prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>t_ab_state.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import unittest
import pentai.base.human_player as h_m
import pentai.base.rules as r_m
import pentai.base.game as g_m
import pentai.ai.priority_filter as pf_m
import pentai.ai.utility_calculator as uc_m
from pentai.ai.ab_state import *
def get_black_line_counts(ab_game_state):
return ab_game_state.get_utility_stats().lines[P1]
def get_white_line_counts(ab_game_state):
return ab_game_state.get_utility_stats().lines[P2]
class AlphaBetaBridgeTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(13, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_update_substrips_middle_of_board(self):
self.gs.set_occ((7,7), P1)
"""
self.assertEquals(self.bl, [20, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_empty_board(self):
self.assertEquals(self.bl, [0, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_SW_corner(self):
self.gs.set_occ((0,0), P1)
self.assertEquals(self.bl, [3, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_near_SW_corner(self):
self.gs.set_occ((1,0), P1)
self.assertEquals(self.bl, [4, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_NE_corner(self):
self.gs.set_occ((12,12), P1)
self.assertEquals(self.bl, [3, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_remove_single_stone(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,0), EMPTY)
self.assertEquals(self.bl, [0, 0, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_two_blacks_SW(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((1,1), P1)
self.assertEquals(self.bl, [7, 1, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_2_opp_colour_pieces(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P2)
self.assertEquals(self.bl, [2, 0, 0, 0, 0])
self.assertEquals(self.wl, [3, 0, 0, 0, 0])
def test_update_substrips_2_pieces(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P1)
self.assertEquals(self.bl, [5, 1, 0, 0, 0])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
def test_update_substrips_5_in_a_row(self):
self.gs.set_occ((0,0), P1)
self.gs.set_occ((0,1), P1)
self.gs.set_occ((0,2), P1)
self.gs.set_occ((0,3), P1)
self.gs.set_occ((0,4), P1)
self.assertEquals(self.bl, [12, 1, 1, 1, 1])
self.assertEquals(self.wl, [0, 0, 0, 0, 0])
class LengthCountingTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(9, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_middle_for_black_diag_2_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((2,2), P2)
self.assertEquals(self.bl, [17, 0, 0, 0, 0])
self.assertEquals(self.wl, [7, 0, 0, 0, 0])
def test_middle_for_black_left_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((3,4), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_right_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((5,4), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_up_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((4,5), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
def test_middle_for_black_down_1_for_white(self):
self.gs.set_occ((4,4), P1)
self.gs.set_occ((4,3), P2)
self.assertEquals(self.bl, [16, 0, 0, 0, 0])
self.assertEquals(self.wl, [5+4+4, 0, 0, 0, 0])
###############
class MoreAlphaBetaBridgeTests(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(5, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_initial_state_black_to_move(self):
self.assertEquals(self.s.to_move_colour(), P1)
def test_create_state(self):
child = self.s.create_state((2,2))
self.assertEquals(child.to_move_colour(), P2)
self.assertEquals(child.terminal(), False)
board = child.board()
self.assertEquals(board.get_occ((2,2)), P1)
self.assertEquals(board.get_occ((3,3)), EMPTY)
self.assertEquals(board.get_occ((1,1)), EMPTY)
def test_length_counters_after_sw_corner(self):
g1 = self.s.create_state((0,0)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_nw_corner(self):
g1 = self.s.create_state((0,4)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_ne_corner(self):
g1 = self.s.create_state((4,4)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])
def test_length_counters_after_se_corner(self):
g1 = self.s.create_state((4,0)) # B
self.assertEquals(get_black_line_counts(g1), [3, 0, 0, 0, 0])<|fim▁hole|> g1 = self.s.create_state((-1,2)) # B
except IllegalMoveException:
return
self.assertFail()
def test_length_counters_after_two_moves(self):
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((1,1)) # W
self.assertEquals(get_black_line_counts(g2), [2, 0, 0, 0, 0])
self.assertEquals(get_white_line_counts(g2), [2, 0, 0, 0, 0])
def test_length_counters_after_two_moves_b(self):
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((2,2)) # W
self.assertEquals(get_black_line_counts(g2), [2, 0, 0, 0, 0])
# One across the other diagonal
self.assertEquals(get_white_line_counts(g2), [3, 0, 0, 0, 0])
def test_length_counters_after_five_moves(self):
# along the NE diagonal
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,3)) # B
g4 = g3.create_state((4,4)) # W
g5 = g4.create_state((0,0)) # B
self.assertEquals(get_black_line_counts(g5), [6, 0, 0, 0, 0])
self.assertEquals(get_white_line_counts(g5), [5, 0, 0, 0, 0])
def test_length_counters_after_five_moves_in_cnrs_and_middle(self):
# four in the corners and one in the middle
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((0,4)) # W
g3 = g2.create_state((4,4)) # B
g4 = g3.create_state((4,0)) # W
g5 = g4.create_state((2,2)) # B
self.assertEquals(get_black_line_counts(g5), [2, 0, 1, 0, 0])
self.assertEquals(get_white_line_counts(g5), [0, 0, 0, 0, 0])
def test_make_a_capture(self):
g1 = self.s.create_state((0,1)) # B
g2 = g1.create_state((1,2)) # W
g3 = g2.create_state((1,3)) # B
g4 = g3.create_state((2,3)) # W
g5 = g4.create_state((3,4)) # B
self.assertEquals(g5.to_move_colour(), P2)
self.assertEquals(g5.terminal(), False)
board = g5.board()
self.assertEquals(board.get_occ((0,1)), P1)
self.assertEquals(board.get_occ((1,3)), P1)
self.assertEquals(board.get_occ((3,4)), P1)
self.assertEquals(board.get_occ((1,2)), EMPTY)
self.assertEquals(board.get_occ((2,3)), EMPTY)
class ThreatTest(unittest.TestCase):
def setUp(self):
player1 = h_m.HumanPlayer("Blomp")
player2 = h_m.HumanPlayer("Kubba")
r = r_m.Rules(5, "standard")
my_game = g_m.Game(r, player1, player2)
self.gs = my_game.current_state
self.search_filter = pf_m.PriorityFilter()
self.util_calc = uc_m.UtilityCalculator()
self.s = ABState(search_filter=self.search_filter,
utility_calculator=self.util_calc)
self.bl = self.s.utility_stats.lines[P1]
self.wl = self.s.utility_stats.lines[P2]
self.s.set_state(self.gs)
def test_add_one_take_for_white(self):
g1 = self.s.create_state((2,4)) # B
g2 = g1.create_state((1,4)) # W
g3 = g2.create_state((3,4)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_SW_valid(self):
g1 = self.s.create_state((1,1)) # B
g2 = g1.create_state((3,3)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_NW_valid(self):
g1 = self.s.create_state((1,3)) # B
g2 = g1.create_state((3,1)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_NE_valid(self):
g1 = self.s.create_state((3,3)) # B
g2 = g1.create_state((1,1)) # W
g3 = g2.create_state((2,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
def test_SE_valid(self):
g1 = self.s.create_state((2,2)) # B
g2 = g1.create_state((1,3)) # W
g3 = g2.create_state((3,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 1])
##########################################
def test_SW_invalid(self):
g1 = self.s.create_state((0,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_NW_invalid(self):
g1 = self.s.create_state((0,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_NE_invalid(self):
g1 = self.s.create_state((4,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_SE_invalid(self):
g1 = self.s.create_state((4,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
##########################################
def test_W_invalid(self):
g1 = self.s.create_state((0,2)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((1,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_E_invalid(self):
g1 = self.s.create_state((4,2)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((3,2)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_N_invalid(self):
g1 = self.s.create_state((2,4)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((2,3)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_S_invalid(self):
g1 = self.s.create_state((2,0)) # B
g2 = g1.create_state((2,2)) # W
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
##########################################
def test_SW_invalid_take2(self):
g1 = self.s.create_state((1,0)) # B
g2 = g1.create_state((3,2)) # W
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_takes(), [0, 0, 0])
def test_SW_invalid_threat2(self):
g1 = self.s.create_state((1,0)) # B
g2 = g1.create_state((3,4)) # W (irrel.)
g3 = g2.create_state((2,1)) # B
self.assertEquals(g3.get_threats(), [0, 0, 0])
##########################################
'''
def test_seen(self):
self.s.set_seen(set([(1,2)]))
moves = list(self.s.successors())
'''
"""
# TODO: lots of threat cases, or unify stuff
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
def test_cannot_place_off_e_edge(self):
try: |
<|file_name|>operator.cpp<|end_file_name|><|fim▁begin|>/*******************************************************************************
*
* MIT License
*
* Copyright (c) 2017 Advanced Micro Devices, Inc.
*<|fim▁hole|> * of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*******************************************************************************/
#include <cassert>
#include <miopen/fusion.hpp>
#include <miopen/logger.hpp>
namespace miopen {
std::ostream& operator<<(std::ostream& stream, const FusionOpDescriptor& x)
{
MIOPEN_LOG_ENUM(stream,
x.kind(),
miopenFusionOpConvForward,
miopenFusionOpActivForward,
miopenFusionOpBatchNormInference,
miopenFusionOpBiasForward,
miopenFusionOpBatchNormFwdTrain,
miopenFusionOpBatchNormBwdTrain,
miopenFusionOpActivBackward);
return stream;
}
std::ostream& operator<<(std::ostream& stream, const MDGraph_op_t& o)
{
MIOPEN_LOG_ENUM(stream, o, OpEqual, OpNotEqual, OpAny, OpModulo, OpGTE, OpLTE);
return stream;
}
std::ostream& operator<<(std::ostream& stream, const boost::any& a)
{
if(a.type() == typeid(std::string))
stream << boost::any_cast<std::string>(a);
else if(a.type() == typeid(int))
stream << boost::any_cast<int>(a);
else if(a.type() == typeid(miopenConvolutionMode_t))
stream << boost::any_cast<miopenConvolutionMode_t>(a);
else if(a.type() == typeid(miopenPaddingMode_t))
stream << boost::any_cast<miopenPaddingMode_t>(a);
else if(a.type() == typeid(size_t))
stream << boost::any_cast<size_t>(a);
else if(a.type() == typeid(miopenBatchNormMode_t))
stream << boost::any_cast<miopenBatchNormMode_t>(a);
else if(a.type() == typeid(miopenActivationMode_t))
stream << boost::any_cast<miopenActivationMode_t>(a);
else if(a.type() == typeid(miopenDataType_t))
stream << boost::any_cast<miopenDataType_t>(a);
else
stream << "Unsupported any type: " << a.type().name();
return stream;
}
} // namespace miopen<|fim▁end|> | * Permission is hereby granted, free of charge, to any person obtaining a copy |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | module.exports = require('./lib/Redback'); |
<|file_name|>thumbv7neon_unknown_linux_musleabihf.rs<|end_file_name|><|fim▁begin|>use crate::spec::{Target, TargetOptions};
// This target is for musl Linux on ARMv7 with thumb mode enabled
// (for consistency with Android and Debian-based distributions)
// and with NEON unconditionally enabled and, therefore, with 32 FPU
// registers enabled as well. See section A2.6.2 on page A2-56 in
// https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf
pub fn target() -> Target {
Target {
// It's important we use "gnueabihf" and not "musleabihf" here. LLVM
// uses it to determine the calling convention and float ABI, and LLVM
// doesn't support the "musleabihf" value.
llvm_target: "armv7-unknown-linux-gnueabihf".to_string(),
pointer_width: 32,
data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".to_string(),
arch: "arm".to_string(),
// Most of these settings are copied from the thumbv7neon_unknown_linux_gnueabihf
// target.
options: TargetOptions {
abi: "eabihf".to_string(),<|fim▁hole|> mcount: "\u{1}mcount".to_string(),
..super::linux_musl_base::opts()
},
}
}<|fim▁end|> | features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".to_string(),
max_atomic_width: Some(64), |
<|file_name|>test.ts<|end_file_name|><|fim▁begin|>import * as tv from "./index";<|fim▁hole|> // Shows.getMetadata(val,console.log);
tv.shows.getEpisodes(val,(err,seasons) => {
if(!seasons[2]) {return;}
console.log(seasons[2].episodes[4])
tv.shows.streamEpisode(seasons[2].episodes[4],(err,url) => {
if(err) {return console.log("failed ",val.title,err)}
console.log("stream url ",url);
});
});
});
console.log(shows);
});<|fim▁end|> |
tv.search.shows("Stargate Universe",(err,shows) => {
shows.forEach((val,i) => { |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
The MIT License (MIT)
Copyright (c) 2014 DutchCoders [https://github.com/dutchcoders/]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
package main
import (
"bufio"
"flag"
"fmt"
"github.com/dutchcoders/gonest"
"log"
"os"
"strings"
)
var clientid string
var token string
var secret string
func init() {
// ok this should actually be a secret, but there is no way to keep this a real secret, same as when it
// is in a executable or apk. So just have it plain here.
clientid = "7348d6f1-1437-4935-a6e6-2a5567c96036"
secret = "xUVHKSKw8RTzxQrVH2UKiCnCb"
token = os.Getenv("NEST_ACCESS_TOKEN")
}
func main() {
var err error
var nest *gonest.Nest
if nest, err = gonest.Connect(clientid, token); err != nil {
if ue, ok := err.(*gonest.UnauthorizedError); ok {
for {<|fim▁hole|> fmt.Printf("Login to nest using url: %s\n\n%s", ue.Url, "Enter pincode: ")
reader := bufio.NewReader(os.Stdin)
code, _ := reader.ReadString('\n')
code = strings.Replace(code, "\n", "", -1)
if err = nest.Authorize(secret, code); err != nil {
fmt.Printf("%s\n%s", err, "Enter pincode: ")
}
break
}
fmt.Printf("Successfully authorized.\nYou can now persist the accesstoken using: \nexport NEST_ACCESS_TOKEN=%s\n", nest.Token)
} else {
fmt.Println("Could not login to nest.")
os.Exit(1)
}
}
structureid := flag.String("structure", "", "operate on structure")
thermostatid := flag.String("thermostat", "", "operate on thermostat")
flag.Parse()
if len(flag.Args()) == 0 {
fmt.Printf("Usage:\n\n")
fmt.Printf("nest --structure {structureid} [home|away]\n")
fmt.Printf("nest structures\n")
fmt.Printf("nest thermostats\n")
os.Exit(0)
}
switch flag.Args()[0] {
case "away", "home":
if *structureid == "" {
fmt.Println("Structure not set\n\nUsage: nest --structure {structureid} [home|away]")
os.Exit(1)
}
away := flag.Args()[0]
if err = nest.Set(fmt.Sprintf("structures/%s", *structureid), map[string]interface{}{"away": away}); err != nil {
log.Panic(err)
}
fmt.Printf("Set to: %s\n", away)
case "structures":
var structures map[string]gonest.Structure
if err = nest.Structures(&structures); err != nil {
log.Panic(err)
}
fmt.Printf("Structures:\n")
for _, structure := range structures {
fmt.Printf("Id: %s\n", structure.StructureId)
fmt.Printf("Name: %s\n", structure.Name)
fmt.Printf("Away: %s\n", structure.Away)
}
case "thermostats":
var devices gonest.Devices
if err = nest.Devices(&devices); err != nil {
log.Panic(err)
}
fmt.Printf("Thermostats:\n")
for _, device := range devices.Thermostats {
fmt.Printf("Id: %s\n", device.DeviceId)
fmt.Printf("Name: %s\n", device.Name)
fmt.Printf("Name(long): %s\n", device.NameLong)
fmt.Printf("Temperature: %f C\n", device.AmbientTemperatureC)
fmt.Printf("Humidity: %f\n", device.Humidity)
if device.IsOnline {
fmt.Printf("Status: online\n")
} else {
fmt.Printf("Status: offline\n")
}
}
default:
if *thermostatid != "" {
var devices gonest.Devices
if err = nest.Devices(&devices); err != nil {
log.Panic(err)
}
var thermostat gonest.Thermostat
var match bool
if thermostat, match = devices.Thermostats[*thermostatid]; !match {
fmt.Printf("Thermostat %s not found.", thermostatid)
os.Exit(1)
}
args := map[string]interface{}{
"deviceid": thermostat.DeviceId,
"version": thermostat.SoftwareVersion,
"name": thermostat.Name,
"online": thermostat.IsOnline,
"away-temperature-high": thermostat.AwayTemperatureHighC,
"away-temperature-low": thermostat.AwayTemperatureLowC,
"ambient-temperature": thermostat.AmbientTemperatureC,
"target-temperature": thermostat.TargetTemperatureC,
}
if val, ok := args[flag.Args()[0]]; ok {
fmt.Println(val)
} else {
fmt.Printf("Unsupported argument %s\n", flag.Args()[0])
}
} else if *structureid != "" {
fmt.Printf("Not implemented yet\n")
} else {
fmt.Printf("Unknown command: %s\n", flag.Args()[0])
}
}
}<|fim▁end|> | |
<|file_name|>series-bar-coverage.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|> | oid sha256:3e0c436bb4be5a94bea0b2ce84121c3a8c6fbe226e7559f80cd0db5b4b93fdb0
size 17334 |
<|file_name|>LODPointFeaturesPreprocessor.java<|end_file_name|><|fim▁begin|>package com.glob3mobile.vectorial.processing;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import com.glob3mobile.utils.Progress;
import com.glob3mobile.vectorial.lod.PointFeatureLODStorage;
import com.glob3mobile.vectorial.lod.mapdb.PointFeatureLODMapDBStorage;
import com.glob3mobile.vectorial.storage.PointFeature;
import com.glob3mobile.vectorial.storage.PointFeatureStorage;
import com.glob3mobile.vectorial.storage.mapdb.PointFeatureMapDBStorage;
public class LODPointFeaturesPreprocessor {
private static class LeafNodesImporter
implements
PointFeatureStorage.NodeVisitor {
private final long _nodesCount;
private final PointFeatureLODStorage _lodStorage;
private final boolean _verbose;
private Progress _progress;
private LeafNodesImporter(final long nodesCount,
final PointFeatureLODStorage lodStorage,
final boolean verbose) {
_nodesCount = nodesCount;
_lodStorage = lodStorage;
_verbose = verbose;
}
@Override
public void start() {
_progress = new Progress(_nodesCount) {
@Override
public void informProgress(final long stepsDone,
final double percent,
final long elapsed,
final long estimatedMsToFinish) {
if (_verbose) {
System.out.println(_lodStorage.getName() + ": 1/4 Importing leaf nodes: "
+ progressString(stepsDone, percent, elapsed, estimatedMsToFinish));
}
}
};
}
@Override
public void stop() {
_progress.finish();
_progress = null;
}
@Override
public boolean visit(final PointFeatureStorage.Node node) {
final List<PointFeature> features = new ArrayList<>(node.getFeatures());
_lodStorage.addLeafNode( //
node.getID(), //
node.getNodeSector(), //
node.getMinimumSector(), //
features //<|fim▁hole|> );
_progress.stepDone();
return true;
}
}
public static void process(final File storageDir,
final String storageName,
final File lodDir,
final String lodName,
final int maxFeaturesPerNode,
final Comparator<PointFeature> featuresComparator,
final boolean createClusters,
final boolean verbose) throws IOException {
try (final PointFeatureStorage storage = PointFeatureMapDBStorage.openReadOnly(storageDir, storageName)) {
try (final PointFeatureLODStorage lodStorage = PointFeatureLODMapDBStorage.createEmpty(storage.getSector(), lodDir,
lodName, maxFeaturesPerNode, featuresComparator, createClusters)) {
final PointFeatureStorage.Statistics statistics = storage.getStatistics(verbose);
if (verbose) {
statistics.show();
System.out.println();
}
final int nodesCount = statistics.getNodesCount();
storage.acceptDepthFirstVisitor(new LeafNodesImporter(nodesCount, lodStorage, verbose));
lodStorage.createLOD(verbose);
if (verbose) {
System.out.println(lodStorage.getName() + ": 4/4 Optimizing storage...");
}
lodStorage.optimize();
if (verbose) {
System.out.println();
final PointFeatureLODStorage.Statistics lodStatistics = lodStorage.getStatistics(verbose);
lodStatistics.show();
}
}
}
}
private LODPointFeaturesPreprocessor() {
}
public static void main(final String[] args) throws IOException {
System.out.println("LODPointFeaturesPreprocessor 0.1");
System.out.println("--------------------------------\n");
final File sourceDir = new File("PointFeaturesStorage");
// final String sourceName = "Cities1000";
// final String sourceName = "AR";
// final String sourceName = "ES";
// final String sourceName = "GEONames-PopulatedPlaces";
// final String sourceName = "SpanishBars";
final String sourceName = "Tornados";
final File lodDir = new File("PointFeaturesLOD");
final String lodName = sourceName + "_LOD";
final int maxFeaturesPerNode = 64;
// final int maxFeaturesPerNode = 96;
final boolean createClusters = true;
final Comparator<PointFeature> featuresComparator = createClusters ? null : new GEONamesComparator();
final boolean verbose = true;
LODPointFeaturesPreprocessor.process( //
sourceDir, sourceName, //
lodDir, lodName, //
maxFeaturesPerNode, //
featuresComparator, //
createClusters, //
verbose);
System.out.println("\n- done!");
}
}<|fim▁end|> | |
<|file_name|>init.js<|end_file_name|><|fim▁begin|>ConstructIndex(); //non ui
for(let B of Bakteriler) {
AddBacteriaToDisplay(B);//sadece isimler ve aileler //only ui with variables
BakteriRouterSearch(B); //non ui +
}
ConstructBottomPanel(); //only ui with variables
IndexFamilies(); //non ui
PremakeLeftPanel(); //only ui<|fim▁hole|>
if(HaveNotification) {
document.querySelector("#notificationCircle").style.display = "block";
}
FilterRuleQueExec(); //sayi vs icin //non ui
//asd
//SozlukBuilderStart();<|fim▁end|> | |
<|file_name|>TestBreakpointCommand.py<|end_file_name|><|fim▁begin|>"""
Test lldb breakpoint command add/list/delete.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import side_effect
class BreakpointCommandTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr24528")
def test_breakpoint_command_sequence(self):
"""Test a sequence of breakpoint command add, list, and delete."""
self.build()
self.breakpoint_command_sequence()
def test_script_parameters(self):
"""Test a sequence of breakpoint command add, list, and delete."""
self.build()
self.breakpoint_command_script_parameters()
def test_commands_on_creation(self):
self.build()
self.breakpoint_commands_on_creation()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.c', '// Set break point at this line.')
# disable "There is a running process, kill it and restart?" prompt
self.runCmd("settings set auto-confirm true")
self.addTearDownHook(
lambda: self.runCmd("settings clear auto-confirm"))
def test_delete_all_breakpoints(self):
"""Test that deleting all breakpoints works."""
self.build()
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol(self, "main")
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("breakpoint delete")
self.runCmd("process continue")
self.expect("process status", PROCESS_STOPPED,
patterns=['Process .* exited with status = 0'])
def breakpoint_command_sequence(self):
"""Test a sequence of breakpoint command add, list, and delete."""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Add three breakpoints on the same line. The first time we don't specify the file,
# since the default file is the one containing main:
lldbutil.run_break_set_by_file_and_line(
self, None, self.line, num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True)
# Breakpoint 4 - set at the same location as breakpoint 1 to test
# setting breakpoint commands on two breakpoints at a time
lldbutil.run_break_set_by_file_and_line(
self, None, self.line, num_expected_locations=1, loc_exact=True)
# Make sure relative path source breakpoints work as expected. We test
# with partial paths with and without "./" prefixes.
lldbutil.run_break_set_by_file_and_line(
self, "./main.c", self.line,
num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "basic/main.c", self.line,
num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "./basic/main.c", self.line,
num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "breakpoint/basic/main.c", self.line,
num_expected_locations=1, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "./breakpoint/basic/main.c", self.line,
num_expected_locations=1, loc_exact=True)
# Test relative breakpoints with incorrect paths and make sure we get
# no breakpoint locations
lldbutil.run_break_set_by_file_and_line(
self, "invalid/main.c", self.line,
num_expected_locations=0, loc_exact=True)
lldbutil.run_break_set_by_file_and_line(
self, "./invalid/main.c", self.line,
num_expected_locations=0, loc_exact=True)
# Now add callbacks for the breakpoints just created.
self.runCmd(
"breakpoint command add -s command -o 'frame variable --show-types --scope' 1 4")
self.runCmd(
"breakpoint command add -s python -o 'import side_effect; side_effect.one_liner = \"one liner was here\"' 2")
self.runCmd(
"breakpoint command add --python-function bktptcmd.function 3")
# Check that the breakpoint commands are correctly set.
# The breakpoint list now only contains breakpoint 1.
self.expect(
"breakpoint list", "Breakpoints 1 & 2 created", substrs=[
"2: file = 'main.c', line = %d, exact_match = 0, locations = 1" %
self.line], patterns=[
"1: file = '.*main.c', line = %d, exact_match = 0, locations = 1" %
self.line])
self.expect(
"breakpoint list -f",
"Breakpoints 1 & 2 created",
substrs=[
"2: file = 'main.c', line = %d, exact_match = 0, locations = 1" %
self.line],
patterns=[
"1: file = '.*main.c', line = %d, exact_match = 0, locations = 1" %
self.line,
"1.1: .+at main.c:%d:?[0-9]*, .+unresolved, hit count = 0" %
self.line,
"2.1: .+at main.c:%d:?[0-9]*, .+unresolved, hit count = 0" %
self.line])
self.expect("breakpoint command list 1", "Breakpoint 1 command ok",
substrs=["Breakpoint commands:",
"frame variable --show-types --scope"])
self.expect("breakpoint command list 2", "Breakpoint 2 command ok",
substrs=["Breakpoint commands (Python):",
"import side_effect",
"side_effect.one_liner"])
self.expect("breakpoint command list 3", "Breakpoint 3 command ok",
substrs=["Breakpoint commands (Python):",
"bktptcmd.function(frame, bp_loc, internal_dict)"])
self.expect("breakpoint command list 4", "Breakpoint 4 command ok",
substrs=["Breakpoint commands:",
"frame variable --show-types --scope"])
self.runCmd("breakpoint delete 4")
self.runCmd("command script import --allow-reload ./bktptcmd.py")<|fim▁hole|> # the second time only one:
lldbutil.run_break_set_by_regexp(
self, r"._MyFunction", num_expected_locations=2)
lldbutil.run_break_set_by_regexp(
self,
r"._MyFunction",
extra_options="-f a.c",
num_expected_locations=1)
lldbutil.run_break_set_by_regexp(
self,
r"._MyFunction",
extra_options="-f a.c -f b.c",
num_expected_locations=2)
# Now try a source regex breakpoint:
lldbutil.run_break_set_by_source_regexp(
self,
r"is about to return [12]0",
extra_options="-f a.c -f b.c",
num_expected_locations=2)
lldbutil.run_break_set_by_source_regexp(
self,
r"is about to return [12]0",
extra_options="-f a.c",
num_expected_locations=1)
# Reset our canary variables and run the program.
side_effect.one_liner = None
side_effect.bktptcmd = None
self.runCmd("run", RUN_SUCCEEDED)
# Check the value of canary variables.
self.assertEquals("one liner was here", side_effect.one_liner)
self.assertEquals("function was here", side_effect.bktptcmd)
# Finish the program.
self.runCmd("process continue")
# Remove the breakpoint command associated with breakpoint 1.
self.runCmd("breakpoint command delete 1")
# Remove breakpoint 2.
self.runCmd("breakpoint delete 2")
self.expect(
"breakpoint command list 1",
startstr="Breakpoint 1 does not have an associated command.")
self.expect(
"breakpoint command list 2",
error=True,
startstr="error: '2' is not a currently valid breakpoint ID.")
# The breakpoint list now only contains breakpoint 1.
self.expect(
"breakpoint list -f",
"Breakpoint 1 exists",
patterns=[
"1: file = '.*main.c', line = %d, exact_match = 0, locations = 1, resolved = 1" %
self.line,
"hit count = 1"])
# Not breakpoint 2.
self.expect(
"breakpoint list -f",
"No more breakpoint 2",
matching=False,
substrs=[
"2: file = 'main.c', line = %d, exact_match = 0, locations = 1, resolved = 1" %
self.line])
# Run the program again, with breakpoint 1 remaining.
self.runCmd("run", RUN_SUCCEEDED)
# We should be stopped again due to breakpoint 1.
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# The breakpoint should have a hit count of 2.
self.expect("breakpoint list -f", BREAKPOINT_HIT_TWICE,
substrs=['resolved, hit count = 2'])
def breakpoint_command_script_parameters(self):
"""Test that the frame and breakpoint location are being properly passed to the script breakpoint command function."""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Add a breakpoint.
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True)
# Now add callbacks for the breakpoints just created.
self.runCmd("breakpoint command add -s python -o 'import side_effect; side_effect.frame = str(frame); side_effect.bp_loc = str(bp_loc)' 1")
# Reset canary variables and run.
side_effect.frame = None
side_effect.bp_loc = None
self.runCmd("run", RUN_SUCCEEDED)
self.expect(side_effect.frame, exe=False, startstr="frame #0:")
self.expect(side_effect.bp_loc, exe=False,
patterns=["1.* where = .*main .* resolved, hit count = 1"])
def breakpoint_commands_on_creation(self):
"""Test that setting breakpoint commands when creating the breakpoint works"""
exe = self.getBuildArtifact("a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target.IsValid(), "Created an invalid target.")
# Add a breakpoint.
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line, num_expected_locations=1, loc_exact=True,
extra_options='-C bt -C "thread list" -C continue')
bkpt = target.FindBreakpointByID(1)
self.assertTrue(bkpt.IsValid(), "Couldn't find breakpoint 1")
com_list = lldb.SBStringList()
bkpt.GetCommandLineCommands(com_list)
self.assertEqual(com_list.GetSize(), 3, "Got the wrong number of commands")
self.assertEqual(com_list.GetStringAtIndex(0), "bt", "First bt")
self.assertEqual(com_list.GetStringAtIndex(1), "thread list", "Next thread list")
self.assertEqual(com_list.GetStringAtIndex(2), "continue", "Last continue")<|fim▁end|> |
# Next lets try some other breakpoint kinds. First break with a regular expression
# and then specify only one file. The first time we should get two locations, |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-28 01:00
from __future__ import unicode_literals
import caching.base
from django.db import migrations, models
import django.db.models.deletion
import sorl.thumbnail.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('is_live', models.BooleanField(default=True, verbose_name='Display on site')),
('show_in_lists', models.BooleanField(default=True, verbose_name='Show on Organization list page')),
('name', models.CharField(max_length=255)),
('slug', models.SlugField(unique=True)),
('email', models.EmailField(blank=True, max_length=254, verbose_name='Email address')),
('twitter_username', models.CharField(blank=True, max_length=32)),
('github_username', models.CharField(blank=True, max_length=32)),
('github_repos_num', models.PositiveIntegerField(blank=True, null=True)),
('github_gists_num', models.PositiveIntegerField(blank=True, null=True)),
('homepage', models.URLField(blank=True)),
('description', models.TextField(blank=True)),
('address', models.CharField(blank=True, max_length=255)),
('city', models.CharField(blank=True, max_length=64)),
('state', models.CharField(blank=True, max_length=32)),
('country', models.CharField(blank=True, help_text='Only necessary if outside the U.S.', max_length=32)),
('logo', sorl.thumbnail.fields.ImageField(blank=True, help_text='Resized to fit 200x50 box in template', null=True, upload_to='img/uploads/org_logos')),
],
options={
'ordering': ('name',),
},
bases=(caching.base.CachingMixin, models.Model),
),
migrations.CreateModel(
name='OrganizationLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=128)),
('url', models.URLField()),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='people.Organization')),
],
options={
'verbose_name': 'Organization Link',
'ordering': ('organization', 'name'),
},
bases=(caching.base.CachingMixin, models.Model),
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),<|fim▁hole|> ('modified', models.DateTimeField(auto_now=True)),
('is_live', models.BooleanField(default=True, verbose_name='Display on site')),
('show_in_lists', models.BooleanField(default=True, verbose_name='Show on People list page')),
('first_name', models.CharField(max_length=128)),
('last_name', models.CharField(max_length=128)),
('slug', models.SlugField(unique=True)),
('email', models.EmailField(blank=True, max_length=254, verbose_name='Email address')),
('twitter_username', models.CharField(blank=True, max_length=32)),
('twitter_bio', models.TextField(blank=True)),
('twitter_profile_image_url', models.URLField(blank=True)),
('github_username', models.CharField(blank=True, max_length=32)),
('github_repos_num', models.PositiveIntegerField(blank=True, null=True)),
('github_gists_num', models.PositiveIntegerField(blank=True, null=True)),
('description', models.TextField(blank=True, verbose_name='Bio')),
('organizations', models.ManyToManyField(blank=True, to='people.Organization')),
],
options={
'verbose_name_plural': 'People',
'ordering': ('last_name', 'first_name'),
},
bases=(caching.base.CachingMixin, models.Model),
),
migrations.CreateModel(
name='PersonLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=128)),
('url', models.URLField()),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='people.Person')),
],
options={
'verbose_name': 'Person Link',
'ordering': ('person', 'name'),
},
bases=(caching.base.CachingMixin, models.Model),
),
]<|fim▁end|> | ('created', models.DateTimeField(auto_now_add=True)), |
<|file_name|>globalize-tests.ts<|end_file_name|><|fim▁begin|>/*Globalize.culture("fr");
console.log(Globalize.culture().name);
Globalize.culture("fr-FR");
console.log(Globalize.culture().name);
Globalize.culture(["es-MX", "fr-FR"]);
console.log(Globalize.culture().name);
Globalize.culture("fr;q=0.4, es;q=0.5, he");
Globalize.format(1234.567, "n");
Globalize.format(1234.567, "n1");
Globalize.format(1234.567, "n0");
Globalize.format(new Date(1955, 10, 5), "yyyy/MM/dd");
Globalize.format(new Date(1955, 10, 5), "dddd MMMM d, yyyy");
Globalize.addCultureInfo("fr", { messages: { "translate": "traduire" } });
console.log(Globalize.localize("translate", "fr"));
Globalize.parseInt("1,234.56");
Globalize.parseInt("1.234,56", 10);
Globalize.parseInt("1.234,56", 10, "de");
Globalize.parseFloat("1,234.56");
Globalize.parseFloat("1.234,56", 10);
Globalize.parseFloat("1.234,56", 10, "de");
Globalize.parseDate("1/2/2003");
Globalize.parseDate("15 Jun 2012", "dd MMM yyyy");
Globalize.parseDate("15 Jun 2012", ["dd MMM yyyy"]);<|fim▁hole|>Globalize.addCultureInfo("fr", { numberFormat: { billionZeroes: 12 } });
Globalize.addCultureInfo("de-DE", "de", { numberFormat: { billionZeroes: 12 } });
//Globalize.culture().calendar = Globalize.culture().calendars.SomeOtherCalendar;
//Globalize.culture().calendar = Globalize.culture().calendars.standard;
Globalize.format(123.45, "n");
Globalize.format(123.45, "n0");
Globalize.format(123.45, "n1");
Globalize.format(123.45, "d");
Globalize.format(12, "d3");
Globalize.format(123.45, "c");
Globalize.format(123.45, "c0");
Globalize.format(123.45, "c1");
Globalize.format(-123.45, "c");
Globalize.format(0.12345, "p");
Globalize.format(0.12345, "p0");
Globalize.format(0.12345, "p4");
Globalize.format(1234.56, "c");
Globalize.culture("en-US").numberFormat.currency.symbol = '\u20ac';
var currSym;
Globalize.culture().numberFormat.currency.symbol = currSym;
Globalize.format(new Date(2012, 1, 20), 'd');
Globalize.format(new Date(2012, 1, 20), 'D');
Globalize.load[ "default" ] = {
name: "English",
englishName: "English",
nativeName: "English",
isRTL: false,
language: "en",
numberFormat: {
pattern: [ "-n" ],
decimals: 2,
",": ",",
".": ".",
groupSizes: [ 3 ],
"+": "+",
"-": "-",
percent: {
pattern: [ "-n %", "n %" ],
decimals: 2,
groupSizes: [ 3 ],
",": ",",
".": ".",
symbol: "%"
},
currency: {
pattern: [ "($n)", "$n" ],
decimals: 2,
groupSizes: [ 3 ],
",": ",",
".": ".",
symbol: "$"
}
},
calendars: {
standard: {
name: "Gregorian_USEnglish",
"/": "/",
":": ":",
firstDay: 0,
days: {
names: [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ],
namesAbbr: [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ],
namesShort: [ "Su", "Mo", "Tu", "We", "Th", "Fr", "Sa" ]
},
//months: [
// names: [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December", "" ],
// namesAbbr: [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", "" ]
//],
AM: [ "AM", "am", "AM" ],
PM: [ "PM", "pm", "PM" ],
eras: [
{"name":"A.D.","start":null,"offset":0}
],
twoDigitYearMax: 2029,
patterns: {
d: "M/d/yyyy",
D: "dddd, MMMM dd, yyyy",
t: "h:mm tt",
T: "h:mm:ss tt",
f: "dddd, MMMM dd, yyyy h:mm tt",
F: "dddd, MMMM dd, yyyy h:mm:ss tt",
M: "MMMM dd",
Y: "yyyy MMMM",
S: "yyyy\u0027-\u0027MM\u0027-\u0027dd\u0027T\u0027HH\u0027:\u0027mm\u0027:\u0027ss"
}
}
},
messages: {}
}*/<|fim▁end|> | Globalize.culture("fr");
Globalize.parseDate("1/2/2003");
Globalize.addCultureInfo({ numberFormat: { billionZeroes: 12 } }); |
<|file_name|>ipn_merchant_order.py<|end_file_name|><|fim▁begin|># coding: UTF-8
import os, sys
import mercadopago
def index(req, **kwargs):
mp = mercadopago.MP("CLIENT_ID", "CLIENT_SECRET")
topic = kwargs["topic"]
merchant_order_info = None
if topic == "payment"
payment_info = mp.get("/collections/notifications/"+kwargs["id"])
merchant_order_info = mp.get("/merchant_orders/"+payment_info["response"]["collection"]["merchant_order_id"])
elif topic == "merchant_order"
merchant_order_info = mp.get("/merchant_orders/"+kwargs["id"])
if merchant_order_info == None<|fim▁hole|> return {
"payment": merchant_order_info["response"]["payments"],
"shipment": merchant_order_info["response"]["shipments"]
}<|fim▁end|> | raise ValueError("Error obtaining the merchant_order")
if merchant_order_info["status"] == 200 |
<|file_name|>make-pathway2list.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
## A name of directory containing 'path:...' file
## You can download them using 'make-wget_pathway.sh' script
dir_name = sys.argv[1]
f_summary = open('%s.summary'%dir_name,'w')
f_genes = open('%s.genes'%dir_name,'w')
f_compounds = open('%s.compounds'%dir_name,'w')
gene_total = []
for filename in os.listdir( dir_name ):
if( not filename.startswith('path:') ):
continue
#sys.stderr.write('Read %s ... '%filename)
path_id = ''
path_name = ''
gene_list = []
comp_list = []
prev_tag = ''
f = open(os.path.join(dir_name,filename),'r')
for line in f:
tmp_tag = line[:11].strip()
if( tmp_tag == 'ENTRY' ):
path_id = line.strip().split()[1]
if( tmp_tag == 'NAME' ):
path_name = line[11:].split(' - ')[0].strip()
if( tmp_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == '' and prev_tag == 'COMPOUND' ):
comp_list.append( line[11:].strip().split()[0] )
f_compounds.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
elif( tmp_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] )<|fim▁hole|> f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
if( tmp_tag != '' ):
prev_tag = tmp_tag
f.close()
if( len(gene_list) == 0 ):
sys.stderr.write('//SKIP// %s(%d) %s\n'%(path_id, len(gene_list), path_name))
continue
f_summary.write('path:%s\t%s\t%d\t%d\n'%(path_id, path_name, len(gene_list), len(comp_list)))
f_summary.close()
f_genes.close()
f_compounds.close()<|fim▁end|> | f_genes.write('path:%s\t%s\n'%(path_id,line[11:].strip()))
#print line[11:].strip()
elif( tmp_tag == '' and prev_tag == 'GENE' ):
gene_list.append( line[11:].strip().split()[0] ) |
<|file_name|>test_frontend.py<|end_file_name|><|fim▁begin|># Authors:
# Jason Gerard DeRose <[email protected]>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipalib.frontend` module.
"""
# FIXME: Pylint errors
# pylint: disable=no-member
from ipatests.util import raises, read_only
from ipatests.util import ClassChecker, create_test_api
from ipatests.util import assert_equal
from ipalib.constants import TYPE_ERROR
from ipalib.base import NameSpace
from ipalib import frontend, backend, plugable, errors, parameters, config
from ipalib import output, messages
from ipalib.parameters import Str
from ipapython.version import API_VERSION
def test_RULE_FLAG():
assert frontend.RULE_FLAG == 'validation_rule'
def test_rule():
"""
Test the `ipalib.frontend.rule` function.
"""
flag = frontend.RULE_FLAG
rule = frontend.rule
def my_func():
pass
assert not hasattr(my_func, flag)
rule(my_func)
assert getattr(my_func, flag) is True
@rule
def my_func2():
pass
assert getattr(my_func2, flag) is True
def test_is_rule():
"""
Test the `ipalib.frontend.is_rule` function.
"""
is_rule = frontend.is_rule
flag = frontend.RULE_FLAG
class no_call(object):
def __init__(self, value):
if value is not None:
assert value in (True, False)
setattr(self, flag, value)
class call(no_call):
def __call__(self):
pass
assert is_rule(call(True))
assert not is_rule(no_call(True))
assert not is_rule(call(False))
assert not is_rule(call(None))
class test_HasParam(ClassChecker):
"""
Test the `ipalib.frontend.Command` class.
"""
_cls = frontend.HasParam
def test_get_param_iterable(self):
"""
Test the `ipalib.frontend.HasParam._get_param_iterable` method.
"""
api = 'the api instance'
class WithTuple(self.cls):
takes_stuff = ('one', 'two')
o = WithTuple(api)
assert o._get_param_iterable('stuff') is WithTuple.takes_stuff
junk = ('three', 'four')
class WithCallable(self.cls):
def takes_stuff(self):
return junk
o = WithCallable(api)
assert o._get_param_iterable('stuff') is junk
class WithParam(self.cls):
takes_stuff = parameters.Str('five')
o = WithParam(api)
assert o._get_param_iterable('stuff') == (WithParam.takes_stuff,)
class WithStr(self.cls):
takes_stuff = 'six'
o = WithStr(api)
assert o._get_param_iterable('stuff') == ('six',)
class Wrong(self.cls):
takes_stuff = ['seven', 'eight']
o = Wrong(api)
e = raises(TypeError, o._get_param_iterable, 'stuff')
assert str(e) == '%s.%s must be a tuple, callable, or spec; got %r' % (
'Wrong', 'takes_stuff', Wrong.takes_stuff
)
def test_filter_param_by_context(self):
"""
Test the `ipalib.frontend.HasParam._filter_param_by_context` method.
"""
api = 'the api instance'
class Example(self.cls):
def get_stuff(self):
return (
'one', # Make sure create_param() is called for each spec
'two',
parameters.Str('three', include='cli'),
parameters.Str('four', exclude='server'),
parameters.Str('five', exclude=['whatever', 'cli']),
)
o = Example(api)
# Test when env is None:
params = list(o._filter_param_by_context('stuff'))
assert list(p.name for p in params) == [
'one', 'two', 'three', 'four', 'five'
]
for p in params:
assert type(p) is parameters.Str
# Test when env.context == 'cli':
cli = config.Env(context='cli')
assert cli.context == 'cli'
params = list(o._filter_param_by_context('stuff', cli))
assert list(p.name for p in params) == ['one', 'two', 'three', 'four']
for p in params:
assert type(p) is parameters.Str
# Test when env.context == 'server'
server = config.Env(context='server')
assert server.context == 'server'
params = list(o._filter_param_by_context('stuff', server))
assert list(p.name for p in params) == ['one', 'two', 'five']
for p in params:
assert type(p) is parameters.Str
# Test with no get_stuff:
class Missing(self.cls):
pass
o = Missing(api)
gen = o._filter_param_by_context('stuff')
e = raises(NotImplementedError, list, gen)
assert str(e) == 'Missing.get_stuff()'
# Test when get_stuff is not callable:
class NotCallable(self.cls):
get_stuff = ('one', 'two')
o = NotCallable(api)
gen = o._filter_param_by_context('stuff')
e = raises(TypeError, list, gen)
assert str(e) == '%s.%s must be a callable; got %r' % (
'NotCallable', 'get_stuff', NotCallable.get_stuff
)
class test_Command(ClassChecker):
"""
Test the `ipalib.frontend.Command` class.
"""
_cls = frontend.Command
def get_subcls(self):
"""
Return a standard subclass of `ipalib.frontend.Command`.
"""
class Rule(object):
def __init__(self, name):
self.name = name
def __call__(self, _, value):
if value != self.name:
return _('must equal %r') % self.name
default_from = parameters.DefaultFrom(
lambda arg: arg,
'default_from'
)
normalizer = lambda value: value.lower()
class example(self.cls):
takes_options = (
parameters.Str('option0', Rule('option0'),
normalizer=normalizer,
default_from=default_from,
),
parameters.Str('option1', Rule('option1'),
normalizer=normalizer,
default_from=default_from,
),
)
return example
def get_instance(self, args=tuple(), options=tuple()):
"""
Helper method used to test args and options.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class example(self.cls):
takes_args = args
takes_options = options
o = example(api)
o.finalize()
return o
def test_class(self):
"""
Test the `ipalib.frontend.Command` class.
"""
assert self.cls.takes_options == tuple()
assert self.cls.takes_args == tuple()
def test_get_args(self):
"""
Test the `ipalib.frontend.Command.get_args` method.
"""
api = 'the api instance'<|fim▁hole|> assert list(self.cls(api).get_args()) == []
args = ('login', 'stuff')
o = self.get_instance(args=args)
assert tuple(o.get_args()) == args
def test_get_options(self):
"""
Test the `ipalib.frontend.Command.get_options` method.
"""
api = 'the api instance'
options = list(self.cls(api).get_options())
assert len(options) == 1
assert options[0].name == 'version'
options = ('verbose', 'debug')
o = self.get_instance(options=options)
assert len(tuple(o.get_options())) == 3
assert 'verbose' in tuple(o.get_options())
assert 'debug' in tuple(o.get_options())
def test_args(self):
"""
Test the ``ipalib.frontend.Command.args`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert type(o.args) is plugable.NameSpace
assert len(o.args) == 0
args = ('destination', 'source?')
ns = self.get_instance(args=args).args
assert type(ns) is plugable.NameSpace
assert len(ns) == len(args)
assert list(ns) == ['destination', 'source']
assert type(ns.destination) is parameters.Str
assert type(ns.source) is parameters.Str
assert ns.destination.required is True
assert ns.destination.multivalue is False
assert ns.source.required is False
assert ns.source.multivalue is False
# Test TypeError:
e = raises(TypeError, self.get_instance, args=(u'whatever',))
assert str(e) == TYPE_ERROR % (
'spec', (str, parameters.Param), u'whatever', unicode)
# Test ValueError, required after optional:
e = raises(ValueError, self.get_instance, args=('arg1?', 'arg2'))
assert str(e) == "arg2: required argument after optional in %s arguments ['arg1?', 'arg2']" % (self.get_instance().name)
# Test ValueError, scalar after multivalue:
e = raises(ValueError, self.get_instance, args=('arg1+', 'arg2'))
assert str(e) == 'arg2: only final argument can be multivalue'
def test_max_args(self):
"""
Test the ``ipalib.frontend.Command.max_args`` instance attribute.
"""
o = self.get_instance()
assert o.max_args == 0
o = self.get_instance(args=('one?',))
assert o.max_args == 1
o = self.get_instance(args=('one', 'two?'))
assert o.max_args == 2
o = self.get_instance(args=('one', 'multi+',))
assert o.max_args is None
o = self.get_instance(args=('one', 'multi*',))
assert o.max_args is None
def test_options(self):
"""
Test the ``ipalib.frontend.Command.options`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert type(o.options) is plugable.NameSpace
assert len(o.options) == 1
options = ('target', 'files*')
ns = self.get_instance(options=options).options
assert type(ns) is plugable.NameSpace
assert len(ns) == len(options) + 1
assert list(ns) == ['target', 'files', 'version']
assert type(ns.target) is parameters.Str
assert type(ns.files) is parameters.Str
assert ns.target.required is True
assert ns.target.multivalue is False
assert ns.files.required is False
assert ns.files.multivalue is True
def test_output(self):
"""
Test the ``ipalib.frontend.Command.output`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
inst = self.cls(api)
inst.finalize()
assert type(inst.output) is plugable.NameSpace
assert list(inst.output) == ['result']
assert type(inst.output.result) is output.Output
def test_iter_output(self):
"""
Test the ``ipalib.frontend.Command._iter_output`` instance attribute.
"""
api = 'the api instance'
class Example(self.cls):
pass
inst = Example(api)
inst.has_output = tuple()
assert list(inst._iter_output()) == []
wrong = ['hello', 'world']
inst.has_output = wrong
e = raises(TypeError, list, inst._iter_output())
assert str(e) == 'Example.has_output: need a %r; got a %r: %r' % (
tuple, list, wrong
)
wrong = ('hello', 17)
inst.has_output = wrong
e = raises(TypeError, list, inst._iter_output())
assert str(e) == 'Example.has_output[1]: need a %r; got a %r: %r' % (
(str, output.Output), int, 17
)
okay = ('foo', output.Output('bar'), 'baz')
inst.has_output = okay
items = list(inst._iter_output())
assert len(items) == 3
assert list(o.name for o in items) == ['foo', 'bar', 'baz']
for o in items:
assert type(o) is output.Output
def test_soft_validate(self):
"""
Test the `ipalib.frontend.Command.soft_validate` method.
"""
class api(object):
env = config.Env(context='cli')
@staticmethod
def is_production_mode():
return False
class user_add(frontend.Command):
takes_args = parameters.Str('uid',
normalizer=lambda value: value.lower(),
default_from=lambda givenname, sn: givenname[0] + sn,
)
takes_options = ('givenname', 'sn')
cmd = user_add(api)
cmd.finalize()
assert list(cmd.params) == ['givenname', 'sn', 'uid', 'version']
ret = cmd.soft_validate({})
assert sorted(ret['values']) == ['version']
assert sorted(ret['errors']) == ['givenname', 'sn', 'uid']
assert cmd.soft_validate(dict(givenname=u'First', sn=u'Last')) == dict(
values=dict(givenname=u'First', sn=u'Last', uid=u'flast',
version=None),
errors=dict(),
)
def test_convert(self):
"""
Test the `ipalib.frontend.Command.convert` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
kw = dict(
option0=u'1.5',
option1=u'7',
)
o = self.subcls(api)
o.finalize()
for (key, value) in o.convert(**kw).iteritems():
assert_equal(unicode(kw[key]), value)
def test_normalize(self):
"""
Test the `ipalib.frontend.Command.normalize` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
kw = dict(
option0=u'OPTION0',
option1=u'OPTION1',
)
norm = dict((k, v.lower()) for (k, v) in kw.items())
sub = self.subcls(api)
sub.finalize()
assert sub.normalize(**kw) == norm
def test_get_default(self):
"""
Test the `ipalib.frontend.Command.get_default` method.
"""
# FIXME: Add an updated unit tests for get_default()
def test_default_from_chaining(self):
"""
Test chaining of parameters through default_from.
"""
class my_cmd(self.cls):
takes_options = (
Str('option0'),
Str('option1', default_from=lambda option0: option0),
Str('option2', default_from=lambda option1: option1),
)
def run(self, *args, **options):
return dict(result=options)
kw = dict(option0=u'some value')
(api, home) = create_test_api()
api.finalize()
o = my_cmd(api)
o.finalize()
e = o(**kw) # pylint: disable=not-callable
assert type(e) is dict
assert 'result' in e
assert 'option2' in e['result']
assert e['result']['option2'] == u'some value'
def test_validate(self):
"""
Test the `ipalib.frontend.Command.validate` method.
"""
class api(object):
env = config.Env(context='cli')
@staticmethod
def is_production_mode():
return False
sub = self.subcls(api)
sub.finalize()
# Check with valid values
okay = dict(
option0=u'option0',
option1=u'option1',
another_option='some value',
version=API_VERSION,
)
sub.validate(**okay)
# Check with an invalid value
fail = dict(okay)
fail['option0'] = u'whatever'
e = raises(errors.ValidationError, sub.validate, **fail)
assert_equal(e.name, 'option0')
assert_equal(e.value, u'whatever')
assert_equal(e.error, u"must equal 'option0'")
assert e.rule.__class__.__name__ == 'Rule'
assert e.index is None
# Check with a missing required arg
fail = dict(okay)
fail.pop('option1')
e = raises(errors.RequirementError, sub.validate, **fail)
assert e.name == 'option1'
def test_execute(self):
"""
Test the `ipalib.frontend.Command.execute` method.
"""
api = 'the api instance'
o = self.cls(api)
e = raises(NotImplementedError, o.execute)
assert str(e) == 'Command.execute()'
def test_args_options_2_params(self):
"""
Test the `ipalib.frontend.Command.args_options_2_params` method.
"""
# Test that ZeroArgumentError is raised:
o = self.get_instance()
e = raises(errors.ZeroArgumentError, o.args_options_2_params, 1)
assert e.name == 'example'
# Test that MaxArgumentError is raised (count=1)
o = self.get_instance(args=('one?',))
e = raises(errors.MaxArgumentError, o.args_options_2_params, 1, 2)
assert e.name == 'example'
assert e.count == 1
assert str(e) == "command 'example' takes at most 1 argument"
# Test that MaxArgumentError is raised (count=2)
o = self.get_instance(args=('one', 'two?'))
e = raises(errors.MaxArgumentError, o.args_options_2_params, 1, 2, 3)
assert e.name == 'example'
assert e.count == 2
assert str(e) == "command 'example' takes at most 2 arguments"
# Test that OptionError is raised when an extra option is given:
o = self.get_instance()
e = raises(errors.OptionError, o.args_options_2_params, bad_option=True)
assert e.option == 'bad_option'
# Test that OverlapError is raised:
o = self.get_instance(args=('one', 'two'), options=('three', 'four'))
e = raises(errors.OverlapError, o.args_options_2_params,
1, 2, three=3, two=2, four=4, one=1)
assert e.names == ['one', 'two']
# Test the permutations:
o = self.get_instance(args=('one', 'two*'), options=('three', 'four'))
mthd = o.args_options_2_params
assert mthd() == dict()
assert mthd(1) == dict(one=1)
assert mthd(1, 2) == dict(one=1, two=(2,))
assert mthd(1, 21, 22, 23) == dict(one=1, two=(21, 22, 23))
assert mthd(1, (21, 22, 23)) == dict(one=1, two=(21, 22, 23))
assert mthd(three=3, four=4) == dict(three=3, four=4)
assert mthd(three=3, four=4, one=1, two=2) == \
dict(one=1, two=2, three=3, four=4)
assert mthd(1, 21, 22, 23, three=3, four=4) == \
dict(one=1, two=(21, 22, 23), three=3, four=4)
assert mthd(1, (21, 22, 23), three=3, four=4) == \
dict(one=1, two=(21, 22, 23), three=3, four=4)
def test_args_options_2_entry(self):
"""
Test `ipalib.frontend.Command.args_options_2_entry` method.
"""
class my_cmd(self.cls):
takes_args = (
parameters.Str('one', attribute=True),
parameters.Str('two', attribute=False),
)
takes_options = (
parameters.Str('three', attribute=True, multivalue=True),
parameters.Str('four', attribute=True, multivalue=False),
)
def run(self, *args, **kw):
return self.args_options_2_entry(*args, **kw)
args = ('one', 'two')
kw = dict(three=('three1', 'three2'), four='four')
(api, home) = create_test_api()
api.finalize()
o = my_cmd(api)
o.finalize()
e = o.run(*args, **kw)
assert type(e) is dict
assert 'one' in e
assert 'two' not in e
assert 'three' in e
assert 'four' in e
assert e['one'] == 'one'
assert e['three'] == ['three1', 'three2']
assert e['four'] == 'four'
def test_params_2_args_options(self):
"""
Test the `ipalib.frontend.Command.params_2_args_options` method.
"""
o = self.get_instance(args='one', options='two')
assert o.params_2_args_options() == ((None,), {})
assert o.params_2_args_options(one=1) == ((1,), {})
assert o.params_2_args_options(two=2) == ((None,), dict(two=2))
assert o.params_2_args_options(two=2, one=1) == ((1,), dict(two=2))
def test_run(self):
"""
Test the `ipalib.frontend.Command.run` method.
"""
class my_cmd(self.cls):
def execute(self, *args, **kw):
return ('execute', args, kw)
def forward(self, *args, **kw):
return ('forward', args, kw)
args = ('Hello,', 'world,')
kw = dict(how_are='you', on_this='fine day?', version=API_VERSION)
# Test in server context:
(api, home) = create_test_api(in_server=True)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
out = o.run(*args, **kw)
assert ('execute', args, kw) == out
# Test in non-server context
(api, home) = create_test_api(in_server=False)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert ('forward', args, kw) == o.run(*args, **kw)
def test_messages(self):
"""
Test correct handling of messages
"""
class TestMessage(messages.PublicMessage):
type = 'info'
format = 'This is a message.'
errno = 1234
class my_cmd(self.cls):
def execute(self, *args, **kw):
result = {'name': 'execute'}
messages.add_message(kw['version'], result, TestMessage())
return result
def forward(self, *args, **kw):
result = {'name': 'forward'}
messages.add_message(kw['version'], result, TestMessage())
return result
args = ('Hello,', 'world,')
kw = dict(how_are='you', on_this='fine day?', version=API_VERSION)
expected = [TestMessage().to_dict()]
# Test in server context:
(api, home) = create_test_api(in_server=True)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert {'name': 'execute', 'messages': expected} == o.run(*args, **kw)
# Test in non-server context
(api, home) = create_test_api(in_server=False)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert {'name': 'forward', 'messages': expected} == o.run(*args, **kw)
def test_validate_output_basic(self):
"""
Test the `ipalib.frontend.Command.validate_output` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Example(self.cls):
has_output = ('foo', 'bar', 'baz')
inst = Example(api)
inst.finalize()
# Test with wrong type:
wrong = ('foo', 'bar', 'baz')
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): need a %r; got a %r: %r' % (
'Example', dict, tuple, wrong
)
# Test with a missing keys:
wrong = dict(bar='hello')
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): missing keys %r in %r' % (
'Example', ['baz', 'foo'], wrong
)
# Test with extra keys:
wrong = dict(foo=1, bar=2, baz=3, fee=4, azz=5)
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): unexpected keys %r in %r' % (
'Example', ['azz', 'fee'], wrong
)
# Test with different keys:
wrong = dict(baz=1, xyzzy=2, quux=3)
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): missing keys %r in %r' % (
'Example', ['bar', 'foo'], wrong
), str(e)
def test_validate_output_per_type(self):
"""
Test `ipalib.frontend.Command.validate_output` per-type validation.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Complex(self.cls):
has_output = (
output.Output('foo', int),
output.Output('bar', list),
)
inst = Complex(api)
inst.finalize()
wrong = dict(foo=17.9, bar=[18])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s:\n output[%r]: need %r; got %r: %r' % (
'Complex.validate_output()', 'foo', int, float, 17.9
)
wrong = dict(foo=18, bar=17)
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s:\n output[%r]: need %r; got %r: %r' % (
'Complex.validate_output()', 'bar', list, int, 17
)
def test_validate_output_nested(self):
"""
Test `ipalib.frontend.Command.validate_output` nested validation.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Subclass(output.ListOfEntries):
pass
# Test nested validation:
class nested(self.cls):
has_output = (
output.Output('hello', int),
Subclass('world'),
)
inst = nested(api)
inst.finalize()
okay = dict(foo='bar')
nope = ('aye', 'bee')
wrong = dict(hello=18, world=[okay, nope, okay])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == output.emsg % (
'nested', 'Subclass', 'world', 1, dict, tuple, nope
)
wrong = dict(hello=18, world=[okay, okay, okay, okay, nope])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == output.emsg % (
'nested', 'Subclass', 'world', 4, dict, tuple, nope
)
def test_get_output_params(self):
"""
Test the `ipalib.frontend.Command.get_output_params` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class example(self.cls):
has_output_params = (
'one',
'two',
'three',
)
takes_args = (
'foo',
)
takes_options = (
Str('bar', flags='no_output'),
'baz',
)
inst = example(api)
inst.finalize()
assert list(inst.get_output_params()) == [
'one', 'two', 'three', inst.params.foo, inst.params.baz
]
assert list(inst.output_params) == ['one', 'two', 'three', 'foo', 'baz']
class test_LocalOrRemote(ClassChecker):
"""
Test the `ipalib.frontend.LocalOrRemote` class.
"""
_cls = frontend.LocalOrRemote
def test_init(self):
"""
Test the `ipalib.frontend.LocalOrRemote.__init__` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert list(o.args) == []
assert list(o.options) == ['server', 'version']
op = o.options.server
assert op.required is False
assert op.default is False
def test_run(self):
"""
Test the `ipalib.frontend.LocalOrRemote.run` method.
"""
class example(self.cls):
takes_args = 'key?'
def forward(self, *args, **options):
return dict(result=('forward', args, options))
def execute(self, *args, **options):
return dict(result=('execute', args, options))
# Test when in_server=False:
(api, home) = create_test_api(in_server=False)
api.add_plugin(example)
api.finalize()
cmd = api.Command.example
assert cmd(version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=False))
)
assert cmd(u'var', version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=False))
)
assert cmd(server=True, version=u'2.47') == dict(
result=('forward', (None,), dict(version=u'2.47', server=True))
)
assert cmd(u'var', server=True, version=u'2.47') == dict(
result=('forward', (u'var',), dict(version=u'2.47', server=True))
)
# Test when in_server=True (should always call execute):
(api, home) = create_test_api(in_server=True)
api.add_plugin(example)
api.finalize()
cmd = api.Command.example
assert cmd(version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=False))
)
assert cmd(u'var', version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=False))
)
assert cmd(server=True, version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=True))
)
assert cmd(u'var', server=True, version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=True))
)
class test_Object(ClassChecker):
"""
Test the `ipalib.frontend.Object` class.
"""
_cls = frontend.Object
def test_class(self):
"""
Test the `ipalib.frontend.Object` class.
"""
assert self.cls.backend is None
assert self.cls.methods is None
assert self.cls.params is None
assert self.cls.params_minus_pk is None
assert self.cls.takes_params == tuple()
def test_init(self):
"""
Test the `ipalib.frontend.Object.__init__` method.
"""
# Setup for test:
class DummyAttribute(object):
def __init__(self, obj_name, attr_name, name=None):
self.obj_name = obj_name
self.attr_name = attr_name
if name is None:
self.name = '%s_%s' % (obj_name, attr_name)
else:
self.name = name
self.param = frontend.create_param(attr_name)
def __clone__(self, attr_name):
return self.__class__(
self.obj_name,
self.attr_name,
getattr(self, attr_name)
)
def get_attributes(cnt, format):
for name in ['other', 'user', 'another']:
for i in xrange(cnt):
yield DummyAttribute(name, format % i)
cnt = 10
methods_format = 'method_%d'
class FakeAPI(object):
Method = plugable.NameSpace(
get_attributes(cnt, methods_format)
)
def __contains__(self, key):
return hasattr(self, key)
def __getitem__(self, key):
return getattr(self, key)
def is_production_mode(self):
return False
api = FakeAPI()
assert len(api.Method) == cnt * 3
class user(self.cls):
pass
# Actually perform test:
o = user(api)
assert read_only(o, 'api') is api
namespace = o.methods
assert isinstance(namespace, plugable.NameSpace)
assert len(namespace) == cnt
f = methods_format
for i in xrange(cnt):
attr_name = f % i
attr = namespace[attr_name]
assert isinstance(attr, DummyAttribute)
assert attr is getattr(namespace, attr_name)
assert attr.obj_name == 'user'
assert attr.attr_name == attr_name
assert attr.name == '%s_%s' % ('user', attr_name)
# Test params instance attribute
o = self.cls(api)
ns = o.params
assert type(ns) is plugable.NameSpace
assert len(ns) == 0
class example(self.cls):
takes_params = ('banana', 'apple')
o = example(api)
ns = o.params
assert type(ns) is plugable.NameSpace
assert len(ns) == 2, repr(ns)
assert list(ns) == ['banana', 'apple']
for p in ns():
assert type(p) is parameters.Str
assert p.required is True
assert p.multivalue is False
def test_primary_key(self):
"""
Test the `ipalib.frontend.Object.primary_key` attribute.
"""
(api, home) = create_test_api()
api.finalize()
# Test with no primary keys:
class example1(self.cls):
takes_params = (
'one',
'two',
)
o = example1(api)
assert o.primary_key is None
# Test with 1 primary key:
class example2(self.cls):
takes_params = (
'one',
'two',
parameters.Str('three', primary_key=True),
'four',
)
o = example2(api)
pk = o.primary_key
assert type(pk) is parameters.Str
assert pk.name == 'three'
assert pk.primary_key is True
assert o.params[2] is o.primary_key
assert isinstance(o.params_minus_pk, plugable.NameSpace)
assert list(o.params_minus_pk) == ['one', 'two', 'four']
# Test with multiple primary_key:
class example3(self.cls):
takes_params = (
parameters.Str('one', primary_key=True),
parameters.Str('two', primary_key=True),
'three',
parameters.Str('four', primary_key=True),
)
o = example3(api)
e = raises(ValueError, o.finalize)
assert str(e) == \
'example3 (Object) has multiple primary keys: one, two, four'
def test_backend(self):
"""
Test the `ipalib.frontend.Object.backend` attribute.
"""
(api, home) = create_test_api()
class ldap(backend.Backend):
whatever = 'It worked!'
api.add_plugin(ldap)
class user(frontend.Object):
backend_name = 'ldap'
api.add_plugin(user)
api.finalize()
b = api.Object.user.backend
assert isinstance(b, ldap)
assert b.whatever == 'It worked!'
def test_get_dn(self):
"""
Test the `ipalib.frontend.Object.get_dn` method.
"""
api = 'the api instance'
o = self.cls(api)
e = raises(NotImplementedError, o.get_dn, 'primary key')
assert str(e) == 'Object.get_dn()'
class user(self.cls):
pass
o = user(api)
e = raises(NotImplementedError, o.get_dn, 'primary key')
assert str(e) == 'user.get_dn()'
def test_params_minus(self):
"""
Test the `ipalib.frontend.Object.params_minus` method.
"""
class example(self.cls):
takes_params = ('one', 'two', 'three', 'four')
(api, home) = create_test_api()
api.finalize()
o = example(api)
p = o.params
assert tuple(o.params_minus()) == tuple(p())
assert tuple(o.params_minus([])) == tuple(p())
assert tuple(o.params_minus('two', 'three')) == (p.one, p.four)
assert tuple(o.params_minus(['two', 'three'])) == (p.one, p.four)
assert tuple(o.params_minus(p.two, p.three)) == (p.one, p.four)
assert tuple(o.params_minus([p.two, p.three])) == (p.one, p.four)
ns = NameSpace([p.two, p.three])
assert tuple(o.params_minus(ns)) == (p.one, p.four)
class test_Attribute(ClassChecker):
"""
Test the `ipalib.frontend.Attribute` class.
"""
_cls = frontend.Attribute
def test_class(self):
"""
Test the `ipalib.frontend.Attribute` class.
"""
assert self.cls.__bases__ == (plugable.Plugin,)
assert type(self.cls.obj) is property
assert type(self.cls.obj_name) is property
assert type(self.cls.attr_name) is property
def test_init(self):
"""
Test the `ipalib.frontend.Attribute.__init__` method.
"""
user_obj = 'The user frontend.Object instance'
class api(object):
Object = dict(user=user_obj)
@staticmethod
def is_production_mode():
return False
class user_add(self.cls):
pass
o = user_add(api)
assert read_only(o, 'api') is api
assert read_only(o, 'obj') is user_obj
assert read_only(o, 'obj_name') == 'user'
assert read_only(o, 'attr_name') == 'add'
class test_Method(ClassChecker):
"""
Test the `ipalib.frontend.Method` class.
"""
_cls = frontend.Method
def get_api(self, args=tuple(), options=tuple()):
"""
Return a finalized `ipalib.plugable.API` instance.
"""
(api, home) = create_test_api()
class user(frontend.Object):
takes_params = (
'givenname',
'sn',
frontend.Param('uid', primary_key=True),
'initials',
)
class user_verb(self.cls):
takes_args = args
takes_options = options
api.add_plugin(user)
api.add_plugin(user_verb)
api.finalize()
return api
def test_class(self):
"""
Test the `ipalib.frontend.Method` class.
"""
assert self.cls.__bases__ == (frontend.Attribute, frontend.Command)
def test_init(self):
"""
Test the `ipalib.frontend.Method.__init__` method.
"""
api = 'the api instance'
class user_add(self.cls):
pass
o = user_add(api)
assert o.name == 'user_add'
assert o.obj_name == 'user'
assert o.attr_name == 'add'<|fim▁end|> | |
<|file_name|>TexUnpackBlob.cpp<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "TexUnpackBlob.h"
#include "GLBlitHelper.h"
#include "GLContext.h"
#include "mozilla/dom/Element.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "mozilla/RefPtr.h"
#include "nsLayoutUtils.h"
#include "WebGLBuffer.h"
#include "WebGLContext.h"
#include "WebGLTexelConversions.h"
#include "WebGLTexture.h"
namespace mozilla {
namespace webgl {
static bool
IsPIValidForDOM(const webgl::PackingInfo& pi)
{
// https://www.khronos.org/registry/webgl/specs/latest/2.0/#TEXTURE_TYPES_FORMATS_FROM_DOM_ELEMENTS_TABLE
// Just check for invalid individual formats and types, not combinations.
switch (pi.format) {
case LOCAL_GL_RGB:
case LOCAL_GL_RGBA:
case LOCAL_GL_LUMINANCE_ALPHA:
case LOCAL_GL_LUMINANCE:
case LOCAL_GL_ALPHA:
case LOCAL_GL_RED:
case LOCAL_GL_RED_INTEGER:
case LOCAL_GL_RG:
case LOCAL_GL_RG_INTEGER:
case LOCAL_GL_RGB_INTEGER:
case LOCAL_GL_RGBA_INTEGER:
break;
case LOCAL_GL_SRGB:
case LOCAL_GL_SRGB_ALPHA:
// Allowed in WebGL1+EXT_srgb
break;
default:
return false;
}
switch (pi.type) {
case LOCAL_GL_UNSIGNED_BYTE:
case LOCAL_GL_UNSIGNED_SHORT_5_6_5:
case LOCAL_GL_UNSIGNED_SHORT_4_4_4_4:
case LOCAL_GL_UNSIGNED_SHORT_5_5_5_1:
case LOCAL_GL_HALF_FLOAT:
case LOCAL_GL_HALF_FLOAT_OES:
case LOCAL_GL_FLOAT:
case LOCAL_GL_UNSIGNED_INT_10F_11F_11F_REV:
break;
default:
return false;
}
return true;
}
static bool
ValidatePIForDOM(WebGLContext* webgl, const char* funcName,
const webgl::PackingInfo& pi)
{
if (!IsPIValidForDOM(pi)) {
webgl->ErrorInvalidOperation("%s: Format or type is invalid for DOM sources.",
funcName);
return false;
}
return true;
}
static WebGLTexelFormat
FormatForPackingInfo(const PackingInfo& pi)
{
switch (pi.type) {
case LOCAL_GL_UNSIGNED_BYTE:
switch (pi.format) {
case LOCAL_GL_RED:
case LOCAL_GL_LUMINANCE:
case LOCAL_GL_RED_INTEGER:
return WebGLTexelFormat::R8;
case LOCAL_GL_ALPHA:
return WebGLTexelFormat::A8;
case LOCAL_GL_LUMINANCE_ALPHA:
return WebGLTexelFormat::RA8;
case LOCAL_GL_RGB:
case LOCAL_GL_RGB_INTEGER:
return WebGLTexelFormat::RGB8;
case LOCAL_GL_RGBA:
case LOCAL_GL_RGBA_INTEGER:
return WebGLTexelFormat::RGBA8;
case LOCAL_GL_RG:
case LOCAL_GL_RG_INTEGER:
return WebGLTexelFormat::RG8;
default:
break;
}
break;
case LOCAL_GL_UNSIGNED_SHORT_5_6_5:
if (pi.format == LOCAL_GL_RGB)
return WebGLTexelFormat::RGB565;
break;
case LOCAL_GL_UNSIGNED_SHORT_5_5_5_1:
if (pi.format == LOCAL_GL_RGBA)
return WebGLTexelFormat::RGBA5551;
break;
case LOCAL_GL_UNSIGNED_SHORT_4_4_4_4:
if (pi.format == LOCAL_GL_RGBA)
return WebGLTexelFormat::RGBA4444;
break;
case LOCAL_GL_HALF_FLOAT:
case LOCAL_GL_HALF_FLOAT_OES:
switch (pi.format) {
case LOCAL_GL_RED:
case LOCAL_GL_LUMINANCE:
return WebGLTexelFormat::R16F;
case LOCAL_GL_ALPHA: return WebGLTexelFormat::A16F;
case LOCAL_GL_LUMINANCE_ALPHA: return WebGLTexelFormat::RA16F;
case LOCAL_GL_RG: return WebGLTexelFormat::RG16F;
case LOCAL_GL_RGB: return WebGLTexelFormat::RGB16F;
case LOCAL_GL_RGBA: return WebGLTexelFormat::RGBA16F;
default:
break;
}
break;
case LOCAL_GL_FLOAT:
switch (pi.format) {
case LOCAL_GL_RED:
case LOCAL_GL_LUMINANCE:
return WebGLTexelFormat::R32F;
case LOCAL_GL_ALPHA: return WebGLTexelFormat::A32F;
case LOCAL_GL_LUMINANCE_ALPHA: return WebGLTexelFormat::RA32F;
case LOCAL_GL_RG: return WebGLTexelFormat::RG32F;
case LOCAL_GL_RGB: return WebGLTexelFormat::RGB32F;
case LOCAL_GL_RGBA: return WebGLTexelFormat::RGBA32F;
default:
break;
}
break;
case LOCAL_GL_UNSIGNED_INT_10F_11F_11F_REV:
if (pi.format == LOCAL_GL_RGB)
return WebGLTexelFormat::RGB11F11F10F;
break;
default:
break;
}
return WebGLTexelFormat::FormatNotSupportingAnyConversion;
}
////////////////////
static bool
ValidateUnpackPixels(WebGLContext* webgl, const char* funcName, uint32_t fullRows,
uint32_t tailPixels, webgl::TexUnpackBlob* blob)
{
if (!blob->mWidth || !blob->mHeight || !blob->mDepth)
return true;
const auto usedPixelsPerRow = CheckedUint32(blob->mSkipPixels) + blob->mWidth;
if (!usedPixelsPerRow.isValid() || usedPixelsPerRow.value() > blob->mRowLength) {
webgl->ErrorInvalidOperation("%s: UNPACK_SKIP_PIXELS + width >"
" UNPACK_ROW_LENGTH.",
funcName);
return false;
}
if (blob->mHeight > blob->mImageHeight) {
webgl->ErrorInvalidOperation("%s: height > UNPACK_IMAGE_HEIGHT.", funcName);
return false;
}
//////
// The spec doesn't bound SKIP_ROWS + height <= IMAGE_HEIGHT, unfortunately.
auto skipFullRows = CheckedUint32(blob->mSkipImages) * blob->mImageHeight;
skipFullRows += blob->mSkipRows;
MOZ_ASSERT(blob->mDepth >= 1);
MOZ_ASSERT(blob->mHeight >= 1);
auto usedFullRows = CheckedUint32(blob->mDepth - 1) * blob->mImageHeight;
usedFullRows += blob->mHeight - 1; // Full rows in the final image, excluding the tail.
const auto fullRowsNeeded = skipFullRows + usedFullRows;
if (!fullRowsNeeded.isValid()) {
webgl->ErrorOutOfMemory("%s: Invalid calculation for required row count.",
funcName);
return false;
}
if (fullRows > fullRowsNeeded.value())
return true;
if (fullRows == fullRowsNeeded.value() && tailPixels >= usedPixelsPerRow.value()) {
blob->mNeedsExactUpload = true;
return true;
}
webgl->ErrorInvalidOperation("%s: Desired upload requires more data than is"
" available: (%u rows plus %u pixels needed, %u rows"
" plus %u pixels available)",
funcName, fullRowsNeeded.value(),
usedPixelsPerRow.value(), fullRows, tailPixels);
return false;
}
static bool
ValidateUnpackBytes(WebGLContext* webgl, const char* funcName,
const webgl::PackingInfo& pi, size_t availByteCount,
webgl::TexUnpackBlob* blob)
{
if (!blob->mWidth || !blob->mHeight || !blob->mDepth)
return true;
const auto bytesPerPixel = webgl::BytesPerPixel(pi);
const auto bytesPerRow = CheckedUint32(blob->mRowLength) * bytesPerPixel;
const auto rowStride = RoundUpToMultipleOf(bytesPerRow, blob->mAlignment);
const auto fullRows = availByteCount / rowStride;
if (!fullRows.isValid()) {
webgl->ErrorOutOfMemory("%s: Unacceptable upload size calculated.");
return false;
}
const auto bodyBytes = fullRows.value() * rowStride.value();
const auto tailPixels = (availByteCount - bodyBytes) / bytesPerPixel;
return ValidateUnpackPixels(webgl, funcName, fullRows.value(), tailPixels, blob);
}
////////////////////
static uint32_t
ZeroOn2D(TexImageTarget target, uint32_t val)
{
return (IsTarget3D(target) ? val : 0);
}
static uint32_t
FallbackOnZero(uint32_t val, uint32_t fallback)
{
return (val ? val : fallback);
}
TexUnpackBlob::TexUnpackBlob(const WebGLContext* webgl, TexImageTarget target,
uint32_t rowLength, uint32_t width, uint32_t height,
uint32_t depth, bool srcIsPremult)
: mAlignment(webgl->mPixelStore_UnpackAlignment)
, mRowLength(rowLength)
, mImageHeight(FallbackOnZero(ZeroOn2D(target, webgl->mPixelStore_UnpackImageHeight),
height))
, mSkipPixels(webgl->mPixelStore_UnpackSkipPixels)
, mSkipRows(webgl->mPixelStore_UnpackSkipRows)
, mSkipImages(ZeroOn2D(target, webgl->mPixelStore_UnpackSkipImages))
, mWidth(width)
, mHeight(height)
, mDepth(depth)
, mSrcIsPremult(srcIsPremult)
, mNeedsExactUpload(false)
{
MOZ_ASSERT_IF(!IsTarget3D(target), mDepth == 1);
}
bool
TexUnpackBlob::ConvertIfNeeded(WebGLContext* webgl, const char* funcName,
const uint32_t rowLength, const uint32_t rowCount,
WebGLTexelFormat srcFormat,
const uint8_t* const srcBegin, const ptrdiff_t srcStride,
WebGLTexelFormat dstFormat, const ptrdiff_t dstStride,
const uint8_t** const out_begin,
UniqueBuffer* const out_anchoredBuffer) const
{
MOZ_ASSERT(srcFormat != WebGLTexelFormat::FormatNotSupportingAnyConversion);
MOZ_ASSERT(dstFormat != WebGLTexelFormat::FormatNotSupportingAnyConversion);
*out_begin = srcBegin;
if (!rowLength || !rowCount)
return true;
const auto& dstIsPremult = webgl->mPixelStore_PremultiplyAlpha;
const auto srcOrigin = (webgl->mPixelStore_FlipY ? gl::OriginPos::TopLeft
: gl::OriginPos::BottomLeft);
const auto dstOrigin = gl::OriginPos::BottomLeft;
if (srcFormat != dstFormat) {
webgl->GenerateWarning("%s: Conversion requires pixel reformatting.", funcName);
} else if (mSrcIsPremult != dstIsPremult) {
webgl->GenerateWarning("%s: Conversion requires change in"
"alpha-premultiplication.",
funcName);
} else if (srcOrigin != dstOrigin) {
webgl->GenerateWarning("%s: Conversion requires y-flip.", funcName);
} else if (srcStride != dstStride) {
webgl->GenerateWarning("%s: Conversion requires change in stride.", funcName);
} else {
return true;
}
////
const auto dstTotalBytes = CheckedUint32(rowCount) * dstStride;
if (!dstTotalBytes.isValid()) {
webgl->ErrorOutOfMemory("%s: Calculation failed.", funcName);
return false;
}
UniqueBuffer dstBuffer = calloc(1, dstTotalBytes.value());
if (!dstBuffer.get()) {
webgl->ErrorOutOfMemory("%s: Failed to allocate dest buffer.", funcName);
return false;
}
const auto dstBegin = static_cast<uint8_t*>(dstBuffer.get());
////
// And go!:
bool wasTrivial;
if (!ConvertImage(rowLength, rowCount,
srcBegin, srcStride, srcOrigin, srcFormat, mSrcIsPremult,
dstBegin, dstStride, dstOrigin, dstFormat, dstIsPremult,
&wasTrivial))
{
webgl->ErrorImplementationBug("%s: ConvertImage failed.", funcName);
return false;
}
*out_begin = dstBegin;
*out_anchoredBuffer = Move(dstBuffer);
return true;
}
static GLenum
DoTexOrSubImage(bool isSubImage, gl::GLContext* gl, TexImageTarget target, GLint level,
const DriverUnpackInfo* dui, GLint xOffset, GLint yOffset, GLint zOffset,
GLsizei width, GLsizei height, GLsizei depth, const void* data)
{
if (isSubImage) {
return DoTexSubImage(gl, target, level, xOffset, yOffset, zOffset, width, height,
depth, dui->ToPacking(), data);
} else {
return DoTexImage(gl, target, level, dui, width, height, depth, data);
}
}
//////////////////////////////////////////////////////////////////////////////////////////
// TexUnpackBytes
TexUnpackBytes::TexUnpackBytes(const WebGLContext* webgl, TexImageTarget target,
uint32_t width, uint32_t height, uint32_t depth,
bool isClientData, const uint8_t* ptr, size_t availBytes)
: TexUnpackBlob(webgl, target,
FallbackOnZero(webgl->mPixelStore_UnpackRowLength, width),
width, height, depth, false)
, mIsClientData(isClientData)
, mPtr(ptr)
, mAvailBytes(availBytes)
{ }
bool
TexUnpackBytes::Validate(WebGLContext* webgl, const char* funcName,
const webgl::PackingInfo& pi)
{
if (mIsClientData && !mPtr)
return true;
return ValidateUnpackBytes(webgl, funcName, pi, mAvailBytes, this);
}
bool
TexUnpackBytes::TexOrSubImage(bool isSubImage, bool needsRespec, const char* funcName,
WebGLTexture* tex, TexImageTarget target, GLint level,
const webgl::DriverUnpackInfo* dui, GLint xOffset,
GLint yOffset, GLint zOffset, GLenum* const out_error) const
{
WebGLContext* webgl = tex->mContext;
const auto pi = dui->ToPacking();
const auto format = FormatForPackingInfo(pi);
const auto bytesPerPixel = webgl::BytesPerPixel(pi);
const uint8_t* uploadPtr = mPtr;
UniqueBuffer tempBuffer;
do {
if (!mIsClientData || !mPtr)
break;
if (!webgl->mPixelStore_FlipY &&
!webgl->mPixelStore_PremultiplyAlpha)
{
break;
}
if (webgl->mPixelStore_UnpackImageHeight ||
webgl->mPixelStore_UnpackSkipImages ||
webgl->mPixelStore_UnpackRowLength ||
webgl->mPixelStore_UnpackSkipRows ||
webgl->mPixelStore_UnpackSkipPixels)
{
webgl->ErrorInvalidOperation("%s: Non-DOM-Element uploads with alpha-premult"
" or y-flip do not support subrect selection.",
funcName);
return false;
}
webgl->GenerateWarning("%s: Alpha-premult and y-flip are deprecated for"
" non-DOM-Element uploads.",
funcName);
const uint32_t rowLength = mWidth;
const uint32_t rowCount = mHeight * mDepth;
const auto stride = RoundUpToMultipleOf(rowLength * bytesPerPixel, mAlignment);
if (!ConvertIfNeeded(webgl, funcName, rowLength, rowCount, format, mPtr, stride,
format, stride, &uploadPtr, &tempBuffer))
{
return false;
}
} while (false);
//////
const auto& gl = webgl->gl;
bool useParanoidHandling = false;
if (mNeedsExactUpload && webgl->mBoundPixelUnpackBuffer) {
webgl->GenerateWarning("%s: Uploads from a buffer with a final row with a byte"
" count smaller than the row stride can incur extra"
" overhead.",
funcName);
if (gl->WorkAroundDriverBugs()) {
useParanoidHandling |= (gl->Vendor() == gl::GLVendor::NVIDIA);
}
}
if (!useParanoidHandling) {
if (webgl->mBoundPixelUnpackBuffer) {
gl->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER,
webgl->mBoundPixelUnpackBuffer->mGLName);
}
*out_error = DoTexOrSubImage(isSubImage, gl, target, level, dui, xOffset, yOffset,
zOffset, mWidth, mHeight, mDepth, uploadPtr);
if (webgl->mBoundPixelUnpackBuffer) {
gl->fBindBuffer(LOCAL_GL_PIXEL_UNPACK_BUFFER, 0);
}
return true;
}
//////
MOZ_ASSERT(webgl->mBoundPixelUnpackBuffer);
if (!isSubImage) {
// Alloc first to catch OOMs.
AssertUintParamCorrect(gl, LOCAL_GL_PIXEL_UNPACK_BUFFER, 0);
*out_error = DoTexOrSubImage(false, gl, target, level, dui, xOffset, yOffset,
zOffset, mWidth, mHeight, mDepth, nullptr);
if (*out_error)
return true;
}
const ScopedLazyBind bindPBO(gl, LOCAL_GL_PIXEL_UNPACK_BUFFER,
webgl->mBoundPixelUnpackBuffer);
//////
// Make our sometimes-implicit values explicit. Also this keeps them constant when we
// ask for height=mHeight-1 and such.
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH, mRowLength);
gl->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, mImageHeight);
if (mDepth > 1) {
*out_error = DoTexOrSubImage(true, gl, target, level, dui, xOffset, yOffset,
zOffset, mWidth, mHeight, mDepth-1, uploadPtr);
}
// Skip the images we uploaded.
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES, mSkipImages + mDepth - 1);
if (mHeight > 1) {
*out_error = DoTexOrSubImage(true, gl, target, level, dui, xOffset, yOffset,
zOffset+mDepth-1, mWidth, mHeight-1, 1, uploadPtr);
}
const auto totalSkipRows = CheckedUint32(mSkipImages) * mImageHeight + mSkipRows;
const auto totalFullRows = CheckedUint32(mDepth - 1) * mImageHeight + mHeight - 1;
const auto tailOffsetRows = totalSkipRows + totalFullRows;
const auto bytesPerRow = CheckedUint32(mRowLength) * bytesPerPixel;
const auto rowStride = RoundUpToMultipleOf(bytesPerRow, mAlignment);
if (!rowStride.isValid()) {
MOZ_CRASH("Should be checked earlier.");
}
const auto tailOffsetBytes = tailOffsetRows * rowStride;
uploadPtr += tailOffsetBytes.value();
//////
gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, 1); // No stride padding.
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH, 0); // No padding in general.
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES, 0); // Don't skip images,
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS, 0); // or rows.
// Keep skipping pixels though!
*out_error = DoTexOrSubImage(true, gl, target, level, dui, xOffset,
yOffset+mHeight-1, zOffset+mDepth-1, mWidth, 1, 1,
uploadPtr);
// Reset all our modified state.
gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, webgl->mPixelStore_UnpackAlignment);
gl->fPixelStorei(LOCAL_GL_UNPACK_IMAGE_HEIGHT, webgl->mPixelStore_UnpackImageHeight);
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH, webgl->mPixelStore_UnpackRowLength);
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_IMAGES, webgl->mPixelStore_UnpackSkipImages);
gl->fPixelStorei(LOCAL_GL_UNPACK_SKIP_ROWS, webgl->mPixelStore_UnpackSkipRows);
return true;
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// TexUnpackImage
TexUnpackImage::TexUnpackImage(const WebGLContext* webgl, TexImageTarget target,
uint32_t width, uint32_t height, uint32_t depth,
layers::Image* image, bool isAlphaPremult)
: TexUnpackBlob(webgl, target, image->GetSize().width, width, height, depth,
isAlphaPremult)
, mImage(image)
{ }
TexUnpackImage::~TexUnpackImage()
{ }
bool
TexUnpackImage::Validate(WebGLContext* webgl, const char* funcName,
const webgl::PackingInfo& pi)
{
if (!ValidatePIForDOM(webgl, funcName, pi))
return false;
const auto fullRows = mImage->GetSize().height;
return ValidateUnpackPixels(webgl, funcName, fullRows, 0, this);
}
bool
TexUnpackImage::TexOrSubImage(bool isSubImage, bool needsRespec, const char* funcName,
WebGLTexture* tex, TexImageTarget target, GLint level,
const webgl::DriverUnpackInfo* dui, GLint xOffset,
GLint yOffset, GLint zOffset, GLenum* const out_error) const
{
MOZ_ASSERT_IF(needsRespec, !isSubImage);
WebGLContext* webgl = tex->mContext;
gl::GLContext* gl = webgl->GL();
gl->MakeCurrent();
if (needsRespec) {
*out_error = DoTexOrSubImage(isSubImage, gl, target.get(), level, dui, xOffset,
yOffset, zOffset, mWidth, mHeight, mDepth,
nullptr);
if (*out_error)
return true;
}
do {
if (mDepth != 1)
break;<|fim▁hole|> break;
if (dui->unpackFormat != LOCAL_GL_RGB && dui->unpackFormat != LOCAL_GL_RGBA)
break;
if (dui->unpackType != LOCAL_GL_UNSIGNED_BYTE)
break;
gl::ScopedFramebuffer scopedFB(gl);
gl::ScopedBindFramebuffer bindFB(gl, scopedFB.FB());
{
gl::GLContext::LocalErrorScope errorScope(*gl);
gl->fFramebufferTexture2D(LOCAL_GL_FRAMEBUFFER, LOCAL_GL_COLOR_ATTACHMENT0,
target.get(), tex->mGLName, level);
if (errorScope.GetError())
break;
}
const GLenum status = gl->fCheckFramebufferStatus(LOCAL_GL_FRAMEBUFFER);
if (status != LOCAL_GL_FRAMEBUFFER_COMPLETE)
break;
const gfx::IntSize destSize(mWidth, mHeight);
const auto dstOrigin = (webgl->mPixelStore_FlipY ? gl::OriginPos::TopLeft
: gl::OriginPos::BottomLeft);
if (!gl->BlitHelper()->BlitImageToFramebuffer(mImage, destSize, scopedFB.FB(),
dstOrigin))
{
break;
}
// Blitting was successful, so we're done!
*out_error = 0;
return true;
} while (false);
webgl->GenerateWarning("%s: Failed to hit GPU-copy fast-path. Falling back to CPU"
" upload.",
funcName);
const RefPtr<gfx::SourceSurface> surf = mImage->GetAsSourceSurface();
RefPtr<gfx::DataSourceSurface> dataSurf;
if (surf) {
// WARNING: OSX can lose our MakeCurrent here.
dataSurf = surf->GetDataSurface();
}
if (!dataSurf) {
webgl->ErrorOutOfMemory("%s: GetAsSourceSurface or GetDataSurface failed after"
" blit failed for TexUnpackImage.",
funcName);
return false;
}
const TexUnpackSurface surfBlob(webgl, target, mWidth, mHeight, mDepth, dataSurf,
mSrcIsPremult);
return surfBlob.TexOrSubImage(isSubImage, needsRespec, funcName, tex, target, level,
dui, xOffset, yOffset, zOffset, out_error);
}
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// TexUnpackSurface
TexUnpackSurface::TexUnpackSurface(const WebGLContext* webgl, TexImageTarget target,
uint32_t width, uint32_t height, uint32_t depth,
gfx::DataSourceSurface* surf, bool isAlphaPremult)
: TexUnpackBlob(webgl, target, surf->GetSize().width, width, height, depth,
isAlphaPremult)
, mSurf(surf)
{ }
//////////
static bool
GetFormatForSurf(gfx::SourceSurface* surf, WebGLTexelFormat* const out_texelFormat,
uint8_t* const out_bpp)
{
const auto surfFormat = surf->GetFormat();
switch (surfFormat) {
case gfx::SurfaceFormat::B8G8R8A8:
*out_texelFormat = WebGLTexelFormat::BGRA8;
*out_bpp = 4;
return true;
case gfx::SurfaceFormat::B8G8R8X8:
*out_texelFormat = WebGLTexelFormat::BGRX8;
*out_bpp = 4;
return true;
case gfx::SurfaceFormat::R8G8B8A8:
*out_texelFormat = WebGLTexelFormat::RGBA8;
*out_bpp = 4;
return true;
case gfx::SurfaceFormat::R8G8B8X8:
*out_texelFormat = WebGLTexelFormat::RGBX8;
*out_bpp = 4;
return true;
case gfx::SurfaceFormat::R5G6B5_UINT16:
*out_texelFormat = WebGLTexelFormat::RGB565;
*out_bpp = 2;
return true;
case gfx::SurfaceFormat::A8:
*out_texelFormat = WebGLTexelFormat::A8;
*out_bpp = 1;
return true;
case gfx::SurfaceFormat::YUV:
// Ugh...
NS_ERROR("We don't handle uploads from YUV sources yet.");
// When we want to, check out gfx/ycbcr/YCbCrUtils.h. (specifically
// GetYCbCrToRGBDestFormatAndSize and ConvertYCbCrToRGB)
return false;
default:
return false;
}
}
//////////
bool
TexUnpackSurface::Validate(WebGLContext* webgl, const char* funcName,
const webgl::PackingInfo& pi)
{
if (!ValidatePIForDOM(webgl, funcName, pi))
return false;
const auto fullRows = mSurf->GetSize().height;
return ValidateUnpackPixels(webgl, funcName, fullRows, 0, this);
}
bool
TexUnpackSurface::TexOrSubImage(bool isSubImage, bool needsRespec, const char* funcName,
WebGLTexture* tex, TexImageTarget target, GLint level,
const webgl::DriverUnpackInfo* dstDUI, GLint xOffset,
GLint yOffset, GLint zOffset,
GLenum* const out_error) const
{
const auto& webgl = tex->mContext;
////
const auto rowLength = mSurf->GetSize().width;
const auto rowCount = mSurf->GetSize().height;
const auto& dstPI = dstDUI->ToPacking();
const auto& dstBPP = webgl::BytesPerPixel(dstPI);
const auto dstFormat = FormatForPackingInfo(dstPI);
////
WebGLTexelFormat srcFormat;
uint8_t srcBPP;
if (!GetFormatForSurf(mSurf, &srcFormat, &srcBPP)) {
webgl->ErrorImplementationBug("%s: GetFormatForSurf failed for"
" WebGLTexelFormat::%u.",
funcName, uint32_t(mSurf->GetFormat()));
return false;
}
gfx::DataSourceSurface::ScopedMap map(mSurf, gfx::DataSourceSurface::MapType::READ);
if (!map.IsMapped()) {
webgl->ErrorOutOfMemory("%s: Failed to map source surface for upload.", funcName);
return false;
}
const auto& srcBegin = map.GetData();
const auto& srcStride = map.GetStride();
////
const auto srcRowLengthBytes = rowLength * srcBPP;
const uint8_t maxGLAlignment = 8;
uint8_t srcAlignment = 1;
for (; srcAlignment <= maxGLAlignment; srcAlignment *= 2) {
const auto strideGuess = RoundUpToMultipleOf(srcRowLengthBytes, srcAlignment);
if (strideGuess == srcStride)
break;
}
const uint32_t dstAlignment = (srcAlignment > maxGLAlignment) ? 1 : srcAlignment;
const auto dstRowLengthBytes = rowLength * dstBPP;
const auto dstStride = RoundUpToMultipleOf(dstRowLengthBytes, dstAlignment);
////
const uint8_t* dstBegin = srcBegin;
UniqueBuffer tempBuffer;
if (!ConvertIfNeeded(webgl, funcName, rowLength, rowCount, srcFormat, srcBegin,
srcStride, dstFormat, dstStride, &dstBegin, &tempBuffer))
{
return false;
}
////
const auto& gl = webgl->gl;
MOZ_ALWAYS_TRUE( gl->MakeCurrent() );
gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, dstAlignment);
if (webgl->IsWebGL2()) {
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH, rowLength);
}
*out_error = DoTexOrSubImage(isSubImage, gl, target.get(), level, dstDUI, xOffset,
yOffset, zOffset, mWidth, mHeight, mDepth, dstBegin);
gl->fPixelStorei(LOCAL_GL_UNPACK_ALIGNMENT, webgl->mPixelStore_UnpackAlignment);
if (webgl->IsWebGL2()) {
gl->fPixelStorei(LOCAL_GL_UNPACK_ROW_LENGTH, webgl->mPixelStore_UnpackRowLength);
}
return true;
}
} // namespace webgl
} // namespace mozilla<|fim▁end|> |
const auto& dstIsPremult = webgl->mPixelStore_PremultiplyAlpha;
if (mSrcIsPremult != dstIsPremult) |
<|file_name|>Guild.js<|end_file_name|><|fim▁begin|>const Long = require('long');
const User = require('./User');
const Role = require('./Role');
const Emoji = require('./Emoji');
const Invite = require('./Invite');
const GuildAuditLogs = require('./GuildAuditLogs');
const Webhook = require('./Webhook');
const { Presence } = require('./Presence');
const GuildChannel = require('./GuildChannel');
const GuildMember = require('./GuildMember');
const VoiceRegion = require('./VoiceRegion');
const Constants = require('../util/Constants');
const Collection = require('../util/Collection');
const Util = require('../util/Util');
const Snowflake = require('../util/Snowflake');
const Permissions = require('../util/Permissions');
const Shared = require('./shared');
const { Error, TypeError } = require('../errors');
/**
* Represents a guild (or a server) on Discord.
* <info>It's recommended to see if a guild is available before performing operations or reading data from it. You can
* check this with `guild.available`.</info>
*/
class Guild {
constructor(client, data) {
/**
* The client that created the instance of the guild
* @name Guild#client
* @type {Client}
* @readonly
*/
Object.defineProperty(this, 'client', { value: client });
/**
* A collection of members that are in this guild. The key is the member's ID, the value is the member
* @type {Collection<Snowflake, GuildMember>}
*/
this.members = new Collection();
<|fim▁hole|> * A collection of channels that are in this guild. The key is the channel's ID, the value is the channel
* @type {Collection<Snowflake, GuildChannel>}
*/
this.channels = new Collection();
/**
* A collection of roles that are in this guild. The key is the role's ID, the value is the role
* @type {Collection<Snowflake, Role>}
*/
this.roles = new Collection();
/**
* A collection of presences in this guild
* @type {Collection<Snowflake, Presence>}
*/
this.presences = new Collection();
if (!data) return;
if (data.unavailable) {
/**
* Whether the guild is available to access. If it is not available, it indicates a server outage
* @type {boolean}
*/
this.available = false;
/**
* The Unique ID of the guild, useful for comparisons
* @type {Snowflake}
*/
this.id = data.id;
} else {
this.setup(data);
if (!data.channels) this.available = false;
}
}
/**
* Sets up the guild.
* @param {*} data The raw data of the guild
* @private
*/
setup(data) {
/**
* The name of the guild
* @type {string}
*/
this.name = data.name;
/**
* The hash of the guild icon
* @type {?string}
*/
this.icon = data.icon;
/**
* The hash of the guild splash image (VIP only)
* @type {?string}
*/
this.splash = data.splash;
/**
* The region the guild is located in
* @type {string}
*/
this.region = data.region;
/**
* The full amount of members in this guild as of `READY`
* @type {number}
*/
this.memberCount = data.member_count || this.memberCount;
/**
* Whether the guild is "large" (has more than 250 members)
* @type {boolean}
*/
this.large = Boolean('large' in data ? data.large : this.large);
/**
* An array of guild features
* @type {Object[]}
*/
this.features = data.features;
/**
* The ID of the application that created this guild (if applicable)
* @type {?Snowflake}
*/
this.applicationID = data.application_id;
/**
* The time in seconds before a user is counted as "away from keyboard"
* @type {?number}
*/
this.afkTimeout = data.afk_timeout;
/**
* The ID of the voice channel where AFK members are moved
* @type {?Snowflake}
*/
this.afkChannelID = data.afk_channel_id;
/**
* Whether embedded images are enabled on this guild
* @type {boolean}
*/
this.embedEnabled = data.embed_enabled;
/**
* The verification level of the guild
* @type {number}
*/
this.verificationLevel = data.verification_level;
/**
* The explicit content filter level of the guild
* @type {number}
*/
this.explicitContentFilter = data.explicit_content_filter;
/**
* The timestamp the client user joined the guild at
* @type {number}
*/
this.joinedTimestamp = data.joined_at ? new Date(data.joined_at).getTime() : this.joinedTimestamp;
this.id = data.id;
this.available = !data.unavailable;
this.features = data.features || this.features || [];
if (data.members) {
this.members.clear();
for (const guildUser of data.members) this._addMember(guildUser, false);
}
if (data.owner_id) {
/**
* The user ID of this guild's owner
* @type {Snowflake}
*/
this.ownerID = data.owner_id;
}
if (data.channels) {
this.channels.clear();
for (const channel of data.channels) this.client.dataManager.newChannel(channel, this);
}
if (data.roles) {
this.roles.clear();
for (const role of data.roles) {
const newRole = new Role(this, role);
this.roles.set(newRole.id, newRole);
}
}
if (data.presences) {
for (const presence of data.presences) {
this._setPresence(presence.user.id, presence);
}
}
this._rawVoiceStates = new Collection();
if (data.voice_states) {
for (const voiceState of data.voice_states) {
this._rawVoiceStates.set(voiceState.user_id, voiceState);
const member = this.members.get(voiceState.user_id);
if (member) {
member.serverMute = voiceState.mute;
member.serverDeaf = voiceState.deaf;
member.selfMute = voiceState.self_mute;
member.selfDeaf = voiceState.self_deaf;
member.voiceSessionID = voiceState.session_id;
member.voiceChannelID = voiceState.channel_id;
this.channels.get(voiceState.channel_id).members.set(member.user.id, member);
}
}
}
if (!this.emojis) {
/**
* A collection of emojis that are in this guild. The key is the emoji's ID, the value is the emoji.
* @type {Collection<Snowflake, Emoji>}
*/
this.emojis = new Collection();
for (const emoji of data.emojis) this.emojis.set(emoji.id, new Emoji(this, emoji));
} else {
this.client.actions.GuildEmojisUpdate.handle({
guild_id: this.id,
emojis: data.emojis,
});
}
}
/**
* The timestamp the guild was created at
* @type {number}
* @readonly
*/
get createdTimestamp() {
return Snowflake.deconstruct(this.id).timestamp;
}
/**
* The time the guild was created
* @type {Date}
* @readonly
*/
get createdAt() {
return new Date(this.createdTimestamp);
}
/**
* The time the client user joined the guild
* @type {Date}
* @readonly
*/
get joinedAt() {
return new Date(this.joinedTimestamp);
}
/**
* Gets the URL to this guild's icon
* @param {Object} [options={}] Options for the icon url
* @param {string} [options.format='webp'] One of `webp`, `png`, `jpg`
* @param {number} [options.size=128] One of `128`, '256', `512`, `1024`, `2048`
* @returns {?string}
*/
iconURL({ format, size } = {}) {
if (!this.icon) return null;
return Constants.Endpoints.CDN(this.client.options.http.cdn).Icon(this.id, this.icon, format, size);
}
/**
* Gets the acronym that shows up in place of a guild icon
* @type {string}
* @readonly
*/
get nameAcronym() {
return this.name.replace(/\w+/g, name => name[0]).replace(/\s/g, '');
}
/**
* The URL to this guild's splash
* @param {Object} [options={}] Options for the splash url
* @param {string} [options.format='webp'] One of `webp`, `png`, `jpg`
* @param {number} [options.size=128] One of `128`, '256', `512`, `1024`, `2048`
* @returns {?string}
*/
splashURL({ format, size } = {}) {
if (!this.splash) return null;
return Constants.Endpoints.CDN(this.client.options.http.cdn).Splash(this.id, this.splash, format, size);
}
/**
* The owner of the guild
* @type {GuildMember}
* @readonly
*/
get owner() {
return this.members.get(this.ownerID);
}
/**
* If the client is connected to any voice channel in this guild, this will be the relevant VoiceConnection
* @type {?VoiceConnection}
* @readonly
*/
get voiceConnection() {
if (this.client.browser) return null;
return this.client.voice.connections.get(this.id) || null;
}
/**
* The position of this guild
* <warn>This is only available when using a user account.</warn>
* @type {?number}
* @readonly
*/
get position() {
if (this.client.user.bot) return null;
if (!this.client.user.settings.guildPositions) return null;
return this.client.user.settings.guildPositions.indexOf(this.id);
}
/**
* Whether the guild is muted
* <warn>This is only available when using a user account.</warn>
* @type {?boolean}
* @readonly
*/
get muted() {
if (this.client.user.bot) return null;
try {
return this.client.user.guildSettings.get(this.id).muted;
} catch (err) {
return false;
}
}
/**
* The type of message that should notify you
* one of `EVERYTHING`, `MENTIONS`, `NOTHING`
* <warn>This is only available when using a user account.</warn>
* @type {?string}
* @readonly
*/
get messageNotifications() {
if (this.client.user.bot) return null;
try {
return this.client.user.guildSettings.get(this.id).messageNotifications;
} catch (err) {
return null;
}
}
/**
* Whether to receive mobile push notifications
* <warn>This is only available when using a user account.</warn>
* @type {?boolean}
* @readonly
*/
get mobilePush() {
if (this.client.user.bot) return null;
try {
return this.client.user.guildSettings.get(this.id).mobilePush;
} catch (err) {
return false;
}
}
/**
* Whether to suppress everyone messages
* <warn>This is only available when using a user account.</warn>
* @type {?boolean}
* @readonly
*/
get suppressEveryone() {
if (this.client.user.bot) return null;
try {
return this.client.user.guildSettings.get(this.id).suppressEveryone;
} catch (err) {
return null;
}
}
/*
* The `@everyone` role of the guild
* @type {Role}
* @readonly
*/
get defaultRole() {
return this.roles.get(this.id);
}
/**
* The client user as a GuildMember of this guild
* @type {?GuildMember}
* @readonly
*/
get me() {
return this.members.get(this.client.user.id);
}
/**
* Fetches a collection of roles in the current guild sorted by position
* @type {Collection<Snowflake, Role>}
* @readonly
* @private
*/
get _sortedRoles() {
return this._sortPositionWithID(this.roles);
}
/**
* Returns the GuildMember form of a User object, if the user is present in the guild.
* @param {UserResolvable} user The user that you want to obtain the GuildMember of
* @returns {?GuildMember}
* @example
* // Get the guild member of a user
* const member = guild.member(message.author);
*/
member(user) {
return this.client.resolver.resolveGuildMember(this, user);
}
/**
* Fetch a collection of banned users in this guild.
* The returned collection contains user objects keyed under `user` and reasons keyed under `reason`.
* @returns {Promise<Collection<Snowflake, Object>>}
*/
fetchBans() {
return this.client.api.guilds(this.id).bans.get().then(bans =>
bans.reduce((collection, ban) => {
collection.set(ban.user.id, {
reason: ban.reason,
user: this.client.dataManager.newUser(ban.user),
});
return collection;
}, new Collection())
);
}
/**
* Fetch a collection of invites to this guild. Resolves with a collection mapping invites by their codes.
* @returns {Promise<Collection<string, Invite>>}
*/
fetchInvites() {
return this.client.api.guilds(this.id).invites.get()
.then(inviteItems => {
const invites = new Collection();
for (const inviteItem of inviteItems) {
const invite = new Invite(this.client, inviteItem);
invites.set(invite.code, invite);
}
return invites;
});
}
/**
* Fetch all webhooks for the guild.
* @returns {Promise<Collection<Snowflake, Webhook>>}
*/
fetchWebhooks() {
return this.client.api.guilds(this.id).webhooks.get().then(data => {
const hooks = new Collection();
for (const hook of data) hooks.set(hook.id, new Webhook(this.client, hook));
return hooks;
});
}
/**
* Fetch available voice regions.
* @returns {Promise<Collection<string, VoiceRegion>>}
*/
fetchVoiceRegions() {
return this.client.api.guilds(this.id).regions.get().then(res => {
const regions = new Collection();
for (const region of res) regions.set(region.id, new VoiceRegion(region));
return regions;
});
}
/**
* Fetch audit logs for this guild.
* @param {Object} [options={}] Options for fetching audit logs
* @param {Snowflake|GuildAuditLogsEntry} [options.before] Limit to entries from before specified entry
* @param {Snowflake|GuildAuditLogsEntry} [options.after] Limit to entries from after specified entry
* @param {number} [options.limit] Limit number of entries
* @param {UserResolvable} [options.user] Only show entries involving this user
* @param {string|number} [options.type] Only show entries involving this action type
* @returns {Promise<GuildAuditLogs>}
*/
fetchAuditLogs(options = {}) {
if (options.before && options.before instanceof GuildAuditLogs.Entry) options.before = options.before.id;
if (options.after && options.after instanceof GuildAuditLogs.Entry) options.after = options.after.id;
if (typeof options.type === 'string') options.type = GuildAuditLogs.Actions[options.type];
return this.client.api.guilds(this.id)['audit-logs'].get({ query: {
before: options.before,
after: options.after,
limit: options.limit,
user_id: this.client.resolver.resolveUserID(options.user),
action_type: options.type,
} })
.then(data => GuildAuditLogs.build(this, data));
}
/**
* Adds a user to the guild using OAuth2. Requires the `CREATE_INSTANT_INVITE` permission.
* @param {UserResolvable} user User to add to the guild
* @param {Object} options Options for the addition
* @param {string} options.accessToken An OAuth2 access token for the user with the `guilds.join` scope granted to the
* bot's application
* @param {string} [options.nick] Nickname to give the member (requires `MANAGE_NICKNAMES`)
* @param {Collection<Snowflake, Role>|RoleResolvable[]} [options.roles] Roles to add to the member
* (requires `MANAGE_ROLES`)
* @param {boolean} [options.mute] Whether the member should be muted (requires `MUTE_MEMBERS`)
* @param {boolean} [options.deaf] Whether the member should be deafened (requires `DEAFEN_MEMBERS`)
* @returns {Promise<GuildMember>}
*/
addMember(user, options) {
if (this.members.has(user.id)) return Promise.resolve(this.members.get(user.id));
options.access_token = options.accessToken;
if (options.roles) {
const roles = [];
for (let role of options.roles instanceof Collection ? options.roles.values() : options.roles) {
role = this.client.resolver.resolveRole(this, role);
if (!role) {
return Promise.reject(new TypeError('INVALID_TYPE', 'options.roles',
'Array or Collection of Roles or Snowflakes', true));
}
roles.push(role.id);
}
}
return this.client.api.guilds(this.id).members(user.id).put({ data: options })
.then(data => this.client.actions.GuildMemberGet.handle(this, data).member);
}
/**
* Fetch a single guild member from a user.
* @param {UserResolvable} user The user to fetch the member for
* @param {boolean} [cache=true] Insert the user into the users cache
* @returns {Promise<GuildMember>}
*/
fetchMember(user, cache = true) {
user = this.client.resolver.resolveUser(user);
if (!user) return Promise.reject(new Error('USER_NOT_CACHED'));
if (this.members.has(user.id)) return Promise.resolve(this.members.get(user.id));
return this.client.api.guilds(this.id).members(user.id).get()
.then(data => {
if (cache) return this.client.actions.GuildMemberGet.handle(this, data).member;
else return new GuildMember(this, data);
});
}
/**
* Fetches all the members in the guild, even if they are offline. If the guild has less than 250 members,
* this should not be necessary.
* @param {Object} [options] Options for the fetch operation
* @param {string} [options.query=''] Limit fetch to members with similar usernames
* @param {number} [options.limit=0] Maximum number of members to request
* @returns {Promise<Collection<Snowflake, GuildMember>>}
*/
fetchMembers({ query = '', limit = 0 } = {}) {
return new Promise((resolve, reject) => {
if (this.memberCount === this.members.size) {
resolve((query || limit) ? new Collection() : this.members);
return;
}
this.client.ws.send({
op: Constants.OPCodes.REQUEST_GUILD_MEMBERS,
d: {
guild_id: this.id,
query,
limit,
},
});
const fetchedMembers = new Collection();
const handler = (members, guild) => {
if (guild.id !== this.id) return;
for (const member of members.values()) {
if (query || limit) fetchedMembers.set(member.user.id, member);
}
if (this.memberCount === this.members.size || ((query || limit) && members.size < 1000)) {
this.client.removeListener(Constants.Events.GUILD_MEMBERS_CHUNK, handler);
resolve((query || limit) ? fetchedMembers : this.members);
}
};
this.client.on(Constants.Events.GUILD_MEMBERS_CHUNK, handler);
this.client.setTimeout(() => {
this.client.removeListener(Constants.Events.GUILD_MEMBERS_CHUNK, handler);
reject(new Error('GUILD_MEMBERS_TIMEOUT'));
}, 120e3);
});
}
/**
* Performs a search within the entire guild.
* <warn>This is only available when using a user account.</warn>
* @param {MessageSearchOptions} [options={}] Options to pass to the search
* @returns {Promise<MessageSearchResult>}
* @example
* guild.search({
* content: 'discord.js',
* before: '2016-11-17'
* }).then(res => {
* const hit = res.results[0].find(m => m.hit).content;
* console.log(`I found: **${hit}**, total results: ${res.total}`);
* }).catch(console.error);
*/
search(options = {}) {
return Shared.search(this, options);
}
/**
* The data for editing a guild.
* @typedef {Object} GuildEditData
* @property {string} [name] The name of the guild
* @property {string} [region] The region of the guild
* @property {number} [verificationLevel] The verification level of the guild
* @property {number} [explicitContentFilter] The level of the explicit content filter
* @property {ChannelResolvable} [afkChannel] The AFK channel of the guild
* @property {number} [afkTimeout] The AFK timeout of the guild
* @property {Base64Resolvable} [icon] The icon of the guild
* @property {GuildMemberResolvable} [owner] The owner of the guild
* @property {Base64Resolvable} [splash] The splash screen of the guild
*/
/**
* Updates the guild with new information - e.g. a new name.
* @param {GuildEditData} data The data to update the guild with
* @param {string} [reason] Reason for editing this guild
* @returns {Promise<Guild>}
* @example
* // Set the guild name and region
* guild.edit({
* name: 'Discord Guild',
* region: 'london',
* })
* .then(updated => console.log(`New guild name ${updated.name} in region ${updated.region}`))
* .catch(console.error);
*/
edit(data, reason) {
const _data = {};
if (data.name) _data.name = data.name;
if (data.region) _data.region = data.region;
if (typeof data.verificationLevel !== 'undefined') _data.verification_level = Number(data.verificationLevel);
if (data.afkChannel) _data.afk_channel_id = this.client.resolver.resolveChannel(data.afkChannel).id;
if (data.afkTimeout) _data.afk_timeout = Number(data.afkTimeout);
if (data.icon) _data.icon = this.client.resolver.resolveBase64(data.icon);
if (data.owner) _data.owner_id = this.client.resolver.resolveUser(data.owner).id;
if (data.splash) _data.splash = this.client.resolver.resolveBase64(data.splash);
if (typeof data.explicitContentFilter !== 'undefined') {
_data.explicit_content_filter = Number(data.explicitContentFilter);
}
return this.client.api.guilds(this.id).patch({ data: _data, reason })
.then(newData => this.client.actions.GuildUpdate.handle(newData).updated);
}
/**
* Edit the level of the explicit content filter.
* @param {number} explicitContentFilter The new level of the explicit content filter
* @param {string} [reason] Reason for changing the level of the guild's explicit content filter
* @returns {Promise<Guild>}
*/
setExplicitContentFilter(explicitContentFilter, reason) {
return this.edit({ explicitContentFilter }, reason);
}
/**
* Edit the name of the guild.
* @param {string} name The new name of the guild
* @param {string} [reason] Reason for changing the guild's name
* @returns {Promise<Guild>}
* @example
* // Edit the guild name
* guild.setName('Discord Guild')
* .then(updated => console.log(`Updated guild name to ${guild.name}`))
* .catch(console.error);
*/
setName(name, reason) {
return this.edit({ name }, reason);
}
/**
* Edit the region of the guild.
* @param {string} region The new region of the guild
* @param {string} [reason] Reason for changing the guild's region
* @returns {Promise<Guild>}
* @example
* // Edit the guild region
* guild.setRegion('london')
* .then(updated => console.log(`Updated guild region to ${guild.region}`))
* .catch(console.error);
*/
setRegion(region, reason) {
return this.edit({ region }, reason);
}
/**
* Edit the verification level of the guild.
* @param {number} verificationLevel The new verification level of the guild
* @param {string} [reason] Reason for changing the guild's verification level
* @returns {Promise<Guild>}
* @example
* // Edit the guild verification level
* guild.setVerificationLevel(1)
* .then(updated => console.log(`Updated guild verification level to ${guild.verificationLevel}`))
* .catch(console.error);
*/
setVerificationLevel(verificationLevel, reason) {
return this.edit({ verificationLevel }, reason);
}
/**
* Edit the AFK channel of the guild.
* @param {ChannelResolvable} afkChannel The new AFK channel
* @param {string} [reason] Reason for changing the guild's AFK channel
* @returns {Promise<Guild>}
* @example
* // Edit the guild AFK channel
* guild.setAFKChannel(channel)
* .then(updated => console.log(`Updated guild AFK channel to ${guild.afkChannel}`))
* .catch(console.error);
*/
setAFKChannel(afkChannel, reason) {
return this.edit({ afkChannel }, reason);
}
/**
* Edit the AFK timeout of the guild.
* @param {number} afkTimeout The time in seconds that a user must be idle to be considered AFK
* @param {string} [reason] Reason for changing the guild's AFK timeout
* @returns {Promise<Guild>}
* @example
* // Edit the guild AFK channel
* guild.setAFKTimeout(60)
* .then(updated => console.log(`Updated guild AFK timeout to ${guild.afkTimeout}`))
* .catch(console.error);
*/
setAFKTimeout(afkTimeout, reason) {
return this.edit({ afkTimeout }, reason);
}
/**
* Set a new guild icon.
* @param {Base64Resolvable} icon The new icon of the guild
* @param {string} [reason] Reason for changing the guild's icon
* @returns {Promise<Guild>}
* @example
* // Edit the guild icon
* guild.setIcon(fs.readFileSync('./icon.png'))
* .then(updated => console.log('Updated the guild icon'))
* .catch(console.error);
*/
setIcon(icon, reason) {
return this.edit({ icon }, reason);
}
/**
* Sets a new owner of the guild.
* @param {GuildMemberResolvable} owner The new owner of the guild
* @param {string} [reason] Reason for setting the new owner
* @returns {Promise<Guild>}
* @example
* // Edit the guild owner
* guild.setOwner(guild.members.first())
* .then(updated => console.log(`Updated the guild owner to ${updated.owner.username}`))
* .catch(console.error);
*/
setOwner(owner, reason) {
return this.edit({ owner }, reason);
}
/**
* Set a new guild splash screen.
* @param {Base64Resolvable} splash The new splash screen of the guild
* @param {string} [reason] Reason for changing the guild's splash screen
* @returns {Promise<Guild>}
* @example
* // Edit the guild splash
* guild.setIcon(fs.readFileSync('./splash.png'))
* .then(updated => console.log('Updated the guild splash'))
* .catch(console.error);
*/
setSplash(splash, reason) {
return this.edit({ splash }, reason);
}
/**
* Sets the position of the guild in the guild listing.
* <warn>This is only available when using a user account.</warn>
* @param {number} position Absolute or relative position
* @param {boolean} [relative=false] Whether to position relatively or absolutely
* @returns {Promise<Guild>}
*/
setPosition(position, relative) {
if (this.client.user.bot) {
return Promise.reject(new Error('FEATURE_USER_ONLY'));
}
return this.client.user.settings.setGuildPosition(this, position, relative);
}
/**
* Marks all messages in this guild as read.
* <warn>This is only available when using a user account.</warn>
* @returns {Promise<Guild>}
*/
acknowledge() {
return this.client.api.guilds(this.id).ack
.post({ data: { token: this.client.rest._ackToken } })
.then(res => {
if (res.token) this.client.rest._ackToken = res.token;
return this;
});
}
/**
* Allow direct messages from guild members.
* @param {boolean} allow Whether to allow direct messages
* @returns {Promise<Guild>}
*/
allowDMs(allow) {
const settings = this.client.user.settings;
if (allow) return settings.removeRestrictedGuild(this);
else return settings.addRestrictedGuild(this);
}
/**
* Bans a user from the guild.
* @param {UserResolvable} user The user to ban
* @param {Object|number|string} [options] Ban options. If a number, the number of days to delete messages for, if a
* string, the ban reason. Supplying an object allows you to do both.
* @param {number} [options.days=0] Number of days of messages to delete
* @param {string} [options.reason] Reason for banning
* @returns {Promise<GuildMember|User|string>} Result object will be resolved as specifically as possible.
* If the GuildMember cannot be resolved, the User will instead be attempted to be resolved. If that also cannot
* be resolved, the user ID will be the result.
* @example
* // Ban a user by ID (or with a user/guild member object)
* guild.ban('some user ID')
* .then(user => console.log(`Banned ${user.username || user.id || user} from ${guild.name}`))
* .catch(console.error);
*/
ban(user, options = { days: 0 }) {
if (options.days) options['delete-message-days'] = options.days;
const id = this.client.resolver.resolveUserID(user);
if (!id) return Promise.reject(new Error('BAN_RESOLVE_ID', true));
return this.client.api.guilds(this.id).bans[id].put({ query: options })
.then(() => {
if (user instanceof GuildMember) return user;
const _user = this.client.resolver.resolveUser(id);
if (_user) {
const member = this.client.resolver.resolveGuildMember(this, _user);
return member || _user;
}
return id;
});
}
/**
* Unbans a user from the guild.
* @param {UserResolvable} user The user to unban
* @param {string} [reason] Reason for unbanning user
* @returns {Promise<User>}
* @example
* // Unban a user by ID (or with a user/guild member object)
* guild.unban('some user ID')
* .then(user => console.log(`Unbanned ${user.username} from ${guild.name}`))
* .catch(console.error);
*/
unban(user, reason) {
const id = this.client.resolver.resolveUserID(user);
if (!id) throw new Error('BAN_RESOLVE_ID');
return this.client.api.guilds(this.id).bans[id].delete({ reason })
.then(() => user);
}
/**
* Prunes members from the guild based on how long they have been inactive.
* @param {number} [options.days=7] Number of days of inactivity required to kick
* @param {boolean} [options.dry=false] Get number of users that will be kicked, without actually kicking them
* @param {string} [options.reason] Reason for this prune
* @returns {Promise<number>} The number of members that were/will be kicked
* @example
* // See how many members will be pruned
* guild.pruneMembers({ dry: true })
* .then(pruned => console.log(`This will prune ${pruned} people!`))
* .catch(console.error);
* @example
* // Actually prune the members
* guild.pruneMembers({ days: 1, reason: 'too many people!' })
* .then(pruned => console.log(`I just pruned ${pruned} people!`))
* .catch(console.error);
*/
pruneMembers({ days = 7, dry = false, reason } = {}) {
if (typeof days !== 'number') throw new TypeError('PRUNE_DAYS_TYPE');
return this.client.api.guilds(this.id).prune[dry ? 'get' : 'post']({ query: { days }, reason })
.then(data => data.pruned);
}
/**
* Syncs this guild (already done automatically every 30 seconds).
* <warn>This is only available when using a user account.</warn>
*/
sync() {
if (!this.client.user.bot) this.client.syncGuilds([this]);
}
/**
* Can be used to overwrite permissions when creating a channel.
* @typedef {Object} ChannelCreationOverwrites
* @property {PermissionResolvable[]|number} [allow] The permissions to allow
* @property {PermissionResolvable[]|number} [deny] The permissions to deny
* @property {RoleResolvable|UserResolvable} id ID of the group or member this overwrite is for
*/
/**
* Creates a new channel in the guild.
* @param {string} name The name of the new channel
* @param {string} type The type of the new channel, either `text` or `voice`
* @param {Object} [options={}] Options
* @param {Array<PermissionOverwrites|ChannelCreationOverwrites>} [options.overwrites] Permission overwrites
* to apply to the new channel
* @param {string} [options.reason] Reason for creating this channel
* @returns {Promise<TextChannel|VoiceChannel>}
* @example
* // Create a new text channel
* guild.createChannel('new-general', 'text')
* .then(channel => console.log(`Created new channel ${channel}`))
* .catch(console.error);
*/
createChannel(name, type, { overwrites, reason } = {}) {
if (overwrites instanceof Collection || overwrites instanceof Array) {
overwrites = overwrites.map(overwrite => {
let allow = overwrite.allow || overwrite._allowed;
let deny = overwrite.deny || overwrite._denied;
if (allow instanceof Array) allow = Permissions.resolve(allow);
if (deny instanceof Array) deny = Permissions.resolve(deny);
const role = this.client.resolver.resolveRole(this, overwrite.id);
if (role) {
overwrite.id = role.id;
overwrite.type = 'role';
} else {
overwrite.id = this.client.resolver.resolveUserID(overwrite.id);
overwrite.type = 'member';
}
return {
allow,
deny,
type: overwrite.type,
id: overwrite.id,
};
});
}
return this.client.api.guilds(this.id).channels.post({
data: {
name, type, permission_overwrites: overwrites,
},
reason,
}).then(data => this.client.actions.ChannelCreate.handle(data).channel);
}
/**
* The data needed for updating a channel's position.
* @typedef {Object} ChannelPosition
* @property {ChannelResolvable} channel Channel to update
* @property {number} position New position for the channel
*/
/**
* Batch-updates the guild's channels' positions.
* @param {ChannelPosition[]} channelPositions Channel positions to update
* @returns {Promise<Guild>}
* @example
* guild.updateChannels([{ channel: channelID, position: newChannelIndex }])
* .then(guild => console.log(`Updated channel positions for ${guild.id}`))
* .catch(console.error);
*/
setChannelPositions(channelPositions) {
const data = new Array(channelPositions.length);
for (let i = 0; i < channelPositions.length; i++) {
data[i] = {
id: this.client.resolver.resolveChannelID(channelPositions[i].channel),
position: channelPositions[i].position,
};
}
return this.client.api.guilds(this.id).channels.patch({ data: {
guild_id: this.id,
channels: channelPositions,
} }).then(() =>
this.client.actions.GuildChannelsPositionUpdate.handle({
guild_id: this.id,
channels: channelPositions,
}).guild
);
}
/**
* Creates a new role in the guild with given information
* @param {Object} [options] Options
* @param {RoleData} [options.data] The data to update the role with
* @param {string} [options.reason] Reason for creating this role
* @returns {Promise<Role>}
* @example
* // Create a new role
* guild.createRole()
* .then(role => console.log(`Created role ${role}`))
* .catch(console.error);
* @example
* // Create a new role with data and a reason
* guild.createRole({
* data: {
* name: 'Super Cool People',
* color: 'BLUE',
* },
* reason: 'we needed a role for Super Cool People',
* })
* .then(role => console.log(`Created role ${role}`))
* .catch(console.error)
*/
createRole({ data = {}, reason } = {}) {
if (data.color) data.color = Util.resolveColor(data.color);
if (data.permissions) data.permissions = Permissions.resolve(data.permissions);
return this.client.api.guilds(this.id).roles.post({ data, reason }).then(role =>
this.client.actions.GuildRoleCreate.handle({
guild_id: this.id,
role,
}).role
);
}
/**
* Creates a new custom emoji in the guild.
* @param {BufferResolvable|Base64Resolvable} attachment The image for the emoji
* @param {string} name The name for the emoji
* @param {Object} [options] Options
* @param {Collection<Snowflake, Role>|RoleResolvable[]} [options.roles] Roles to limit the emoji to
* @param {string} [options.reason] Reason for creating the emoji
* @returns {Promise<Emoji>} The created emoji
* @example
* // Create a new emoji from a url
* guild.createEmoji('https://i.imgur.com/w3duR07.png', 'rip')
* .then(emoji => console.log(`Created new emoji with name ${emoji.name}!`))
* .catch(console.error);
* @example
* // Create a new emoji from a file on your computer
* guild.createEmoji('./memes/banana.png', 'banana')
* .then(emoji => console.log(`Created new emoji with name ${emoji.name}!`))
* .catch(console.error);
*/
createEmoji(attachment, name, { roles, reason } = {}) {
if (typeof attachment === 'string' && attachment.startsWith('data:')) {
const data = { image: attachment, name };
if (roles) {
data.roles = [];
for (let role of roles instanceof Collection ? roles.values() : roles) {
role = this.client.resolver.resolveRole(this, role);
if (!role) {
return Promise.reject(new TypeError('INVALID_TYPE', 'options.roles',
'Array or Collection of Roles or Snowflakes', true));
}
data.roles.push(role.id);
}
}
return this.client.api.guilds(this.id).emojis.post({ data, reason })
.then(emoji => this.client.actions.GuildEmojiCreate.handle(this, emoji).emoji);
}
return this.client.resolver.resolveBuffer(attachment)
.then(data => {
const dataURI = this.client.resolver.resolveBase64(data);
return this.createEmoji(dataURI, name, { roles, reason });
});
}
/**
* Delete an emoji.
* @param {Emoji|string} emoji The emoji to delete
* @param {string} [reason] Reason for deleting the emoji
* @returns {Promise}
*/
deleteEmoji(emoji, reason) {
if (!(emoji instanceof Emoji)) emoji = this.emojis.get(emoji);
return this.client.api.guilds(this.id).emojis(emoji.id).delete({ reason })
.then(() => this.client.actions.GuildEmojiDelete.handle(emoji).data);
}
/**
* Causes the client to leave the guild.
* @returns {Promise<Guild>}
* @example
* // Leave a guild
* guild.leave()
* .then(g => console.log(`Left the guild ${g}`))
* .catch(console.error);
*/
leave() {
if (this.ownerID === this.client.user.id) return Promise.reject(new Error('GUILD_OWNED'));
return this.client.api.users('@me').guilds(this.id).delete()
.then(() => this.client.actions.GuildDelete.handle({ id: this.id }).guild);
}
/**
* Causes the client to delete the guild.
* @returns {Promise<Guild>}
* @example
* // Delete a guild
* guild.delete()
* .then(g => console.log(`Deleted the guild ${g}`))
* .catch(console.error);
*/
delete() {
return this.client.api.guilds(this.id).delete()
.then(() => this.client.actions.GuildDelete.handle({ id: this.id }).guild);
}
/**
* Whether this guild equals another guild. It compares all properties, so for most operations
* it is advisable to just compare `guild.id === guild2.id` as it is much faster and is often
* what most users need.
* @param {Guild} guild The guild to compare with
* @returns {boolean}
*/
equals(guild) {
let equal =
guild &&
guild instanceof this.constructor &&
this.id === guild.id &&
this.available === guild.available &&
this.splash === guild.splash &&
this.region === guild.region &&
this.name === guild.name &&
this.memberCount === guild.memberCount &&
this.large === guild.large &&
this.icon === guild.icon &&
Util.arraysEqual(this.features, guild.features) &&
this.ownerID === guild.ownerID &&
this.verificationLevel === guild.verificationLevel &&
this.embedEnabled === guild.embedEnabled;
if (equal) {
if (this.embedChannel) {
if (!guild.embedChannel || this.embedChannel.id !== guild.embedChannel.id) equal = false;
} else if (guild.embedChannel) {
equal = false;
}
}
return equal;
}
/**
* When concatenated with a string, this automatically concatenates the guild's name instead of the guild object.
* @returns {string}
* @example
* // Logs: Hello from My Guild!
* console.log(`Hello from ${guild}!`);
* @example
* // Logs: Hello from My Guild!
* console.log('Hello from ' + guild + '!');
*/
toString() {
return this.name;
}
_addMember(guildUser, emitEvent = true) {
const existing = this.members.has(guildUser.user.id);
if (!(guildUser.user instanceof User)) guildUser.user = this.client.dataManager.newUser(guildUser.user);
guildUser.joined_at = guildUser.joined_at || 0;
const member = new GuildMember(this, guildUser);
this.members.set(member.id, member);
if (this._rawVoiceStates && this._rawVoiceStates.has(member.user.id)) {
const voiceState = this._rawVoiceStates.get(member.user.id);
member.serverMute = voiceState.mute;
member.serverDeaf = voiceState.deaf;
member.selfMute = voiceState.self_mute;
member.selfDeaf = voiceState.self_deaf;
member.voiceSessionID = voiceState.session_id;
member.voiceChannelID = voiceState.channel_id;
if (this.client.channels.has(voiceState.channel_id)) {
this.client.channels.get(voiceState.channel_id).members.set(member.user.id, member);
} else {
this.client.emit('warn', `Member ${member.id} added in guild ${this.id} with an uncached voice channel`);
}
}
/**
* Emitted whenever a user joins a guild.
* @event Client#guildMemberAdd
* @param {GuildMember} member The member that has joined a guild
*/
if (this.client.ws.connection.status === Constants.Status.READY && emitEvent && !existing) {
this.client.emit(Constants.Events.GUILD_MEMBER_ADD, member);
}
return member;
}
_updateMember(member, data) {
const oldMember = Util.cloneObject(member);
if (data.roles) member._roles = data.roles;
if (typeof data.nick !== 'undefined') member.nickname = data.nick;
const notSame = member.nickname !== oldMember.nickname || !Util.arraysEqual(member._roles, oldMember._roles);
if (this.client.ws.connection.status === Constants.Status.READY && notSame) {
/**
* Emitted whenever a guild member changes - i.e. new role, removed role, nickname.
* @event Client#guildMemberUpdate
* @param {GuildMember} oldMember The member before the update
* @param {GuildMember} newMember The member after the update
*/
this.client.emit(Constants.Events.GUILD_MEMBER_UPDATE, oldMember, member);
}
return {
old: oldMember,
mem: member,
};
}
_removeMember(guildMember) {
this.members.delete(guildMember.id);
}
_memberSpeakUpdate(user, speaking) {
const member = this.members.get(user);
if (member && member.speaking !== speaking) {
member.speaking = speaking;
/**
* Emitted once a guild member starts/stops speaking.
* @event Client#guildMemberSpeaking
* @param {GuildMember} member The member that started/stopped speaking
* @param {boolean} speaking Whether or not the member is speaking
*/
this.client.emit(Constants.Events.GUILD_MEMBER_SPEAKING, member, speaking);
}
}
_setPresence(id, presence) {
if (this.presences.get(id)) {
this.presences.get(id).update(presence);
return;
}
this.presences.set(id, new Presence(presence));
}
/**
* Set the position of a role in this guild.
* @param {string|Role} role The role to edit, can be a role object or a role ID
* @param {number} position The new position of the role
* @param {boolean} [relative=false] Position Moves the role relative to its current position
* @returns {Promise<Guild>}
*/
setRolePosition(role, position, relative = false) {
if (typeof role === 'string') {
role = this.roles.get(role);
}
if (!(role instanceof Role)) return Promise.reject(new TypeError('INVALID_TYPE', 'role', 'Role nor a Snowflake'));
position = Number(position);
if (isNaN(position)) return Promise.reject(new TypeError('INVALID_TYPE', 'position', 'number'));
let updatedRoles = this._sortedRoles.array();
Util.moveElementInArray(updatedRoles, role, position, relative);
updatedRoles = updatedRoles.map((r, i) => ({ id: r.id, position: i }));
return this.client.api.guilds(this.id).roles.patch({ data: updatedRoles })
.then(() =>
this.client.actions.GuildRolesPositionUpdate.handle({
guild_id: this.id,
roles: updatedRoles,
}).guild
);
}
/**
* Set the position of a channel in this guild.
* @param {string|GuildChannel} channel The channel to edit, can be a channel object or a channel ID
* @param {number} position The new position of the channel
* @param {boolean} [relative=false] Position Moves the channel relative to its current position
* @returns {Promise<Guild>}
*/
setChannelPosition(channel, position, relative = false) {
if (typeof channel === 'string') {
channel = this.channels.get(channel);
}
if (!(channel instanceof GuildChannel)) {
return Promise.reject(new TypeError('INVALID_TYPE', 'channel', 'GuildChannel nor a Snowflake'));
}
position = Number(position);
if (isNaN(position)) return Promise.reject(new TypeError('INVALID_TYPE', 'position', 'number'));
let updatedChannels = this._sortedChannels(channel.type).array();
Util.moveElementInArray(updatedChannels, channel, position, relative);
updatedChannels = updatedChannels.map((r, i) => ({ id: r.id, position: i }));
return this.client.api.guilds(this.id).channels.patch({ data: updatedChannels })
.then(() =>
this.client.actions.GuildChannelsPositionUpdate.handle({
guild_id: this.id,
channels: updatedChannels,
}).guild
);
}
/**
* Fetches a collection of channels in the current guild sorted by position.
* @param {string} type The channel type
* @returns {Collection<Snowflake, GuildChannel>}
* @private
*/
_sortedChannels(type) {
return this._sortPositionWithID(this.channels.filter(c => {
if (type === 'voice' && c.type === 'voice') return true;
else if (type !== 'voice' && c.type !== 'voice') return true;
else return type === c.type;
}));
}
/**
* Sorts a collection by object position or ID if the positions are equivalent.
* Intended to be identical to Discord's sorting method.
* @param {Collection} collection The collection to sort
* @returns {Collection}
* @private
*/
_sortPositionWithID(collection) {
return collection.sort((a, b) =>
a.position !== b.position ?
a.position - b.position :
Long.fromString(a.id).sub(Long.fromString(b.id)).toNumber()
);
}
}
module.exports = Guild;<|fim▁end|> | /** |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// ***********************************************************<|fim▁hole|>//
// You can change the location of this file or turn off loading
// the plugins file with the 'pluginsFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/plugins-guide
// ***********************************************************
// This function is called when a project is opened or re-opened (e.g. due to
// the project's config changing)
module.exports = (on, config) => {
// `on` is used to hook into various events Cypress emits
// `config` is the resolved Cypress config
if (process.env.CYPRESS_CONNECTION_TYPE) {
on(`before:browser:launch`, (browser = {}, args) => {
if (
browser.name === `chrome` &&
process.env.CYPRESS_CONNECTION_TYPE === `slow`
) {
args.push(`--force-effective-connection-type=2G`)
}
return args
})
}
}<|fim▁end|> | // This example plugins/index.js can be used to load plugins |
<|file_name|>ssp_Hector_Fit3D_mix.py<|end_file_name|><|fim▁begin|>#-------------------------------------------------------------------------------------
from bin.dazer_methods import Dazer
from bin.lib.ssp_functions.ssp_synthesis_tools import ssp_fitter
#-------------------------------------------------------------------------------------
#!/usr/bin/python
import sys
import numpy as np
from numpy import float_, absolute as abs, random as ran
import time
import ssp_Hector_Fit3D_tools as ssp
import pyfits as pyf
from pyfits import getheader as ghead, getdata as gdata, writeto as wfits
from scipy.interpolate.interpolate import interp1d
import ssp_Hector_Fit3D_my as my
import os.path as pt
import matplotlib
import os
#Example command
#
'''
cd workspace/dazer/bin/lib/ssp_functions/
Pyhon command to run original file
python ssp_Hector_Fit3D.py NGC5947.spec_5.txt ssp_lib.fits,ssp_lib.3.fits auto_ssp.NGC5947.cen.only.out mask_elines.txt auto_ssp_V500_several_Hb.config 1 -1 40 3850 6800 emission_lines.txt 0.02 0.001 0.015 0.025 2 0.5 1 9 0.5 0.1 0.0 1.6
'''
def sycall(comand):
from subprocess import call
line=comand.split(" ")
fcomand=[]
fcomand.extend(line)
linp=''
nx=len(fcomand)
for i in range(0, nx):
linp=linp+fcomand[i]+" "
print linp
call(fcomand)
sys.argv=filter(None,sys.argv)
ran.seed(None)
vel_light=299792.458
red_elines=0.0
sec_ini=ssp.print_time()
time1=ssp.get_time()
if len(sys.argv) < 7:
print "USE: auto_ssp.py SPEC1.txt SSP_SFH.fits,SSP_KIN.fits OUTFILE MASK_LIST CONFIG_FILE PLOT [min max] [wmin wmax] [redshift_elines_to_mask] [input_redshift delta_redshift min_redshift max_redshift] [input_sigma delta_sigma min_sigma max_sigma] [input_Av delta_Av min_Av max_Av]"
print "CONFIG_FILE:"
print "redshift delta_redshift min_redshift max_redshift"
print "sigma delta_sigma min_sigma max_sigma"
print "Av delta_Av min_Av max_Av [Same range for all]"
print "N_SYSTEMS"
print "(1) START_W END_W MASK_FILE CONFIG_FILE NPOLY MASK_FILE_POLY"
print "..."
print "(N) START_W END_W MASK_FILE CONFIG_FILE NPOLY MASK_FILE_POLY"
print "MIN_DELTA_CHISQ MAX_NITER CUT_MEDIAN_FLUX"
print "start_w_peak end_w_peak"
print "wavelength_to_norm width_AA new_back_templates.fits"
inline_params = ['ssp_Hector_Fit3D.py', 'NGC5947.spec_5.txt','ssp_lib.fits,ssp_lib.fits','auto_ssp.NGC5947.cen.only.out','mask_elines.txt','auto_ssp_V500_several_Hb.config' ,'1', '-1', '40', '3850', '6800', 'emission_lines.txt', '0.02', '0.001', '0.015', '0.025', '2', '0.5', '1', '9', '0.5', '0.1', '0.0', '1.6']
sys.argv = inline_params
unc_file=sys.argv[1]
clean_file="clean_"+sys.argv[1]
junk_back_list=sys.argv[2]
data=junk_back_list.split(',')
if len(data) == 2:
back_list=data[0]
back_list2=data[1]
else:
back_list=junk_back_list
back_list2=junk_back_list
outfile=sys.argv[3]
out_elines="elines_"+outfile
out_single="single_"+outfile
out_fit="fit_"+outfile
out_coeffs_file="coeffs_"+outfile
out_fit="output."+outfile+".fits"
out_ps=outfile
#######################################
# Clean previous results
#######################################
call="rm -rf "+outfile
sycall(call)
call="rm -rf "+out_elines
sycall(call)
call="rm -rf "+out_single
sycall(call)
call="rm -rf "+out_fit
sycall(call)
D_SYS_VEL=100
mask_list=sys.argv[4]
config_file=sys.argv[5]
plot=int(sys.argv[6])
if plot == 2:
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.cm as cmx
dev_plot=outfile+".pdf"
dev_plot_single="single_"+outfile+".pdf"
else:
if plot == 0:
matplotlib.use('Agg')
dev_plot="null"
dev_plot_single="null"
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.cm as cmx
smooth=1
MIN_CHISQ=1e12
out_file="junk.junk"
factor=1
box=1
deft=0
if len(sys.argv) == 9:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
deft=1
if len(sys.argv) == 11:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
deft=2
if len(sys.argv) == 12:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
elines_mask=sys.argv[11]
deft=2
input_redshift=0
if len(sys.argv) == 13:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
elines_mask=sys.argv[11]
input_redshift=float_(sys.argv[12])
redshift=input_redshift
deft=2
f=open(config_file,'r')
line=f.readline()
data=line.split(" ")
data=filter(None,data)
redshift=float_(data[0])
d_redshift=float_(data[1])
min_redshift=float_(data[2])
max_redshift=float_(data[3])
DV=float_(data[4])
RV=float_(data[5])
DS=float_(data[6])
RS=float_(data[7])
MIN_W=float_(data[8])
MAX_W=float_(data[9])
if len(sys.argv) == 16:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
elines_mask=sys.argv[11]
input_redshift=float_(sys.argv[12])
input_d_redshift=float_(sys.argv[13])
input_min_redshift=float_(sys.argv[14])
input_max_redshift=float_(sys.argv[15])
redshift=input_redshift
d_redshift=input_d_redshift
min_redshift=input_min_redshift
max_redshift=input_max_redshift
deft=2
line=f.readline()
data=line.split(" ")
data=filter(None,data)
sigma=data[0]
d_sigma=data[1]
min_sigma=data[2]
max_sigma=data[3]
if len(sys.argv) == 20:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
elines_mask=sys.argv[11]
input_redshift=float_(sys.argv[12])
input_d_redshift=float_(sys.argv[13])
input_min_redshift=float_(sys.argv[14])
input_max_redshift=float_(sys.argv[15])
sigma=float_(sys.argv[16])
d_sigma=float_(sys.argv[17])
min_sigma=float_(sys.argv[18])
max_sigma=float_(sys.argv[19])
redshift=input_redshift
d_redshift=input_d_redshift
min_redshift=input_min_redshift
max_redshift=input_max_redshift
deft=2
line=f.readline()
data=line.split(" ")
data=filter(None,data)
Av_IN=data[0]
d_Av_IN=data[1]
min_Av=data[2]
max_Av=data[3]
if len(sys.argv) == 24:
min=float_(sys.argv[7])
max=float_(sys.argv[8])
min_wave=sys.argv[9]
max_wave=sys.argv[10]
elines_mask=sys.argv[11]
input_redshift=float_(sys.argv[12])
input_d_redshift=float_(sys.argv[13])
input_min_redshift=float_(sys.argv[14])
input_max_redshift=float_(sys.argv[15])
sigma=float_(sys.argv[16])
d_sigma=float_(sys.argv[17])
min_sigma=float_(sys.argv[18])
max_sigma=float_(sys.argv[19])
Av_IN=float_(sys.argv[20])
d_Av_IN=float_(sys.argv[21])
min_Av=float_(sys.argv[22])
max_Av=float_(sys.argv[23])
redshift=input_redshift
d_redshift=input_d_redshift
min_redshift=input_min_redshift
max_redshift=input_max_redshift
deft=2
data=min_wave.split(',')
if len(data) == 2:
min_wave=float_(data[0])
min_wave2=float_(data[1])
else:
min_wave=float_(min_wave)
min_wave2=min_wave
data=max_wave.split(',')
if len(data) == 2:
max_wave=float_(data[0])
max_wave2=float_(data[1])
else:
max_wave=float_(max_wave)
max_wave2=max_wave
REDSHIFT=redshift
Av_ini=Av_IN
if d_redshift !=0:
fit_redshift=1
else:
fit_redshift=0
#print "FIT_RED "+str(fit_redshift)+" "+str(d_redshift)+" "+str(len(sys.argv))
line=f.readline()
data=line.split(" ")
data=filter(None,data)
ns=int(data[0])
start_w_min=1e12
end_w_max=-1e12
start_w_E=[]
end_w_E=[]
mask_E=[]
config_E=[]
n_line_E=[]
npoly_E=[]
mask_poly_E=[]
nmin_E=[]
nmax_E=[]
config_line_E=[]
for i in range(0, ns):
line=f.readline()
data=line.split(" ")
data=filter(None,data)
start_w_e=float_(data[0])
end_w_e=float_(data[1])
mask_e=data[2]
config_e=data[3]
npoly_e=int(data[4])
mask_poly_e=data[5]
nmin_e=float_(data[6])
nmax_e=float_(data[7])
start_w_E.extend([start_w_e])
end_w_E.extend([end_w_e])
mask_E.extend([mask_e])
config_E.extend([config_e])
#
# We read all the information
#
n_line=0
linef=""
f2=open(config_e,'r')
for line in f2:
linef=linef+line+";"
n_line=n_line+1
config_line_E.extend([linef])
f2.close()
n_line_E.extend([n_line])
npoly_E.extend([npoly_e])
mask_poly_E.extend([mask_poly_e])
nmin_E.extend([nmin_e])
nmax_E.extend([nmax_e])
if start_w_e < start_w_min:
start_w_min=start_w_e
if end_w_e > end_w_max:
end_w_max=end_w_e
line=f.readline()
data=line.split(" ")
data=filter(None,data)
MIN_DELTA_CHISQ=float_(data[0])
MAX_NITER=int(data[1])
CUT_MEDIAN_FLUX=float_(data[2])
ABS_MIN=0.5*CUT_MEDIAN_FLUX
line=f.readline()
data=line.split(" ")
data=filter(None,data)
start_w_peak=float_(data[0])
end_w_peak=float_(data[1])
line=f.readline()
data=line.split(" ")
data=filter(None,data)
if len(data) == 3:
wave_norm=data[0]
w_wave_norm=data[1]
new_back_file=data[2]
else:
wave_norm=[]
w_wave_norm=[]
new_back_file=[]
f.close()
#
# SFH template
#
[pdl_flux_c_ini,hdr]=gdata(back_list, 0, header=True)
[nf,n_c]=pdl_flux_c_ini.shape
coeffs=np.zeros([nf,3])
crpix=hdr["CRPIX1"]
cdelt=hdr["CDELT1"]
crval=hdr["CRVAL1"]
n_mc=30
#
# Kinematics template
#
[pdl_flux_c_ini2,hdr2]=gdata(back_list2, 0, header=True)
[nf2,n_c2]=pdl_flux_c_ini2.shape
coeffs2=np.zeros([nf2,3])
crpix2=hdr2["CRPIX1"]
cdelt2=hdr2["CDELT1"]
crval2=hdr2["CRVAL1"]
Av=np.zeros(nf)
d_Av=np.zeros(nf)
for i in range(0, nf):
Av[i]=Av_IN
d_Av[i]=d_Av_IN
if mask_list == "none":
nmask=0
else:
f=open(mask_list,'r')
start_mask=[]
end_mask=[]
for line in f:
data=line.split(" ")
data=filter(None,data)
if len(data) != 0 and data[0] != "\n":
start_mask.extend([float_(data[0])])
end_mask.extend([float_(data[1])])
nmask=len(start_mask)
f.close()
n_mask_org=nmask
if elines_mask == "none":
nmask_e=0
nline=0
else:
f=open(elines_mask,'r')
nmask_e=0
nline=0
w_eline=[]
start_mask_e=[]
end_mask_e=[]
for line in f:
data=line.split(" ")
data=filter(None,data)
if data[0] != "#":
w_eline.extend([float_(data[0])])
start_mask_e.extend([w_eline[nline]*(1+input_redshift)-4*sigma])
end_mask_e.extend([w_eline[nline]*(1+input_redshift)+4*sigma])
nmask_e=nmask_e+1
nline=nline+1
f.close()
#
# We read the input spectrum
#
n_unc=0
y_min=1e12
y_max=-1e12
f=open(unc_file,'r')
i_scale=0
FLUX=0
have_error=0
index_unc=[]
wave_unc=[]
flux_unc=[]
flux_unc_org=[]
flux_unc_input=[]
e_flux_unc=[]
color_unc=[]
masked=[]
masked2=[]
masked_Av=[]
flux_masked=[]
flux_masked2=[]
e_flux_unc_kin=[]
wave_scale=0
for line in f:
data=line.split(' ')
data=filter(None,data)
if data[0] != "#":
index_unc.extend([float_(data[0])])
wave_unc.extend([float_(data[1])])
flux_unc.extend([float_(data[2])])
flux_unc_org.extend([float_(data[2])])
flux_unc_input.extend([float_(data[2])])
if len(data) > 2:
# Variance Column!
e_flux_unc.extend([np.sqrt(abs(float_(data[3])))])
color_unc.extend([1])#$data[4];
have_error=1
else:
e_flux_unc.extend([np.sqrt(abs(float_(data[2])))/10])
color_unc.extend([1])
if np.isnan(flux_unc[n_unc]):
flux_unc[n_unc]=flux_unc[n_unc-1]
if flux_unc[n_unc] < y_min:
y_min=flux_unc[n_unc]
if flux_unc[n_unc] > y_max:
y_max=flux_unc[n_unc]
if n_unc > 0:
if wave_unc[n_unc-1] <= wave_scale and wave_unc[n_unc] > wave_scale:
i_scale=n_unc
masked.extend([1])
masked2.extend([1])
masked_Av.extend([1])
if flux_unc[n_unc] == 0:
masked[n_unc]=0
masked2[n_unc]=0
w_test=wave_unc[n_unc-1]
for j in range(0, nmask):
if w_test > start_mask[j] and w_test < end_mask[j]:
masked[n_unc]=0
masked2[n_unc]=0
masked_Av[n_unc]=0
if deft == 2:
if w_test < min_wave:
masked[n_unc]=0
masked_Av[n_unc]=0
if w_test > max_wave:
masked[n_unc]=0
masked_Av[n_unc]=0
if w_test < min_wave2:
masked2[n_unc]=0
if w_test > max_wave2:
masked2[n_unc]=0
for j in range(0, nmask_e):
if w_test > start_mask_e[j] and w_test < end_mask_e[j]:
masked2[n_unc]=0
masked_Av[n_unc]=0
flux_masked.extend([flux_unc[n_unc]*masked[n_unc]])
flux_masked2.extend([flux_unc[n_unc]*masked2[n_unc]])
if wave_unc[n_unc] > min_wave and wave_unc[n_unc] < max_wave:
FLUX=FLUX+flux_masked[n_unc]
e_flux_unc_kin.extend([e_flux_unc[n_unc]])
n_unc=n_unc+1
f.close()
sigma_e=np.median(e_flux_unc)
#print "SIGMA_E = "+str(sigma_e)
for i in range(0, n_unc):
if e_flux_unc[i] > 1.5*sigma_e:
e_flux_unc[i]=1.5*sigma_e
e_flux_unc_kin[i]=1.5*sigma_e
if deft == 2:
y_min=min
y_max=max
else:
min_wave=np.amin(wave_unc)
max_wave=np.amax(wave_unc)
if deft == 1:
y_min=min
y_max=max
median_flux=np.median(flux_masked)
dpix_unc=wave_unc[1]-wave_unc[0]
max=3*median_flux
pdl_output=np.zeros([6,n_unc])
#
# We create a kernel
#
med_flux=np.median(flux_unc)
chi_sq_min_now=1e12
min_chi_sq=chi_sq_min_now
print '-----The redshift', redshift
print '-----The sigma', sigma
print '-----The Av', Av.shape
print '-----The crval2', crval2
print '-----The cdelt2', cdelt2
print '-----The crpix2', crpix2
print '-----back_list2', back_list2
print '-----nf2', nf2
print '-----n_c2', n_c2
print '-----pdl_flux_c_ini2', pdl_flux_c_ini2.shape
#print '-----hdr2', hdr2
print '-----wave_unc', wave_unc
print '-----masked_Av', len(masked_Av)
print '-----e_flux_unc', e_flux_unc
print '-----flux_unc', flux_unc
print '-----n_mc', n_mc
print '-----chi_sq_min_now', chi_sq_min_now
print '-----min_chi_sq', min_chi_sq
ssp_dat, mis_cosas = ssp.fit_ssp_lin_no_zero(redshift,sigma,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked_Av,e_flux_unc,flux_unc,n_mc,chi_sq_min_now,min_chi_sq,plot)
min_chi_sq=ssp_dat[0]
# print "CUT = "+str(med_flux)+" "+str(CUT_MEDIAN_FLUX)
# print str(redshift)+","+str(sigma)
#--------------------------------------------------------------------
print '\n----------------------------------------------------------------------------\n'
dzp = Dazer()
dz = ssp_fitter()
#Data folder location
data_folder = '/home/vital/workspace/Fit_3D/example_files/'
defaut_conf = 'auto_ssp_V500_several_Hb.config'
#Read parameters from command line
command_fit_dict = dz.load_command_params()
#Read parameters from config file
conf_file_address = command_fit_dict['config_file_address'] if 'config_file_address' in command_fit_dict != None else data_folder + defaut_conf
config_fit_dict = dz.load_config_params(conf_file_address)
#Update the fit configuration giving preference to the values from the command line
config_fit_dict.update(command_fit_dict)
#Import input data: spectrum, masks, emision line loc, stellar bases...
dz.load_input_data(config_fit_dict)
dz.fit_conf['zero_mask'] = np.array(mis_cosas[1])
obs_fit_spectrum = dz.fit_ssp()
dzp.FigConf()
#dzp.data_plot(dz.fit_conf['obs_wave'], dz.fit_conf['obs_flux'], label='obs_flux')
dzp.data_plot(dz.fit_conf['obs_wave'], dz.fit_conf['zero_mask'], label='my mask')
dzp.data_plot(mis_cosas[0], mis_cosas[1], label='Hector mask')
dzp.data_plot(mis_cosas[0], mis_cosas[2], label='Hector fit')
dzp.data_plot(mis_cosas[0], obs_fit_spectrum, label='my fit')
dzp.FigWording('Wave', 'Flux', 'Input spectra')
dzp.display_fig()
print '\n----------------------------------------------------------------------------\n'
#--------------------------------------------------------------------
print 'Aqui acabamos'
sys.exit(0)
if med_flux < ABS_MIN:
# WHAT TO DO???
# We print all!!!
sys.exit(0)
if med_flux > CUT_MEDIAN_FLUX:
if MIN_W == 0:
MIN_W = min_wave
if MAX_W == 0:
MAX_W=max_wave
################
# REDSHIFT DETERMINATION
my_plot=2
K=0
nr=0
chi_r=[]
red_r=[]
if d_redshift > 0:
min_chi_sq=1e30
RED=min_redshift
while RED < max_redshift:
ssp_dat1=ssp.fit_ssp_lin_no_zero_no_cont(RED,sigma,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked2,e_flux_unc_kin,flux_masked2,n_mc,chi_sq_min_now,min_chi_sq,my_plot)
chi_now=ssp_dat1[0]
chi_r.extend([chi_now])
red_r.extend([RED])
# print RED,chi_now,d_redshift
if nr > 1 and chi_r[nr-1] < min_chi_sq and chi_r[nr-1] > 0:
redshift=red_r[nr-1]
min_chi_sq=chi_r[nr-1]
K=nr-1
nr=nr+1
RED=RED+d_redshift
#
# TWO
#
e_redshift=d_redshift
nr=0
chi_r=[]
red_r=[]
RED=redshift-1.5*d_redshift
max_redshift=redshift+1.5*d_redshift
d_redshift=0.1*d_redshift
while RED < max_redshift:
ssp_dat2=ssp.fit_ssp_lin_no_zero_no_cont(RED,sigma,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked2,e_flux_unc,flux_masked2,n_mc,chi_sq_min_now,min_chi_sq,my_plot)
chi_now=ssp_dat2[0]
chi_r.extend([chi_now])
red_r.extend([RED])
if nr > 1 and chi_r[nr-1] < chi_r[nr-2] and chi_r[nr-1] < chi_r[nr] and chi_r[nr-1] <= min_chi_sq:
a=red_r[nr-2]
b=red_r[nr-1]
c=red_r[nr]
fa=chi_r[nr-2]
fb=chi_r[nr-1]
fc=chi_r[nr]
den=(fc-2*fb+fa)
redshift=c-(b-a)*((fc-fb)/den+0.5)
slope=abs(0.5*(fc-fb)/(c-b))+abs(0.5*(fa-fb)/(a-b))
if slope > 0:
e_redshift=0.01*redshift/slope
else:
e_redshift=0.01*redshift
min_chi_sq=chi_r[nr-1]
K=nr-1
nr=nr+1
a_rnd=ran.rand(2)
RED=RED+d_redshift*(a_rnd[0])
fit_redshift=0
d_redshift=0
else:
fit_redshift=0
e_redshift=0
print "REDSHIFT = "+str(redshift)+" +- "+str(e_redshift)
#sys.exit()
REDSHIFT=redshift
# sigma DETERMINATION
K=0
nr=0
chi_s=[]
sigma_s=[]
print "D_SIGMA = "+str(d_sigma)
if d_sigma > 0:
min_chi_sq = 1e30
SIGMA=min_sigma
while SIGMA < max_sigma:
ssp_dat3=ssp.fit_ssp_lin_no_zero_no_cont(redshift,SIGMA,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked2,e_flux_unc,flux_masked2,n_mc,chi_sq_min_now,min_chi_sq,my_plot)
chi_now=ssp_dat3[0]
chi_s.extend([chi_now])
sigma_s.extend([SIGMA])
if chi_s[nr-1] < min_chi_sq:
sigma=sigma_s[nr-1]
min_chi_sq=chi_s[nr-1]
K=nr
nr=nr+1
SIGMA=SIGMA+d_sigma
SIGMA=sigma-1.5*d_sigma
max_sigma=sigma+1.5*d_sigma
d_sigma=0.33*d_sigma
#
#TWO
#
nr=0
chi_s=[]
sigma_s=[]
e_sigma=d_sigma
while SIGMA < max_sigma:
ssp_dat4=ssp.fit_ssp_lin_no_zero_no_cont(redshift,SIGMA,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked2,e_flux_unc,flux_masked2,n_mc,chi_sq_min_now,min_chi_sq,my_plot)
chi_now=ssp_dat4[0]
chi_s.extend([chi_now])
sigma_s.extend([SIGMA])
if nr > 1 and chi_s[nr-1] < chi_s[nr-2] and chi_s[nr-1] < chi_s[nr] and chi_s[nr-1] <= min_chi_sq:
a=sigma_s[nr-2]
b=sigma_s[nr-1]
c=sigma_s[nr]
fa=chi_s[nr-2]
fb=chi_s[nr-1]
fc=chi_s[nr]
den=(fc-2*fb+fa)
sigma=c-(b-a)*((fc-fb)/den+0.5)
min_chi_sq=chi_s[nr-1]
K=nr
SIGMA=max_sigma
nr=nr+1
a_rnd=ran.rand(2)
SIGMA=SIGMA+d_sigma*(a_rnd[0])
slope=(chi_s[nr-1]-min_chi_sq)/(sigma_s[nr-1]-sigma)
if slope > 0:
e_sigma=sigma/slope/10.
else:
e_sigma=sigma/10.
fit_sigma=0
d_sigma=0
else:
fit_sigma=0
e_sigma=0
sigma=abs(sigma)
e_sigma=abs(e_sigma)
print "SIGMA = "+str(sigma)+"+-"+str(e_sigma)
else:
#
# Below the cut!
#
for i in range(0, nf):
Av[i]=0
d_Av[i]=0
# Av DETERMINATION
K=0
nr=0
chi_Av=[]
Av_s=[]
Av_p_chi=[]
print "D_Av = "+str(d_Av_IN)
nr_min=0
if d_Av_IN > 0:
min_chi_sq = 1e30
Av_NOW=min_Av
while Av_NOW < max_Av:
for i in range(0, nf):
Av[i]=Av_NOW
#
# Not allow negative coeffs!!!!
#
ssp_dat5=ssp.fit_ssp_lin_no_zero(redshift,sigma,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked_Av,e_flux_unc,flux_masked,n_mc,chi_sq_min_now,min_chi_sq,my_plot)
chi_now=ssp_dat5[0]
chi_Av.extend([chi_now])
Av_s.extend([Av_NOW])
if chi_now > 0:
Av_p_chi.extend([Av_NOW/(chi_now)])
if K == 0 and chi_Av[nr] < min_chi_sq:
Av_F=Av_s[nr]
nr_min=nr
min_chi_sq=chi_now
if nr > 1 and chi_Av[nr-1] < chi_Av[nr-2] and chi_Av[nr-1] < chi_Av[nr] and chi_Av[nr-1] <= min_chi_sq:
a=Av_s[nr-2]
b=Av_s[nr-1]
c=Av_s[nr]
fa=chi_Av[nr-2]
fb=chi_Av[nr-1]
fc=chi_Av[nr]
den=(fc-2*fb+fa)
Av_F=c-(b-a)*((fc-fb)/den+0.5)
min_chi_sq=chi_Av[nr-1]
K=nr
nr=nr+1
a_rnd=ran.rand(2)
Av_NOW=Av_NOW+d_Av_IN*(a_rnd[0])
if Av_s[nr-1] != Av_F:
slope=(chi_Av[nr-1]-min_chi_sq)/(Av_s[nr-1]-Av_F)
if slope > 0 :
e_Av=abs(Av_F/slope/3.)
else:
e_Av=d_Av_IN
else:
e_Av=d_Av_IN
fit_Av=0
d_Av_NOW=0
else:
fit_Av=0
if d_Av_IN == 0:
Av_F=Av_IN
if e_Av == 0:
e_Av=d_Av_IN
print "AV = "+str(Av_F)+" +- "+str(e_Av)
for i in range(0, nf):
Av[i]=Av_F
fit_redshift=0
redshift_abs=redshift
delta_chi=10
NITER=0
niter_tmp_max=10
chi_sq_min_now=1e12
min_chi_sq_limit=min_chi_sq
n_mc=10
pdl_rat_master=np.ones(n_unc+1)
[min_chi_sq,pdl_age_mod,pdl_met_mod,pdl_ml,pdl_Av,coeffs,coeffs_N,coeffs_NM,pdl_model_spec_min,pdl_res]=ssp.fit_ssp_lin_no_zero(redshift,sigma,Av,crval2,cdelt2,crpix2,nf2,n_c2,pdl_flux_c_ini2,hdr2,wave_unc,masked_Av,e_flux_unc,flux_unc,n_mc,chi_sq_min_now,min_chi_sq,plot)
#
# We substract the continuum!
#
pdl_mod_SSP=pdl_model_spec_min
pdl_res_SSP=pdl_res
nx=n_unc
i0_now=int(0.4*nx)
i1_now=int(0.6*nx)
stats_res=np.std(pdl_res[i0_now:i1_now])+np.mean(pdl_res[i0_now:i1_now])
stats_mod=np.mean(pdl_model_spec_min[i0_now:i1_now])
SN=0
if stats_res > 0:
SN=stats_mod/stats_res
print "Signal-to-Noise = "+str(SN)
old=1
if old == 1 and SN > 10:
pdl_model_spec_min[np.where(pdl_model_spec_min == 0)[0]]=1.
pdl_rat=pdl_res/pdl_model_spec_min+1
rat=pdl_rat
med_rat=my.median_filter(int(5*2.354*sigma),rat)
pdl_med_rat=np.array(med_rat)
n_unc_1=n_unc-1
pdl_wave_unc=wave_unc[0]+(wave_unc[1]-wave_unc[0])*np.arange(0,n_unc_1)
med_rat=my.median_filter(int(7*2.354*sigma),rat)
med_sigma=int(1.5*sigma)
if med_sigma < 3:
med_sigma=3
med_rat_box=my.median_box(med_sigma,med_rat)
med_wave_box=my.median_box(med_sigma,wave_unc)
y_rat = interp1d(med_wave_box, med_rat_box,bounds_error=False,fill_value=0.)(wave_unc)
if plot > 0:
out_ps_now="junk2"
title="ratio"
ssp.plot_results_min_max(2,wave_unc,[flux_unc,pdl_model_spec_min,pdl_res,pdl_rat,y_rat],out_ps_now,title,-0.2,1.5)
i0_now=int(0.4*n_unc)
i1_now=int(0.6*n_unc)
stats_rat0=np.mean(y_rat[i0_now:i1_now])
stats_rat1=np.std(y_rat[i0_now:i1_now])+stats_rat0
if stats_rat0 > 0 and stats_rat1 > 0.02:
for i in range(0, n_unc):
val=y_rat[i]
if val > 0:
flux_unc[i]=flux_unc[i]/val
print "Deriving SFH...."
#my pdl_mod_JOINT;
#my pdl_res_JOINT;#=$pdl_res_SSP; #Modificacion 12 de Marzo de 2015 (en caso de no entrar en el ciclo while)
#my pdl_no_gas;
#my age_min;
#my met_min;
#my Av_min;
#my age_min_mass;
#my met_min_mass;
#my Av_min_mass;
coeffs_cat=np.zeros([nf+1,n_mc])
while MIN_CHISQ > MIN_DELTA_CHISQ and NITER < MAX_NITER:
if NITER == 1:
MIN_CHISQ=1e12
######################################################################
# Fitting the emission lines
######################################################################
a_fixed=np.zeros([1,9])
a_type_fixed=[]
n_mod_fixed=0
if ns > 0:
ks=0
SYS_VEL=vel_light*REDSHIFT
REN=[]
e_REN=[]
sycall(call)
for ist in range(0,ns):
if red_elines > 0:
SYS_VEL=vel_light*red_elines
if ist == 0:
SYS_VEL_MAX=vel_light*red_elines+D_SYS_VEL
SYS_VEL_MIN=vel_light*red_elines-D_SYS_VEL
else:
SYS_VEL_MAX=vel_light*red_elines+D_SYS_VEL
SYS_VEL_MIN=vel_light*red_elines-D_SYS_VEL
else:
SYS_VEL=vel_light*REDSHIFT
if ist == 0:
SYS_VEL_MAX=vel_light*REDSHIFT+D_SYS_VEL
SYS_VEL_MIN=vel_light*REDSHIFT-D_SYS_VEL
else:
SYS_VEL_MAX=vel_light*REDSHIFT+D_SYS_VEL
SYS_VEL_MIN=vel_light*REDSHIFT-D_SYS_VEL
start_w_e=start_w_E[ist]
end_w_e=end_w_E[ist]
mask_e=mask_E[ist]
config_e=config_E[ist]
npoly_e=npoly_E[ist]
mask_poly_e=mask_poly_E[ist]
nmin_e=nmin_E[ist]
nmax_e=nmax_E[ist]
print "CONF="+config_e
wave_elines=[]
flux_elines=[]
flux_e_elines=[]
masked_elines=[]
n_e=0
for i in range(0, n_unc):
if wave_unc[i] > start_w_e and wave_unc[i] < end_w_e:
wave_elines.extend([wave_unc[i]])
flux_elines.extend([flux_unc_org[i]-pdl_mod_SSP[i]])
flux_e_elines.extend([abs(e_flux_unc[i])])
masked_elines.extend([1])
n_e=n_e+1
pdl_wave_elines=np.array(wave_elines)
pdl_flux_elines=np.array(flux_elines)
pdl_flux_e_elines=np.array(flux_e_elines)
pdl_masked_elines=np.array(masked_elines)
stats0=np.mean(pdl_flux_elines)
stats4=np.amax(pdl_flux_elines)
y_max=stats4-stats0
deft=1
data=filter(None, config_line_E[ist].split(';')[0].split(" "))
#print float_(filter(None, config_line_E[0].split(';')[4].split(" ")))[2]
junk=data[0]
n_mod=int(data[1])
chi_goal=float_(data[2])
d_chi_goal=float_(data[3])
n_line=n_line_E[ist]
i_mod=1
typef=[]
a=np.zeros([n_mod,9])
ia=np.zeros([n_mod,9])
ea=np.zeros([n_mod,9])
a0=np.zeros([n_mod,9])
a1=np.zeros([n_mod,9])
link=np.zeros([n_mod,9])
for ii in range(0, n_mod):
cnf=filter(None, config_line_E[ist].split(';')[i_mod].split(" "))
i_mod=i_mod+1
typef.extend(cnf)
for j in range(0, 9):
data=config_line_E[ist].split(';')[i_mod].replace('\t',' ')
data=filter(None, data.split(' '))
i_mod=i_mod+1
a[ii][j]=float_(data[0])
ia[ii][j]=float_(data[1])
ea[ii][j]=0
a0[ii][j]=float_(data[2])
a1[ii][j]=float_(data[3])
link[ii][j]=float_(data[4])
if deft == 1:
a1_max=2*y_max*(a[ii][2]*((2*3.1416)**0.5))
a0_min=0.01*1.2*y_max*(a[ii][2]*((2*3.1416)**0.5))
if a1[ii][1] > a1_max:
a1[ii][1]=a1_max
a0[ii][1]=a0_min
#
# Force vicitiny in the velocity
#
a[0][3]=SYS_VEL
ia[0][3]=1
a0[0][3]=SYS_VEL_MIN
a1[0][3]=SYS_VEL_MAX
i_ter=0
n_loops=5
n_mc_e=30
breakt=0
scale_ini=0.15;
deft=0;
pdl_model=np.zeros(n_e)
pdl_model_cont=np.zeros(n_e)
pdl_model_tmp=np.zeros(n_e)
pdl_model_cont_tmp=np.zeros(n_e)
a_out_now=ssp.copy_a(n_mod,a)
a_out_tmp=ssp.copy_a(n_mod,a)
chi_sq_now=1e12
a_results=np.zeros([1, n_mod, 9])
nnk=0
max_time=5
time=ssp.get_seconds()
d_time=ssp.get_seconds()-time
i_loops=0
ran.seed(None)
while i_ter < n_loops and breakt == 0:
chi_iter=chi_sq_now
chi_single=0
[chi_sq_now,pdl_a,pdl_model_tmp,pdl_model_cont_tmp]=ssp.fit_elines_grad_rnd_new(pdl_wave_elines,pdl_flux_elines,pdl_flux_e_elines,n_mod,chi_goal,d_chi_goal,typef,a_out_tmp,ia,a0,a1,link,n_mc_e,pdl_masked_elines,deft,scale_ini)#,max_time)
a_out_now=ssp.copy_a_pdl(n_mod,pdl_a)
#print chi_sq_now, pdl_a[:,1],a_out_tmp[:,1]
if chi_sq_now < chi_iter:
#####################################################
# Close to a result, narrow the range
for i in range(0, n_mod):
for j in range(0, 9):
if typef[i] == "eline\n":
if ia[i][j] == 1:
if link[i][j] == -1:
delta_now=abs(a1[i][j]-a0[i][j])/(2.)
a0_tmp=a0[i][j]
a1_tmp=a1[i][j]
if j != 3:
a0_tmp=a_out_now[i][j]-delta_now
a1_tmp=a_out_now[i][j]+delta_now
else:
a0_tmp=a_out_now[i][j]-0.5*delta_now
a1_tmp=a_out_now[i][j]+0.5*delta_now
if a0_tmp < a0[i][j]:
a0_tmp=a0[i][j]
if a1_tmp > a1[i][j]:
a1_tmp=a1[i][j]
a0[i][j]=a0_tmp
a1[i][j]=a1_tmp
####################################################
a_out_tmp=ssp.copy_a(n_mod,a_out_now)
a_results=ssp.copy_a_results(n_mod,nnk,a_out_now,a_results)
pdl_model=pdl_model_tmp
pdl_model_cont=pdl_model_cont_tmp
nnk=nnk+1
i_ter=i_ter+1
else:
rnd_a=ran.rand(10);
a_out_now=ssp.copy_a(n_mod,a_out_now)
i_loops=i_loops+1
if i_loops > 5*n_loops:
breakt=1
out_ps_now="fit_"+outfile+"."+str(start_w_e)+"_"+str(end_w_e)
title="["+str(start_w_e)+","+str(end_w_e)+"]"
if pdl_model.shape[0] == len(pdl_wave_elines):
pdl_model=np.transpose(pdl_model)
ssp.plot_results(plot,pdl_wave_elines,[pdl_flux_elines,pdl_model[0,:],(pdl_flux_elines-pdl_model[0,:])],out_ps_now,title)
print "----------------------------------------";
a_final=ssp.mean_a_results_last(n_mod,nnk,a_results,ia)
#
# Background noise
#
pdl_res_now=pdl_flux_elines-pdl_model
stats_back1=np.mean(pdl_res_now)+np.std(pdl_res_now)
a_final=ssp.add_back_noise(n_mod,a_final,typef,chi_sq_now,stats_back1)
ssp.print_a_final(n_mod,a_final,typef,chi_sq_now)
out_fit_spectra=out_elines
ssp.print_a_final_file_add(n_mod,a_final,typef,chi_sq_now,out_fit_spectra)
[n_mod_fixed,junk_a_fixed,junk_a_type_fixed]=ssp.add_a_results_elines(n_mod,a_final,typef,n_mod_fixed,a_fixed,a_type_fixed)
a_fixed=junk_a_fixed
a_type_fixed=junk_a_type_fixed
nmin_e=int(0.1*n_unc)
nmax_e=int(0.9*n_unc)
###############################
# Low order polynomical!
out_fit_now=out_fit+"."+str(start_w_e)+"_"+str(end_w_e)+".pdf"
box=int(sigma*6)
print "DONE FIT ELINES CONFIG "+str(ist)
#
# We create a FIXED model of the emission lines
#
pdl_model_elines=np.zeros(n_unc)
pdl_model_cont=np.zeros(n_unc)
pdl_wave_elines=np.array(wave_unc)
NN=len(pdl_wave_elines)
NN1=len(pdl_model_elines)
for i in range(0, n_mod_fixed):
pdl_tmp=ssp.create_single_model(pdl_wave_elines,i,a_type_fixed,a_fixed)
NN2=len(pdl_tmp[0,:])
pdl_model_elines=pdl_model_elines+pdl_tmp[0,:]
#
# We remove the gas before a new iteration
#
for i in range(0, n_unc):
flux_unc[i]=flux_unc_org[i]-pdl_model_elines[i]
pdl_mod_JOINT=pdl_mod_SSP+pdl_model_elines
pdl_res_JOINT=pdl_res_SSP-pdl_model_elines
pdl_no_gas=np.array(flux_unc)
#############################################################
# We rescale!
##############################################################
y_rat=np.ones(nx+1)
jy=0
if SN > 10:
pdl_mod_JOINT[np.where(pdl_mod_JOINT == 0)[0]]=1.
pdl_rat=pdl_res_JOINT/pdl_mod_JOINT+1
rat=pdl_rat
n_unc_1=n_unc-1
pdl_wave_unc=wave_unc[0]+(wave_unc[1]-wave_unc[0])*np.arange(0, n_unc_1)
med_rat=my.median_filter(int(5*2.354*sigma),rat);
med_sigma=int(1.5*sigma)
if med_sigma < 3:
med_sigma=3
med_rat_box=my.median_box(med_sigma,med_rat)
med_wave_box=my.median_box(med_sigma,wave_unc)
y_rat = interp1d(med_wave_box, med_rat_box,bounds_error=False,fill_value=0.)(wave_unc)
i0_now=int(0.4*nx)
i1_now=int(0.6*nx)
stats_rat0=np.mean(y_rat[i0_now:i1_now])
stats_rat1=np.std(y_rat[i0_now:i1_now])+stats_rat0
if plot > 1:
out_ps_now="junk3"
title="ratio = "+str(stats_rat0)+", rms="+str(stats_rat1)
print title
ssp.plot_results_min_max(2,wave_unc,[flux_unc,pdl_model_spec_min,pdl_res,pdl_rat,y_rat],out_ps_now,title,-0.2,1.5)
if stats_rat0 > 0 and stats_rat1 > 0.02:
if jy == 0:
# Continuum shape correction on/off
pdl_rat_master=y_rat
pdl_rat_master[np.where(pdl_rat_master == 0)[0]]=1.
y_rat=pdl_rat_master
else:
y_rat=pdl_rat_master
for i in range(0, n_unc):
val=y_rat[i]
if val > 0:
flux_unc[i]=flux_unc[i]/val
flux_unc_org[i]=flux_unc_org[i]/val
##############################################################
# End re-scale
##############################################################
ML=0
if med_flux > CUT_MEDIAN_FLUX:
n_mc=20
[min_chi_sq,pdl_age_mod,pdl_met_mod,pdl_ml,pdl_Av,coeffs,coeffs_N,coeffs_NM,pdl_mod_SSP,pdl_res_SSP,coeffs_N_input,e_coeffs_N_input]=ssp.fit_ssp_lin_MC(redshift,sigma,Av,crval,cdelt,crpix,nf,n_c,pdl_flux_c_ini,hdr,wave_unc,masked,e_flux_unc,flux_unc,n_mc,chi_sq_min_now,MIN_CHISQ,plot)
smooth_ratiot=ssp.smooth_ratio(flux_unc,pdl_mod_SSP,int(sigma))
pdl_mod_SSP_no_cor=np.copy(pdl_mod_SSP)
pdl_mod_SSP=pdl_mod_SSP*smooth_ratiot
f1=open(out_coeffs_file, "w")
f1.write("# ID AGE MET COEFF Min.Coeff log(M/L) AV N.Coeff Err.Coeff\n")
print "------------------------------------------------------------------------------"
print "ID AGE MET COEFF Min.Coeff log(M/L) AV N.Coeff Err.Coeff"
print "------------------------------------------------------------------------------"
age_mod=pdl_age_mod
met_mod=pdl_met_mod
Av_mod=pdl_Av
ml=pdl_ml
a_coeffs=coeffs[:,0]
a_e_coeffs=coeffs[:,1]
a_min_coeffs=coeffs[:,2]
a_coeffs_N=coeffs_N
a_e_coeffs_N=a_e_coeffs
l_age_min=0
l_met_min=0
l_Av_min=0
l_age_min_mass=0
l_met_min_mass=0
l_Av_min_mass=0
e_l_age_min=0
e_l_met_min=0
e_l_Av_min=0
e_l_age_min_mass=0
e_l_met_min_mass=0
e_l_Av_min_mass=0
for k in range(0, nf):
if a_coeffs[k] > 0:
a_e_coeffs_N[k]=a_e_coeffs[k]*(a_coeffs_N[k]/a_coeffs[k])
else:
a_e_coeffs_N[k]=0
l_age_min=l_age_min+a_coeffs[k]*np.log10(age_mod[k])
l_met_min=l_met_min+a_coeffs[k]*np.log10(met_mod[k])
l_Av_min=l_Av_min+a_coeffs[k]*np.log10(Av_mod[k])
l_age_min_mass=l_age_min_mass+ml[k]*a_coeffs_N[k]*np.log10(age_mod[k])
l_met_min_mass=l_met_min_mass+ml[k]*a_coeffs_N[k]*np.log10(met_mod[k])
l_Av_min_mass=l_Av_min_mass+ml[k]*a_coeffs_N[k]*np.log10(Av_mod[k])
e_l_age_min=e_l_age_min+a_e_coeffs[k]*np.log10(age_mod[k])
e_l_met_min=e_l_met_min+a_e_coeffs[k]*np.log10(met_mod[k])
e_l_Av_min=e_l_Av_min+a_e_coeffs[k]*np.log10(Av_mod[k])
e_l_age_min_mass=e_l_age_min_mass+ml[k]*a_e_coeffs_N[k]*np.log10(age_mod[k])
e_l_met_min_mass=e_l_met_min_mass+ml[k]*a_e_coeffs_N[k]*np.log10(met_mod[k])
e_l_Av_min_mass=e_l_Av_min_mass+ml[k]*a_e_coeffs_N[k]*np.log10(Av_mod[k])
ML=ML+ml[k]*a_coeffs_N[k]
C_ini=coeffs_N_input[k]
e_C_ini=e_coeffs_N_input[k]
f1.write(("%2d" % k)+" "+("%7.4f" % age_mod[k])+" "+("%7.4f" % met_mod[k])+" "+("%7.4f" % a_coeffs_N[k])+" "+("%7.4f" % a_min_coeffs[k])+" "+("%4.4f" % np.log10(ml[k]))+" "+("%4.2f" % Av_mod[k])+" "+("%7.4f" % a_coeffs[k])+" "+("%7.4f" % a_e_coeffs[k])+"\n")
if a_coeffs[k] > 1e-5:
print ("%2d" % k)+" "+("%7.4f" % age_mod[k])+" "+("%7.4f" % met_mod[k])+" "+("%7.4f" % a_coeffs_N[k])+" "+("%7.4f" % a_min_coeffs[k])+" "+("%4.4f" % np.log10(ml[k]))+" "+("%4.2f" % Av_mod[k])+" "+("%7.4f" % a_coeffs[k])+" "+("%7.4f" % a_e_coeffs[k])+" "+("%7.4f" % C_ini)+" "+("%7.4f" % e_C_ini)
print "------------------------------------------------------------------------------"
f1.close
age_min=10**(l_age_min)
met_min=10**(l_met_min)
Av_min=10**(l_Av_min)
age_min_mass=10**(l_age_min_mass/ML)
met_min_mass=10**(l_met_min_mass/ML)
Av_min_mass=10**(l_Av_min_mass/ML)
e_age_min=abs(0.43*e_l_age_min*age_min)
e_met_min=abs(0.43*e_l_met_min*met_min)
e_Av_min=abs(0.43*e_l_Av_min*Av_min)
e_age_min_mass=abs(0.43*e_l_age_min*age_min_mass)
e_met_min_mass=abs(0.43*e_l_met_min*met_min_mass)
e_Av_min_mass=abs(0.43*e_l_Av_min*Av_min_mass)
if min_chi_sq > 0:
delta_chi=abs((chi_sq_min_now-min_chi_sq)/min_chi_sq)
wpeak=6562
Fpeak=-1e12
pdl_mod_JOINT=pdl_mod_SSP+pdl_model_elines
pdl_res_JOINT=pdl_res_SSP-pdl_model_elines
pdl_no_gas=np.array(flux_unc)
# Copy output!
pdl_output[0,:]=np.array(flux_unc_org)
pdl_output[1,:]=pdl_mod_SSP
pdl_output[2,:]=pdl_mod_JOINT
pdl_res_SSP=np.array(flux_unc_org)-pdl_mod_SSP
pdl_res_SSP_no_cor=np.array(flux_unc_input)-pdl_mod_SSP_no_cor
pdl_output[3,:]=pdl_res_SSP_no_cor
pdl_tmp=np.array(flux_unc_org)
nx_1=n_unc#-1
if len(pdl_rat_master)-len(pdl_mod_JOINT)==1:<|fim▁hole|> pdl_output[5,:]=np.array(flux_unc_org)-(pdl_res_SSP-pdl_res_JOINT)
title="X="+str(chi_sq_now)+" T="+str(age_min)+" ("+str(age_min_mass)+") Z="+str(met_min)+" ("+str(met_min_mass)+") Av="+str(Av_min)+" z="+str(redshift)+" sigma="+str(sigma)
ssp.plot_results(plot,pdl_wave_elines,pdl_output,out_ps,title)
print "I.Iter = "+str(NITER)+" DONE"
NITER=NITER+1
# Write output file
h=pyf.PrimaryHDU().header
h["NAXIS"]=2
h["NAXIS1"]=n_unc
h["NAXIS2"]=6
h["COMMENT"]="OUTPUT auto_ssp_elines_rnd.pl FITs"
h["CRVAL1"]=wave_unc[0]
h["CDELT1"]=wave_unc[1]-wave_unc[0];
h["CRPIX1"]=1
if pt.exists(out_fit) == False:
wfits(out_fit,pdl_output,h)
else:
sycall("rm "+out_fit)
wfits(out_fit,pdl_output,h)
################################
print "--------------------------------------------------------------"
pdl_masked=np.array(masked)
pdl_chi_now=((pdl_masked*pdl_res_JOINT)**2)/((np.array(e_flux_unc))**2)
pdl_chi_now[np.isnan(pdl_chi_now)]=0
chi_joint=np.sum(pdl_chi_now)
chi_joint=(chi_joint/(n_unc-n_mod_fixed-nf-1))**0.5
rms=np.std(pdl_masked*pdl_res_JOINT)
j1=int(0.4*n_unc)
j2=int(0.6*n_unc)
rms=np.std(pdl_res_JOINT[j1:j2])
pdl_flux_unc_now=np.array(flux_unc)
med_flux=np.median(pdl_flux_unc_now[j1:j2])
title="X="+str(chi_joint)+" T="+str(age_min)+" ("+str(age_min_mass)+") Z="+str(met_min)+" ("+str(met_min_mass)+") Av="+str(Av_min)+" z="+str(redshift)+" sigma="+str(sigma)
ssp.plot_results(plot,wave_unc,pdl_output,out_ps,title)
MASS=ML*med_flux
lML=np.log10(ML)
print "MSP CHISQ="+str(chi_joint)+" AGE="+str(age_min)+"+-"+str(e_age_min)+" MET="+str(met_min)+"+-"+str(e_met_min)+" AV="+str(Av_min)+"+-"+str(e_Av_min)+" REDSHIFT="+str(redshift)+"+-"+str(e_redshift)+" SIGMA_DISP="+str(sigma)+"+-"+str(e_sigma)+" RMS="+str(rms)+" MED_FLUX="+str(med_flux)+" AGE_mass="+str(age_min_mass)+"+-"+str(e_age_min_mass)+" MET_mass="+str(met_min_mass)+"+-"+str(e_met_min_mass)+" MASS="+str(MASS)+" log_M/L="+str(lML)
j1=int(0.4*n_unc)
j2=int(0.6*n_unc)
wave_norm=(wave_unc[j1]+wave_unc[j2])/2.
f=open(outfile, "w")
f.write("# (1) MIN_CHISQ\n")
f.write("# (2) LW Age (Gyr)\n")
f.write("# (3) LW Age error\n")
f.write("# (4) LW metallicity\n")
f.write("# (5) LW metallicity error\n")
f.write("# (6) Av\n")
f.write("# (7) AV error\n")
f.write("# (8) redshift \n")
f.write("# (9) redshift error\n")
f.write("# (10) velocity dispersion sigma, in AA\n")
f.write("# (11) velocity dispersion error\n")
f.write("# (12) median_FLUX\n")
f.write("# (13) redshift_ssp\n")
f.write("# (14) med_flux \n")
f.write("# (15) StdDev_residual \n")
f.write("# (16) MW Age (Gyr)\n")
f.write("# (17) MW Age error\n")
f.write("# (18) MW metallicity\n")
f.write("# (19) MW metallicity error\n")
f.write("# (20) Systemic Velocity km/s \n")
f.write("# (21) Log10 Average Mass-to-Light Ratio \n")
f.write("# SSP_SFH $back_list \n")
f.write("# SSP_KIN $back_list2 \n")
f.write("# WAVE_NORM $wave_norm AA\n")
if chi_joint == 0:
chi_joint=1
f.write(str(chi_joint)+","+str(age_min)+","+str(e_age_min)+","+str(met_min)+","+str(e_met_min)+","+str(Av_min)+","+str(e_Av_min)+","+str(redshift)+","+str(e_redshift)+","+str(sigma)+","+str(e_sigma)+","+str(FLUX)+","+str(redshift_abs)+","+str(med_flux)+","+str(rms)+","+str(age_min_mass)+","+str(e_age_min_mass)+","+str(met_min_mass)+","+str(e_met_min_mass)+","+str(SYS_VEL)+","+str(lML)+"\n")
f.close
sec_end=ssp.print_time()
sec_total=sec_end-sec_ini
print "# SECONDS = "+str(sec_total)
#
# Write the output!
#
#<|fim▁end|> | pdl_res_JOINT=pdl_tmp/(pdl_rat_master[0:nx_1])-pdl_mod_JOINT
else:
pdl_res_JOINT=pdl_tmp/(pdl_rat_master)-pdl_mod_JOINT
pdl_output[4,:]=pdl_res_JOINT |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># ######## KADEMLIA CONSTANTS ###########
BIT_NODE_ID_LEN = 160
HEX_NODE_ID_LEN = BIT_NODE_ID_LEN // 4
# Small number representing the degree of
# parallelism in network calls
ALPHA = 3
# Maximum number of contacts stored in a bucket
# NOTE: Should be an even number.
K = 8 # pylint: disable=invalid-name
# Maximum number of contacts stored in the
# replacement cache of a bucket
# NOTE: Should be an even number.
CACHE_K = 32
# Timeout for network operations
# [seconds]
RPC_TIMEOUT = 0.1
# Delay between iterations of iterative node lookups
# (for loose parallelism)
# [seconds]
ITERATIVE_LOOKUP_DELAY = RPC_TIMEOUT / 2
# If a KBucket has not been used for this amount of time, refresh it.
# [seconds]
REFRESH_TIMEOUT = 60 * 60 * 1000 # 1 hour
# The interval at which nodes replicate (republish/refresh)
# the data they hold
# [seconds]
REPLICATE_INTERVAL = REFRESH_TIMEOUT
# The time it takes for data to expire in the network;
# the original publisher of the data will also republish
# the data at this time if it is still valid
# [seconds]
DATE_EXPIRE_TIMEOUT = 86400 # 24 hours
# ####### IMPLEMENTATION-SPECIFIC CONSTANTS ###########
# The interval in which the node should check whether any buckets
# need refreshing or whether any data needs to be republished
# [seconds]
CHECK_REFRESH_INTERVAL = REFRESH_TIMEOUT / 5
<|fim▁hole|># Any larger message will be spread accross several UDP packets.
# [bytes]
UDP_DATAGRAM_MAX_SIZE = 8192 # 8 KB
DB_PATH = "db/ob.db"
VERSION = "0.3.1"
SATOSHIS_IN_BITCOIN = 100000000
# The IP of the default DNSChain Server used to validate namecoin addresses
DNSCHAIN_SERVER_IP = "192.184.93.146"<|fim▁end|> | # Max size of a single UDP datagram. |
<|file_name|>imports.js<|end_file_name|><|fim▁begin|>'use strict';
import _ from 'lodash';
import bluebird from 'bluebird';
import fs from 'fs';
import requireDir from 'require-dir';
import Logger from '../../logger';
<|fim▁hole|>
function main() {
const imports = _.chain(requireDir('./importers'))
.map('default')
.map((importer) => importer.run())
.value();
return Promise.all(imports);
}
Logger.info('base.data.imports.imports: Running...');
main()
.then(() => {
Logger.info('base.data.imports.imports: Done!');
process.exit(0);
})
.catch((error) => {
Logger.error('base.data.imports.imports:', error);
process.exit(1);
});<|fim▁end|> | bluebird.promisifyAll(fs); |
<|file_name|>ace-tests.ts<|end_file_name|><|fim▁begin|>/// <reference path="ace.d.ts" />
var assert: any;
var editor = ace.edit("editor");
editor.setTheme("ace/theme/monokai");
editor.getSession().setMode("ace/mode/javascript");
editor.setTheme("ace/theme/twilight");
editor.getSession().setMode("ace/mode/javascript");
editor.setValue("the new text here"); // or session.setValue
editor.getValue(); // or session.getValue
editor.session.getTextRange(editor.getSelectionRange());
editor.insert("Something cool");
editor.selection.getCursor();
editor.gotoLine(123);
editor.session.getLength();
editor.getSession().setTabSize(4);
editor.getSession().setUseSoftTabs(true);
document.getElementById('editor').style.fontSize = '12px';
editor.getSession().setUseWrapMode(true);
editor.setHighlightActiveLine(false);
editor.setShowPrintMargin(false);
editor.setReadOnly(true); // false to make it editable
editor.resize()
editor.find('needle', {
backwards: false,
wrap: false,
caseSensitive: false,
wholeWord: false,
regExp: false
});
editor.findNext();
editor.findPrevious();
editor.find('foo');
editor.replace('bar');
editor.replaceAll('bar');
editor.getSession().on('change', function (e) {
// e.type, etc
});
editor.getSession().selection.on('changeSelection', function (e) {
});
editor.getSession().selection.on('changeCursor', function (e) {
});
editor.commands.addCommand({
name: 'myCommand',
bindKey: { win: 'Ctrl-M', mac: 'Command-M' },
exec: function (editor) {
//...
},
readOnly: true // false if this command should not apply in readOnly mode
});
editor.moveCursorTo(1, 1);
editor.removeLines();
editor.removeLines();
editor.removeLines();
editor.removeLines();
editor.moveCursorTo(1, 1);
editor.getSelection().selectDown();
editor.removeLines();
editor.removeLines();
editor.moveCursorTo(3, 0);
editor.removeLines();
editor.removeLines();
editor.moveCursorTo(1, 3);
editor.getSelection().selectDown();
editor.indent();
var range = editor.getSelectionRange();
editor.moveCursorTo(1, 0);
editor.getSelection().selectDown();
editor.indent();
editor.moveCursorTo(0, 0);
editor.onTextInput("\n");
editor.moveCursorTo(0, 5);
editor.getSelection().selectDown();
editor.getSelection().selectDown();
editor.blockOutdent();
editor.moveCursorTo(1, 1);
editor.removeLines();
var session = new AceAjax.EditSession(["a", "b", "c", "d"].join("\n"));
assert.equal(session.toString(), "a\nc\nd");
assert.position(editor.getCursorPosition(), 1, 0);
editor.removeLines();
assert.equal(session.toString(), "a\nd");
assert.position(editor.getCursorPosition(), 1, 0);
editor.removeLines();
assert.equal(session.toString(), "a");
assert.position(editor.getCursorPosition(), 0, 1);
editor.removeLines();
assert.equal(session.toString(), "");
assert.position(editor.getCursorPosition(), 0, 0);
editor.moveCursorTo(1, 1);
editor.getSelection().selectDown();
editor.removeLines();
assert.equal(session.toString(), "a\nd");
assert.position(editor.getCursorPosition(), 1, 0);
editor.removeLines();
assert.equal(session.toString(), "b\nc");
assert.position(editor.getCursorPosition(), 0, 0);
editor.moveCursorTo(3, 0);
editor.removeLines();
assert.equal(session.toString(), "a\nb\nc");
assert.position(editor.getCursorPosition(), 2, 1);
editor.removeLines();
assert.equal(session.toString(), "a\nb");
assert.position(editor.getCursorPosition(), 1, 1);
editor.moveCursorTo(1, 3);
editor.getSelection().selectDown();
editor.indent();
assert.equal(["a12345", " b12345", " c12345"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 2, 7);
range = editor.getSelectionRange();
assert.position(range.start, 1, 7);
assert.position(range.end, 2, 7);
editor.moveCursorTo(1, 0);
editor.getSelection().selectDown();
editor.indent();
assert.equal(["a12345", " b12345", "c12345"].join("\n"), session.toString());
editor.moveCursorTo(0, 0);
editor.onTextInput("\n");
assert.equal(["", "{"].join("\n"), session.toString());
editor.moveCursorTo(0, 5);
editor.getSelection().selectDown();
editor.getSelection().selectDown();
editor.blockOutdent();
assert.equal(session.toString(), [" a12345", "b12345", " c12345"].join("\n"));
assert.position(editor.getCursorPosition(), 2, 1);
range = editor.getSelectionRange();
assert.position(range.start, 0, 1);
assert.position(range.end, 2, 1);
editor.blockOutdent();
assert.equal(session.toString(), ["a12345", "b12345", "c12345"].join("\n"));
range = editor.getSelectionRange();
assert.position(range.start, 0, 0);
assert.position(range.end, 2, 0);
editor.moveCursorTo(0, 3);
editor.blockOutdent(" ");
assert.equal(session.toString(), " 12");
assert.position(editor.getCursorPosition(), 0, 0);
editor.moveCursorTo(0, 2);
editor.getSelection().selectDown();
editor.toggleCommentLines();
assert.equal(["// abc", "//cde"].join("\n"), session.toString());
var selection = editor.getSelectionRange();<|fim▁hole|>assert.position(selection.start, 0, 4);
assert.position(selection.end, 1, 4);
editor.moveCursorTo(0, 1);
editor.getSelection().selectDown();
editor.getSelection().selectRight();
editor.getSelection().selectRight();
editor.toggleCommentLines();
assert.equal([" abc", "cde"].join("\n"), session.toString());
assert.range(editor.getSelectionRange(), 0, 0, 1, 1);
editor.moveCursorTo(0, 0);
editor.getSelection().selectDown();
editor.getSelection().selectDown();
editor.toggleCommentLines();
editor.toggleCommentLines();
assert.equal([" abc", "cde", "fg"].join("\n"), session.toString());
editor.moveCursorTo(0, 0);
editor.getSelection().selectDown();
editor.toggleCommentLines();
assert.range(editor.getSelectionRange(), 0, 2, 1, 0);
editor.moveCursorTo(1, 0);
editor.getSelection().selectUp();
editor.toggleCommentLines();
assert.range(editor.getSelectionRange(), 0, 2, 1, 0);
editor.moveCursorTo(0, 1);
editor.getSelection().selectDown();
editor.moveLinesDown();
assert.equal(["33", "11", "22", "44"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 1, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 3, 0);
assert.position(editor.getSelection().getSelectionLead(), 1, 0);
editor.moveLinesDown();
assert.equal(["33", "44", "11", "22"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 2, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 3, 2);
assert.position(editor.getSelection().getSelectionLead(), 2, 0);
// moving again should have no effect
editor.moveLinesDown();
assert.equal(["33", "44", "11", "22"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 2, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 3, 2);
assert.position(editor.getSelection().getSelectionLead(), 2, 0);
editor.moveCursorTo(2, 1);
editor.getSelection().selectDown();
editor.moveLinesUp();
assert.equal(session.toString(), ["11", "33", "44", "22"].join("\n"));
assert.position(editor.getCursorPosition(), 1, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 3, 0);
assert.position(editor.getSelection().getSelectionLead(), 1, 0);
editor.moveLinesUp();
assert.equal(session.toString(), ["33", "44", "11", "22"].join("\n"));
assert.position(editor.getCursorPosition(), 0, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 2, 0);
assert.position(editor.getSelection().getSelectionLead(), 0, 0);
editor.moveCursorTo(1, 1);
editor.clearSelection();
editor.moveLinesDown();
assert.equal(["11", "33", "22", "44"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 2, 1);
editor.clearSelection();
editor.moveLinesUp();
assert.equal(["11", "22", "33", "44"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 1, 1);
editor.moveCursorTo(1, 1);
editor.getSelection().selectDown();
editor.copyLinesDown();
assert.equal(["11", "22", "33", "22", "33", "44"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 3, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 5, 0);
assert.position(editor.getSelection().getSelectionLead(), 3, 0);
editor.moveCursorTo(1, 1);
editor.getSelection().selectDown();
editor.copyLinesUp();
assert.equal(["11", "22", "33", "22", "33", "44"].join("\n"), session.toString());
assert.position(editor.getCursorPosition(), 1, 0);
assert.position(editor.getSelection().getSelectionAnchor(), 3, 0);
assert.position(editor.getSelection().getSelectionLead(), 1, 0);
session.setTabSize(2);
session.setUseSoftTabs(true);
editor.onTextInput("\t");
assert.equal(session.toString(), " ");
session.setTabSize(5);
editor.onTextInput("\t");
assert.equal(session.toString(), " ");
session.setUseSoftTabs(false);
editor.onTextInput("\t");
assert.equal(session.toString(), "\t");
editor.removeLines();
var step1 = session.toString();
assert.equal(step1, "222\n333");
editor.removeLines();
var step2 = session.toString();
assert.equal(step2, "333");
editor.removeLines();
var step3 = session.toString();
assert.equal(step3, "");
var undoManager = new AceAjax.UndoManager();
undoManager.undo();
assert.equal(session.toString(), step2);
undoManager.undo();
assert.equal(session.toString(), step1);
undoManager.undo();
assert.equal(session.toString(), "");
undoManager.undo();
assert.equal(session.toString(), "");
editor.moveCursorTo(1, 1);
editor.remove("left");
assert.equal(session.toString(), "123\n56");
editor.moveCursorTo(1, 0);
editor.remove("left");
assert.equal(session.toString(), "123456");
session.setUseSoftTabs(true);
session.setTabSize(4);
editor.moveCursorTo(1, 8);
editor.remove("left");
assert.equal(session.toString(), "123\n 456");
editor.moveCursorTo(1, 0);
editor.transposeLetters();
assert.equal(session.getValue(), ["123", "4567", "89"].join("\n"));
editor.moveCursorTo(1, 2);
editor.transposeLetters();
assert.equal(session.getValue(), ["123", "4657", "89"].join("\n"));
editor.moveCursorTo(1, 4);
editor.transposeLetters();
assert.equal(session.getValue(), ["123", "4576", "89"].join("\n"));
editor.moveCursorTo(1, 1);
editor.getSelection().selectRight();
editor.transposeLetters();
assert.equal(session.getValue(), ["123", "4567", "89"].join("\n"));
editor.moveCursorTo(1, 2);
editor.transposeLetters();
assert.position(editor.getCursorPosition(), 1, 3);
editor.moveCursorTo(1, 2);
editor.removeToLineEnd();
assert.equal(session.getValue(), ["123", "45", "89"].join("\n"));
editor.moveCursorTo(1, 4);
editor.removeToLineEnd();
assert.position(editor.getCursorPosition(), 1, 4);
assert.equal(session.getValue(), ["123", "456789"].join("\n"));
editor.moveCursorTo(1, 0);
editor.getSelection().selectLineEnd();
editor.toUpperCase()
assert.equal(session.getValue(), ["ajax", "DOT", "org"].join("\n"));
editor.moveCursorTo(1, 0);
editor.toUpperCase()
assert.equal(session.getValue(), ["ajax", "DOT", "org"].join("\n"));
assert.position(editor.getCursorPosition(), 1, 0);
editor.moveCursorTo(1, 0);
editor.getSelection().selectLineEnd();
editor.toLowerCase()
assert.equal(session.getValue(), ["AJAX", "dot", "ORG"].join("\n"));
editor.moveCursorTo(1, 0);
editor.toLowerCase()
assert.equal(session.getValue(), ["AJAX", "dot", "ORG"].join("\n"));
assert.position(editor.getCursorPosition(), 1, 0);<|fim▁end|> | |
<|file_name|>Message.js<|end_file_name|><|fim▁begin|>const Mentions = require('./MessageMentions');
const Attachment = require('./MessageAttachment');
const Embed = require('./MessageEmbed');
const MessageReaction = require('./MessageReaction');
const ReactionCollector = require('./ReactionCollector');
const ClientApplication = require('./ClientApplication');
const Util = require('../util/Util');
const Collection = require('../util/Collection');
const Constants = require('../util/Constants');
const Permissions = require('../util/Permissions');
const { Error, TypeError } = require('../errors');
let GuildMember;
/**
* Represents a message on Discord.
*/
class Message {
constructor(channel, data, client) {
/**
* The client that instantiated the Message
* @name Message#client
* @type {Client}
* @readonly
*/
Object.defineProperty(this, 'client', { value: client });
/**
* The channel that the message was sent in
* @type {TextChannel|DMChannel|GroupDMChannel}
*/
this.channel = channel;
if (data) this.setup(data);
}
setup(data) { // eslint-disable-line complexity
/**
* The ID of the message
* @type {Snowflake}
*/
this.id = data.id;
/**
* The type of the message
* @type {MessageType}
*/
this.type = Constants.MessageTypes[data.type];
/**
* The content of the message
* @type {string}
*/
this.content = data.content;
/**
* The author of the message
* @type {User}
*/
this.author = this.client.dataManager.newUser(data.author);
/**
* Represents the author of the message as a guild member
* Only available if the message comes from a guild where the author is still a member
* @type {?GuildMember}
*/
this.member = this.guild ? this.guild.member(this.author) || null : null;
/**
* Whether or not this message is pinned
* @type {boolean}
*/
this.pinned = data.pinned;
/**
* Whether or not the message was Text-To-Speech
* @type {boolean}
*/
this.tts = data.tts;
/**
* A random number or string used for checking message delivery
* @type {string}
*/
this.nonce = data.nonce;
<|fim▁hole|> */
this.system = data.type === 6;
/**
* A list of embeds in the message - e.g. YouTube Player
* @type {MessageEmbed[]}
*/
this.embeds = data.embeds.map(e => new Embed(e));
/**
* A collection of attachments in the message - e.g. Pictures - mapped by their ID
* @type {Collection<Snowflake, MessageAttachment>}
*/
this.attachments = new Collection();
for (const attachment of data.attachments) this.attachments.set(attachment.id, new Attachment(this, attachment));
/**
* The timestamp the message was sent at
* @type {number}
*/
this.createdTimestamp = new Date(data.timestamp).getTime();
/**
* The timestamp the message was last edited at (if applicable)
* @type {?number}
*/
this.editedTimestamp = data.edited_timestamp ? new Date(data.edited_timestamp).getTime() : null;
/**
* A collection of reactions to this message, mapped by the reaction ID
* @type {Collection<Snowflake, MessageReaction>}
*/
this.reactions = new Collection();
if (data.reactions && data.reactions.length > 0) {
for (const reaction of data.reactions) {
const id = reaction.emoji.id ? `${reaction.emoji.name}:${reaction.emoji.id}` : reaction.emoji.name;
this.reactions.set(id, new MessageReaction(this, reaction.emoji, reaction.count, reaction.me));
}
}
/**
* All valid mentions that the message contains
* @type {MessageMentions}
*/
this.mentions = new Mentions(this, data.mentions, data.mention_roles, data.mention_everyone);
/**
* ID of the webhook that sent the message, if applicable
* @type {?Snowflake}
*/
this.webhookID = data.webhook_id || null;
/**
* Supplimental application information for group activities
* @type {?ClientApplication}
*/
this.application = data.application ? new ClientApplication(this.client, data.application) : null;
/**
* Group activity
* @type {?Object}
*/
this.activity = data.activity ? {
partyID: data.activity.party_id,
type: data.activity.type,
} : null;
/**
* Whether this message is a hit in a search
* @type {?boolean}
*/
this.hit = typeof data.hit === 'boolean' ? data.hit : null;
/**
* The previous versions of the message, sorted with the most recent first
* @type {Message[]}
* @private
*/
this._edits = [];
}
/**
* Updates the message.
* @param {Object} data Raw Discord message update data
* @private
*/
patch(data) {
const clone = Util.cloneObject(this);
this._edits.unshift(clone);
this.editedTimestamp = new Date(data.edited_timestamp).getTime();
if ('content' in data) this.content = data.content;
if ('pinned' in data) this.pinned = data.pinned;
if ('tts' in data) this.tts = data.tts;
if ('embeds' in data) this.embeds = data.embeds.map(e => new Embed(e));
else this.embeds = this.embeds.slice();
if ('attachments' in data) {
this.attachments = new Collection();
for (const attachment of data.attachments) this.attachments.set(attachment.id, new Attachment(this, attachment));
} else {
this.attachments = new Collection(this.attachments);
}
this.mentions = new Mentions(
this,
'mentions' in data ? data.mentions : this.mentions.users,
'mentions_roles' in data ? data.mentions_roles : this.mentions.roles,
'mention_everyone' in data ? data.mention_everyone : this.mentions.everyone
);
}
/**
* The time the message was sent
* @type {Date}
* @readonly
*/
get createdAt() {
return new Date(this.createdTimestamp);
}
/**
* The time the message was last edited at (if applicable)
* @type {?Date}
* @readonly
*/
get editedAt() {
return this.editedTimestamp ? new Date(this.editedTimestamp) : null;
}
/**
* The guild the message was sent in (if in a guild channel)
* @type {?Guild}
* @readonly
*/
get guild() {
return this.channel.guild || null;
}
/**
* The message contents with all mentions replaced by the equivalent text.
* If mentions cannot be resolved to a name, the relevant mention in the message content will not be converted.
* @type {string}
* @readonly
*/
get cleanContent() {
return this.content
.replace(/@(everyone|here)/g, '@\u200b$1')
.replace(/<@!?[0-9]+>/g, input => {
const id = input.replace(/<|!|>|@/g, '');
if (this.channel.type === 'dm' || this.channel.type === 'group') {
return this.client.users.has(id) ? `@${this.client.users.get(id).username}` : input;
}
const member = this.channel.guild.members.get(id);
if (member) {
if (member.nickname) return `@${member.nickname}`;
return `@${member.user.username}`;
} else {
const user = this.client.users.get(id);
if (user) return `@${user.username}`;
return input;
}
})
.replace(/<#[0-9]+>/g, input => {
const channel = this.client.channels.get(input.replace(/<|#|>/g, ''));
if (channel) return `#${channel.name}`;
return input;
})
.replace(/<@&[0-9]+>/g, input => {
if (this.channel.type === 'dm' || this.channel.type === 'group') return input;
const role = this.guild.roles.get(input.replace(/<|@|>|&/g, ''));
if (role) return `@${role.name}`;
return input;
});
}
/**
* Creates a reaction collector.
* @param {CollectorFilter} filter The filter to apply
* @param {ReactionCollectorOptions} [options={}] Options to send to the collector
* @returns {ReactionCollector}
* @example
* // Create a reaction collector
* const collector = message.createReactionCollector(
* (reaction, user) => reaction.emoji.name === '👌' && user.id === 'someID',
* { time: 15000 }
* );
* collector.on('collect', r => console.log(`Collected ${r.emoji.name}`));
* collector.on('end', collected => console.log(`Collected ${collected.size} items`));
*/
createReactionCollector(filter, options = {}) {
return new ReactionCollector(this, filter, options);
}
/**
* An object containing the same properties as CollectorOptions, but a few more:
* @typedef {ReactionCollectorOptions} AwaitReactionsOptions
* @property {string[]} [errors] Stop/end reasons that cause the promise to reject
*/
/**
* Similar to createCollector but in promise form.
* Resolves with a collection of reactions that pass the specified filter.
* @param {CollectorFilter} filter The filter function to use
* @param {AwaitReactionsOptions} [options={}] Optional options to pass to the internal collector
* @returns {Promise<Collection<string, MessageReaction>>}
*/
awaitReactions(filter, options = {}) {
return new Promise((resolve, reject) => {
const collector = this.createReactionCollector(filter, options);
collector.once('end', (reactions, reason) => {
if (options.errors && options.errors.includes(reason)) reject(reactions);
else resolve(reactions);
});
});
}
/**
* An array of cached versions of the message, including the current version
* Sorted from latest (first) to oldest (last)
* @type {Message[]}
* @readonly
*/
get edits() {
const copy = this._edits.slice();
copy.unshift(this);
return copy;
}
/**
* Whether the message is editable by the client user
* @type {boolean}
* @readonly
*/
get editable() {
return this.author.id === this.client.user.id;
}
/**
* Whether the message is deletable by the client user
* @type {boolean}
* @readonly
*/
get deletable() {
return this.author.id === this.client.user.id || (this.guild &&
this.channel.permissionsFor(this.client.user).has(Permissions.FLAGS.MANAGE_MESSAGES)
);
}
/**
* Whether the message is pinnable by the client user
* @type {boolean}
* @readonly
*/
get pinnable() {
return !this.guild ||
this.channel.permissionsFor(this.client.user).has(Permissions.FLAGS.MANAGE_MESSAGES);
}
/**
* Options that can be passed into editMessage.
* @typedef {Object} MessageEditOptions
* @property {string} [content] Content to be edited
* @property {Object} [embed] An embed to be added/edited
* @property {string|boolean} [code] Language for optional codeblock formatting to apply
*/
/**
* Edit the content of the message.
* @param {StringResolvable} [content] The new content for the message
* @param {MessageEditOptions} [options] The options to provide
* @returns {Promise<Message>}
* @example
* // Update the content of a message
* message.edit('This is my new content!')
* .then(msg => console.log(`Updated the content of a message from ${msg.author}`))
* .catch(console.error);
*/
edit(content, options) {
if (!options && typeof content === 'object' && !(content instanceof Array)) {
options = content;
content = '';
} else if (!options) {
options = {};
}
if (typeof options.content !== 'undefined') content = options.content;
if (typeof content !== 'undefined') content = Util.resolveString(content);
let { embed, code, reply } = options;
if (embed) embed = new Embed(embed)._apiTransform();
// Wrap everything in a code block
if (typeof code !== 'undefined' && (typeof code !== 'boolean' || code === true)) {
content = Util.escapeMarkdown(Util.resolveString(content), true);
content = `\`\`\`${typeof code !== 'boolean' ? code || '' : ''}\n${content}\n\`\`\``;
}
// Add the reply prefix
if (reply && this.channel.type !== 'dm') {
const id = this.client.resolver.resolveUserID(reply);
const mention = `<@${reply instanceof GuildMember && reply.nickname ? '!' : ''}${id}>`;
content = `${mention}${content ? `, ${content}` : ''}`;
}
return this.client.api.channels[this.channel.id].messages[this.id]
.patch({ data: { content, embed } })
.then(data => this.client.actions.MessageUpdate.handle(data).updated);
}
/**
* Pins this message to the channel's pinned messages.
* @returns {Promise<Message>}
*/
pin() {
return this.client.api.channels(this.channel.id).pins(this.id).put()
.then(() => this);
}
/**
* Unpins this message from the channel's pinned messages.
* @returns {Promise<Message>}
*/
unpin() {
return this.client.api.channels(this.channel.id).pins(this.id).delete()
.then(() => this);
}
/**
* Add a reaction to the message.
* @param {string|Emoji|ReactionEmoji} emoji The emoji to react with
* @returns {Promise<MessageReaction>}
*/
react(emoji) {
emoji = this.client.resolver.resolveEmojiIdentifier(emoji);
if (!emoji) throw new TypeError('EMOJI_TYPE');
return this.client.api.channels(this.channel.id).messages(this.id).reactions(emoji, '@me')
.put()
.then(() => this._addReaction(Util.parseEmoji(emoji), this.client.user));
}
/**
* Remove all reactions from a message.
* @returns {Promise<Message>}
*/
clearReactions() {
return this.client.api.channels(this.channel.id).messages(this.id).reactions.delete()
.then(() => this);
}
/**
* Deletes the message.
* @param {Object} [options] Options
* @param {number} [options.timeout=0] How long to wait to delete the message in milliseconds
* @param {string} [options.reason] Reason for deleting this message, if it does not belong to the client user
* @returns {Promise<Message>}
* @example
* // Delete a message
* message.delete()
* .then(msg => console.log(`Deleted message from ${msg.author}`))
* .catch(console.error);
*/
delete({ timeout = 0, reason } = {}) {
if (timeout <= 0) {
return this.client.api.channels(this.channel.id).messages(this.id)
.delete({ reason })
.then(() =>
this.client.actions.MessageDelete.handle({
id: this.id,
channel_id: this.channel.id,
}).message);
} else {
return new Promise(resolve => {
this.client.setTimeout(() => {
resolve(this.delete({ reason }));
}, timeout);
});
}
}
/**
* Reply to the message.
* @param {StringResolvable} [content] The content for the message
* @param {MessageOptions} [options] The options to provide
* @returns {Promise<Message|Message[]>}
* @example
* // Reply to a message
* message.reply('Hey, I\'m a reply!')
* .then(msg => console.log(`Sent a reply to ${msg.author}`))
* .catch(console.error);
*/
reply(content, options) {
if (!options && typeof content === 'object' && !(content instanceof Array)) {
options = content;
content = '';
} else if (!options) {
options = {};
}
return this.channel.send(content, Object.assign(options, { reply: this.member || this.author }));
}
/**
* Marks the message as read.
* <warn>This is only available when using a user account.</warn>
* @returns {Promise<Message>}
*/
acknowledge() {
return this.client.api.channels(this.channel.id).messages(this.id).ack
.post({ data: { token: this.client.rest._ackToken } })
.then(res => {
if (res.token) this.client.rest._ackToken = res.token;
return this;
});
}
/**
* Fetches the webhook used to create this message.
* @returns {Promise<?Webhook>}
*/
fetchWebhook() {
if (!this.webhookID) return Promise.reject(new Error('WEBHOOK_MESSAGE'));
return this.client.fetchWebhook(this.webhookID);
}
/**
* Used mainly internally. Whether two messages are identical in properties. If you want to compare messages
* without checking all the properties, use `message.id === message2.id`, which is much more efficient. This
* method allows you to see if there are differences in content, embeds, attachments, nonce and tts properties.
* @param {Message} message The message to compare it to
* @param {Object} rawData Raw data passed through the WebSocket about this message
* @returns {boolean}
*/
equals(message, rawData) {
if (!message) return false;
const embedUpdate = !message.author && !message.attachments;
if (embedUpdate) return this.id === message.id && this.embeds.length === message.embeds.length;
let equal = this.id === message.id &&
this.author.id === message.author.id &&
this.content === message.content &&
this.tts === message.tts &&
this.nonce === message.nonce &&
this.embeds.length === message.embeds.length &&
this.attachments.length === message.attachments.length;
if (equal && rawData) {
equal = this.mentions.everyone === message.mentions.everyone &&
this.createdTimestamp === new Date(rawData.timestamp).getTime() &&
this.editedTimestamp === new Date(rawData.edited_timestamp).getTime();
}
return equal;
}
/**
* When concatenated with a string, this automatically concatenates the message's content instead of the object.
* @returns {string}
* @example
* // Logs: Message: This is a message!
* console.log(`Message: ${message}`);
*/
toString() {
return this.content;
}
_addReaction(emoji, user) {
const emojiID = emoji.id ? `${emoji.name}:${emoji.id}` : encodeURIComponent(emoji.name);
let reaction;
if (this.reactions.has(emojiID)) {
reaction = this.reactions.get(emojiID);
if (!reaction.me) reaction.me = user.id === this.client.user.id;
} else {
reaction = new MessageReaction(this, emoji, 0, user.id === this.client.user.id);
this.reactions.set(emojiID, reaction);
}
if (!reaction.users.has(user.id)) {
reaction.users.set(user.id, user);
reaction.count++;
}
return reaction;
}
_removeReaction(emoji, user) {
const emojiID = emoji.id ? `${emoji.name}:${emoji.id}` : encodeURIComponent(emoji.name);
if (this.reactions.has(emojiID)) {
const reaction = this.reactions.get(emojiID);
if (reaction.users.has(user.id)) {
reaction.users.delete(user.id);
reaction.count--;
if (user.id === this.client.user.id) reaction.me = false;
if (reaction.count <= 0) this.reactions.delete(emojiID);
return reaction;
}
}
return null;
}
_clearReactions() {
this.reactions.clear();
}
}
module.exports = Message;<|fim▁end|> | /**
* Whether or not this message was sent by Discord, not actually a user (e.g. pin notifications)
* @type {boolean} |
<|file_name|>authentication.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('users').controller('AuthenticationController', ['$scope', '$http', '$location', 'Authentication',
function($scope, $http, $location, Authentication) {
$scope.authentication = Authentication;
$(".signin").parent("section").css("height", "100%");
// If user is signed in then redirect back home
if ($scope.authentication.user) $location.path('/timeline');
$scope.signup = function() {
$http.post('/auth/signup', $scope.credentials).success(function(response) {
// If successful we assign the response to the global user model
$scope.authentication.user = response;
// And redirect to the index page
$location.path('/signin');
}).error(function(response) {
$scope.error = response.message;
});
};
$scope.signin = function() {
$http.post('/auth/signin', $scope.credentials).success(function(response) {
// If successful we assign the response to the global user model
$scope.authentication.user = response;<|fim▁hole|> }).error(function(response) {
$scope.error = response.message;
});
};
}
]);<|fim▁end|> |
// And redirect to the index page
$location.path('/timeline'); |
<|file_name|>datastructures.py<|end_file_name|><|fim▁begin|>"""Utilities for working with data structures.
Version Added:
2.1
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django_evolution.compat import six
def filter_dup_list_items(items):
"""Return list items with duplicates filtered out.
The order of items will be preserved, but only the first occurrence of
any given item will remain in the list.
Version Added:
2.1
Args:
items (list):
The list of items.
Returns:
list:
The resulting de-duplicated list of items.
"""
return list(six.iterkeys(OrderedDict(
(item, True)
for item in items
)))
def merge_dicts(dest, source):
"""Merge two dictionaries together.<|fim▁hole|> This will recursively merge a source dictionary into a destination
dictionary with the following rules:
* Any keys in the source that aren't in the destination will be placed
directly to the destination (using the same instance of the value, not
a copy).
* Any lists that are in both the source and destination will be combined
by appending the source list to the destinataion list (and this will not
recurse into lists).
* Any dictionaries that are in both the source and destinataion will be
merged using this function.
* Any keys that are not a list or dictionary that exist in both
dictionaries will result in a :py:exc:`TypeError`.
Version Added:
2.1
Args:
dest (dict):
The destination dictionary to merge into.
source (dict):
The source dictionary to merge into the destination.
Raises:
TypeError:
A key was present in both dictionaries with a type that could not
be merged.
"""
for key, value in six.iteritems(source):
if key in dest:
if isinstance(value, list):
if not isinstance(dest[key], list):
raise TypeError(
'Cannot merge a list into a %r for key "%s".'
% (type(dest[key]), key))
dest[key] += value
elif isinstance(value, dict):
if not isinstance(dest[key], dict):
raise TypeError(
'Cannot merge a dictionary into a %r for key "%s".'
% (type(dest[key]), key))
merge_dicts(dest[key], value)
else:
raise TypeError(
'Key "%s" was not an expected type (found %r) '
'when merging dictionaries.'
% (key, type(value)))
else:
dest[key] = value<|fim▁end|> | |
<|file_name|>test_magicbot_sm.py<|end_file_name|><|fim▁begin|>from magicbot.state_machine import (
default_state,
state,
timed_state,
AutonomousStateMachine,
StateMachine,
IllegalCallError,
NoFirstStateError,
MultipleFirstStatesError,
MultipleDefaultStatesError,
InvalidStateName,
)
from magicbot.magic_tunable import setup_tunables
import pytest
def test_no_timed_state_duration():
with pytest.raises(TypeError):
class _TM(StateMachine):
@timed_state()
def tmp(self):
pass
def test_no_start_state():
class _TM(StateMachine):
pass
with pytest.raises(NoFirstStateError):
_TM()
def test_multiple_first_states():
class _TM(StateMachine):
@state(first=True)
def tmp1(self):
pass
@state(first=True)
def tmp2(self):
pass
with pytest.raises(MultipleFirstStatesError):
_TM()
def test_sm(wpitime):
class _TM(StateMachine):
def __init__(self):
self.executed = []
def some_fn(self):
self.executed.append("sf")
@state(first=True)
def first_state(self):
self.executed.append(1)
self.next_state("second_state")
@timed_state(duration=1, next_state="third_state")
def second_state(self):
self.executed.append(2)
@state
def third_state(self):
self.executed.append(3)
sm = _TM()
setup_tunables(sm, "cname")
sm.some_fn()
# should not be able to directly call
with pytest.raises(IllegalCallError):
sm.first_state()
assert sm.current_state == ""
assert not sm.is_executing
sm.engage()
assert sm.current_state == "first_state"
assert not sm.is_executing
sm.execute()
assert sm.current_state == "second_state"
assert sm.is_executing
# should not change
sm.engage()
assert sm.current_state == "second_state"
assert sm.is_executing
sm.execute()
assert sm.current_state == "second_state"
assert sm.is_executing
wpitime.step(1.5)
sm.engage()
sm.execute()
assert sm.current_state == "third_state"
assert sm.is_executing
sm.engage()
sm.execute()
assert sm.current_state == "third_state"
assert sm.is_executing
# should be done
sm.done()
assert sm.current_state == ""
assert not sm.is_executing
# should be able to start directly at second state
sm.engage(initial_state="second_state")
sm.execute()
assert sm.current_state == "second_state"
assert sm.is_executing
wpitime.step(1.5)
sm.engage()
sm.execute()
assert sm.current_state == "third_state"
assert sm.is_executing
# test force
sm.engage()
sm.execute()
assert sm.current_state == "third_state"
assert sm.is_executing
sm.engage(force=True)
assert sm.current_state == "first_state"
assert sm.is_executing
sm.execute()
sm.execute()
assert not sm.is_executing
assert sm.current_state == ""
assert sm.executed == ["sf", 1, 2, 3, 3, 2, 3, 3, 1]
def test_sm_inheritance():
class _TM1(StateMachine):
@state
def second_state(self):
self.done()
class _TM2(_TM1):
@state(first=True)
def first_state(self):
self.next_state("second_state")
sm = _TM2()
setup_tunables(sm, "cname")
sm.engage()
assert sm.current_state == "first_state"
sm.execute()
assert sm.current_state == "second_state"
sm.execute()
assert sm.current_state == ""
def test_must_finish(wpitime):
class _TM(StateMachine):
def __init__(self):
self.executed = []
@state(first=True)
def ordinary1(self):
self.next_state("ordinary2")
self.executed.append(1)
@state
def ordinary2(self):
self.next_state("must_finish")
self.executed.append(2)
@state(must_finish=True)
def must_finish(self):
self.executed.append("mf")
@state
def ordinary3(self):
self.executed.append(3)
self.next_state_now("timed_must_finish")
@timed_state(duration=1, must_finish=True)
def timed_must_finish(self):
self.executed.append("tmf")
sm = _TM()
setup_tunables(sm, "cname")
sm.engage()
sm.execute()
sm.execute()
assert sm.current_state == ""
assert not sm.is_executing
sm.engage()
sm.execute()
sm.engage()
sm.execute()
sm.execute()
sm.execute()
assert sm.current_state == "must_finish"
assert sm.is_executing
sm.next_state("ordinary3")
sm.engage()
sm.execute()
assert sm.current_state == "timed_must_finish"
sm.execute()
assert sm.is_executing
assert sm.current_state == "timed_must_finish"
for _ in range(7):
wpitime.step(0.1)<|fim▁hole|> assert sm.is_executing
assert sm.current_state == "timed_must_finish"
wpitime.step(1)
sm.execute()
assert not sm.is_executing
assert sm.executed == [1, 1, 2, "mf", "mf", 3] + ["tmf"] * 9
def test_autonomous_sm():
class _TM(AutonomousStateMachine):
i = 0
VERBOSE_LOGGING = False
@state(first=True)
def something(self):
self.i += 1
if self.i == 6:
self.done()
sm = _TM()
setup_tunables(sm, "cname")
sm.on_enable()
for _ in range(5):
sm.on_iteration(None)
assert sm.is_executing
sm.on_iteration(None)
assert not sm.is_executing
for _ in range(5):
sm.on_iteration(None)
assert not sm.is_executing
assert sm.i == 6
def test_autonomous_sm_end_timed_state(wpitime):
class _TM(AutonomousStateMachine):
i = 0
j = 0
VERBOSE_LOGGING = False
@state(first=True)
def something(self):
self.i += 1
if self.i == 3:
self.next_state("timed")
@timed_state(duration=1)
def timed(self):
self.j += 1
sm = _TM()
setup_tunables(sm, "cname")
sm.on_enable()
for _ in range(5):
wpitime.step(0.7)
sm.on_iteration(None)
assert sm.is_executing
for _ in range(5):
wpitime.step(0.7)
sm.on_iteration(None)
assert not sm.is_executing
assert sm.i == 3
assert sm.j == 2
def test_next_fn():
class _TM(StateMachine):
@state(first=True)
def first_state(self):
self.next_state(self.second_state)
@state
def second_state(self):
self.done()
sm = _TM()
setup_tunables(sm, "cname")
sm.engage()
assert sm.current_state == "first_state"
sm.execute()
assert sm.current_state == "second_state"
sm.engage()
sm.execute()
assert sm.current_state == ""
def test_next_fn2(wpitime):
class _TM(StateMachine):
@state
def second_state(self):
pass
@timed_state(first=True, duration=0.1, next_state=second_state)
def first_state(self):
pass
sm = _TM()
setup_tunables(sm, "cname")
sm.engage()
sm.execute()
assert sm.current_state == "first_state"
assert sm.is_executing
wpitime.step(0.5)
sm.engage()
sm.execute()
assert sm.current_state == "second_state"
assert sm.is_executing
sm.execute()
assert sm.current_state == ""
assert not sm.is_executing
def test_mixup():
from robotpy_ext.autonomous import state as _ext_state
from robotpy_ext.autonomous import timed_state as _ext_timed_state
with pytest.raises(RuntimeError) as exc_info:
class _SM1(StateMachine):
@_ext_state(first=True)
def the_state(self):
pass
assert isinstance(exc_info.value.__cause__, TypeError)
with pytest.raises(RuntimeError) as exc_info:
class _SM2(StateMachine):
@_ext_timed_state(first=True, duration=1)
def the_state(self):
pass
assert isinstance(exc_info.value.__cause__, TypeError)
def test_forbidden_state_names():
with pytest.raises(InvalidStateName):
class _SM(StateMachine):
@state
def done(self):
pass
def test_mixins():
class _SM1(StateMachine):
@state
def state1(self):
pass
class _SM2(StateMachine):
@state
def state2(self):
pass
class _SM(_SM1, _SM2):
@state(first=True)
def first_state(self):
pass
s = _SM()
states = s._StateMachine__states
assert "state1" in states
assert "state2" in states
assert "first_state" in states
def test_multiple_default_states():
class _SM(StateMachine):
@state(first=True)
def state(self):
pass
@default_state
def state1(self):
pass
@default_state
def state2(self):
pass
with pytest.raises(MultipleDefaultStatesError):
_SM()
def test_default_state_machine():
class _SM(StateMachine):
def __init__(self):
self.didOne = None
self.didDefault = None
self.defaultInit = None
self.didDone = None
@state(first=True)
def stateOne(self):
self.didOne = True
self.didDefault = False
self.didDone = False
@state
def doneState(self):
self.didOne = False
self.didDefault = False
self.didDone = True
self.done()
@default_state
def defaultState(self, initial_call):
self.didOne = False
self.didDefault = True
self.defaultInit = initial_call
self.didDone = False
sm = _SM()
setup_tunables(sm, "cname")
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == True
assert sm.didDone == False
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == False
assert sm.didDone == False
# do a thing
sm.engage()
sm.execute()
assert sm.didOne == True
assert sm.didDefault == False
assert sm.didDone == False
# should go back (test for initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == True
assert sm.didDone == False
# should happen again (no initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == False
assert sm.didDone == False
# do another thing
sm.engage()
sm.execute()
assert sm.didOne == True
assert sm.didDefault == False
assert sm.didDone == False
# should go back (test for initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == True
assert sm.didDone == False
# should happen again (no initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == False
assert sm.didDone == False
# enagage a state that will call done, check to see
# if we come back
sm.engage("doneState")
sm.execute()
assert sm.didOne == False
assert sm.didDefault == False
assert sm.defaultInit == False
assert sm.didDone == True
# should go back (test for initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == True
assert sm.didDone == False
# should happen again (no initial)
sm.execute()
assert sm.didOne == False
assert sm.didDefault == True
assert sm.defaultInit == False
assert sm.didDone == False
def test_short_timed_state(wpitime):
"""
Tests two things:
- A timed state that expires before it executes
- Ensures that the default state won't execute if the machine is always
executing
"""
class _SM(StateMachine):
def __init__(self):
self.executed = []
@default_state
def d(self):
self.executed.append("d")
@state(first=True)
def a(self):
self.executed.append("a")
self.next_state("b")
@timed_state(duration=0.01)
def b(self):
self.executed.append("b")
def done(self):
super().done()
self.executed.append("d")
sm = _SM()
setup_tunables(sm, "cname")
assert sm.current_state == ""
assert not sm.is_executing
for _ in [1, 2, 3, 4]:
sm.engage()
sm.execute()
assert sm.current_state == "b"
wpitime.step(0.02)
sm.engage()
sm.execute()
assert sm.current_state == "b"
wpitime.step(0.02)
assert sm.executed == ["a", "b", "d", "a", "b", "d", "a", "b", "d", "a", "b"]<|fim▁end|> |
sm.execute() |
<|file_name|>stream.d.ts<|end_file_name|><|fim▁begin|>declare module "stream" {
import * as events from "events";
class internal extends events.EventEmitter {
pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T;
}
namespace internal {
class Stream extends internal { }
interface ReadableOptions {
highWaterMark?: number;
encoding?: string;
objectMode?: boolean;
read?(this: Readable, size: number): void;
destroy?(this: Readable, error: Error | null, callback: (error: Error | null) => void): void;
autoDestroy?: boolean;
}
class Readable extends Stream implements NodeJS.ReadableStream {
/**
* A utility method for creating Readable Streams out of iterators.
*/
static from(iterable: Iterable<any> | AsyncIterable<any>, options?: ReadableOptions): Readable;
readable: boolean;
readonly readableEncoding: BufferEncoding | null;
readonly readableEnded: boolean;
readonly readableFlowing: boolean | null;
readonly readableHighWaterMark: number;
readonly readableLength: number;
readonly readableObjectMode: boolean;
destroyed: boolean;
constructor(opts?: ReadableOptions);
_read(size: number): void;
read(size?: number): any;
setEncoding(encoding: string): this;
pause(): this;
resume(): this;
isPaused(): boolean;
unpipe(destination?: NodeJS.WritableStream): this;
unshift(chunk: any, encoding?: BufferEncoding): void;
wrap(oldStream: NodeJS.ReadableStream): this;
push(chunk: any, encoding?: string): boolean;
_destroy(error: Error | null, callback: (error?: Error | null) => void): void;
destroy(error?: Error): void;
/**
* Event emitter
* The defined events on documents including:
* 1. close
* 2. data
* 3. end
* 4. readable
* 5. error
*/
addListener(event: "close", listener: () => void): this;
addListener(event: "data", listener: (chunk: any) => void): this;
addListener(event: "end", listener: () => void): this;
addListener(event: "readable", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
emit(event: "close"): boolean;
emit(event: "data", chunk: any): boolean;
emit(event: "end"): boolean;
emit(event: "readable"): boolean;
emit(event: "error", err: Error): boolean;
emit(event: string | symbol, ...args: any[]): boolean;
on(event: "close", listener: () => void): this;
on(event: "data", listener: (chunk: any) => void): this;
on(event: "end", listener: () => void): this;
on(event: "readable", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: string | symbol, listener: (...args: any[]) => void): this;
once(event: "close", listener: () => void): this;
once(event: "data", listener: (chunk: any) => void): this;
once(event: "end", listener: () => void): this;
once(event: "readable", listener: () => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: string | symbol, listener: (...args: any[]) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "data", listener: (chunk: any) => void): this;
prependListener(event: "end", listener: () => void): this;
prependListener(event: "readable", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "data", listener: (chunk: any) => void): this;
prependOnceListener(event: "end", listener: () => void): this;
prependOnceListener(event: "readable", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "data", listener: (chunk: any) => void): this;
removeListener(event: "end", listener: () => void): this;
removeListener(event: "readable", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
[Symbol.asyncIterator](): AsyncIterableIterator<any>;
}
interface WritableOptions {
highWaterMark?: number;
decodeStrings?: boolean;
defaultEncoding?: string;
objectMode?: boolean;
emitClose?: boolean;
write?(this: Writable, chunk: any, encoding: string, callback: (error?: Error | null) => void): void;
writev?(this: Writable, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void;
destroy?(this: Writable, error: Error | null, callback: (error: Error | null) => void): void;
final?(this: Writable, callback: (error?: Error | null) => void): void;
autoDestroy?: boolean;
}
class Writable extends Stream implements NodeJS.WritableStream {
readonly writable: boolean;
readonly writableEnded: boolean;
readonly writableFinished: boolean;
readonly writableHighWaterMark: number;
readonly writableLength: number;
readonly writableObjectMode: boolean;
destroyed: boolean;
constructor(opts?: WritableOptions);
_write(chunk: any, encoding: string, callback: (error?: Error | null) => void): void;
_writev?(chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void;
_destroy(error: Error | null, callback: (error?: Error | null) => void): void;
_final(callback: (error?: Error | null) => void): void;
write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean;
write(chunk: any, encoding: string, cb?: (error: Error | null | undefined) => void): boolean;
setDefaultEncoding(encoding: string): this;
end(cb?: () => void): void;
end(chunk: any, cb?: () => void): void;
end(chunk: any, encoding: string, cb?: () => void): void;
cork(): void;
uncork(): void;
destroy(error?: Error): void;
/**
* Event emitter
* The defined events on documents including:
* 1. close
* 2. drain
* 3. error
* 4. finish
* 5. pipe
* 6. unpipe
*/
addListener(event: "close", listener: () => void): this;
addListener(event: "drain", listener: () => void): this;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "finish", listener: () => void): this;
addListener(event: "pipe", listener: (src: Readable) => void): this;
addListener(event: "unpipe", listener: (src: Readable) => void): this;
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
emit(event: "close"): boolean;
emit(event: "drain"): boolean;
emit(event: "error", err: Error): boolean;
emit(event: "finish"): boolean;
emit(event: "pipe", src: Readable): boolean;
emit(event: "unpipe", src: Readable): boolean;
emit(event: string | symbol, ...args: any[]): boolean;
on(event: "close", listener: () => void): this;
on(event: "drain", listener: () => void): this;
on(event: "error", listener: (err: Error) => void): this;
on(event: "finish", listener: () => void): this;
on(event: "pipe", listener: (src: Readable) => void): this;
on(event: "unpipe", listener: (src: Readable) => void): this;
on(event: string | symbol, listener: (...args: any[]) => void): this;<|fim▁hole|> once(event: "error", listener: (err: Error) => void): this;
once(event: "finish", listener: () => void): this;
once(event: "pipe", listener: (src: Readable) => void): this;
once(event: "unpipe", listener: (src: Readable) => void): this;
once(event: string | symbol, listener: (...args: any[]) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "drain", listener: () => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "finish", listener: () => void): this;
prependListener(event: "pipe", listener: (src: Readable) => void): this;
prependListener(event: "unpipe", listener: (src: Readable) => void): this;
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "drain", listener: () => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "finish", listener: () => void): this;
prependOnceListener(event: "pipe", listener: (src: Readable) => void): this;
prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this;
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "drain", listener: () => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: "finish", listener: () => void): this;
removeListener(event: "pipe", listener: (src: Readable) => void): this;
removeListener(event: "unpipe", listener: (src: Readable) => void): this;
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
}
interface DuplexOptions extends ReadableOptions, WritableOptions {
allowHalfOpen?: boolean;
readableObjectMode?: boolean;
writableObjectMode?: boolean;
readableHighWaterMark?: number;
writableHighWaterMark?: number;
read?(this: Duplex, size: number): void;
write?(this: Duplex, chunk: any, encoding: string, callback: (error?: Error | null) => void): void;
writev?(this: Duplex, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void;
final?(this: Duplex, callback: (error?: Error | null) => void): void;
destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void;
}
// Note: Duplex extends both Readable and Writable.
class Duplex extends Readable implements Writable {
readonly writable: boolean;
readonly writableEnded: boolean;
readonly writableFinished: boolean;
readonly writableHighWaterMark: number;
readonly writableLength: number;
readonly writableObjectMode: boolean;
constructor(opts?: DuplexOptions);
_write(chunk: any, encoding: string, callback: (error?: Error | null) => void): void;
_writev?(chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void;
_destroy(error: Error | null, callback: (error: Error | null) => void): void;
_final(callback: (error?: Error | null) => void): void;
write(chunk: any, encoding?: string, cb?: (error: Error | null | undefined) => void): boolean;
write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean;
setDefaultEncoding(encoding: string): this;
end(cb?: () => void): void;
end(chunk: any, cb?: () => void): void;
end(chunk: any, encoding?: string, cb?: () => void): void;
cork(): void;
uncork(): void;
}
type TransformCallback = (error?: Error | null, data?: any) => void;
interface TransformOptions extends DuplexOptions {
read?(this: Transform, size: number): void;
write?(this: Transform, chunk: any, encoding: string, callback: (error?: Error | null) => void): void;
writev?(this: Transform, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void;
final?(this: Transform, callback: (error?: Error | null) => void): void;
destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void;
transform?(this: Transform, chunk: any, encoding: string, callback: TransformCallback): void;
flush?(this: Transform, callback: TransformCallback): void;
}
class Transform extends Duplex {
constructor(opts?: TransformOptions);
_transform(chunk: any, encoding: string, callback: TransformCallback): void;
_flush(callback: TransformCallback): void;
}
class PassThrough extends Transform { }
interface FinishedOptions {
error?: boolean;
readable?: boolean;
writable?: boolean;
}
function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
namespace finished {
function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>;
}
function pipeline<T extends NodeJS.WritableStream>(stream1: NodeJS.ReadableStream, stream2: T, callback?: (err: NodeJS.ErrnoException | null) => void): T;
function pipeline<T extends NodeJS.WritableStream>(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: T, callback?: (err: NodeJS.ErrnoException | null) => void): T;
function pipeline<T extends NodeJS.WritableStream>(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream,
stream3: NodeJS.ReadWriteStream,
stream4: T,
callback?: (err: NodeJS.ErrnoException | null) => void,
): T;
function pipeline<T extends NodeJS.WritableStream>(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream,
stream3: NodeJS.ReadWriteStream,
stream4: NodeJS.ReadWriteStream,
stream5: T,
callback?: (err: NodeJS.ErrnoException | null) => void,
): T;
function pipeline(
streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>,
callback?: (err: NodeJS.ErrnoException | null) => void,
): NodeJS.WritableStream;
function pipeline(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void)>,
): NodeJS.WritableStream;
namespace pipeline {
function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.WritableStream): Promise<void>;
function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.WritableStream): Promise<void>;
function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.ReadWriteStream, stream4: NodeJS.WritableStream): Promise<void>;
function __promisify__(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream,
stream3: NodeJS.ReadWriteStream,
stream4: NodeJS.ReadWriteStream,
stream5: NodeJS.WritableStream,
): Promise<void>;
function __promisify__(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>): Promise<void>;
function __promisify__(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream>,
): Promise<void>;
}
interface Pipe {
close(): void;
hasRef(): boolean;
ref(): void;
unref(): void;
}
}
export = internal;
}<|fim▁end|> |
once(event: "close", listener: () => void): this;
once(event: "drain", listener: () => void): this; |
<|file_name|>ms.js<|end_file_name|><|fim▁begin|>/* -*- Mode: Javascript; indent-tabs-mode:nil; js-indent-level: 2 -*- */
/* vim: set ts=2 et sw=2 tw=80: */
/*************************************************************
*
* MathJax/jax/output/SVG/autoload/ms.js
*
* Implements the SVG output for <ms> elements.
*
* ---------------------------------------------------------------------
*
* Copyright (c) 2011-2017 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Hub.Register.StartupHook("SVG Jax Ready",function () {
var VERSION = "2.7.2";
var MML = MathJax.ElementJax.mml,
SVG = MathJax.OutputJax.SVG;
MML.ms.Augment({
toSVG: function () {
this.SVGgetStyles();
var svg = this.SVG(); this.SVGhandleSpace(svg);
var values = this.getValues("lquote","rquote","mathvariant");
if (!this.hasValue("lquote") || values.lquote === '"') values.lquote = "\u201C";
if (!this.hasValue("rquote") || values.rquote === '"') values.rquote = "\u201D";
if (values.lquote === "\u201C" && values.mathvariant === "monospace") values.lquote = '"';
if (values.rquote === "\u201D" && values.mathvariant === "monospace") values.rquote = '"';
var variant = this.SVGgetVariant(), scale = this.SVGgetScale();
var text = values.lquote+this.data.join("")+values.rquote; // FIXME: handle mglyph?
svg.Add(this.SVGhandleVariant(variant,scale,text));
svg.Clean();
this.SVGhandleColor(svg);
this.SVGsaveData(svg);
return svg;
}
});
MathJax.Hub.Startup.signal.Post("SVG ms Ready");
MathJax.Ajax.loadComplete(SVG.autoloadDir+"/ms.js");
});<|fim▁end|> | * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* |
<|file_name|>managed-pointer-within-unique-vec.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-android: FIXME(#10381)
#[feature(managed_boxes)];
// compile-flags:-Z extra-debug-info
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print unique->elements[0]->val
// check:$1 = 10
// debugger:print unique->elements[1]->val
// check:$2 = 11
// debugger:print unique->elements[2]->val
// check:$3 = 12
// debugger:print unique->elements[3]->val
// check:$4 = 13<|fim▁hole|>
fn main() {
let unique: ~[@i64] = ~[@10, @11, @12, @13];
zzz();
}
fn zzz() {()}<|fim▁end|> |
#[allow(unused_variable)]; |
<|file_name|>gluster.py<|end_file_name|><|fim▁begin|>#
#
# Copyright (C) 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Gluster storage class.
This class is very similar to FileStorage, given that Gluster when mounted
behaves essentially like a regular file system. Unlike RBD, there are no
special provisions for block device abstractions (yet).
"""
import logging
import os
import socket
from ganeti import utils
from ganeti import errors
from ganeti import netutils
from ganeti import constants
from ganeti import ssconf
from ganeti.utils import io
from ganeti.storage import base
from ganeti.storage.filestorage import FileDeviceHelper
class GlusterVolume(object):
"""This class represents a Gluster volume.
Volumes are uniquely identified by:
- their IP address
- their port
- the volume name itself
Two GlusterVolume objects x, y with same IP address, port and volume name
are considered equal.
"""
def __init__(self, server_addr, port, volume, _run_cmd=utils.RunCmd,
_mount_point=None):
"""Creates a Gluster volume object.
@type server_addr: str
@param server_addr: The address to connect to
@type port: int
@param port: The port to connect to (Gluster standard is 24007)
@type volume: str
@param volume: The gluster volume to use for storage.
"""
self.server_addr = server_addr
server_ip = netutils.Hostname.GetIP(self.server_addr)
self._server_ip = server_ip
port = netutils.ValidatePortNumber(port)
self._port = port
self._volume = volume
if _mount_point: # tests
self.mount_point = _mount_point
else:
self.mount_point = ssconf.SimpleStore().GetGlusterStorageDir()
self._run_cmd = _run_cmd
@property
def server_ip(self):
return self._server_ip
@property
def port(self):
return self._port
@property
def volume(self):
return self._volume
def __eq__(self, other):
return (self.server_ip, self.port, self.volume) == \
(other.server_ip, other.port, other.volume)
def __repr__(self):
return """GlusterVolume("{ip}", {port}, "{volume}")""" \
.format(ip=self.server_ip, port=self.port, volume=self.volume)
def __hash__(self):
return (self.server_ip, self.port, self.volume).__hash__()
def _IsMounted(self):
"""Checks if we are mounted or not.
@rtype: bool
@return: True if this volume is mounted.
"""
if not os.path.exists(self.mount_point):
return False
return os.path.ismount(self.mount_point)
def _GuessMountFailReasons(self):
"""Try and give reasons why the mount might've failed.
@rtype: str
@return: A semicolon-separated list of problems found with the current setup
suitable for display to the user.
"""
reasons = []
# Does the mount point exist?
if not os.path.exists(self.mount_point):
reasons.append("%r: does not exist" % self.mount_point)
# Okay, it exists, but is it a directory?
elif not os.path.isdir(self.mount_point):
reasons.append("%r: not a directory" % self.mount_point)
# If, for some unfortunate reason, this folder exists before mounting:
#
# /var/run/ganeti/gluster/gv0/10.0.0.1:30000:gv0/
# '--------- cwd ------------'
#
# and you _are_ trying to mount the gluster volume gv0 on 10.0.0.1:30000,
# then the mount.glusterfs command parser gets confused and this command:
#
# mount -t glusterfs 10.0.0.1:30000:gv0 /var/run/ganeti/gluster/gv0
# '-- remote end --' '------ mountpoint -------'
#
# gets parsed instead like this:
#
# mount -t glusterfs 10.0.0.1:30000:gv0 /var/run/ganeti/gluster/gv0
# '-- mountpoint --' '----- syntax error ------'
#
# and if there _is_ a gluster server running locally at the default remote
# end, localhost:24007, then this is not a network error and therefore... no
# usage message gets printed out. All you get is a Byson parser error in the
# gluster log files about an unexpected token in line 1, "". (That's stdin.)
#
# Not that we rely on that output in any way whatsoever...
parser_confusing = io.PathJoin(self.mount_point,
self._GetFUSEMountString())
if os.path.exists(parser_confusing):
reasons.append("%r: please delete, rename or move." % parser_confusing)
# Let's try something else: can we connect to the server?
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((self.server_ip, self.port))
sock.close()
except socket.error as err:
reasons.append("%s:%d: %s" % (self.server_ip, self.port, err.strerror))
reasons.append("try running 'gluster volume info %s' on %s to ensure"
" it exists, it is started and it is using the tcp"
" transport" % (self.volume, self.server_ip))
return "; ".join(reasons)
def _GetFUSEMountString(self):
"""Return the string FUSE needs to mount this volume.
@rtype: str
"""
return "-o server-port={port} {ip}:/{volume}" \
.format(port=self.port, ip=self.server_ip, volume=self.volume)
def GetKVMMountString(self, path):
"""Return the string KVM needs to use this volume.
@rtype: str
"""
ip = self.server_ip
if netutils.IPAddress.GetAddressFamily(ip) == socket.AF_INET6:
ip = "[%s]" % ip
return "gluster://{ip}:{port}/{volume}/{path}" \
.format(ip=ip, port=self.port, volume=self.volume, path=path)
def Mount(self):
"""Try and mount the volume. No-op if the volume is already mounted.
@raises BlockDeviceError: if the mount was unsuccessful
@rtype: context manager
@return: A simple context manager that lets you use this volume for
short lived operations like so::
with volume.mount():
# Do operations on volume
# Volume is now unmounted
"""
class _GlusterVolumeContextManager(object):
def __init__(self, volume):
self.volume = volume
def __enter__(self):
# We're already mounted.
return self
def __exit__(self, *exception_information):
self.volume.Unmount()
return False # do not swallow exceptions.
if self._IsMounted():
return _GlusterVolumeContextManager(self)
command = ["mount",
"-t", "glusterfs",
self._GetFUSEMountString(),
self.mount_point]
io.Makedirs(self.mount_point)
self._run_cmd(" ".join(command),
# Why set cwd? Because it's an area we control. If,
# for some unfortunate reason, this folder exists:
# "/%s/" % _GetFUSEMountString()
# ...then the gluster parser gets confused and treats
# _GetFUSEMountString() as your mount point and
# self.mount_point becomes a syntax error.
cwd=self.mount_point)
# mount.glusterfs exits with code 0 even after failure.
# https://bugzilla.redhat.com/show_bug.cgi?id=1031973
if not self._IsMounted():
reasons = self._GuessMountFailReasons()
if not reasons:
reasons = "%r failed." % (" ".join(command))
base.ThrowError("%r: mount failure: %s",
self.mount_point,
reasons)
return _GlusterVolumeContextManager(self)
def Unmount(self):
"""Try and unmount the volume.
Failures are logged but otherwise ignored.
@raises BlockDeviceError: if the volume was not mounted to begin with.
"""
if not self._IsMounted():
base.ThrowError("%r: should be mounted but isn't.", self.mount_point)
result = self._run_cmd(["umount",
self.mount_point])
if result.failed:
logging.warning("Failed to unmount %r from %r: %s",
self, self.mount_point, result.fail_reason)
class GlusterStorage(base.BlockDev):
"""File device using the Gluster backend.
This class represents a file storage backend device stored on Gluster. Ganeti
mounts and unmounts the Gluster devices automatically.
The unique_id for the file device is a (file_driver, file_path) tuple.
"""
def __init__(self, unique_id, children, size, params, dyn_params, **kwargs):
"""Initalizes a file device backend.
"""
if children:
base.ThrowError("Invalid setup for file device")
try:
self.driver, self.path = unique_id
except ValueError: # wrong number of arguments
raise ValueError("Invalid configuration data %s" % repr(unique_id))
server_addr = params[constants.GLUSTER_HOST]
port = params[constants.GLUSTER_PORT]
volume = params[constants.GLUSTER_VOLUME]
self.volume = GlusterVolume(server_addr, port, volume)
self.full_path = io.PathJoin(self.volume.mount_point, self.path)
self.file = None
super(GlusterStorage, self).__init__(unique_id, children, size,
params, dyn_params, **kwargs)
self.Attach()
def Assemble(self):
"""Assemble the device.
Checks whether the file device exists, raises BlockDeviceError otherwise.
"""
assert self.attached, "Gluster file assembled without being attached"
self.file.Exists(assert_exists=True)
def Shutdown(self):
"""Shutdown the device.
<|fim▁hole|> self.dev_path = None
self.attached = False
def Open(self, force=False, exclusive=True):
"""Make the device ready for I/O.
This is a no-op for the file type.
"""
assert self.attached, "Gluster file opened without being attached"
def Close(self):
"""Notifies that the device will no longer be used for I/O.
This is a no-op for the file type.
"""
pass
def Remove(self):
"""Remove the file backing the block device.
@rtype: boolean
@return: True if the removal was successful
"""
with self.volume.Mount():
self.file = FileDeviceHelper(self.full_path)
if self.file.Remove():
self.file = None
return True
else:
return False
def Rename(self, new_id):
"""Renames the file.
"""
# TODO: implement rename for file-based storage
base.ThrowError("Rename is not supported for Gluster storage")
def Grow(self, amount, dryrun, backingstore, excl_stor):
"""Grow the file
@param amount: the amount (in mebibytes) to grow with
"""
self.file.Grow(amount, dryrun, backingstore, excl_stor)
def Attach(self, **kwargs):
"""Attach to an existing file.
Check if this file already exists.
@rtype: boolean
@return: True if file exists
"""
try:
self.volume.Mount()
self.file = FileDeviceHelper(self.full_path)
self.dev_path = self.full_path
except Exception as err:
self.volume.Unmount()
raise err
self.attached = self.file.Exists()
return self.attached
def GetActualSize(self):
"""Return the actual disk size.
@note: the device needs to be active when this is called
"""
return self.file.Size()
def GetUserspaceAccessUri(self, hypervisor):
"""Generate KVM userspace URIs to be used as `-drive file` settings.
@see: L{BlockDev.GetUserspaceAccessUri}
@see: https://github.com/qemu/qemu/commit/8d6d89cb63c57569864ecdeb84d3a1c2eb
"""
if hypervisor == constants.HT_KVM:
return self.volume.GetKVMMountString(self.path)
else:
base.ThrowError("Hypervisor %s doesn't support Gluster userspace access" %
hypervisor)
@classmethod
def Create(cls, unique_id, children, size, spindles, params, excl_stor,
dyn_params, **kwargs):
"""Create a new file.
@param size: the size of file in MiB
@rtype: L{bdev.FileStorage}
@return: an instance of FileStorage
"""
if excl_stor:
raise errors.ProgrammerError("FileStorage device requested with"
" exclusive_storage")
if not isinstance(unique_id, (tuple, list)) or len(unique_id) != 2:
raise ValueError("Invalid configuration data %s" % str(unique_id))
full_path = unique_id[1]
server_addr = params[constants.GLUSTER_HOST]
port = params[constants.GLUSTER_PORT]
volume = params[constants.GLUSTER_VOLUME]
volume_obj = GlusterVolume(server_addr, port, volume)
full_path = io.PathJoin(volume_obj.mount_point, full_path)
# Possible optimization: defer actual creation to first Attach, rather
# than mounting and unmounting here, then remounting immediately after.
with volume_obj.Mount():
FileDeviceHelper.CreateFile(full_path, size, create_folders=True)
return GlusterStorage(unique_id, children, size, params, dyn_params,
**kwargs)<|fim▁end|> | """
self.file = None |
<|file_name|>foo.py<|end_file_name|><|fim▁begin|>def main(**kwargs):<|fim▁hole|><|fim▁end|> | print('foo foo') |
<|file_name|>dumb_agent.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -tt
# An incredibly simple agent. All we do is find the closest enemy tank, drive
# towards it, and shoot. Note that if friendly fire is allowed, you will very
# often kill your own tanks with this code.
#################################################################
# NOTE TO STUDENTS
# This is a starting point for you. You will need to greatly
# modify this code if you want to do anything useful. But this
# should help you to know how to interact with BZRC in order to
# get the information you need.
#
# After starting the bzrflag server, this is one way to start
# this code:
# python agent0.py [hostname] [port]
#
# Often this translates to something like the following (with the
# port name being printed out by the bzrflag server):
# python agent0.py localhost 49857
#################################################################
import sys
import math
import time
import random
from bzrc import BZRC, Command
class dumb_agent(object):
"""Class handles all command and control logic for a teams tanks."""
new_angles = []
running_time = []
shooting_time = 2
ENEMY_TANK_MIN_DISTANCE = 1
ENEMY_TANK_MAX_DISTANCE = 5
OBSTACLE_MAX_DISTANCE = 10
OBSTACLE_MIN_DISTANCE = 1
BULLET_MAX_DISTANCE = 10
BULLET_MIN_DISTANCE = 1
FLAG_MIN_DISTANCE = 1
FLAG_MAX_DISTANCE = 5
FLAG_MAX_SPEED = 5
def __init__(self, bzrc):
self.bzrc = bzrc
self.constants = self.bzrc.get_constants()
random.seed();
mytanks, othertanks, flags, shots = self.bzrc.get_lots_o_stuff()
self.new_angles = []
self.running_time = []
for tank in mytanks:
self.new_angles.append(self.normalize_angle(tank.angle - math.pi/3))
self.running_time.append(random.uniform(3,8))
self.shooting_time = random.uniform(1.5,2.5)
self.commands = []
def tick(self, time_diff):
"""Some time has passed; decide what to do next."""
mytanks, othertanks, flags, shots = self.bzrc.get_lots_o_stuff()
obstacles = self.bzrc.get_obstacles()
self.mytanks = mytanks
self.othertanks = othertanks
self.flags = flags
self.shots = shots
self.enemies = [tank for tank in othertanks if tank.color !=
self.constants['team']]
self.friendlies = [tank for tank in othertanks if tank.color ==
self.constants['team']]
self.obstacles = obstacles
self.commands = []
shoot = False
if self.shooting_time > 0 :
self.shooting_time = self.shooting_time - time_diff
else:
shoot = True
self.shooting_time = random.uniform(1.5,2.5)
i = 0
"we need a new speed, a new angle, and whether or not to shoot"
for tank in mytanks:
speed = 0
angle = 0
if self.running_time[i] > 0:
self.running_time[i] = self.running_time[i] - time_diff
speed = self.FLAG_MAX_SPEED
angle = 0
else:
if self.new_angles[i]+.001 > tank.angle and self.new_angles[i]-.001 < tank.angle:
self.running_time[i] = random.uniform(3,8)
self.new_angles[i] = self.normalize_angle(tank.angle - math.pi/3)
else:
if self.new_angles[i] > 0:
if tank.angle - self.new_angles[i] > 1:
angle = -1
else:
angle = -(tank.angle-self.new_angles[i])
elif self.new_angles[i] < 0:
if self.new_angles[i] - tank.angle < -1:
angle = -1
else:
angle = self.new_angles[i] - tank.angle
command = Command(tank.index, speed, angle, shoot)
self.commands.append(command)
i = i + 1
results = self.bzrc.do_commands(self.commands)
def normalize_angle(self, angle):
"""Make any angle be between +/- pi."""
angle -= 2 * math.pi * int (angle / (2 * math.pi))
if angle <= -math.pi:
angle += 2 * math.pi
elif angle > math.pi:
angle -= 2 * math.pi
return angle
def get_desired_movement(self, tank, flags, shots, obstacles):
final_angle = 0
final_speed = 0
vectors = []
vectors.extend(self.get_repulsive_vectors(tank, shots))
vectors.extend(self.get_attractive_vectors(tank, flags))
vectors.extend(self.get_tangential_vectors(tank, obstacles))
for speed, angle in vectors:
final_speed += speed
final_angle += angle
return final_speed, final_angle
def main():
# Process CLI arguments.
try:
execname, host, port = sys.argv
except ValueError:
execname = sys.argv[0]
print >>sys.stderr, '%s: incorrect number of arguments' % execname
print >>sys.stderr, 'usage: %s hostname port' % sys.argv[0]
sys.exit(-1)
# Connect.
#bzrc = BZRC(host, int(port), debug=True)
bzrc = BZRC(host, int(port))
agent = dumb_agent(bzrc)
prev_time = time.time()
# Run the agent
try:
while True:
time_diff = time.time() - prev_time
prev_time = prev_time + time_diff
#print >> sys.stderr, 'time dif %f' % time_diff
agent.tick(time_diff)
except KeyboardInterrupt:
print "Exiting due to keyboard interrupt."
bzrc.close()<|fim▁hole|>
if __name__ == '__main__':
main()
# vim: et sw=4 sts=4<|fim▁end|> | |
<|file_name|>IntervalFeature.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.data.interval;
import htsjdk.tribble.Feature;
import htsjdk.tribble.annotation.Strand;<|fim▁hole|>import java.awt.Color;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
*
* @author sol
*/
public interface IntervalFeature<T> extends Feature {
Strand getStrand();
T getValue();
}<|fim▁end|> | import htsjdk.tribble.bed.FullBEDFeature; |
<|file_name|>ventilator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import zmq
import random
import time
def run():
context = zmq.Context()
sender = context.socket(zmq.PUSH)<|fim▁hole|> sender.bind('tcp://*:5557')
sink = context.socket(zmq.PUSH)
sink.connect('tcp://localhost:5558')
print 'Press Enter when the workers are ready: '
_ = raw_input()
print('sending tasks to workders...')
sink.send(b'0')
random.seed()
total_msec = 0
for task_nbr in xrange(100):
workload = random.randint(1, 100)
total_msec += workload
sender.send_string(u'%i' % workload)
print 'Total expected cost: %s msec' % total_msec
time.sleep(1)
if __name__ == '__main__':
run()<|fim▁end|> | |
<|file_name|>tsUnit.ts<|end_file_name|><|fim▁begin|>module tsUnit {
export interface ITestClass {
}
export class Test {
private tests: TestDefintion[] = [];
addTestClass(testClass: ITestClass, name: string = 'Tests'): void {
this.tests.push(new TestDefintion(testClass, name));
}
run() {
var testContext = new TestContext();
var testResult = new TestResult();
for (var i = 0; i < this.tests.length; ++i) {
var testClass = this.tests[i].testClass;
var testName = this.tests[i].name;
for (var prop in testClass) {
if (typeof testClass[prop] === 'function') {
try {
testClass[prop](testContext);
testResult.passes.push(new TestDescription(testName, prop, 'OK'));
} catch (err) {
testResult.errors.push(new TestDescription(testName, prop, err));
}
}
}
}
return testResult;
}
showResults(target: HTMLElement, result: TestResult) {
var template = '<article>' +
'<h1>' + this.getTestResult(result) + '</h1>' +
'<p>' + this.getTestSummary(result) + '</p>' +
'<section id="tsFail">' +
'<h2>Errors</h2>' +
'<ul class="bad">' + this.getTestResultList(result.errors) + '</ul>' +
'</section>' +
'<section id="tsOkay">' +
'<h2>Passing Tests</h2>' +
'<ul class="good">' + this.getTestResultList(result.passes) + '</ul>' +
<|fim▁hole|> }
private getTestResult(result: TestResult) {
return result.errors.length === 0 ? 'Test Passed' : 'Test Failed';
}
private getTestSummary(result: TestResult) {
return 'Total tests: <span id="tsUnitTotalCout">' + (result.passes.length + result.errors.length).toString() + '</span>. ' +
'Passed tests: <span id="tsUnitPassCount" class="good">' + result.passes.length + '</span>. ' +
'Failed tests: <span id="tsUnitFailCount" class="bad">' + result.errors.length + '</span>.';
}
private getTestResultList(testResults: TestDescription[]) {
var list = '';
var group = '';
var isFirst = true;
for (var i = 0; i < testResults.length; ++i) {
var result = testResults[i];
if (result.testName !== group) {
group = result.testName;
if (isFirst) {
isFirst = false;
} else {
list += '</li></ul>';
}
list += '<li>' + result.testName + '<ul>';
}
list += '<li>' + result.funcName + '(): ' + this.htmlEscape(result.message) + '</li>';
}
return list + '</ul>';
}
private htmlEscape(str:string):string {
return String(str)
.replace(/&/g, '&')
.replace(/"/g, '"')
.replace(/'/g, ''')
.replace(/</g, '<')
.replace(/>/g, '>');
}
}
export class TestContext {
isNull(a: any): void {
if (a != null) {
throw 'isNull failed when passed non-null value ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
isUndefined(a: any): void {
if (a != undefined) {
throw 'isUndefined failed when passed non-null value ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
isNotNull(a: any): void {
if (a == null) {
throw 'isNotNull failed when passed null value';
}
}
isNotUndefined(a: any): void {
if (a == undefined) {
throw 'isUndefined failed when passed undefined value ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
areIdentical(a: any, b: any): void {
if (a !== b) {
throw 'areIdentical failed when passed ' +
'{' + (typeof a) + '} "' + a + '" and ' +
'{' + (typeof b) + '} "' + b + '"';
}
}
areNotIdentical(a: any, b: any): void {
if (a === b) {
throw 'areNotIdentical failed when passed ' +
'{' + (typeof a) + '} "' + a + '" and ' +
'{' + (typeof b) + '} "' + b + '"';
}
}
areEqual(a: any, b: any): void {
if (a != b) {
throw 'areEqual failed when passed ' +
'{' + (typeof a) + '} "' + a + '" and ' +
'{' + (typeof b) + '} "' + b + '"';
}
}
areNotEqual(a: any, b: any): void {
if (a == b) {
throw 'areNotEqual failed when passed ' +
'{' + (typeof a) + '} "' + a + '" and ' +
'{' + (typeof b) + '} "' + b + '"';
}
}
isTrue(a: bool) {
if (!a) {
throw 'isTrue failed when passed ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
isFalse(a: bool) {
if (a) {
throw 'isFalse failed when passed ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
isTruthy(a: any) {
if (!a) {
throw 'isTrue failed when passed ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
isFalsey(a: any) {
if (a) {
throw 'isFalse failed when passed ' +
'{' + (typeof a) + '} "' + a + '"';
}
}
throws(a: { (): void; }) {
var isThrown = false;
try {
a();
} catch (ex) {
isThrown = true;
}
if (!isThrown) {
throw 'did not throw an error';
}
}
fail() {
throw 'fail';
}
}
export class FakeFunction {
constructor (public name: string, public delgate: { (...args: any[]): any; }) {
}
}
export class Fake {
constructor (obj: any) {
for (var prop in obj) {
if (typeof obj[prop] === 'function') {
this[prop] = function () { };
} else {
this[prop] = null;
}
}
}
create(): any {
return this;
}
addFunction(name: string, delegate: { (...args: any[]): any; }) {
this[name] = delegate;
}
addProperty(name: string, value: any) {
this[name] = value;
}
}
class TestDefintion {
constructor (public testClass: ITestClass, public name: string) {
}
}
class TestError implements Error {
constructor (public name: string, public message: string) {
}
}
export class TestDescription {
constructor (public testName: string, public funcName: string, public message: string) {
}
}
export class TestResult {
public passes: TestDescription[] = [];
public errors: TestDescription[] = [];
}
}<|fim▁end|> | '</section>' +
'</article>';
target.innerHTML = template;
|
<|file_name|>Easel.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _IconBase = require('./../components/IconBase/IconBase');
var _IconBase2 = _interopRequireDefault(_IconBase);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Easel = function (_React$Component) {
_inherits(Easel, _React$Component);
<|fim▁hole|>
return _possibleConstructorReturn(this, Object.getPrototypeOf(Easel).apply(this, arguments));
}
_createClass(Easel, [{
key: 'render',
value: function render() {
if (this.props.bare) {
return _react2.default.createElement(
'g',
null,
_react2.default.createElement(
'g',
null,
_react2.default.createElement('rect', { x: '80', y: '144', width: '352', height: '192' }),
_react2.default.createElement('path', { d: 'M464,96H48c-8.837,0-16,7.163-16,16v256c0,8.837,6.932,16,15.768,16H464c8.837,0,16-7.163,16-16V112 C480,103.163,472.837,96,464,96z M448,352H64V128h384V352z' }),
_react2.default.createElement('polygon', { points: '80,480 112,480 140.147,400 108.835,400 \t' }),
_react2.default.createElement('polygon', { points: '270.83,32 241.472,32 224.171,80 288.099,80 \t' }),
_react2.default.createElement('polygon', { points: '371.853,400 400,480 432,480 403.22,400 \t' }),
_react2.default.createElement('rect', { x: '240', y: '400', width: '32', height: '48' })
)
);
}return _react2.default.createElement(
_IconBase2.default,
null,
_react2.default.createElement(
'g',
null,
_react2.default.createElement('rect', { x: '80', y: '144', width: '352', height: '192' }),
_react2.default.createElement('path', { d: 'M464,96H48c-8.837,0-16,7.163-16,16v256c0,8.837,6.932,16,15.768,16H464c8.837,0,16-7.163,16-16V112 C480,103.163,472.837,96,464,96z M448,352H64V128h384V352z' }),
_react2.default.createElement('polygon', { points: '80,480 112,480 140.147,400 108.835,400 \t' }),
_react2.default.createElement('polygon', { points: '270.83,32 241.472,32 224.171,80 288.099,80 \t' }),
_react2.default.createElement('polygon', { points: '371.853,400 400,480 432,480 403.22,400 \t' }),
_react2.default.createElement('rect', { x: '240', y: '400', width: '32', height: '48' })
)
);
}
}]);
return Easel;
}(_react2.default.Component);
exports.default = Easel;
;Easel.defaultProps = { bare: false };<|fim▁end|> | function Easel() {
_classCallCheck(this, Easel); |
<|file_name|>ManageAccounts.java<|end_file_name|><|fim▁begin|>import java.text.NumberFormat;
// ****************************************************************
// ManageAccounts.java
// Use Account class to create and manage Sally and Joe's bank accounts
public class ManageAccounts
{
public static void main(String[] args)
{
Account acct1, acct2;
NumberFormat usMoney = NumberFormat.getCurrencyInstance();
//create account1 for Sally with $1000
acct1 = new Account(1000, "Sally", 1111);
//create account2 for Joe with $500
acct2 = new Account(500, "Joe", 1212);
//deposit $100 to Joe's account
acct2.deposit(100);
//print Joe's new balance (use getBalance())
System.out.println("Joe's new balance: " + usMoney.format(acct2.getBalance()));
//withdraw $50 from Sally's account
acct1.withdraw(50);<|fim▁hole|> //charge fees to both accounts
System.out.println("Sally's new balance after the fee is charged: " + usMoney.format(acct1.chargeFee()));
System.out.println("Joe's new balance after the fee is charged: " + usMoney.format(acct2.chargeFee()));
//change the name on Joe's account to Joseph
acct2.changeName("Joseph");
//print summary for both accounts
System.out.println(acct1);
System.out.println(acct2);
//close and display Sally's account
acct1.close();
System.out.println(acct1);
//consolidate account test (doesn't work as acct1
Account newAcct = Account.consolidate(acct1, acct2);
System.out.println(acct1);
}
}<|fim▁end|> |
//print Sally's new balance (use getBalance())
System.out.println("Sally's new balance: " + usMoney.format(acct1.getBalance()));
|
<|file_name|>bp.py<|end_file_name|><|fim▁begin|>import math
import net
SIGMOID = 0
TANH = 1
class bp:
def __init__(self, net, learning_rate, momentum):
self.type = net.getType()
self.net = net
self.lr = learning_rate
self.m = momentum
self.layer = net.getLayer()
self.lc = [[[0]*max(self.layer)]*max(self.layer)]*len(self.layer)
def _dfunc(self, y):
if self.type==SIGMOID:
return y * (1.0 - y)
else:
return 1.0 - y**2
def setLearningRate(self,x):
self.lr = x
def setMomentum(self, x):
self.m = x
def backPropagate(self, input, target):
if len(target)!=self.layer[-1]:
print len(target)
print self.layer[-1]
raise ValueError('Wrong number of target values')
self.net.process(input)
nlayer = len(self.layer)<|fim▁hole|>
for i in range(0,self.layer[nlayer-1]):
node = self.net.getNode(nlayer-1, i)
error = target[i] - node
delta[nlayer-1][i] = self._dfunc(node) * error
for l in range(nlayer-2, 0, -1):
for i in range(0, self.layer[l]):
error = 0.0
for j in range(0, self.layer[l+1]):
error = error + delta[l+1][j] * self.net.getWeight(l+1, i, j)
delta[l][i] = self._dfunc(self.net.getNode(l,i)) * error
for l in range(nlayer-2, -1, -1):
for i in range(0, self.layer[l]):
for j in range(0, self.layer[l+1]):
change = delta[l+1][j] * self.net.getNode(l, i)
w = self.net.getWeight(l+1, i, j) + self.lr * change + self.m * self.lc[l+1][i][j]
self.net.setWeight(l+1, i, j, w)
self.lc[l+1][i][j] = change
for i in range(0, self.layer[l+1]):
b = self.net.getBias(l+1, i) + delta[l+1][i]
self.net.setBias(l+1, i, b)
error = 0.0
for i in range(0, len(target)):
error = error + 0.5 * (target[i] - self.net.getNode(nlayer-1, i))**2
return error<|fim▁end|> |
delta = []
for i in range(0, nlayer):
delta.append([0.0] * self.layer[i]) |
<|file_name|>WSDLHostCustomizer.java<|end_file_name|><|fim▁begin|>package org.wildfly.swarm.webservices.runtime;
<|fim▁hole|>import org.wildfly.swarm.spi.api.Customizer;
import org.wildfly.swarm.spi.runtime.annotations.Post;
import org.wildfly.swarm.webservices.WebServicesFraction;
/**
* @author Bob McWhirter
*/
@Post
@ApplicationScoped
public class WSDLHostCustomizer implements Customizer {
@Inject
Interface iface;
@Inject
WebServicesFraction fraction;
@Override
public void customize() {
if (fraction.wsdlHost() == null) {
fraction.wsdlHost(this.iface.getExpression());
}
}
}<|fim▁end|> | import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import org.wildfly.swarm.container.Interface; |
<|file_name|>remote_fs_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import httpretty
import pytest
from selenium.common.exceptions import InvalidArgumentException
from appium.webdriver.webdriver import WebDriver
from test.unit.helper.test_helper import android_w3c_driver, appium_command, get_httpretty_request_body
class TestWebDriverRemoteFs(object):
@httpretty.activate<|fim▁hole|> httpretty.register_uri(
httpretty.POST,
appium_command('/session/1234567890/appium/device/push_file'),
)
dest_path = '/path/to/file.txt'
data = base64.b64encode(bytes('HelloWorld', 'utf-8')).decode('utf-8')
assert isinstance(driver.push_file(dest_path, data), WebDriver)
d = get_httpretty_request_body(httpretty.last_request())
assert d['path'] == dest_path
assert d['data'] == str(data)
@httpretty.activate
def test_push_file_invalid_arg_exception_without_src_path_and_base64data(self):
driver = android_w3c_driver()
httpretty.register_uri(
httpretty.POST,
appium_command('/session/1234567890/appium/device/push_file'),
)
dest_path = '/path/to/file.txt'
with pytest.raises(InvalidArgumentException):
driver.push_file(dest_path)
@httpretty.activate
def test_push_file_invalid_arg_exception_with_src_file_not_found(self):
driver = android_w3c_driver()
httpretty.register_uri(
httpretty.POST,
appium_command('/session/1234567890/appium/device/push_file'),
)
dest_path = '/dest_path/to/file.txt'
src_path = '/src_path/to/file.txt'
with pytest.raises(InvalidArgumentException):
driver.push_file(dest_path, source_path=src_path)
@httpretty.activate
def test_pull_file(self):
driver = android_w3c_driver()
httpretty.register_uri(
httpretty.POST,
appium_command('/session/1234567890/appium/device/pull_file'),
body='{"value": "SGVsbG9Xb3JsZA=="}',
)
dest_path = '/path/to/file.txt'
assert driver.pull_file(dest_path) == str(base64.b64encode(bytes('HelloWorld', 'utf-8')).decode('utf-8'))
d = get_httpretty_request_body(httpretty.last_request())
assert d['path'] == dest_path
@httpretty.activate
def test_pull_folder(self):
driver = android_w3c_driver()
httpretty.register_uri(
httpretty.POST,
appium_command('/session/1234567890/appium/device/pull_folder'),
body='{"value": "base64EncodedZippedFolderData"}',
)
dest_path = '/path/to/file.txt'
assert driver.pull_folder(dest_path) == 'base64EncodedZippedFolderData'
d = get_httpretty_request_body(httpretty.last_request())
assert d['path'] == dest_path<|fim▁end|> | def test_push_file(self):
driver = android_w3c_driver() |
<|file_name|>test_help.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
[tests/stdlib/test_help.py]
Test the help command.
"""
import unittest
#import os<|fim▁hole|>
#from ergonomica import ergo, ENV
class TestHelp(unittest.TestCase):
"""Tests the 'help' command."""
def test_list_commands(self):
"""
Tests listing all commands using the 'help commands' command.
"""<|fim▁end|> | |
<|file_name|>SacrificedSouls.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Analyzer, { SELECTED_PLAYER } from 'parser/core/Analyzer';
import Events from 'parser/core/Events';
import calculateEffectiveDamage from 'parser/core/calculateEffectiveDamage';
import SPELLS from 'common/SPELLS';<|fim▁hole|>import STATISTIC_CATEGORY from 'parser/ui/STATISTIC_CATEGORY';
import Statistic from 'parser/ui/Statistic';
import BoringSpellValueText from 'parser/ui/BoringSpellValueText';
import ItemDamageDone from 'parser/ui/ItemDamageDone';
import DemoPets from '../pets/DemoPets';
const BONUS_DAMAGE_PER_PET = 0.04;
const MAX_TRAVEL_TIME = 3000; // Shadow Bolt is the slowest, takes around 2 seconds to land from max distance, add a little more to account for target movement
const debug = false;
/*
Sacrificed Souls:
Shadow Bolt and Demonbolt deal 5% additional damage per demon you have summoned.
*/
class SacrificedSouls extends Analyzer {
get totalBonusDamage() {
return this._shadowBoltDamage + this._demonboltDamage;
}
static dependencies = {
demoPets: DemoPets,
};
_shadowBoltDamage = 0;
_demonboltDamage = 0;
_queue = [];
constructor(...args) {
super(...args);
this.active = this.selectedCombatant.hasTalent(SPELLS.SACRIFICED_SOULS_TALENT.id);
this.addEventListener(Events.cast.by(SELECTED_PLAYER).spell([SPELLS.SHADOW_BOLT_DEMO, SPELLS.DEMONBOLT]), this.handleCast);
this.addEventListener(Events.damage.by(SELECTED_PLAYER).spell([SPELLS.SHADOW_BOLT_DEMO, SPELLS.DEMONBOLT]), this.handleDamage);
}
// essentially same snapshotting mechanic as in Destruction's Eradication
handleCast(event) {
const bonus = this.demoPets.getPetCount() * BONUS_DAMAGE_PER_PET;
this._queue.push({
timestamp: event.timestamp,
spellId: event.ability.guid,
targetID: event.targetID,
targetInstance: event.targetInstance,
bonus,
});
debug && this.log('Pushed a cast into queue', JSON.parse(JSON.stringify(this._queue)));
}
handleDamage(event) {
// filter out old casts if there are any
this._queue = this._queue.filter(cast => cast.timestamp > (event.timestamp - MAX_TRAVEL_TIME));
const castIndex = this._queue
.findIndex(cast => cast.targetID === event.targetID
&& cast.targetInstance === event.targetInstance
&& cast.spellId === event.ability.guid);
if (castIndex === -1) {
debug && this.error('Encountered damage event with no cast associated. Queue', JSON.parse(JSON.stringify(this._queue)), 'event', event);
return;
}
debug && this.log('Paired damage event', event, 'with queued cast', JSON.parse(JSON.stringify(this._queue[castIndex])));
const bonusDamage = calculateEffectiveDamage(event, this._queue[castIndex].bonus);
this._queue.splice(castIndex, 1);
if (event.ability.guid === SPELLS.SHADOW_BOLT_DEMO.id) {
this._shadowBoltDamage += bonusDamage;
} else {
this._demonboltDamage += bonusDamage;
}
}
statistic() {
const hasPS = this.selectedCombatant.hasTalent(SPELLS.POWER_SIPHON_TALENT.id);
return (
<Statistic
category={STATISTIC_CATEGORY.TALENTS}
size="flexible"
tooltip={(
<>
{formatThousands(this.totalBonusDamage)} bonus damage<br />
Bonus Shadow Bolt damage: {formatThousands(this._shadowBoltDamage)} ({this.owner.formatItemDamageDone(this._shadowBoltDamage)})<br />
Bonus Demonbolt damage: {formatThousands(this._demonboltDamage)} ({this.owner.formatItemDamageDone(this._demonboltDamage)})
{hasPS && (
<>
<br /><br />* Since you have Power Siphon talent, it's highly likely that it messes up getting current pets at certain time because sometimes
the number of Imps we sacrifice in code doesn't agree with what happens in logs. Therefore, this value is most likely a little wrong.
</>
)}
</>
)}
>
<BoringSpellValueText spell={SPELLS.SACRIFICED_SOULS_TALENT}>
<ItemDamageDone amount={this.totalBonusDamage} />
</BoringSpellValueText>
</Statistic>
);
}
}
export default SacrificedSouls;<|fim▁end|> | import { formatThousands } from 'common/format'; |
<|file_name|>bitcoin_bg.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="bg" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About sportcoin</source>
<translation>За sportcoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>sportcoin</b> version</source>
<translation><b>sportcoin</b> версия</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Това е експериментален софтуер.
Разпространява се под MIT/X11 софтуерен лиценз, виж COPYING или http://www.opensource.org/licenses/mit-license.php.
Използван е софтуер, разработен от OpenSSL Project за употреба в OpenSSL Toolkit (http://www.openssl.org/), криптографски софтуер разработен от Eric Young ([email protected]) и UPnP софтуер разработен от Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Sportcoin Developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Адреси</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Двоен клик за редакция на адрес или име</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Създава нов адрес</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Копира избрания адрес</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Нов адрес</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your sportcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Това са вашите sportcoin адреси за получаване на плащания. За по-лесно проследяване на плащанията и повишена анонимност можете да използвате нов адрес за всяко плащане.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Копирай</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Покажи &QR код</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a sportcoin address</source>
<translation>Подпишете съобщение като доказателство, че притежавате определен адрес</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Подпиши &съобщение</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Изтрий избрания адрес от списъка</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Запишете данните от текущия раздел във файл</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified sportcoin address</source>
<translation>Проверете съобщение, за да сте сигурни че е подписано с определен sportcoin адрес</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Провери съобщение</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Изтрий</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your sportcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Копирай &име</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Редактирай</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Запазване на адреси</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>CSV файл (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Грешка при записа</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Неуспешен запис в %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Име</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адрес</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(без име)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Парола</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Нова парола</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Още веднъж</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Въведете нова парола за портфейла.<br/>Моля използвайте <b>поне 10 случайни символа</b> или <b>8 или повече думи</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Криптиране на портфейла</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Тази операция изисква Вашата парола за отключване на портфейла.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Отключване на портфейла</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Тази операция изисква Вашата парола за декриптиране на портфейла.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Декриптиране на портфейла</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Смяна на паролата</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Въведете текущата и новата парола за портфейла.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Потвърждаване на криптирането</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR DARKCOINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Портфейлът е криптиран</translation>
</message>
<message>
<location line="-56"/>
<source>sportcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your sportcoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Криптирането беше неуспешно</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Криптирането на портфейла беше неуспешно поради неизвестен проблем. Портфейлът не е криптиран.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Паролите не съвпадат</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Отключването беше неуспешно</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Паролата въведена за декриптиране на портфейла е грешна.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Декриптирането беше неуспешно</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Паролата на портфейла беше променена успешно.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Подписване на &съобщение...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Синхронизиране с мрежата...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Баланс</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Обобщена информация за портфейла</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Транзакции</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>История на входящите и изходящи транзакции</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Редактиране на адреси</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Списък на адресите за получаване</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>Из&ход</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Затваря приложението</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about sportcoin</source>
<translation>Показва информация за sportcoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>За &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Опции...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Криптиране на портфейла...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Запазване на портфейла...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Смяна на паролата...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a sportcoin address</source>
<translation>Изпращане към sportcoin адрес</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for sportcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Променя паролата за портфейла</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Проверка на съобщение...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>sportcoin</source>
<translation>sportcoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Портфейл</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About sportcoin</source>
<translation>&За sportcoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your sportcoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified sportcoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Файл</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Настройки</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Помощ</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Раздели</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>sportcoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to sportcoin network</source>
<translation><numerusform>%n връзка към sportcoin мрежата</numerusform><numerusform>%n връзки към sportcoin мрежата</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Синхронизиран</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Зарежда блокове...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Потвърждение за такса</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Изходяща транзакция</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Входяща транзакция</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid sportcoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Портфейлът е <b>криптиран</b> и <b>отключен</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Портфейлът е <b>криптиран</b> и <b>заключен</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. sportcoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Редактиране на адрес</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Име</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Името свързано с този запис в списъка с адреси</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Адрес</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Адресът свързан с този запис в списъка с адреси. Може да се променя само за изходящи адреси.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Нов адрес за получаване</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Нов адрес за изпращане</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Редактиране на входящ адрес</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Редактиране на изходящ адрес</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Вече има адрес "%1" в списъка с адреси.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid sportcoin address.</source>
<translation>"%1" не е валиден sportcoin адрес.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Отключването на портфейла беше неуспешно.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Създаването на ключ беше неуспешно.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>sportcoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>UI Опции</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Опции</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Основни</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>&Такса за изходяща транзакция</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start sportcoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start sportcoin on system login</source>
<translation>&Пускане на sportcoin при вход в системата</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>&Мрежа</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the sportcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Автоматично отваряне на входящия sportcoin порт. Работи само с рутери поддържащи UPnP.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Отваряне на входящия порт чрез &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the sportcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>IP адрес на прокси сървъра (например 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Прозорец</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>След минимизиране ще е видима само иконата в системния трей.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Минимизиране в системния трей</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>При затваряне на прозореца приложението остава минимизирано. Ако изберете тази опция, приложението може да се затвори само чрез Изход в менюто.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>М&инимизиране при затваряне</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Интерфейс</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Език:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting sportcoin.</source>
<translation>Промяната на езика ще влезе в сила след рестартиране на sportcoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>Мерни единици:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Изберете единиците, показвани по подразбиране в интерфейса.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show sportcoin addresses in the transaction list or not.</source>
<translation>Ще се показват адресите в списъка с транзакции независимо от наличието на кратко име.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Показвай и адресите в списъка с транзакции</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting sportcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Прокси адресът е невалиден.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the sportcoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Баланс:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Непотвърдени:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Портфейл</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Последни транзакции</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Вашият текущ баланс</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Сборът на все още непотвърдените транзакции, които не са част от текущия баланс</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>несинхронизиран</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start sportcoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Изискай плащане</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Сума:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Име:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Съобщение:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Запази като...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Грешка при създаването на QR Code от URI.</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Въведената сума е невалидна, моля проверете.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Мрежа</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the sportcoin-Qt help message to get a list with possible sportcoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>sportcoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>sportcoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the sportcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Изчисти конзолата</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the sportcoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Използвайте стрелки надолу и нагореза разглеждане на историятаот команди и <b>Ctrl-L</b> за изчистване на конзолата.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Изпращане</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Изпращане към повече от един получател</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Добави &получател</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Изчистване на всички полета</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>&Изчисти</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Баланс:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Потвърдете изпращането</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>И&зпрати</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> на %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Потвърждаване</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Сигурни ли сте, че искате да изпратите %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> и </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Невалиден адрес на получателя.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Сумата трябва да е по-голяма от 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Грешка: транзакцията беше отхвърлена. Това е възможно ако част от парите в портфейла са вече похарчени, например при паралелно използване на копие на wallet.dat</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>С&ума:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Плати &На:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Въведете име за този адрес, за да го добавите в списъка с адреси</translation>
</message>
<message>
<location line="-78"/><|fim▁hole|> <source>&Label:</source>
<translation>&Име:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Изберете от списъка с адреси</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Вмъкни от клипборда</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Махни този получател</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a sportcoin address (e.g. GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</source>
<translation>Въведете sportcoin адрес (например GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Подпиши / Провери съобщение</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Подпиши</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Можете да подпишете съобщение като доказателство, че притежавате определен адрес. Бъдете внимателни и не подписвайте съобщения, които биха разкрили лична информация без вашето съгласие.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</source>
<translation>Адресът, с който ще подпишете съобщението (например GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Изберете от списъка с адреси</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Вмъкни от клипборда</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Въведете съобщението тук</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Копиране на текущия подпис</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this sportcoin address</source>
<translation>Подпишете съобщение като доказателство, че притежавате определен адрес</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>&Изчисти</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Провери</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</source>
<translation>Адресът, с който е подписано съобщението (например GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified sportcoin address</source>
<translation>Проверете съобщение, за да сте сигурни че е подписано с определен sportcoin адрес</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a sportcoin address (e.g. GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</source>
<translation>Въведете sportcoin адрес (например GKttXqpmwf2DYadeoueA6GpqwVcwggnu9N)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Натиснете "Подписване на съобщение" за да създадете подпис</translation>
</message>
<message>
<location line="+3"/>
<source>Enter sportcoin signature</source>
<translation>sportcoin подпис</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Въведеният адрес е невалиден.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Моля проверете адреса и опитайте отново.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Не е наличен частният ключ за въведеният адрес.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Подписването на съобщение бе неуспешно.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Съобщението е подписано.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Подписът не може да бъде декодиран.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Проверете подписа и опитайте отново.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Подписът не отговаря на комбинацията от съобщение и адрес.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Проверката на съобщението беше неуспешна.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Съобщението е потвърдено.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Sportcoin Developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Подлежи на промяна до %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/офлайн</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/непотвърдени</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>включена в %1 блока</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Статус</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Дата</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Източник</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Издадени</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>От</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>За</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>собствен адрес</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>име</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Кредит</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Дебит</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Такса</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Сума нето</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Съобщение</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Коментар</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Транзакция</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Сума</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>true</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>false</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, все още не е изпратено</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>неизвестен</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Транзакция</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Описание на транзакцията</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Дата</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Тип</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адрес</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Сума</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Подлежи на промяна до %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Офлайн (%1 потвърждения)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Непотвърдени (%1 от %2 потвърждения)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Потвърдени (%1 потвърждения)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Блокът не е получен от останалите участници и най-вероятно няма да бъде одобрен.</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Получени с</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Изпратени на</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Емитирани</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Състояние на транзакцията. Задръжте върху това поле за брой потвърждения.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Дата и час на получаване.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Тип на транзакцията.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Получател на транзакцията.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Сума извадена или добавена към баланса.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Всички</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Днес</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Тази седмица</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Този месец</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Предния месец</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Тази година</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>От - до...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Получени</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Изпратени на</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Собствени</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Емитирани</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Други</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Търсене по адрес или име</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Минимална сума</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Копирай адрес</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Копирай име</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Редактирай име</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>CSV файл (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Потвърдени</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Дата</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Тип</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Име</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Адрес</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Сума</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Грешка при записа</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Неуспешен запис в %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>От:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>до</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Изпращане</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Запишете данните от текущия раздел във файл</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>sportcoin version</source>
<translation>sportcoin версия</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or sportcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Опции:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: sportcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: sportcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=sportcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "sportcoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. sportcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong sportcoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Невалиден -tor адрес: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the sportcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Зареждане на адресите...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of sportcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart sportcoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Невалиден -proxy address: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Зареждане на блок индекса...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. sportcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Зареждане на портфейла...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Преразглеждане на последовтелността от блокове...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Зареждането е завършено</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Грешка</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>coffeehandlers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''coffeehandlers.py - Waqas Bhatti ([email protected]) - Jul 2014
This contains the URL handlers for the astroph-coffee web-server.
'''
import os.path
import logging
import base64
import re
LOGGER = logging.getLogger(__name__)
from datetime import datetime, timedelta
from pytz import utc, timezone
import tornado.web
from tornado.escape import xhtml_escape, xhtml_unescape, url_unescape, squeeze
import arxivdb
import webdb
import fulltextsearch as fts
import ipaddress
######################
## USEFUL CONSTANTS ##
######################
ARCHIVEDATE_REGEX = re.compile(r'^(\d{4})(\d{2})(\d{2})$')
MONTH_NAMES = {x:datetime(year=2014,month=x,day=12)
for x in range(1,13)}
######################
## USEFUL FUNCTIONS ##
######################
def msgencode(message, signer):
'''This escapes a message, then base64 encodes it.
Uses an itsdangerous.Signer instance provided as the signer arg to sign the
message to protect against tampering.
'''
try:
msg = base64.b64encode(signer.sign(xhtml_escape(message)))
msg = msg.replace('=','*')
return msg
except Exception as e:
return ''
def msgdecode(message, signer):
'''This base64 decodes a message, then unescapes it.
Uses an itsdangerous.Signer instance provided as the signer arg to verify
the message to protect against tampering.
'''
try:
msg = message.replace('*','=')
decoded_message = base64.b64decode(msg)
decoded_message = signer.unsign(decoded_message)
return xhtml_unescape(decoded_message)
except Exception as e:
return ''
def group_arxiv_dates(dates, npapers, nlocal, nvoted):
'''
This takes a list of datetime.dates and the number of papers corresponding
to each date and builds a nice dict out of it, allowing the following
listing (in rev-chron order) to be made:
YEAR X
Month X:
Date X --- <strong>YY<strong> papers
.
.
.
YEAR 1
Month 1:
Date 1 --- <strong>YY<strong> papers
'''
years, months = [], []
for x in dates:
years.append(x.year)
months.append(x.month)
unique_years = set(years)
unique_months = set(months)
yeardict = {}
for year in unique_years:
yeardict[year] = {}
for month in unique_months:
yeardict[year][MONTH_NAMES[month]] = [
(x,y,z,w) for (x,y,z,w) in zip(dates, npapers, nlocal, nvoted)
if (x.year == year and x.month == month)
]
for month in yeardict[year].copy():
if not yeardict[year][month]:
del yeardict[year][month]
return yeardict
##################
## URL HANDLERS ##
##################
class CoffeeHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee and redirects based on
time of day.
'''
def initialize(self,
database,<|fim▁hole|> voting_start,
voting_end,
coffee_time,
server_tz,
signer,
room,
building,
department,
institution):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.coffee_time = coffee_time
self.local_tz = timezone(server_tz)
self.signer = signer
self.room = room
self.building = building
self.department = department
self.institution = institution
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
' <a class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# there's no existing user session
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# construct the current dt and use it to figure out the local-to-server
# voting times
dtnow = datetime.now(tz=utc)
dtstart = dtnow.replace(hour=self.voting_start.hour,
minute=self.voting_start.minute,
second=0)
local_start = dtstart.astimezone(self.local_tz)
local_start = local_start.strftime('%H:%M %Z')
dtend = dtnow.replace(hour=self.voting_end.hour,
minute=self.voting_end.minute,
second=0)
local_end = dtend.astimezone(self.local_tz)
local_end = local_end.strftime('%H:%M %Z')
dtcoffee = dtnow.replace(hour=self.coffee_time.hour,
minute=self.coffee_time.minute,
second=0)
local_coffee = dtcoffee.astimezone(self.local_tz)
local_coffee = local_coffee.strftime('%H:%M %Z')
utc_start = self.voting_start.strftime('%H:%M %Z')
utc_end = self.voting_end.strftime('%H:%M %Z')
utc_coffee = self.coffee_time.strftime('%H:%M %Z')
self.render("index.html",
user_name=user_name,
local_today=local_today,
voting_localstart=local_start,
voting_localend=local_end,
voting_start=utc_start,
voting_end=utc_end,
coffeetime_local=local_coffee,
coffeetime_utc=utc_coffee,
flash_message=flash_message,
new_user=new_user,
coffee_room=self.room,
coffee_building=self.building,
coffee_department=self.department,
coffee_institution=self.institution)
class ArticleListHandler(tornado.web.RequestHandler):
'''This handles all requests for the listing of selected articles and voting
pages. Note: if nobody voted on anything, the default is to return all
articles with local authors at the top.
'''
def initialize(self, database,
voting_start,
voting_end,
server_tz,
reserve_interval,
signer):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.server_tz = server_tz
self.signer = signer
self.reserve_interval = reserve_interval
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
todays_date = datetime.now(tz=utc).strftime('%A, %b %d %Y')
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
todays_localdate = (
datetime.now(tz=timezone(self.server_tz)).strftime('%Y-%m-%d')
)
todays_utcdow = datetime.now(tz=utc).weekday()
todays_localdate_str = (
datetime.now(tz=timezone(self.server_tz)).strftime('%A, %b %d %Y')
)
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
# there's no existing user session
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
############################
## SERVE THE PAGE REQUEST ##
############################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then show the voting page
if (self.voting_start < timenow < self.voting_end):
# get the articles for today
(local_articles, voted_articles,
other_articles, reserved_articles) = (
arxivdb.get_articles_for_voting(database=self.database)
)
# if today's papers aren't ready yet, redirect to the papers display
if not local_articles and not voted_articles and not other_articles:
LOGGER.warning('no papers for today yet, '
'redirecting to previous day papers')
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(
database=self.database
)
)
todays_date = datetime.strptime(
latestdate,
'%Y-%m-%d'
).strftime('%A, %b %d %Y')
# don't show a message on the weekend when no papers are loaded
if todays_utcdow in (5,6):
flash_message = ""
else:
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"Papers for today haven't been imported yet. "
"Here are the most recent papers. "
"Please wait a few minutes and try again."
"<a href=\"#\" class=\"close\">×</a></div>"
)
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("listing.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
flash_message=flash_message,
reserve_interval_days=self.reserve_interval,
new_user=new_user)
# if today's papers are ready, show them and ask for votes
else:
# get this user's votes
user_articles = arxivdb.get_user_votes(todays_utcdate,
user_name,
database=self.database)
user_reserved = arxivdb.get_user_reservations(
todays_utcdate,
user_name,
database=self.database
)
LOGGER.info('user has votes on: %s, has reservations on: %s'
% (user_articles, user_reserved))
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the voting page
self.render("voting.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
flash_message=flash_message,
new_user=new_user,
reserve_interval_days=self.reserve_interval,
user_articles=user_articles,
user_reserved=user_reserved)
# otherwise, show the article list
else:
# get the articles for today
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(utcdate=todays_utcdate,
database=self.database)
)
# if today's papers aren't ready yet, show latest papers
if not local_articles and not voted_articles and not other_articles:
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(
database=self.database
)
)
todays_date = datetime.strptime(
latestdate,
'%Y-%m-%d'
).strftime('%A, %b %d %Y')
# don't show a message on the weekend when no papers are loaded
if todays_utcdow in (5,6):
flash_message = ""
else:
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"Papers for today haven't been imported yet. "
"Here are the most recent papers. "
"Please wait a few minutes and try again."
"<a href=\"#\" class=\"close\">×</a></div>"
)
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("listing.html",
user_name=user_name,
local_today=local_today,
todays_date=todays_date,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
reserve_interval_days=self.reserve_interval,
flash_message=flash_message,
new_user=new_user)
class ReservationHandler(tornado.web.RequestHandler):
'''
This handles all requests for the voting page.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''
This handles a POST request for a paper reservation.
'''
arxivid = self.get_argument('arxivid', None)
reservetype = self.get_argument('reservetype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# if we're asked to geofence, then do so
# (unless the request came from INSIDE the building)
# FIXME: add exceptions for private network IPv4 addresses
geolocked = False
# check the network as well
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.ipaddrs])
except:
trustedip = False
if self.geofence and user_ip != '127.0.0.1':
try:
geoip = self.geofence.city(user_ip)
if (geoip.country.iso_code in self.countries and
geoip.subdivisions.most_specific.iso_code
in self.regions):
LOGGER.info('geofencing ok: '
'reservation request '
'from inside allowed regions')
else:
LOGGER.warning(
'geofencing activated: '
'vote request from %s '
'is outside allowed regions' %
('%s-%s' % (
geoip.country.iso_code,
geoip.subdivisions.most_specific.iso_code
))
)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# fail deadly
except Exception as e:
LOGGER.exception('geofencing failed for IP %s, '
'blocking request.' % user_ip)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
#############################
## PROCESS THE RESERVATION ##
#############################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# if all things are satisfied, then process the reserve request
if (arxivid and
reservetype and
sessioninfo[0] and
((not geolocked) or trustedip) and
in_votetime):
arxivid = xhtml_escape(arxivid)
reservetype = xhtml_escape(reservetype)
LOGGER.info('user: %s, reserving: %s, on: %s' % (user_name,
reservetype,
arxivid))
if 'arXiv:' not in arxivid or reservetype not in ('reserve',
'release'):
message = ("Your paper reservation request "
"used invalid arguments "
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# first, check how many reservations this user has
user_reservations = arxivdb.get_user_reservations(
todays_utcdate,
user_name,
database=self.database
)
# make sure it's less than 5 or we're not adding another
# reservation
if len(user_reservations) < 5 or reservetype != 'reserve':
reserve_outcome = arxivdb.record_reservation(
arxivid,
user_name,
reservetype,
database=self.database
)
if reserve_outcome is False or None:
message = ("That article doesn't exist, "
"and your reservation "
"has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
if (reserve_outcome[0] == 1 and
reserve_outcome[1] == user_name):
message = ("Reservation successfully recorded for %s"
% arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
elif (reserve_outcome[0] == 1 and
reserve_outcome[1] != user_name):
message = ("Someeone else already reserved that paper!")
jsondict = {'status':'failed',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
elif (reserve_outcome[0] == 0):
message = ("Release successfully recorded for %s"
% arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'reserved':reserve_outcome[0]}}
else:
message = ("That article doesn't exist, "
"or your reservation "
"has been discarded because of a problem.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("You've reserved 5 articles already.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
elif ((not geolocked) or trustedip):
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class VotingHandler(tornado.web.RequestHandler):
'''
This handles all requests for the voting page.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''This handles POST requests for vote submissions.
takes the following arguments:
arxivid: article to vote for
votetype: up / down
checks if an existing session is in play. if not, flashes a message
saying 'no dice' in a flash message
- checks if the user has more than five votes used for the utcdate of
the requested arxivid
- if they do, then deny vote
- if they don't, allow vote
if vote is allowed:
- changes the nvote column for arxivid
- adds the current user to the voters column
- returns the nvotes for the arxivid along with
success/failure
if vote is not allowed:
- sends back a 401 + error message, which the frontend JS turns into a
flash message
the frontend JS then:
- updates the vote total for this arxivid
- handles flash messages
- updates the vote button status
'''
arxivid = self.get_argument('arxivid', None)
votetype = self.get_argument('votetype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# TESTING
# user_ip = '131.111.184.18' # Cambridge UK
# user_ip = '71.168.183.215' # FIOS NJ
# user_ip = '70.192.88.245' # VZW NJ
# user_ip = '70.42.157.5' # VZW NY
# user_ip = '69.141.255.240' # Comcast PA
# user_ip = '128.112.25.36' # Princeton Univ, NJ
# if we're asked to geofence, then do so
# (unless the request came from INSIDE the building)
# FIXME: add exceptions for private network IPv4 addresses
geolocked = False
# check the network as well
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.ipaddrs])
except:
trustedip = False
if self.geofence and user_ip != '127.0.0.1':
try:
# check the geoip location
geoip = self.geofence.city(user_ip)
if (geoip.country.iso_code in self.countries and
geoip.subdivisions.most_specific.iso_code
in self.regions):
LOGGER.info('geofencing ok: '
'vote request from inside allowed regions')
else:
LOGGER.warning(
'geofencing activated: '
'vote request from %s '
'is outside allowed regions' %
('%s-%s' % (
geoip.country.iso_code,
geoip.subdivisions.most_specific.iso_code
))
)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# fail deadly
except Exception as e:
LOGGER.exception('geofencing failed for IP %s, '
'blocking request.' % user_ip)
message = ("Sorry, you're trying to vote "
"from an IP address that is "
"blocked from voting.")
jsondict = {'status':'failed',
'message':message,
'results':None}
geolocked = True
self.write(jsondict)
self.finish()
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# if all things are satisfied, then process the vote request
if (arxivid and
votetype and
sessioninfo[0] and
(not geolocked or trustedip) and
in_votetime):
arxivid = xhtml_escape(arxivid)
votetype = xhtml_escape(votetype)
LOGGER.info('user: %s, voting: %s, on: %s' % (user_name,
votetype,
arxivid))
if 'arXiv:' not in arxivid or votetype not in ('up','down'):
message = ("Your vote request used invalid arguments"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# first, check how many votes this user has
user_votes = arxivdb.get_user_votes(todays_utcdate,
user_name,
database=self.database)
# make sure it's less than 5 or the votetype isn't up
if len(user_votes) < 5 or votetype != 'up':
vote_outcome = arxivdb.record_vote(arxivid,
user_name,
votetype,
database=self.database)
if vote_outcome is False:
message = ("That article doesn't exist, and your vote "
"has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Vote successfully recorded for %s" % arxivid)
jsondict = {'status':'success',
'message':message,
'results':{'nvotes':vote_outcome}}
self.write(jsondict)
self.finish()
else:
message = ("You've voted on 5 articles already.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
elif (not geolocked or trustedip):
message = ("Your vote request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
message = ("Your reservation request could not be authorized"
" and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class EditHandler(tornado.web.RequestHandler):
'''This handles all requests for the editing function.
This allows users in the trustedip range to edit the arxiv listing for the
current day.
The allowable edits are:
- paper is local author
- paper is not local author
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def post(self):
'''
This handles a POST request for a paper reservation.
'''
arxivid = self.get_argument('arxivid', None)
edittype = self.get_argument('edittype', None)
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
sessioninfo = webdb.session_check(session_token,
database=self.database)
user_name = sessioninfo[2]
todays_utcdate = datetime.now(tz=utc).strftime('%Y-%m-%d')
user_ip = self.request.remote_ip
# check the network
try:
userip_addrobj = ipaddress.ip_address(user_ip.decode())
trustedip = any([(userip_addrobj in x) for x in self.editips])
except:
trustedip = False
######################
## PROCESS THE EDIT ##
######################
# check if we're in voting time-limits
timenow = datetime.now(tz=utc).timetz()
# if we are within the time limits, then allow the voting POST request
if (self.voting_start < timenow < self.voting_end):
in_votetime = True
else:
in_votetime = False
# editing only checks its cidr and if we're in vote mode
if (arxivid and edittype and sessioninfo[0] and
trustedip and in_votetime):
arxivid = xhtml_escape(arxivid)
edittype = xhtml_escape(edittype)
LOGGER.info('user: %s, reserving: %s, on: %s' % (user_name,
reservetype,
arxivid))
if 'arXiv:' not in arxivid or editttype not in ('local',
'notlocal'):
message = ("Your paper reservation request "
"used invalid arguments "
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
else:
# process the edit
pass
# if we're not allowed to edit, discard the request
else:
message = ("Your edit request could not be authorized "
"(probably because the voting window is over)"
"and has been discarded.")
jsondict = {'status':'failed',
'message':message,
'results':None}
self.write(jsondict)
self.finish()
class AboutHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee/about.
'''
def initialize(self, database):
'''
This sets up the database.
'''
self.database = database
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#########################
# show the contact page #
#########################
self.render("about.html",
local_today=local_today,
user_name=user_name,
flash_message=flash_message,
new_user=new_user)
class ArchiveHandler(tornado.web.RequestHandler):
'''
This handles all paper archive requests.
url: /astroph-coffee/archive/YYYYMMDD
'''
def initialize(self,
database,
reserve_interval,
signer):
'''
Sets up the database.
'''
self.database = database
self.reserve_interval = reserve_interval
self.signer = signer
def get(self, archivedate):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message, self.signer)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
##################################
# now handle the archive request #
##################################
if archivedate is not None:
archivedate = xhtml_escape(archivedate)
archivedate = re.match(ARCHIVEDATE_REGEX, archivedate)
if archivedate:
year, month, day = archivedate.groups()
listingdate = '%s-%s-%s' % (year, month, day)
# get the articles for today
(latestdate, local_articles,
voted_articles, other_articles, reserved_articles) = (
arxivdb.get_articles_for_listing(utcdate=listingdate,
database=self.database)
)
# if this date's papers aren't available, show the archive index
if (not local_articles and
not voted_articles and
not other_articles and
not reserved_articles):
flash_message = (
"<div data-alert class=\"alert-box radius\">"
"No papers for %s were found. "
"You've been redirected to the Astro-Coffee archive."
"<a href=\"#\" class=\"close\">×</a></div>"
) % listingdate
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
else:
# figure out the UTC date for this archive listing
archive_datestr = datetime(
hour=0,
minute=15,
second=0,
day=int(day),
month=int(month),
year=int(year),
tzinfo=utc
).strftime('%A, %b %d %Y')
# preprocess the local papers to highlight local author names
if len(local_articles) > 0:
for lind in range(len(local_articles)):
author_list = local_articles[lind][4]
author_list = author_list.split(': ')[-1].split(',')
local_indices = local_articles[lind][-2]
if local_indices and len(local_indices) > 0:
local_indices = [
int(x) for x in local_indices.split(',')
]
for li in local_indices:
author_list[li] = '<strong>%s</strong>' % (
author_list[li]
)
# update this article's local authors
local_articles[lind][4] = ', '.join(author_list)
# show the listing page
self.render("archivelisting.html",
user_name=user_name,
local_today=local_today,
todays_date=archive_datestr,
local_articles=local_articles,
voted_articles=voted_articles,
other_articles=other_articles,
reserved_articles=reserved_articles,
reserve_interval_days=self.reserve_interval,
flash_message=flash_message,
new_user=new_user)
else:
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
else:
(archive_dates, archive_npapers,
archive_nlocal, archive_nvoted) = arxivdb.get_archive_index(
database=self.database
)
paper_archives = group_arxiv_dates(archive_dates,
archive_npapers,
archive_nlocal,
archive_nvoted)
self.render("archive.html",
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
paper_archives=paper_archives,
local_today=local_today)
class LocalListHandler(tornado.web.RequestHandler):
'''
This handles all requests for /astroph-coffee/local-authors.
'''
def initialize(self, database, admincontact, adminemail):
'''
This sets up the database.
'''
self.database = database
self.admincontact = admincontact
self.adminemail = adminemail
def get(self):
'''
This handles GET requests.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
###############################
# show the local authors page #
###############################
authorlist = webdb.get_local_authors(database=self.database)
if authorlist:
self.render("local-authors.html",
local_today=local_today,
user_name=user_name,
flash_message=flash_message,
new_user=new_user,
authorlist=authorlist,
admincontact=self.admincontact,
adminemail=self.adminemail)
else:
LOGGER.error('could not get the author list!')
message = ("There was a database error "
"trying to look up local authors. "
"Please "
"<a href=\"/astroph-coffee/about\">"
"let us know</a> about this problem!")
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
class FTSHandler(tornado.web.RequestHandler):
'''
This handles all requests for searching.
GET returns a search page.
POST posts the AJAX request.
'''
def initialize(self,
database,
voting_start,
voting_end,
debug,
signer,
geofence,
countries,
regions):
'''
Sets up the database.
'''
self.database = database
self.voting_start = voting_start
self.voting_end = voting_end
self.debug = debug
self.signer = signer
self.geofence = geofence[0]
self.ipaddrs = geofence[1]
self.editips = geofence[2]
self.countries = countries
self.regions = regions
def get(self):
'''This handles GET requests for searching.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#######################
## CONTENT RENDERING ##
#######################
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="initial",
search_results=None,
search_result_info='',
search_nmatches=0,
new_user=new_user)
def post(self):
'''This handles POST requests for searching.
renders using the search.html template with search_page_type = 'results'
and passes search_results to it from a run of the
fulltextsearch.fts4_phrase_search_paginated function.
'''
# handle a redirect with an attached flash message
flash_message = self.get_argument('f', None)
if flash_message:
flashtext = msgdecode(flash_message)
LOGGER.warning('flash message: %s' % flashtext)
flashbox = (
'<div data-alert class="alert-box radius">%s'
'<a href="#" class="close">×</a></div>' %
flashtext
)
flash_message = flashbox
else:
flash_message = ''
local_today = datetime.now(tz=utc).strftime('%Y-%m-%d %H:%M %Z')
# first, get the session token
session_token = self.get_secure_cookie('coffee_session',
max_age_days=30)
ip_address = self.request.remote_ip
if 'User-Agent' in self.request.headers:
client_header = self.request.headers['User-Agent'] or 'none'
else:
client_header = 'none'
user_name = 'anonuser@%s' % ip_address
new_user = True
# check if this session_token corresponds to an existing user
if session_token:
sessioninfo = webdb.session_check(session_token,
database=self.database)
if sessioninfo[0]:
user_name = sessioninfo[2]
LOGGER.info('found session for %s, continuing with it' %
user_name)
new_user = False
elif sessioninfo[-1] != 'database_error':
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
else:
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
error_message=message,
local_today=local_today,
flash_message=flash_message,
new_user=new_user)
else:
if ('crawler' not in client_header.lower() and
'bot' not in client_header.lower()):
LOGGER.warning('unknown user, starting a new session for '
'%s, %s' % (ip_address, client_header))
sessionok, token = webdb.anon_session_initiate(
ip_address,
client_header,
database=self.database
)
if sessionok and token:
self.set_secure_cookie('coffee_session',
token,
httponly=True)
else:
LOGGER.error('could not set session cookie for %s, %s' %
(ip_address, client_header))
self.set_status(500)
message = ("There was a database error "
"trying to look up user credentials.")
LOGGER.error('database error while looking up session for '
'%s, %s' % (ip_address, client_header))
self.render("errorpage.html",
user_name=user_name,
local_today=local_today,
error_message=message,
flash_message=flash_message,
new_user=new_user)
#######################
## CONTENT RENDERING ##
#######################
# get the search query
searchquery = self.get_argument('searchquery',None)
if not searchquery or len(searchquery) == 0:
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>' %
squeeze(xhtml_escape(searchquery)))
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)
else:
searchquery = squeeze(xhtml_escape(searchquery))
if len(searchquery) > 0:
try:
# figure out the weights to apply
titleq_count = searchquery.count('title:')
abstractq_count = searchquery.count('abstract:')
authorq_count = searchquery.count('authors:')
author_weight = 1.0 + 1.0*authorq_count
abstract_weight = 3.0 + 1.0*abstractq_count
title_weight = 2.0 + 1.0*titleq_count
# turn any " characters into " so we can do exact
# phrase matching
searchquery = searchquery.replace('"','"')
ftsdict = fts.fts4_phrase_query_paginated(
searchquery,
['arxiv_id','day_serial','title',
'authors','comments','abstract',
'link','pdf','utcdate',
'nvotes',
'local_authors', 'local_author_indices'],
sortcol='relevance',
pagelimit=500,
database=self.database,
relevance_weights=[title_weight,
abstract_weight,
author_weight],
)
search_results = ftsdict['results']
all_nmatches = ftsdict['nmatches']
LOGGER.info('found %s objects matching %s' % (all_nmatches,
searchquery))
relevance_sticker = (
'<span data-tooltip aria-haspopup="true" '
'class="has-tip" title="Okapi BM25 relevance '
'weights: title = %.1f, '
'abstract = %.1f,'
' authors = %.1f, all others = 1.0">relevant</span>'
) % (title_weight, abstract_weight, author_weight)
if all_nmatches == 0:
search_nmatches = 0
search_result_info = (
'Sorry, <span class="nmatches">0</span> '
'matching items were found for: '
'<strong>%s</strong>' %
searchquery
)
elif all_nmatches == 1:
search_nmatches = 1
search_result_info = (
'Found only <span class="nmatches">1</span> '
'matching item for: '
'<strong>%s</strong>' % searchquery
)
elif 1 < all_nmatches < 501:
search_nmatches = len(ftsdict['results']['arxiv_id'])
search_result_info = (
'Found <span class="nmatches">%s</span> '
'matching items for: '
'<strong>%s</strong>' %
(search_nmatches,
searchquery)
)
else:
search_nmatches = len(ftsdict['results']['arxiv_id'])
search_result_info = (
'Found %s total matching '
'items for: <strong>%s</strong>. '
'Showing only the '
'top <span class="nmatches">%s</span> '
'%s '
'results below' %
(all_nmatches,
searchquery,
search_nmatches,
relevance_sticker))
self.render(
"search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user
)
# if the query fails on the backend, return nothing.
except Exception as e:
LOGGER.exception("search backend failed on searchquery: %s"
% searchquery)
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>' %
searchquery)
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)
# this is if we don't understand the query
else:
search_result_info = ('Sorry, we couldn\'t understand your '
'search query: <strong>%s</strong>.' %
searchquery)
search_results = None
search_nmatches = 0
self.render("search.html",
user_name=user_name,
local_today=local_today,
flash_message=flash_message,
search_page_title="Search the Astro-Coffee archive",
search_page_type="results",
search_results=search_results,
search_nmatches=search_nmatches,
search_result_info=search_result_info,
new_user=new_user)<|fim▁end|> | |
<|file_name|>test_bottleparser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import pytest
from bottle import Bottle, debug, request, response
from webtest import TestApp
from webargs import ValidationError, fields
from webargs.bottleparser import BottleParser
hello_args = {
'name': fields.Str(missing='World', validate=lambda n: len(n) >= 3),
}
hello_multiple = {
'name': fields.List(fields.Str())
}
parser = BottleParser()
@pytest.fixture
def app():
app = Bottle()
@app.route('/echo', method=['GET', 'POST'])
def index():
return parser.parse(hello_args, request)
@app.route('/echomulti/', method=['GET', 'POST'])
def multi():
return parser.parse(hello_multiple, request)
debug(True)
return app
@pytest.fixture
def testapp(app):
return TestApp(app)
def test_parse_querystring_args(testapp):
assert testapp.get('/echo?name=Fred').json == {'name': 'Fred'}
def test_parse_querystring_multiple(testapp):
expected = {'name': ['steve', 'Loria']}
assert testapp.get('/echomulti/?name=steve&name=Loria').json == expected
def test_parse_form_multiple(testapp):
expected = {'name': ['steve', 'Loria']}
assert testapp.post('/echomulti/', {'name': ['steve', 'Loria']}).json == expected
def test_parse_form(testapp):
assert testapp.post('/echo', {'name': 'Joe'}).json == {'name': 'Joe'}
def test_parse_json(testapp):
assert testapp.post_json('/echo', {'name': 'Fred'}).json == {'name': 'Fred'}
def test_parse_json_default(testapp):
assert testapp.post_json('/echo', {}).json == {'name': 'World'}
def test_parsing_form_default(testapp):
assert testapp.post('/echo', {}).json == {'name': 'World'}
def test_abort_called_on_validation_error(testapp):
res = testapp.post('/echo', {'name': 'b'}, expect_errors=True)
assert res.status_code == 422
def test_validator_that_raises_validation_error(app):
def always_fail(value):
raise ValidationError('something went wrong')
args = {'text': fields.Str(validate=always_fail)}
@app.route('/validated', method=['POST'])
def validated_route():
parser.parse(args)
vtestapp = TestApp(app)
res = vtestapp.post_json('/validated', {'text': 'bar'}, expect_errors=True)
assert res.status_code == 422
def test_use_args_decorator(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_args({'myvalue': fields.Int()})
def echo2(args):
return args
assert testapp.post('/foo/', {'myvalue': 23}).json == {'myvalue': 23}
def test_use_args_with_validation(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_args({'myvalue': fields.Int()}, validate=lambda args: args['myvalue'] > 42)
def echo(args):
return args
result = testapp.post('/foo/', {'myvalue': 43}, expect_errors=True)
assert result.status_code == 200
result = testapp.post('/foo/', {'myvalue': 41}, expect_errors=True)
assert result.status_code == 422
def test_use_args_with_url_params(app, testapp):
@app.route('/foo/<name>')
@parser.use_args({'myvalue': fields.Int()})
def foo(args, name):
return args
assert testapp.get('/foo/Fred?myvalue=42').json == {'myvalue': 42}
def test_use_kwargs_decorator(app, testapp):
@app.route('/foo/', method=['GET', 'POST'])
@parser.use_kwargs({'myvalue': fields.Int()})
def echo2(myvalue):
return {'myvalue': myvalue}
assert testapp.post('/foo/', {'myvalue': 23}).json == {'myvalue': 23}
def test_use_kwargs_with_url_params(app, testapp):
@app.route('/foo/<name>')
@parser.use_kwargs({'myvalue': fields.Int()})
def foo(myvalue, name):
return {'myvalue': myvalue}
assert testapp.get('/foo/Fred?myvalue=42').json == {'myvalue': 42}
def test_parsing_headers(app, testapp):
@app.route('/echo2')
def echo2():
args = parser.parse(hello_args, request, locations=('headers',))
return args
res = testapp.get('/echo2', headers={'name': 'Fred'}).json
assert res == {'name': 'Fred'}
def test_parsing_cookies(app, testapp):
@app.route('/setcookie')
def setcookie():
response.set_cookie('name', 'Fred')
return {}
@app.route('/echocookie')
def echocookie():
args = parser.parse(hello_args, request, locations=('cookies',))
return args
testapp.get('/setcookie')
assert testapp.get('/echocookie').json == {'name': 'Fred'}
def test_arg_specific_locations(app, testapp):
testargs = {
'name': fields.Str(location='json'),
'age': fields.Int(location='querystring'),<|fim▁hole|>
@app.route('/echo', method=['POST'])
def echo():
args = parser.parse(testargs, request)
return args
resp = testapp.post_json('/echo?age=42', {'name': 'Fred'})
assert resp.json['age'] == 42
assert resp.json['name'] == 'Fred'<|fim▁end|> | } |
<|file_name|>test_systemd.py<|end_file_name|><|fim▁begin|># Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import socket
from unittest import mock
from oslotest import base as test_base
from oslo_service import systemd
class SystemdTestCase(test_base.BaseTestCase):
"""Test case for Systemd service readiness."""
def test__abstractify(self):
sock_name = '@fake_socket'<|fim▁hole|> @mock.patch.object(os, 'getenv', return_value='@fake_socket')
def _test__sd_notify(self, getenv_mock, unset_env=False):
self.ready = False
self.closed = False
class FakeSocket(object):
def __init__(self, family, type):
pass
def connect(fs, socket):
pass
def close(fs):
self.closed = True
def sendall(fs, data):
if data == b'READY=1':
self.ready = True
with mock.patch.object(socket, 'socket', new=FakeSocket):
if unset_env:
systemd.notify_once()
else:
systemd.notify()
self.assertTrue(self.ready)
self.assertTrue(self.closed)
def test_notify(self):
self._test__sd_notify()
def test_notify_once(self):
os.environ['NOTIFY_SOCKET'] = '@fake_socket'
self._test__sd_notify(unset_env=True)
self.assertRaises(KeyError, os.environ.__getitem__, 'NOTIFY_SOCKET')
@mock.patch("socket.socket")
def test_onready(self, sock_mock):
recv_results = [b'READY=1', '', socket.timeout]
expected_results = [0, 1, 2]
for recv, expected in zip(recv_results, expected_results):
if recv == socket.timeout:
sock_mock.return_value.recv.side_effect = recv
else:
sock_mock.return_value.recv.return_value = recv
actual = systemd.onready('@fake_socket', 1)
self.assertEqual(expected, actual)<|fim▁end|> | res = systemd._abstractify(sock_name)
self.assertEqual('\0{0}'.format(sock_name[1:]), res)
|
<|file_name|>common.py<|end_file_name|><|fim▁begin|>import collections
import mui4py.mui4py_mod as mui4py_mod
from mui4py.config import get_default_config
from mui4py.types import map_type, get_float_type_str, get_int_type_str, get_io_type_str
import re
import numpy as np
class CppClass(object):
def __init__(self, config=None, args=(), kwargs={}):
self._cpp_class_name = None
self._cpp_point_class_name = None
self.raw_point = None
self.raw = None
self.io_data_type = None
# Convertir args en Args()
self.args = tuple([Arg(a) if not issubclass(a.__class__, Arg) else a for a in args])
self.namespace = ""
# Filter None-valued entries to gake C++ default values.
self.kwargs = {k: v for k, v in kwargs.items() if v is not None}
self.configured = False
self._ALLOWED_IO_TYPES = None
if config is None:
self.config = get_default_config()
else:<|fim▁hole|> self.signature = self._signature()
def _signature(self):
sig = self._split_class_name()
args_str = [str(a) for a in self.get_plain_args()]
kwargs_str = ["{}={}".format(k,v) for k,v in self.kwargs.items()]
if args_str:
sig += "_ARGS_" + "_".join(args_str)
if kwargs_str:
sig += "_KWARGS_" + "_".join(kwargs_str)
return sig
def _split_class_name(self, title=True):
tokens = re.findall('[A-Z][^A-Z]*', self.__class__.__name__)
tokens = [t.lower() for t in tokens]
if title:
tokens[0] = tokens[0].title()
return "_".join(tokens)
def get_plain_args(self):
return tuple([a.arg for a in self.args])
def get_plain_kwargs(self):
return
def configure(self, config, io_data_type=None, cpp_obj=None, onlycheck=False):
self.config = config
self.point_class_name = get_cpp_name("Point", config.dim,\
config.float_type, config.int_type)
self.raw_point = getattr(mui4py_mod, self.point_class_name)
self.io_data_type = map_type[io_data_type]
if self.io_data_type is not None and self.io_data_type not in self._ALLOWED_IO_TYPES:
raise Exception("Data type not supported by spatial sampler ''. Supported types : {float, np.float32, np.float64, etc.}")
if onlycheck:
self.io_data_type = None
self.raw = cpp_obj
self._cpp_class_name = get_cpp_name(self._split_class_name(), config.dim, config.float_type,
config.int_type, namespace=self.namespace, type_io=self.io_data_type)
if self.raw is None:
# Configure class arguments
for a in self.args:
a.configure(config, self.raw_point)
self.raw = getattr(mui4py_mod, self._cpp_class_name)(*self.get_plain_args(), **self.kwargs)
self.configured = True
class Arg(object):
def __init__(self, arg):
self.arg = arg
def configure(self, config, cpp_point):
pass
class _Point(Arg):
def __init__(self, point_rep):
super(_Point, self).__init__(None)
self.point_rep = point_rep
def configure(self, config, cpp_point):
self.arg = array2Point(self.point_rep, config, cpp_point)
def array2Point(arr, config, cpp_point):
arr_aux = arr
if not isinstance(arr, list) and\
not isinstance(arr, tuple) and\
not isinstance(arr, np.ndarray):
arr_aux = [arr]
# TODO:Maybe check for point type?
if len(arr_aux) == config.dim:
return cpp_point(arr_aux)
else:
raise Exception("Size of point is different than uniface dimensions.")
def get_cpp_name(cname, dim, float_type, int_type, namespace="", type_io=None):
s = ""
if namespace:
s += "_" + namespace
s += "_{}{}d_{}_{}".format(cname, dim, get_float_type_str(float_type),\
get_int_type_str(int_type))
if type_io is not None:
s += "_" + get_io_type_str(type_io)
return s<|fim▁end|> | self.config = config |
<|file_name|>simple_functions.py<|end_file_name|><|fim▁begin|>import math<|fim▁hole|> '''
Returns a list of even numbers in thelist
'''
return [x for x in thelist if x%2 == 0]
def is_perfect_square(x):
'''
Returns True if x is a perfect square, False otherwise
'''
thesqrt = int(math.sqrt(x))
return thesqrt * thesqrt == x<|fim▁end|> |
def even_numbers_only(thelist): |
<|file_name|>projectcli.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Name: ProjectCli.py
# Purpose:
# Author: Fabien Marteau <[email protected]>
# Created: 23/05/2008
#-----------------------------------------------------------------------------
# Copyright (2008) Armadeus Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#-----------------------------------------------------------------------------
# Revision list :
#
# Date By Changes
#
#-----------------------------------------------------------------------------
__doc__ = ""
__version__ = "1.0.0"
__author__ = "Fabien Marteau <[email protected]>"
import cmd,os
from periphondemand.bin.define import *
from periphondemand.bin.utils import wrapperxml, settings, error, basecli
from periphondemand.bin.utils import wrappersystem as sy
from periphondemand.bin.utils.display import Display
from periphondemand.bin.commandline import *
from periphondemand.bin.commandline.synthesiscli import SynthesisCli
from periphondemand.bin.commandline.simulationcli import SimulationCli
from periphondemand.bin.commandline.drivercli import DriverCli
from periphondemand.bin.utils.settings import Settings
from periphondemand.bin.utils.basecli import BaseCli
from periphondemand.bin.utils.error import Error
from periphondemand.bin.core.project import Project
from periphondemand.bin.core.component import Component
from periphondemand.bin.core.platform import Platform
from periphondemand.bin.core.library import Library
from periphondemand.bin.code.intercon import Intercon
from periphondemand.bin.code.vhdl.topvhdl import TopVHDL
from periphondemand.bin.toolchain.synthesis import Synthesis
from periphondemand.bin.toolchain.simulation import Simulation
from periphondemand.bin.toolchain.driver import Driver
settings = Settings()
display = Display()
class ProjectCli(BaseCli):
""" Project command line interface
"""
def __init__(self,parent=None):
BaseCli.__init__(self,parent)
if settings.active_project is None:
settings.active_project = Project("",void=1)
if settings.active_library is None:
settings.active_library = Library()
def do_synthesis(self,arg):
"""\
Usage : synthesis
synthesis commands
"""
try:
self.isProjectOpen()
self.isPlatformSelected()
except Error,e:
print e
return
cli = SynthesisCli(self)
cli.setPrompt("synthesis")
arg = str(arg)
if len(arg) > 0:
line = cli.precmd(arg)
cli.onecmd(line)
cli.postcmd(True, line)
else:
cli.cmdloop()
self.stdout.write("\n")
def do_simulation(self,line):
"""\
Usage : simulation
Simulation generation environment
"""
try:
self.isProjectOpen()
self.isPlatformSelected()
except Error,e:
print e
return
# test if only one toolchain for simulation in library
cli = SimulationCli(self)
cli.setPrompt("simulation")
line = str(line)
if len(line) > 0:
line = cli.precmd(line)
cli.onecmd(line)
cli.postcmd(True, line)
else:
cli.cmdloop()
self.stdout.write("\n")
def do_driver(self,line):
"""\
Usage : driver
Driver generation environment
"""
try:
self.isProjectOpen()
self.isPlatformSelected()
except Error,e:
print e
return
# test if only one toolchain for simulation in library
cli = DriverCli(self)
cli.setPrompt("driver")
line = str(line)
if len(line) > 0:
line = cli.precmd(line)
cli.onecmd(line)
cli.postcmd(True, line)
else:
cli.cmdloop()
self.stdout.write("\n")
def do_create(self,line):
"""\
Usage : create <projectname>
create new project
"""
try:
self.checkargs(line,"<projectname>")
except Error,e:
print e
return
try:
sy.check_name(line)
except Error,e:
print e
return 0
dirname = os.path.abspath(line)
if sy.dirExist(dirname):
print "Project "+line+" already exists"
return 0
else:
try:
settings.active_project = Project(dirname,void=0)
except Error,e:
print e
return
self.setPrompt("project",settings.active_project.getName())
print "Project "+settings.active_project.getName()+" created"
def complete_load(self,text,line,begidx,endidx):
""" complete load command with files under directory """
path = line.split(" ")[1]
if path.find("/") == -1: # sub
path = ""
elif text.split() == "": # sub/sub/
path = "/".join(path)+"/"
else: # sub/sub
path = "/".join(path.split("/")[0:-1]) + "/"
listdir = sy.listDirectory(path)
listfile = sy.listFileType(path,XMLEXT[1:])
listfile.extend(listdir)
return self.completelist(line,text,listfile)
def do_load(self,line):
"""\
Usage : projectload <projectfilename>.xml
Load a project
"""
try:
self.checkargs(line,"<projectfilename>.xml")
except Error,e:
print e
return
if sy.dirExist(line):
head,projectname = os.path.split(line)
line = os.path.join(head,projectname,projectname+".xml")
if not sy.fileExist(line):
print Error("File doesn't exists")
return
try:
settings.active_project = Project(line)
except Error,e:
print e
return
except IOError,e:
print e
return
self.setPrompt("project:"+settings.active_project.getName())
print display
def complete_addinstance(self,text,line,begidx,endidx):
componentlist = []
try:
componentlist = self.completeargs(text,line,"<libraryname>.<componentname>.[componentversion] [newinstancename]")
except Exception,e:
print e
return componentlist
def do_addinstance(self,line):
"""\
Usage : addinstance <libraryname>.<componentname>.[componentversion] [newinstancename]
Add component in project
"""
try:
self.isProjectOpen()
self.isPlatformSelected()
self.checkargs(line,"<libraryname>.<componentname>.[componentversion] [newinstancename]")
except Error,e:
print display
print e
return
arg = line.split(' ')
subarg = arg[0].split(".")
try:
instancename= arg[1]
except IndexError:
instancename=None
try:
componentversion=subarg[2]
except IndexError:
componentversion=None
try:
if instancename != None:
sy.check_name(instancename)
if instancename== None and componentversion==None:
settings.active_project.addinstance(componentname=subarg[1],
libraryname=subarg[0])
elif instancename != None and componentversion==None:
settings.active_project.addinstance(componentname=subarg[1],
libraryname=subarg[0],
instancename=instancename)
elif instancename == None and componentversion!=None:
settings.active_project.addinstance(componentname=subarg[1],
libraryname=subarg[0],
componentversion=componentversion)
else:
settings.active_project.addinstance(componentname=subarg[1],
libraryname=subarg[0],
componentversion=componentversion,
instancename=instancename)
except Error,e:
print display
print e
return
print display
def complete_listcomponents(self,text,line,begidx,endidx):
componentlist = []
try:
componentlist = self.completeargs(text,line,"[libraryname]")
except Exception:
pass
return componentlist
def do_listcomponents(self,line):
"""\
Usage : listcomponents [libraryname]
List components available in the library
"""
if line.strip() == "":
return self.columnize(settings.active_library.listLibraries())
else:
return self.columnize(
settings.active_library.listComponents(line))
def listinstances(self):
try:
self.isProjectOpen()
return [comp.getInstanceName()\
for comp in settings.active_project.getInstancesList()]
except Error,e:
print e
return
def do_listinstances(self,line):
"""\
Usage : listinstances
List all project instances
"""
try:
self.isProjectOpen()
except Error,e:
print e
return
return self.columnize(self.listinstances())
def complete_selectplatform(self,text,line,begidx,endidx):
platformlist = []
try:
platformlist = self.completeargs(text,line,"<platformname>")
except Exception,e:
print e
return platformlist
def do_selectplatform(self,line):
"""\
Usage : selectplatform <platformname>
Select the platform to use
"""
try:
self.isProjectOpen()
self.checkargs(line,"<platformname>")
except Error,e:
print e
return
try:
settings.active_project.selectPlatform(line)
settings.active_project.saveProject()
except Error,e:
print display
print e
return
print display
def do_listplatforms(self,line):
"""\
Usage : listplatforms
List platform available
"""
try:
self.isProjectOpen()
except Error,e:
print e
return
try:
return self.columnize(settings.active_project.listAvailablePlatforms())
except AttributeError,e:
print e
def complete_listinterfaces(self,text,line,begidx,endidx):
pinlist = []
try:
pinlist = self.completeargs(text,line,"<instancename>")
except Exception,e:
print e
return pinlist
def do_listinterfaces(self,line=None):
"""\
Usage : listinterfaces
List instance interface
"""
try:
self.checkargs(line,"<instancename>")
self.isProjectOpen()
interfacelist= [interface.getName() for interface in settings.active_project.getInstance(line).getInterfacesList()]
except Error,e:
print display
print e
return
print display
return self.columnize(interfacelist)
def do_saveproject(self,line):
"""\
Usage : saveproject
Save project in the curent directory
"""
try:
self.isProjectOpen()
except Error,e:
print display
print e
return
print display
settings.active_project.saveProject()
def complete_connectpin(self,text,line,begidx,endidx):
pinlist = []
try:
pinlist = self.completeargs(text,line,"<instancename>.<interfacename>.<portname>.<pinnum> <instancename>.<interfacename>.<portname>.<pinnum>")
except Exception,e:
print e
return pinlist
def do_connectpin(self,line):
"""\
Usage : connectpin <instancename>.<interfacename>.<portname>.[pinnum] <instancename>.<interfacename>.<portname>.[pinnum]
Connect pin between instances
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>.<interfacename>.<portname>.[pinnum] <instancename>.<interfacename>.<portname>.[pinnum]")
except Error,e:
print display
print e
return
arg = line.split(' ')
source = arg[0].split('.')
dest = arg[-1].split('.')
if len(source) == 3:
source.append(0)
if len(dest) == 3:
dest.append(0)
try:
settings.active_project.connectPin_cmd(\
settings.active_project.getInstance(
source[0]).getInterface(
source[1]).getPort(
source[2]).getPin(source[3]),\
settings.active_project.getInstance(
dest[0] ).getInterface(
dest [1]).getPort(dest[2]).getPin(dest[3]))
except Error, e:
print display
print e
return
print display
def complete_connectport(self,text,line,begidx,endidx):
portlist = []
try:
portlist = self.completeargs(text,line,"<instancename>.<interfacename>.<portname> <instancename>.<interfacename>.<portname>")
except Exception,e:
print e
return portlist
def do_connectport(self,line):
"""
Usage : connectport <instancename>.<interfacename>.<portname> <instancename>.<interfacename>.<portname>
Connect all pins of two same size ports.
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>.<interfacename>.<portname> <instancename>.<interfacename>.<portname>")
except Exception,e:
print display
print e
return
arg=line.split(' ')
source = arg[0].split('.')
dest = arg[-1].split('.')
if len(source) != 3:
print "source arguments error"
return
if len(dest) != 3:
print "Argument error"
return
try:
settings.active_project.connectPort(source[0],source[1],source[2],
dest[0],dest[1],dest[2])
except Error, e:
print display
print e
return
print display
def complete_connectbus(self,text,line,begidx,endidx):
buslist = []
try:
buslist = self.completeargs(text,line,"<masterinstancename>.<masterinterfacename> <slaveinstancename>.<slaveinterfacename>")
except Exception,e:
print e
return buslist
def do_connectbus(self,line):
"""\
Usage : connectbus <masterinstancename>.<masterinterfacename> <slaveinstancename>.<slaveinterfacename>
Connect slave to master bus
"""
try:
self.isProjectOpen()
self.checkargs(line,"<masterinstancename>.<masterinterfacename> <slaveinstancename>.<slaveinterfacename>")
except Exception,e:
print display
print e
return
arg=line.split(' ')
source = arg[0].split('.')
dest = arg[-1].split('.')
if len(source) != 2 or len(dest) != 2:
print "Argument error"
return
try:
settings.active_project.connectBus(source[0],source[1],dest[0],dest[1])
except Error, e:
print display
print e
return
print display
def do_autoconnectbus(self,line):
"""\
Usage : autoconnectbus
Autoconnect bus if only one master in project
"""
try:
self.isProjectOpen()
settings.active_project.autoConnectBus()
except Error,e:
print display
print e
return
print display
def complete_addbusclock(self,text,line,begidx,endidx):
clocklist = []
try:
clocklist = self.completeargs(text,line,"<instancesysconname>.<interfacename> <masterinstancename>.<masterinterfacename>")
except Exception,e:
print e
return clocklist
def do_addbusclock(self,line):
"""\
Usage : busaddclock <instancesysconname>.<interfacename> <masterinstancename>.<masterinterfacename>
Specify the bus clock
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancesysconname>.<interfacename> <masterinstancename>.<masterinterfacename>")
except Error,e:
print display
print e
return
arg=line.split(' ')
source = arg[0].split('.')
dest = arg[-1].split('.')
if len(source) != 2 or len(dest) != 2:
print "Argument error"
return
try:
settings.active_project.connectClkDomain(source[0],dest[0],source[1],dest[1])
except Error, e:
print display
print e
return
print display
def complete_delpinconnection(self,text,line,begidx,endidx):
connectlist = []
try:
connectlist = self.completeargs(text,line,"<instancename>.<interfacename>.<portname>.<pinnum> <instancename>.<interfacename>.<portname>.<pinnum>")
except Exception,e:
print e
return connectlist
def do_delpinconnection(self,line):
"""\
Usage : delpinconnection <instancename>.<interfacename>.<portname>.[pinnum] [instancename].[interfacename].[portname].[pinnum]
Suppress a pin connection
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>.<interfacename>.<portname>.[pinnum] [instancename].[interfacename].[portname].[pinnum]")
except Error,e:
print display
print e
return
# get arguments
arg = line.split(' ')
# make source and destination tabular
source = arg[0].split('.')
dest = arg[-1].split('.')
# check if dest "instance.interface.port.pin" present,
# if not set it to [None] tabular
try:
dest = arg[1].split('.')
except IndexError:
dest = [None,None,None,None]
# check if pin num present, if not set it None
if len(source) == 3: # instead of 4
source.append(None)
if len(dest) == 3 :
dest.append(None)
try:
settings.active_project.deletePinConnection_cmd(source[0],source[1],source[2],source[3],
dest[0],dest[1],dest[2],dest[3])
except Error, e:
print display
print e
return
print display
print "Connection deleted"
# TODO
def complete_delbusconnection(self,text,line,begidx,endidx):
connectlist = []
try:
connectlist = self.completeargs(text,line,"<instancename>.<interfacename>.<portname>")
except Exception,e:
print e
return connectlist
# TODO
def do_delbusconnection(self,line):
"""\
Usage : delbusconnection <instancename>.<interfacebusname>
Suppress a pin connection
"""
print "TODO"
def complete_delinstance(self,text,line,begidx,endidx):
componentlist = []
try:
componentlist = self.completeargs(text,line,"<instancename>")
except Exception,e:
print e
return componentlist
def do_delinstance(self,line):
"""\
Usage : delinstance <instancename>
Suppress a component from project
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>")
except Error,e:
print display
print e
return
try:
settings.active_project.delProjectInstance(line)
except Error,e:
print display
print e
return
print display
def do_check(self,line):
"""\
Usage : check
Check the project before code generation
"""
try:
self.isProjectOpen()
settings.active_project.check()
except Error,e:
print display
print e
print display
def complete_setaddr(self,text,line,begidx,endidx):
addrlist = []
try:
addrlist = self.completeargs(text,line,"<slaveinstancename>.<slaveinterfacename> <addressinhexa>")
except Exception,e:
print e
return addrlist
def do_setaddr(self,line):
"""\
Usage : setaddr <slaveinstancename>.<slaveinterfacename> <addressinhexa>
Set the base address of slave interface
"""
try:
self.isProjectOpen()
self.checkargs(line,"<slaveinstancename>.<slaveinterfacename> <addressinhexa>")
except Error,e:
print display
print e
return
arg = line.split(' ')
names = arg[0].split('.')
if len(names) < 2:
masterinterface = settings.active_project.getInstance(names[0]).getSlaveInterfaceList()
if len(masterinterface) != 1:
print display
print "Error, need a slave interface name"
return
names.append(masterinterface[0].getName())
try:
interfaceslave = settings.active_project.getInstance(names[0]).getInterface(names[1])
interfacemaster = interfaceslave.getMaster()
interfacemaster.allocMem.setAddressSlave(interfaceslave,arg[1])
except Error,e:
print display
print e
return
print display
print "Base address "+arg[1]+" set"
def do_listmasters(self,line):
"""\
Usage : listmaster
List master interface
"""
try:
self.isProjectOpen()
except Error,e:
print display
print e
return
for master in settings.active_project.getInterfaceMaster():
print master.parent.getInstanceName()+"."+master.getName()
print display
def complete_getmapping(self,text,line,begidx,endidx):
mappinglist = []
try:
mappinglist = self.completeargs(text,line,"<masterinstancename>.<masterinterfacename>")
except Exception,e:
print e
return mappinglist
def do_getmapping(self,line=None):
"""\
Usage : getmapping <masterinstancename>.<masterinterfacename>
Return mapping for a master interface
"""
try:
self.isProjectOpen()
self.checkargs(line,"<masterinstancename>.<masterinterfacename>")
except Error,e:
print display
print e
return
arg = line.split(' ')
names = arg[0].split('.')
try:
masterinterface = settings.active_project.getInstance(names[0]).getInterface(names[1])
print masterinterface.allocMem
except Error,e:
print display
print e
print display
def complete_printxml(self,text,line,begidx,endidx):
printlist = []
try:
printlist = self.completeargs(text,line,"<instancename>")
except Exception,e:
print e
return printlist
def do_printxml(self,line=None):
"""\
Usage : printxml <instancename>
Print instance in XML format
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>")
except Error,e:
print display
print e
return
print settings.active_project.getInstance(line)
print display
def complete_info(self,text,line,begidx,endidx):
infolist = []
try:
infolist = self.completeargs(text,line,"<instancename>")
except Exception,e:
print e
return infolist
def do_info(self,line=None):
"""\
Usage : info <instancename>
Print instance information
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>")
instance = settings.active_project.getInstance(line)
except Error,e:
print display
print e
return
print "Instance name :"+instance.getInstanceName()
print "Component name :"+instance.getName()
print "description : "+instance.getDescription().strip()
print "->Generics"
for generic in instance.getGenericsList():
print "%15s : "%generic.getName() + generic.getValue()
print "->Interfaces"
for interface in instance.getInterfacesList():
if interface.getBusName() != None:
if interface.getClass() == "slave":
print "%-15s "%interface.getName()+" Base address:"+hex(interface.getBaseInt())
elif interface.getClass() == "master":
print "%-15s :"%interface.getName()
for slave in interface.getSlavesList():
print " "*10 + "slave -> "+slave.getInstanceName()+"."+slave.getInterfaceName()
else:
print "%-15s :"%interface.getName()
for port in interface.getPortsList():
print " "*5+"%-15s"%port.getName()+" s"+port.getSize()
for pin in port.getPinsList():
print " "*8+"pin",
if pin.getNum()!= None:
print pin.getNum()+":",
elif pin.isAll():
print "all",
first = True
for connection in pin.getConnections():
if first is not True:
print " "*8+"|"+" "*5,
first = False
print "-> "+connection["instance_dest"]+"."+connection["interface_dest"]+"."+connection["port_dest"]+"."+connection["pin_dest"]
def complete_setgeneric(self,text,line,begidx,endidx):
genericlist = []
try:
genericlist = self.completeargs(text,line,"<instancename>.<genericname> <genericvalue>")
except Exception,e:
print e
return genericlist
def do_setgeneric(self,line=None):
"""\
Usage : setgeneric <instancename>.<genericname> <genericvalue>
Set generic parameter
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>.<genericname> <genericvalue>")
except Error,e:
print display
print e
return
args = line.split(" ")
names = args[0].split(".")
try:
instance = settings.active_project.getInstance(names[0])
generic = instance.getGeneric(names[1])
if generic.isPublic()=="true":
generic.setValue(args[1])
else:
raise Error("this generic can't be modified by user",0)
except Error,e:
print display
print e
return
print display
print "Done"
def do_description(self,line):
"""\
Usage : description <some word for description>
set the project description
"""
settings.active_project.setDescription(line)
print display
print "Description set : "+line
return
def do_closeproject(self,line):
"""\
Usage : closeproject
Close the project
"""
try:
self.isProjectOpen()
except Error,e:
print display
print e
return
settings.active_project = None
print display
print "Project closed"
# Generate CODE
def complete_intercon(self,text,line,begidx,endidx):
interconlist = []
try:
interconlist = self.completeargs(text,line,"<masterinstancename>.<masterinterfacename>")
except Exception,e:
print e
return interconlist
############# FIXME: Move it in synthesiscli ? ######################-
#TODO: change name to generateintercon
def do_intercon(self,line=None):
"""\
Usage : intercon <masterinstancename>.<masterinterfacename>
Generate intercon for master given in argument
"""
try:
self.isProjectOpen()
self.checkargs(line,"<instancename>.<masterinterfacename>")
except Error,e:
print e
return
arg = line.split(' ')
names = arg[0].split('.')
if len(names) != 2:
print "Arguments error"
return
try:
settings.active_project.generateIntercon(names[0],names[1])
except Error,e:
print e
return
print display
#TODO : change name to generatetop
def do_top(self,line):
"""\
Usage : top
Generate top component
"""
try:
self.isProjectOpen()
settings.active_project.check()<|fim▁hole|> top.generate()
except Error,e:
print e
return
print display
print "Top generated with name : top_"+settings.active_project.getName()+".vhd"
#####################################################################
def do_report(self,line):
"""\
Usage : report
Generate a report of the project
"""
try:
self.isProjectOpen()
text = settings.active_project.generateReport()
except Error,e:
print display
print e
return
print display
print "report : "
print text
def isProjectOpen(self):
""" check if project is open, raise error if not
"""
if settings.active_project.isVoid() :
raise Error("No project open",0)
def do_listforce(self,line):
"""\
Usage : listforce
List all force configured for this project
"""
try:
for port in settings.active_project.getForcesList():
print "port "+str(port.getName())+" is forced to "+str(port.getForce())
except Error, e:
print display
print e
return
# TODO
def complete_setforce(self,text,line,begidx,endidx):
pinlist = []
try:
pinlist = self.completeargs(text,line,"<forcename> <forcestate>")
except Exception,e:
print e
return pinlist
def do_setforce(self, line):
"""\
Usage : setpin <pinname> <state>
Set fpga pin state in 'gnd', 'vcc'. To unset use 'undef' value
"""
try:
self.isProjectOpen()
self.checkargs(line,"<forcename> <forcestate>")
except Error,e:
print display
print e
return
arg = line.split(' ')
portname = arg[-2]
state = arg[-1]
try:
settings.active_project.setForce(portname, state)
except Error, e:
print display
print e
return<|fim▁end|> | top = TopVHDL(settings.active_project) |
<|file_name|>api_op_DescribeSnapshots.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package ec2
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/internal/awsutil"
)
type DescribeSnapshotsInput struct {
_ struct{} `type:"structure"`
// Checks whether you have the required permissions for the action, without
// actually making the request, and provides an error response. If you have
// the required permissions, the error response is DryRunOperation. Otherwise,
// it is UnauthorizedOperation.
DryRun *bool `locationName:"dryRun" type:"boolean"`
// The filters.
//
// * description - A description of the snapshot.
//
// * encrypted - Indicates whether the snapshot is encrypted (true | false)
//
// * owner-alias - The owner alias, from an Amazon-maintained list (amazon).
// This is not the user-configured AWS account alias set using the IAM console.
// We recommend that you use the related parameter instead of this filter.
//
// * owner-id - The AWS account ID of the owner. We recommend that you use
// the related parameter instead of this filter.
//
// * progress - The progress of the snapshot, as a percentage (for example,
// 80%).
//
// * snapshot-id - The snapshot ID.
//
// * start-time - The time stamp when the snapshot was initiated.
//
// * status - The status of the snapshot (pending | completed | error).
//
// * tag:<key> - The key/value combination of a tag assigned to the resource.
// Use the tag key in the filter name and the tag value as the filter value.
// For example, to find all resources that have a tag with the key Owner
// and the value TeamA, specify tag:Owner for the filter name and TeamA for
// the filter value.
//
// * tag-key - The key of a tag assigned to the resource. Use this filter
// to find all resources assigned a tag with a specific key, regardless of
// the tag value.
//
// * volume-id - The ID of the volume the snapshot is for.
//
// * volume-size - The size of the volume, in GiB.
Filters []Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
// The maximum number of snapshot results returned by DescribeSnapshots in paginated
// output. When this parameter is used, DescribeSnapshots only returns MaxResults
// results in a single page along with a NextToken response element. The remaining
// results of the initial request can be seen by sending another DescribeSnapshots
// request with the returned NextToken value. This value can be between 5 and
// 1000; if MaxResults is given a value larger than 1000, only 1000 results
// are returned. If this parameter is not used, then DescribeSnapshots returns
// all results. You cannot specify this parameter and the snapshot IDs parameter
// in the same request.
MaxResults *int64 `type:"integer"`
// The NextToken value returned from a previous paginated DescribeSnapshots
// request where MaxResults was used and the results exceeded the value of that
// parameter. Pagination continues from the end of the previous results that
// returned the NextToken value. This value is null when there are no more results
// to return.
NextToken *string `type:"string"`
// Scopes the results to snapshots with the specified owners. You can specify
// a combination of AWS account IDs, self, and amazon.
OwnerIds []string `locationName:"Owner" locationNameList:"Owner" type:"list"`
// The IDs of the AWS accounts that can create volumes from the snapshot.
RestorableByUserIds []string `locationName:"RestorableBy" type:"list"`
// The snapshot IDs.
//
// Default: Describes the snapshots for which you have create volume permissions.
SnapshotIds []string `locationName:"SnapshotId" locationNameList:"SnapshotId" type:"list"`
}
// String returns the string representation
func (s DescribeSnapshotsInput) String() string {
return awsutil.Prettify(s)
}
type DescribeSnapshotsOutput struct {
_ struct{} `type:"structure"`
// The NextToken value to include in a future DescribeSnapshots request. When
// the results of a DescribeSnapshots request exceed MaxResults, this value
// can be used to retrieve the next page of results. This value is null when
// there are no more results to return.
NextToken *string `locationName:"nextToken" type:"string"`
// Information about the snapshots.
Snapshots []Snapshot `locationName:"snapshotSet" locationNameList:"item" type:"list"`
}
// String returns the string representation
func (s DescribeSnapshotsOutput) String() string {
return awsutil.Prettify(s)
}
const opDescribeSnapshots = "DescribeSnapshots"
// DescribeSnapshotsRequest returns a request value for making API operation for
// Amazon Elastic Compute Cloud.
//
// Describes the specified EBS snapshots available to you or all of the EBS
// snapshots available to you.
//
// The snapshots available to you include public snapshots, private snapshots
// that you own, and private snapshots owned by other AWS accounts for which
// you have explicit create volume permissions.
//
// The create volume permissions fall into the following categories:
//
// * public: The owner of the snapshot granted create volume permissions
// for the snapshot to the all group. All AWS accounts have create volume
// permissions for these snapshots.
//
// * explicit: The owner of the snapshot granted create volume permissions
// to a specific AWS account.
//
// * implicit: An AWS account has implicit create volume permissions for
// all snapshots it owns.
//
// The list of snapshots returned can be filtered by specifying snapshot IDs,
// snapshot owners, or AWS accounts with create volume permissions. If no options
// are specified, Amazon EC2 returns all snapshots for which you have create
// volume permissions.
//
// If you specify one or more snapshot IDs, only snapshots that have the specified
// IDs are returned. If you specify an invalid snapshot ID, an error is returned.
// If you specify a snapshot ID for which you do not have access, it is not
// included in the returned results.
//
// If you specify one or more snapshot owners using the OwnerIds option, only
// snapshots from the specified owners and for which you have access are returned.
// The results can include the AWS account IDs of the specified owners, amazon
// for snapshots owned by Amazon, or self for snapshots that you own.
//
// If you specify a list of restorable users, only snapshots with create snapshot
// permissions for those users are returned. You can specify AWS account IDs
// (if you own the snapshots), self for snapshots for which you own or have
// explicit permissions, or all for public snapshots.
//
// If you are describing a long list of snapshots, we recommend that you paginate
// the output to make the list more manageable. The MaxResults parameter sets
// the maximum number of results returned in a single page. If the list of results
// exceeds your MaxResults value, then that number of results is returned along
// with a NextToken value that can be passed to a subsequent DescribeSnapshots
// request to retrieve the remaining results.
//
// To get the state of fast snapshot restores for a snapshot, use DescribeFastSnapshotRestores.
//
// For more information about EBS snapshots, see Amazon EBS Snapshots (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSSnapshots.html)
// in the Amazon Elastic Compute Cloud User Guide.
//
// // Example sending a request using DescribeSnapshotsRequest.
// req := client.DescribeSnapshotsRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSnapshots
func (c *Client) DescribeSnapshotsRequest(input *DescribeSnapshotsInput) DescribeSnapshotsRequest {
op := &aws.Operation{
Name: opDescribeSnapshots,
HTTPMethod: "POST",
HTTPPath: "/",
Paginator: &aws.Paginator{
InputTokens: []string{"NextToken"},
OutputTokens: []string{"NextToken"},
LimitToken: "MaxResults",
TruncationToken: "",
},
}
if input == nil {
input = &DescribeSnapshotsInput{}
}
req := c.newRequest(op, input, &DescribeSnapshotsOutput{})
return DescribeSnapshotsRequest{Request: req, Input: input, Copy: c.DescribeSnapshotsRequest}
}
// DescribeSnapshotsRequest is the request type for the
// DescribeSnapshots API operation.
type DescribeSnapshotsRequest struct {
*aws.Request
Input *DescribeSnapshotsInput<|fim▁hole|>// Send marshals and sends the DescribeSnapshots API request.
func (r DescribeSnapshotsRequest) Send(ctx context.Context) (*DescribeSnapshotsResponse, error) {
r.Request.SetContext(ctx)
err := r.Request.Send()
if err != nil {
return nil, err
}
resp := &DescribeSnapshotsResponse{
DescribeSnapshotsOutput: r.Request.Data.(*DescribeSnapshotsOutput),
response: &aws.Response{Request: r.Request},
}
return resp, nil
}
// NewDescribeSnapshotsRequestPaginator returns a paginator for DescribeSnapshots.
// Use Next method to get the next page, and CurrentPage to get the current
// response page from the paginator. Next will return false, if there are
// no more pages, or an error was encountered.
//
// Note: This operation can generate multiple requests to a service.
//
// // Example iterating over pages.
// req := client.DescribeSnapshotsRequest(input)
// p := ec2.NewDescribeSnapshotsRequestPaginator(req)
//
// for p.Next(context.TODO()) {
// page := p.CurrentPage()
// }
//
// if err := p.Err(); err != nil {
// return err
// }
//
func NewDescribeSnapshotsPaginator(req DescribeSnapshotsRequest) DescribeSnapshotsPaginator {
return DescribeSnapshotsPaginator{
Pager: aws.Pager{
NewRequest: func(ctx context.Context) (*aws.Request, error) {
var inCpy *DescribeSnapshotsInput
if req.Input != nil {
tmp := *req.Input
inCpy = &tmp
}
newReq := req.Copy(inCpy)
newReq.SetContext(ctx)
return newReq.Request, nil
},
},
}
}
// DescribeSnapshotsPaginator is used to paginate the request. This can be done by
// calling Next and CurrentPage.
type DescribeSnapshotsPaginator struct {
aws.Pager
}
func (p *DescribeSnapshotsPaginator) CurrentPage() *DescribeSnapshotsOutput {
return p.Pager.CurrentPage().(*DescribeSnapshotsOutput)
}
// DescribeSnapshotsResponse is the response type for the
// DescribeSnapshots API operation.
type DescribeSnapshotsResponse struct {
*DescribeSnapshotsOutput
response *aws.Response
}
// SDKResponseMetdata returns the response metadata for the
// DescribeSnapshots request.
func (r *DescribeSnapshotsResponse) SDKResponseMetdata() *aws.Response {
return r.response
}<|fim▁end|> | Copy func(*DescribeSnapshotsInput) DescribeSnapshotsRequest
}
|
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
function Cat(name, color) {
this.name = name;
this.color = color;
}
Cat.prototype.age = 4;
var huesitos = new Cat('huesitos', 'amarillo claro');
display(huesitos.__proto__);
display(Cat.prototype );
display(huesitos.__proto__ == Cat.prototype );
<|fim▁hole|> Cat.prototype = { age: 5};
display(Cat.prototype);
} ());<|fim▁end|> | |
<|file_name|>parser.ts<|end_file_name|><|fim▁begin|>import {ENV, DEBUG, uuid, unpad, underline, coerceInput} from "./utils"
import * as runtime from "./runtime"
import {eve} from "./app"
import {repeat} from "./utils"
class ParseError extends Error {
name: string = "Parse Error";
constructor(public message:string, public line:string, public lineIx?:number, public charIx:number = 0, public length:number = line && (line.length - charIx)) {
super(message);
}
toString() {
return unpad(6) `
${this.name}: ${this.message}
${this.lineIx !== undefined ? `On line ${this.lineIx + 1}:${this.charIx}` : ""}
${this.line}
${underline(this.charIx, this.length)}
`;
}
}
function readWhile(str:string, pattern:RegExp, startIx:number):string {
let endIx = startIx;
while(str[endIx] !== undefined && str[endIx].match(pattern)) endIx++;
return str.slice(startIx, endIx);
}
function readUntil(str:string, sentinel:string, startIx:number):string;
function readUntil(str:string, sentinel:string, startIx:number, unsatisfiedErr: Error):string|Error;
function readUntil(str:string, sentinel:string, startIx:number, unsatisfiedErr?: Error):any {
let endIx = str.indexOf(sentinel, startIx);
if(endIx === -1) {
if(unsatisfiedErr) return unsatisfiedErr;
return str.slice(startIx);
}
return str.slice(startIx, endIx);
}
function readUntilAny(str:string, sentinels:string[], startIx:number):string;
function readUntilAny(str:string, sentinels:string[], startIx:number, unsatisfiedErr: Error):string|Error;
function readUntilAny(str:string, sentinels:string[], startIx:number, unsatisfiedErr?: Error):any {
let endIx = -1;
for(let sentinel of sentinels) {
let ix = str.indexOf(sentinel, startIx);
if(ix === -1 || endIx !== -1 && ix > endIx) continue;
endIx = ix;
}
if(endIx === -1) {
if(unsatisfiedErr) return unsatisfiedErr;
return str.slice(startIx);
}
return str.slice(startIx, endIx);
}
//-----------------------------------------------------------------------------
// UI DSL Parser
//-----------------------------------------------------------------------------
export interface UIElem {
id?: string
children?: UIElem[]
embedded?: {} // Undefined or the restricted scope of the embedded child.
binding?: string
bindingKind?: string
attributes?: {}
events?: {[event:string]: {}}
}
// export function parseUI(str:string):UIElem {
// let root:UIElem = {};
// let errors = [];
// let lineIx = 0;
// let lines = str.split("\n");
// let stack:{indent: number, elem: UIElem}[] = [{indent: -2, elem: root}];
// // @FIXME: Chunk into element chunks instead of lines to enable in-argument continuation.
// for(let line of lines) {
// let charIx = 0;
// while(line[charIx] === " ") charIx++;
// let indent = charIx;
// if(line[charIx] === undefined) continue;
// let parent:UIElem;
// for(let stackIx = stack.length - 1; stackIx >= 0; stackIx--) {
// if(indent > stack[stackIx].indent) {
// parent = stack[stackIx].elem;
// break;
// } else stack.pop();
// }
// let keyword = readUntil(line, " ", charIx);
// charIx += keyword.length;
// if(keyword[0] === "~" || keyword[0] === "%") { // Handle binding
// charIx -= keyword.length - 1;
// let kind = keyword[0] === "~" ? "plan" : "query";
// if(!parent.binding) {
// parent.binding = line.slice(charIx);
// parent.bindingKind = kind;
// } else if(kind === parent.bindingKind) parent.binding += "\n" + line.slice(charIx);
// else {
// errors.push(new ParseError(`UI must be bound to a single type of query.`, line, lineIx));
// continue;
// }
// charIx = line.length;
// } else if(keyword[0] === "@") { // Handle event
// charIx -= keyword.length - 1;
// let err;
// while(line[charIx] === " ") charIx++;
// let lastIx = charIx;
// let eventRaw = readUntil(line, "{", charIx);
// charIx += eventRaw.length;
// let event = eventRaw.trim();
// if(!event) err = new ParseError(`UI event must specify a valid event name`, line, lineIx, lastIx, eventRaw.length);
// let state;
// [state, charIx] = getMapArgs(line, lineIx, charIx);
// if(state instanceof Error && !err) err = state;
// if(err) {
// errors.push(err);
// lineIx++;
// continue;
// }
// if(!parent.events) parent.events = {};
// parent.events[event] = state;
// } else if(keyword[0] === ">") { // Handle embed
// charIx -= keyword.length - 1;
// let err;
// while(line[charIx] === " ") charIx++;
// let lastIx = charIx;
// let embedIdRaw = readUntil(line, "{", charIx);
// charIx += embedIdRaw.length;
// let embedId = embedIdRaw.trim();
// if(!embedId) err = new ParseError(`UI embed must specify a valid element id`, line, lineIx, lastIx, embedIdRaw.length);<|fim▁hole|>// if(scope instanceof Error && !err) err = scope;
// if(err) {
// errors.push(err);
// lineIx++;
// continue;
// }
// let elem = {embedded: scope, id: embedId};
// if(!parent.children) parent.children = [];
// parent.children.push(elem);
// stack.push({indent, elem});
// } else { // Handle element
// let err;
// if(!keyword) err = new ParseError(`UI element must specify a valid tag name`, line, lineIx, charIx, 0);
// while(line[charIx] === " ") charIx++;
// let classesRaw = readUntil(line, "{", charIx);
// charIx += classesRaw.length;
// let classes = classesRaw.trim();
// let attributes;
// [attributes = {}, charIx] = getMapArgs(line, lineIx, charIx);
// if(attributes instanceof Error && !err) err = attributes;
// if(err) {
// errors.push(err);
// lineIx++;
// continue;
// }
// attributes["t"] = keyword;
// if(classes) attributes["c"] = classes;
// let elem:UIElem = {id: attributes["id"], attributes};
// if(!parent.children) parent.children = [];
// parent.children.push(elem);
// stack.push({indent, elem});
// }
// lineIx++;
// }
// if(errors.length) {
// for(let err of errors) {
// console.error(err);
// }
// }
// return root;
// }
//-----------------------------------------------------------------------------
// Eve DSL Parser
//-----------------------------------------------------------------------------
enum TOKEN_TYPE { EXPR, IDENTIFIER, KEYWORD, STRING, LITERAL };
export class Token {
static TYPE = TOKEN_TYPE;
static identifier(value:string, lineIx?: number, charIx?: number) {
return new Token(Token.TYPE.IDENTIFIER, value, lineIx, charIx);
}
static keyword(value:string, lineIx?: number, charIx?: number) {
return new Token(Token.TYPE.KEYWORD, value, lineIx, charIx);
}
static string(value:string, lineIx?: number, charIx?: number) {
return new Token(Token.TYPE.STRING, value, lineIx, charIx);
}
static literal(value:any, lineIx?: number, charIx?: number) {
return new Token(Token.TYPE.LITERAL, value, lineIx, charIx);
}
constructor(public type?: TOKEN_TYPE, public value?: any, public lineIx?: number, public charIx?: number) {}
toString() {
if(this.type === Token.TYPE.KEYWORD) return `:${this.value}`;
else if(this.type === Token.TYPE.STRING) return `"${this.value}"`;
else return this.value.toString();
}
}
export class Sexpr {
static list(value:(Token|Sexpr)[] = [], lineIx?: number, charIx?: number, syntax?: boolean) {
value = value.slice();
value.unshift(Token.identifier("list", lineIx, charIx ? charIx + 1 : undefined));
return new Sexpr(value, lineIx, charIx, syntax ? "list" : undefined);
}
static hash(value:(Token|Sexpr)[] = [], lineIx?: number, charIx?: number, syntax?: boolean) {
value = value.slice();
value.unshift(Token.identifier("hash", lineIx, charIx ? charIx + 1 : undefined));
return new Sexpr(value, lineIx, charIx, syntax ? "hash" : undefined);
}
static asSexprs(values:(Token|Sexpr)[]):Sexpr[] {
for(let raw of values) {
if(!(raw instanceof Sexpr)) throw new ParseError(`All top level entries must be expressions (got ${raw})`, undefined, raw.lineIx, raw.charIx);
else {
let op = raw.operator;
if(op.type !== Token.TYPE.IDENTIFIER)
throw new ParseError(`All expressions must begin with an identifier`, undefined, raw.lineIx, raw.charIx);
}
}
return <Sexpr[]>values;
}
public type = Token.TYPE.EXPR;
public value:(Token|Sexpr)[];
constructor(val?: (Token|Sexpr)[], public lineIx?: number, public charIx?: number, public syntax = "expr") {
if(val) this.value = val.slice();
}
toString() {
let content = this.value && this.value.map((token) => token.toString()).join(" ");
let argsContent = this.value && this.arguments.map((token) => token.toString()).join(" ");
if(this.syntax === "hash") return `{${argsContent}}`;
else if(this.syntax === "list") return `[${argsContent}]`;
else return `(${content})`;
}
push(val:Token|Sexpr) {
this.value = this.value || [];
return this.value.push(val);
}
nth(n, val?:Token|Sexpr) {
if(val) {
this.value = this.value || [];
return this.value[n] = val;
}
return this.value && this.value[n];
}
get operator() {
return this.value && this.value[0];
}
set operator(op: Token|Sexpr) {
this.value = this.value || [];
this.value[0] = op;
}
get arguments() {
return this.value && this.value.slice(1);
}
set arguments(args: (Token|Sexpr)[]) {
this.value = this.value || [];
this.value.length = 1;
this.value.push.apply(this.value, args);
}
get length() {
return this.value && this.value.length;
}
}
const TOKEN_TO_TYPE = {
"(": "expr",
")": "expr",
"[": "list",
"]": "list",
"{": "hash",
"}": "hash"
};
let hygienicSymbolCounter = 0;
export function readSexprs(text:string):Sexpr {
let root = Sexpr.list();
let token:Token;
let sexpr:Sexpr = root;
let sexprs:Sexpr[] = [root];
let lines = text.split("\n");
let lineIx = 0;
let mode:string;
for(let line of lines) {
let line = lines[lineIx];
let charIx = 0;
if(mode === "string") token.value += "\n";
while(charIx < line.length) {
if(mode === "string") {
if(line[charIx] === "\"" && line[charIx - 1] !== "\\") {
sexpr.push(token);
token = mode = undefined;
charIx++;
} else token.value += line[charIx++];
continue;
}
let padding = readWhile(line, /\s/, charIx);
charIx += padding.length;
if(padding.length) {
if(token) sexpr.push(token);
token = undefined;
}
if(charIx >= line.length) continue;
if(line[charIx] === ";") {
charIx = line.length;
} else if(line[charIx] === "\"") {
if(!sexpr.length) throw new ParseError(`Literal must be an argument in a sexpr.`, line, lineIx, charIx);
mode = "string";
token = Token.string("", lineIx, charIx);
charIx++;
} else if(line[charIx] === ":") {
if(!sexpr.length) throw new ParseError(`Literal must be an argument in a sexpr.`, line, lineIx, charIx);
let keyword = readUntilAny(line, [" ", ")", "]", "}"], ++charIx);
sexpr.push(Token.keyword(keyword, lineIx, charIx - 1));
charIx += keyword.length;
} else if(line[charIx] === "(" || line[charIx] === "[" || line[charIx] === "{") {
if(token) throw new ParseError(`Sexpr arguments must be space separated.`, line, lineIx, charIx);
let type = TOKEN_TO_TYPE[line[charIx]];
if(type === "hash") sexpr = Sexpr.hash(undefined, lineIx, charIx);
else if(type === "list") sexpr = Sexpr.list(undefined, lineIx, charIx);
else sexpr = new Sexpr(undefined, lineIx, charIx);
sexpr.syntax = type;
sexprs.push(sexpr);
charIx++;
} else if(line[charIx] === ")" || line[charIx] === "]" || line[charIx] === "}") {
let child = sexprs.pop();
let type = TOKEN_TO_TYPE[line[charIx]];
if(child.syntax !== type) throw new ParseError(`Must terminate ${child.syntax} before terminating ${type}`, line, lineIx, charIx);
sexpr = sexprs[sexprs.length - 1];
if(!sexpr) throw new ParseError(`Too many closing parens`, line, lineIx, charIx);
sexpr.push(child);
charIx++;
} else {
let literal = readUntilAny(line, [" ", ")", "]", "}"], charIx);
let length = literal.length;
literal = coerceInput(literal);
let type = typeof literal === "string" ? "identifier" : "literal";
if(!sexpr.length && type !== "identifier") throw new ParseError(`Expr must begin with identifier.`, line, lineIx, charIx);
if(type === "identifier") {
let dotIx = literal.indexOf(".");
if(dotIx !== -1) {
let child:Sexpr = new Sexpr([
Token.identifier("get", lineIx, charIx + 1),
Token.identifier(literal.slice(0, dotIx), lineIx, charIx + 3),
Token.string(literal.slice(dotIx + 1), lineIx, charIx + 5 + dotIx)
], lineIx, charIx);
sexpr.push(child);
} else sexpr.push(Token.identifier(literal, lineIx, charIx));
} else sexpr.push(Token.literal(literal, lineIx, charIx));
charIx += length;
}
}
lineIx++;
}
if(token) throw new ParseError(`Unterminated ${TOKEN_TYPE[token.type]} token`, lines[lineIx - 1], lineIx - 1);
let lastIx = lines.length - 1;
if(sexprs.length > 1) throw new ParseError(`Too few closing parens`, lines[lastIx], lastIx, lines[lastIx].length);
return root;
}
export function macroexpandDSL(sexpr:Sexpr):Sexpr {
// @TODO: Implement me.
let op = sexpr.operator;
if(op.value === "eav") {
throw new Error("@TODO: Implement me!");
} else if(op.value === "one-of") {
// (one-of (query ...body) (query ...body) ...) =>
// (union
// (def q1 (query ...body1))
// (def q2 (query (negate q1) ...body2)))
throw new Error("@TODO: Implement me!");
} else if(op.value === "negate") {
if(sexpr.length > 2) throw new ParseError(`Negate only takes a single body`, undefined, sexpr.lineIx, sexpr.charIx);
let select = macroexpandDSL(Sexpr.asSexprs(sexpr.arguments)[0]);
select.push(Token.keyword("$$negated"));
select.push(Token.literal(true));
return select;
} else if(["hash", "list", "get", "def", "query", "union", "select", "member", "project!", "insert!", "remove!", "load!"].indexOf(op.value) === -1) {
// (foo-bar :a 5) => (select "foo bar" :a 5)
let source = op;
source.type = Token.TYPE.STRING;
source.value = source.value.replace(/(.?)-(.)/g, "$1 $2");
let args = sexpr.arguments;
args.unshift(source);
sexpr.arguments = args;
sexpr.operator = Token.identifier("select");
}
return sexpr;
}
enum VALUE { NULL, SCALAR, SET, VIEW };
export type Artifacts = {changeset?: runtime.Diff, views: {[query:string]: runtime.Query|runtime.Union}};
type Variable = {name: string, type: VALUE, static?: boolean, value?: any, projection?: string, constraints: [string, string][]};
type VariableContext = Variable[];
export function parseDSL(text:string):Artifacts {
let artifacts:Artifacts = {views: {}};
let lines = text.split("\n");
let root = readSexprs(text);
for(let raw of Sexpr.asSexprs(root.arguments)) parseDSLSexpr(raw, artifacts);
return artifacts;
}
type SexprResult = {type:VALUE, value?:any, projected?:any, context?:any, mappings?:any, aggregated?:boolean};
function parseDSLSexpr(raw:Sexpr, artifacts:Artifacts, context?:VariableContext, parent?:runtime.Query|runtime.Union, resultVariable?:string):SexprResult {
if(parent instanceof runtime.Query) var query = parent;
else var union = <runtime.Union>parent;
let sexpr = macroexpandDSL(raw);
let op = sexpr.operator;
if(op.type !== Token.TYPE.IDENTIFIER)
throw new ParseError(`Evaluated sexpr must begin with an identifier ('${op}' is a ${Token.TYPE[op.type]})`, "", raw.lineIx, raw.charIx);
if(op.value === "list") {
let {$$body} = parseArguments(sexpr, undefined, "$$body");
return {type: VALUE.SCALAR, value: (<any>$$body).map((token, ix) => resolveTokenValue(`list item ${ix}`, token, context))};
}
if(op.value === "hash") {
let args = parseArguments(sexpr);
for(let arg in args) args[arg] = resolveTokenValue(`hash item ${arg}`, args[arg], context);
return {type: VALUE.SET, value: args};
}
if(op.value === "insert!") {
let changeset = artifacts.changeset || eve.diff();
for(let arg of sexpr.arguments) {
let table = arg.value[0];
let fact = {};
for(let ix = 1; ix < arg.value.length; ix += 2) {
let key = arg.value[ix];
let value = arg.value[ix+1];
fact[key.value] = value.value;
}
changeset.add(table.value, fact);
}
artifacts.changeset = changeset;
return;
}
if(op.value === "remove!") {
let changeset = artifacts.changeset || eve.diff();
for(let arg of sexpr.arguments) {
let table = arg.value[0];
let fact = {};
for(let ix = 1; ix < arg.value.length; ix += 2) {
let key = arg.value[ix];
let value = arg.value[ix+1];
fact[key.value] = value.value;
}
changeset.remove(table.value, fact);
}
artifacts.changeset = changeset;
return;
}
if(op.value === "load!") {
throw new Error("(load! ..) has not been implemented yet");
}
if(op.value === "query") {
let neueContext:VariableContext = [];
let {$$view, $$negated, $$body} = parseArguments(sexpr, undefined, "$$body");
let queryId = $$view ? resolveTokenValue("view", $$view, context, VALUE.SCALAR) : uuid();
let neue = new runtime.Query(eve, queryId);
neue["displayName"] = sexpr.toString();
if(DEBUG.instrumentQuery) instrumentQuery(neue, DEBUG.instrumentQuery);
artifacts.views[queryId] = neue;
let aggregated = false;
for(let raw of Sexpr.asSexprs(<any>$$body)) {
let state = parseDSLSexpr(raw, artifacts, neueContext, neue);
if(state && state.aggregated) aggregated = true;
}
let projectionMap = neue.projectionMap;
let projected = true;
if(!projectionMap) {
projectionMap = {};
projected = false;
for(let variable of neueContext) projectionMap[variable.name] = variable.value;
}
if(Object.keys(projectionMap).length) neue.project(projectionMap);
// Join subquery to parent.
if(parent) {
let select = new Sexpr([Token.identifier(query ? "select" : "member"), Token.string(queryId)], raw.lineIx, raw.charIx);
let groups = [];
for(let variable of neueContext) {
if(projected && !variable.projection) continue;
let field = variable.projection || variable.name;
select.push(Token.keyword(field));
if(query) select.push(Token.identifier(variable.name));
else select.push(Sexpr.list([Token.string(field)]));
if(context) {
for(let parentVar of context) {
if(parentVar.name === variable.name) groups.push(variable.value);
}
}
}
if($$negated) {
select.push(Token.keyword("$$negated"));
select.push($$negated);
}
if(groups.length && aggregated) neue.group(groups);
parseDSLSexpr(select, artifacts, context, parent);
}
return {value: queryId, type: VALUE.VIEW, projected, context: neueContext};
}
if(op.value === "union") {
let {$$view, $$body, $$negated} = parseArguments(sexpr, undefined, "$$body");
let unionId = $$view ? resolveTokenValue("view", $$view, context, VALUE.SCALAR) : uuid();
let neue = new runtime.Union(eve, unionId);
if(DEBUG.instrumentQuery) instrumentQuery(neue, DEBUG.instrumentQuery);
artifacts.views[unionId] = neue;
let mappings = {};
for(let raw of Sexpr.asSexprs(<any>$$body)) {
let child = macroexpandDSL(raw);
if(child.operator.value !== "query" && child.operator.value !== "union")
throw new ParseError("Unions may only contain queries", "", raw.lineIx, raw.charIx);
let res = parseDSLSexpr(child, artifacts, context, neue);
for(let variable of res.context) {
if(res.projected && !variable.projection) continue;
let field = variable.projection || variable.name;
if(!mappings[field]) mappings[field] = {};
mappings[field][variable.name] = true;
}
}
// Join subunion to parent
if(parent) {
let select = new Sexpr([Token.identifier(query ? "select" : "member"), Token.string(unionId)], raw.lineIx, raw.charIx);
for(let field in mappings) {
let mappingVariables = Object.keys(mappings[field]);
if(mappingVariables.length > 1)
throw new ParseError(
`All variables projected to a single union field must have the same name. Field '${field}' has ${mappingVariables.length} fields (${mappingVariables.join(", ")})`, "", raw.lineIx, raw.charIx);
select.push(Token.keyword(field));
select.push(Token.identifier(mappingVariables[0]));
}
console.log("union select", select.toString());
parseDSLSexpr(select, artifacts, context, parent);
}
return {type: VALUE.VIEW, value: unionId, mappings};
}
if(op.value === "member") {
if(!union) throw new ParseError(`Cannot add member to non-union parent`, "", raw.lineIx, raw.charIx);
let args = parseArguments(sexpr, ["$$view"]);
let {$$view, $$negated} = args;
let view = resolveTokenValue("view", $$view, context, VALUE.SCALAR);
if(view === undefined) throw new ParseError("Must specify a view to be unioned", "", raw.lineIx, raw.charIx);
let join = {};
for(let arg in args) {
if(arg === "$$view" || arg === "$$negated") continue;
join[arg] = resolveTokenValue("member field", args[arg], context);
}
if(runtime.QueryFunctions[view]) throw new ParseError(`Cannot union primitive view '${view}'`, "", raw.lineIx, raw.charIx);
union.union(view, join);
return;
}
if(!parent) throw new ParseError(`Non-query or union sexprs must be contained within a query or union`, "", raw.lineIx, raw.charIx);
if(op.value === "select") {
if(!query) throw new ParseError(`Cannot add select to non-query parent`, "", raw.lineIx, raw.charIx);
let selectId = uuid();
let $$view = getArgument(sexpr, "$$view", ["$$view"]);
let view = resolveTokenValue("view", $$view, context, VALUE.SCALAR);
if(view === undefined) throw new ParseError("Must specify a view to be selected", "", raw.lineIx, raw.charIx);
let primitive = runtime.QueryFunctions[view]
//@TODO: Move this to an eve table to allow user defined defaults
let args = parseArguments(sexpr, ["$$view"].concat(getDefaults(view)));
let {$$negated} = args;
let join = {};
for(let arg in args) {
let value = args[arg];
let variable;
if(arg === "$$view" || arg === "$$negated") continue;
if(value instanceof Token && value.type !== Token.TYPE.IDENTIFIER) {
join[arg] = args[arg].value;
continue;
}
if(value instanceof Sexpr) {
let result = parseDSLSexpr(value, artifacts, context, parent, `$$temp-${hygienicSymbolCounter++}-${arg}`);
if(!result || result.type === VALUE.NULL) throw new Error(`Cannot set parameter '${arg}' to null value '${value.toString()}'`);
if(result.type === VALUE.VIEW) {
let view = result.value;
let resultField = getResult(view);
if(!resultField) throw new Error(`Cannot set parameter '${arg}' to select without default result field`);
for(let curVar of context) {
for(let constraint of curVar.constraints) {
if(constraint[0] === view && constraint[1] === resultField) {
variable = curVar;
break;
}
}
}
}
} else variable = getDSLVariable(value.value, context);
if(variable) {
join[arg] = variable.value;
variable.constraints.push([view, arg]);
}
else if($$negated && $$negated.value)
throw new ParseError(`Cannot bind field in negated select to undefined variable '${value.value}'`, "", raw.lineIx, raw.charIx);
else context.push({name: value.value, type: VALUE.SCALAR, value: [selectId, arg], constraints: [[view, arg]]}); // @TODO: does this not need to add to the join map?
}
let resultField = getResult(view);
if(resultVariable && resultField && !join[resultField]) {
join[resultField] = [selectId, resultField];
context.push({name: resultVariable, type: VALUE.SCALAR, value: [selectId, resultField], constraints: [[view, resultField]]});
}
if(primitive) {
if($$negated) {
if(primitive.inverse) view = primitive.inverse;
else throw new ParseError(`Cannot invert primitive calculation '${view}'`, "", raw.lineIx, raw.charIx);
}
if(primitive.aggregate) query.aggregate(view, join, selectId);
else query.calculate(view, join, selectId);
} else if($$negated) query.deselect(view, join);
else query.select(view, join, selectId);
return {
type: VALUE.VIEW,
value: view,
aggregated: primitive && primitive.aggregate
};
}
if(op.value === "project!") {
let args = parseArguments(sexpr, ["$$view"]);
let {$$view, $$negated} = args;
let projectionMap = {};
for(let arg in args) {
let value = args[arg];
if(arg === "$$view" || arg === "$$negated") continue;
if(value.type !== Token.TYPE.IDENTIFIER) {
projectionMap[arg] = args[arg].value;
continue;
}
let variable = getDSLVariable(value.value, context);
if(variable) {
if(variable.static) projectionMap[arg] = variable.value;
else if(!$$view) {
variable.projection = arg;
projectionMap[arg] = variable.value;
} else projectionMap[arg] = [variable.name];
} else throw new ParseError(`Cannot bind projected field to undefined variable '${value.value}'`, "", raw.lineIx, raw.charIx);
}
let view = resolveTokenValue("view", $$view, context, VALUE.SCALAR);
if(view === undefined) {
if(query.projectionMap) throw new ParseError("Query can only self-project once", "", raw.lineIx, raw.charIx);
if($$negated && $$negated.value) throw new ParseError(`Cannot negate self-projection`, "", raw.lineIx, raw.charIx);
// Project self
query.project(projectionMap);
} else {
let union = <runtime.Union>artifacts.views[view] || new runtime.Union(eve, view);
if(DEBUG.instrumentQuery && !artifacts.views[view]) instrumentQuery(union, DEBUG.instrumentQuery);
artifacts.views[view] = union;
// if($$negated && $$negated.value) union.ununion(queryId, projectionMap);
if($$negated && $$negated.value)
throw new ParseError(`Union projections may not be negated in the current runtime`, "", raw.lineIx, raw.charIx);
else union.union(query.name, projectionMap);
}
return;
}
throw new ParseError(`Unknown DSL operator '${op.value}'`, "", raw.lineIx, raw.charIx);
}
function resolveTokenValue(name:string, token:Token|Sexpr, context:VariableContext, type?:VALUE) {
if(!token) return;
if(token instanceof Sexpr) return parseDSLSexpr(token, undefined, context);
if(token instanceof Token && token.type === Token.TYPE.IDENTIFIER) {
let variable = getDSLVariable(token.value, context, VALUE.SCALAR);
if(!variable) throw new Error(`Cannot bind ${name} to undefined variable '${token.value}'`);
if(!variable.static) throw new Error(`Cannot bind ${name} to dynamic variable '${token.value}'`);
return variable.value;
}
return token.value;
}
function getDSLVariable(name:string, context:VariableContext, type?:VALUE):Variable {
if(!context) return;
for(let variable of context) {
if(variable.name === name) {
if(variable.static === false) throw new Error(`Cannot statically look up dynamic variable '${name}'`);
if(type !== undefined && variable.type !== type)
throw new Error(`Expected variable '${name}' to have type '${type}', but instead has type '${variable.type}'`);
return variable;
}
}
}
function getDefaults(view:string):string[] {
return (runtime.QueryFunctions[view] && runtime.QueryFunctions[view].params) || [];
}
function getResult(view:string):string {
return runtime.QueryFunctions[view] && runtime.QueryFunctions[view].result;
}
export function getArgument(root:Sexpr, param:string, defaults?: string[]):Token|Sexpr {
let ix = 1;
let defaultIx = 0;
for(let ix = 1, cur = root.nth(ix); ix < root.length; ix++) {
if(cur.type === Token.TYPE.KEYWORD) {
if(cur.value === param) return root.nth(ix + 1);
else ix + 1;
} else {
if(defaults && defaultIx < defaults.length) {
let keyword = defaults[defaultIx++];
if(keyword === param) return cur;
else ix + 1;
}
throw new Error(`Param '${param}' not in sexpr ${root.toString()}`);
}
}
throw new Error(`Param '${param}' not in sexpr ${root.toString()}`);
}
export function parseArguments(root:Sexpr, defaults?:string[], rest?:string):{[keyword:string]: Token|Sexpr} {
let args:any = {};
let defaultIx = 0;
let keyword;
let kwarg = false;
for(let raw of root.arguments) {
if(raw.type === Token.TYPE.KEYWORD) {
if(keyword) throw new Error(`Keywords may not be values '${raw}'`);
else keyword = raw.value;
} else if(keyword) {
if(args[keyword] === undefined) {
args[keyword] = raw;
} else {
if(!(args[keyword] instanceof Array)) args[keyword] = [args[keyword]];
args[keyword].push(raw);
}
keyword = undefined;
defaultIx = defaults ? defaults.length : 0;
kwarg = true;
} else if(defaults && defaultIx < defaults.length) {
args[defaults[defaultIx++]] = raw;
} else if(rest) {
args[rest] = args[rest] || [];
args[rest].push(raw);
} else {
if(kwarg) throw new Error("Cannot specify an arg after a kwarg");
else if(defaultIx) throw new Error(`Too many args, expected: ${defaults.length}, got: ${defaultIx + 1}`);
else throw new Error("Cannot specify an arg without default keys specified");
}
}
return args;
}
declare var exports;
if(ENV === "browser") window["parser"] = exports;
export function instrumentQuery(q:any, instrument?:Function|boolean) {
let instrumentation:Function = <Function>instrument;
if(!instrument || instrument === true) instrumentation = (fn, args) => console.log("*", fn, ":", args);
let keys = [];
for(let key in q) keys.push(key);
keys.forEach((fn) => {
if(!q.constructor.prototype.hasOwnProperty(fn) || typeof q[fn] !== "function") return;
var old = q[fn];
q[fn] = function() {
instrumentation(fn, arguments);
return old.apply(this, arguments);
}
});
return q;
}
export function asDiff(ixer, artifacts:Artifacts) {
let views = artifacts.views;
let diff = ixer.diff();
for(let id in views) diff.merge(views[id].changeset(eve));
return diff;
}
export function applyAsDiffs(artifacts:Artifacts) {
let views = artifacts.views;
for(let id in views) eve.applyDiff(views[id].changeset(eve));
console.log("Applied diffs for:");
for(let id in views) console.log(" * ", views[id] instanceof runtime.Query ? "Query" : "Union", views[id].name);
return artifacts;
}
export function logArtifacts(artifacts:Artifacts) {
for(let view in artifacts.views) console.log(view, "\n", eve.find(view));
}<|fim▁end|> | // let scope;
// [scope = {}, charIx] = getMapArgs(line, lineIx, charIx); |
<|file_name|>controllers.js<|end_file_name|><|fim▁begin|>angular.module('perCapita.controllers', [])
.controller('AppCtrl', ['$scope', '$rootScope', '$ionicModal', '$timeout', '$localStorage', '$ionicPlatform', 'AuthService',
function ($scope, $rootScope, $ionicModal, $timeout, $localStorage, $ionicPlatform, AuthService) {
$scope.loginData = $localStorage.getObject('userinfo', '{}');
$scope.reservation = {};
$scope.registration = {};
$scope.loggedIn = false;
if (AuthService.isAuthenticated()) {
$scope.loggedIn = true;
$scope.username = AuthService.getUsername();
}
// Create the login modal that we will use later
$ionicModal.fromTemplateUrl('templates/login.html', {
scope: $scope
}).then(function (modal) {
$scope.modal = modal;
});
// Triggered in the login modal to close it
$scope.closeLogin = function () {
$scope.modal.hide();
};
// Open the login modal
$scope.login = function () {
$scope.modal.show();
};
// Perform the login action when the user submits the login form
$scope.doLogin = function () {
console.log('Doing login', $scope.loginData);
$localStorage.storeObject('userinfo', $scope.loginData);
AuthService.login($scope.loginData);
$scope.closeLogin();
};
$scope.logOut = function () {
AuthService.logout();
$scope.loggedIn = false;
$scope.username = '';
};
$rootScope.$on('login:Successful', function () {
$scope.loggedIn = AuthService.isAuthenticated();
$scope.username = AuthService.getUsername();
});
$ionicModal.fromTemplateUrl('templates/register.html', {
scope: $scope
}).then(function (modal) {
$scope.registerform = modal;
});
$scope.closeRegister = function () {
$scope.registerform.hide();
};
$scope.register = function () {
$scope.registerform.show();
};
$scope.doRegister = function () {
console.log('Doing registration', $scope.registration);
$scope.loginData.username = $scope.registration.username;
$scope.loginData.password = $scope.registration.password;
AuthService.register($scope.registration);
$timeout(function () {
$scope.closeRegister();
}, 1000);
};
$rootScope.$on('registration:Successful', function () {
$localStorage.storeObject('userinfo', $scope.loginData);
});
}])
.controller('FavoriteDetailsController', ['$scope', '$rootScope', '$state', '$stateParams', 'Favorites', function ($scope, $rootScope, $state, $stateParams, Favorites) {
$scope.showFavButton = false;
// Lookup favorites for a given user id
Favorites.findById({id: $stateParams.id})
.$promise.then(
function (response) {
$scope.city = response;
},
function (response) {
$scope.message = "Error: " + response.status + " " + response.statusText;
}
);
}])
.controller('HomeController', ['$scope', 'perCapitaService', '$stateParams', '$rootScope', 'Favorites', '$ionicPlatform', '$cordovaLocalNotification', '$cordovaToast', function ($scope, perCapitaService, $stateParams, $rootScope, Favorites, $ionicPlatform, $cordovaLocalNotification, $cordovaToast) {
$scope.showFavButton = $rootScope.currentUser;
$scope.controlsData = {skills: $rootScope.skills};
// Look up jobs data
$scope.doLookup = function () {
$rootScope.skills = $scope.controlsData.skills;
perCapitaService.lookup($scope.controlsData.skills);
};
// Post process the jobs data, by adding Indeeds link and calculating jobsPer1kPeople and jobsRank
$scope.updatePerCapitaData = function () {
$scope.cities = perCapitaService.response.data.docs;
var arrayLength = $scope.cities.length;
for (var i = 0; i < arrayLength; i++) {
var obj = $scope.cities[i];
obj.jobsPer1kPeople = Math.round(obj.totalResults / obj.population * 1000);
obj.url = "https://www.indeed.com/jobs?q=" + $scope.controlsData.skills + "&l=" + obj.city + ", " + obj.state;
}
// rank jobs
var sortedObjs;
if (perCapitaService.isSkills) {
sortedObjs = _.sortBy($scope.cities, 'totalResults').reverse();
} else {
sortedObjs = _.sortBy($scope.cities, 'jobsPer1kPeople').reverse();
}
$scope.cities.forEach(function (element) {
element.jobsRank = sortedObjs.indexOf(element) + 1;
});
if (!$scope.$$phase) {
$scope.$apply();
}
$rootScope.cities = $scope.cities;
console.log("Loaded " + arrayLength + " results.")
};
perCapitaService.registerObserverCallback($scope.updatePerCapitaData);
$scope.addToFavorites = function () {
delete $scope.city._id;
delete $scope.city._rev;
$scope.city.customerId = $rootScope.currentUser.id
Favorites.create($scope.city);
$ionicPlatform.ready(function () {
$cordovaLocalNotification.schedule({
id: 1,
title: "Added Favorite",
text: $scope.city.city
}).then(function () {
console.log('Added Favorite ' + $scope.city.city);
},
function () {
console.log('Failed to add Favorite ');
});
$cordovaToast
.show('Added Favorite ' + $scope.city.city, 'long', 'center')
.then(function (success) {
// success
}, function (error) {
// error
});
});
}
if ($stateParams.id) {
console.log("param " + $stateParams.id);
$scope.city = $rootScope.cities.filter(function (obj) {
return obj._id === $stateParams.id;
})[0];
console.log($scope.city);
} else {
$scope.doLookup();
}
}])
.controller('AboutController', ['$scope', function ($scope) {
}])
.controller('FavoritesController', ['$scope', '$rootScope', '$state', 'Favorites', '$ionicListDelegate', '$ionicPopup', function ($scope, $rootScope, $state, Favorites, $ionicListDelegate, $ionicPopup) {
$scope.shouldShowDelete = false;
/*$scope.$on('$stateChangeSuccess', function(event, toState, toParams, fromState, fromParams) {
console.log("State changed: ", toState);
if(toState.name === "app.favorites") $scope.refreshItems();
});*/
$scope.refreshItems = function () {
if ($rootScope.currentUser) {
Favorites.find({
filter: {
where: {
customerId: $rootScope.currentUser.id
}
}
}).$promise.then(
function (response) {
$scope.favorites = response;
console.log("Got favorites");
},
function (response) {
console.log(response);
});
}
else {
$scope.message = "You are not logged in"
}
}
$scope.refreshItems();
$scope.toggleDelete = function () {
$scope.shouldShowDelete = !$scope.shouldShowDelete;
console.log($scope.shouldShowDelete);
}
$scope.deleteFavorite = function (favoriteid) {
var confirmPopup = $ionicPopup.confirm({
title: '<h3>Confirm Delete</h3>',<|fim▁hole|> });
confirmPopup.then(function (res) {
if (res) {
console.log('Ok to delete');
Favorites.deleteById({id: favoriteid}).$promise.then(
function (response) {
$scope.favorites = $scope.favorites.filter(function (el) {
return el.id !== favoriteid;
});
$state.go($state.current, {}, {reload: false});
// $window.location.reload();
},
function (response) {
console.log(response);
$state.go($state.current, {}, {reload: false});
});
} else {
console.log('Canceled delete');
}
});
$scope.shouldShowDelete = false;
}
}])
;<|fim▁end|> | template: '<p>Are you sure you want to delete this item?</p>' |
<|file_name|>webmediaplayer_params.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "webkit/media/webmediaplayer_params.h"<|fim▁hole|>
namespace webkit_media {
WebMediaPlayerParams::WebMediaPlayerParams(
const scoped_refptr<media::AudioRendererSink>& audio_renderer_sink,
const scoped_refptr<media::GpuVideoDecoder::Factories>& gpu_factories,
const scoped_refptr<media::MediaLog>& media_log)
: audio_renderer_sink_(audio_renderer_sink),
gpu_factories_(gpu_factories),
media_log_(media_log) {
DCHECK(media_log_);
}
WebMediaPlayerParams::~WebMediaPlayerParams() {}
} // namespace webkit_media<|fim▁end|> |
#include "media/base/audio_renderer_sink.h"
#include "media/base/media_log.h" |
<|file_name|>Paginator.test.tsx<|end_file_name|><|fim▁begin|>import { render, screen } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import Paginator from '../Paginator'
import type { PaginatorProps } from '../Paginator'
const mockPageChange = jest.fn()
const testCases = [
// activePage, totalPages, renderedItems, activeItem
[3, 10, 10, 4],
[1, 1, 5],
[1, 2, 6],
[1, 3, 7],
[1, 4, 8],
[1, 5, 9],
[2, 5, 9],
[3, 5, 9],
[1, 6, 8],
[2, 6, 9],
[3, 6, 10],
[4, 6, 10],
[1, 7, 8],
[2, 7, 9],
[3, 7, 10],
[4, 7, 11],
[5, 7, 10],
[6, 7, 9],
[7, 7, 8],
[1, 8, 8],
[1, 9, 8],
[1, 10, 8],
[1, 20, 8],
[1, 100, 8],
[2, 100, 9],
[3, 100, 10],
[4, 100, 11],
[5, 100, 11],
[50, 100, 11],
[500, 1000, 11],
[5000, 10000, 11],
[97, 100, 11],
[98, 100, 10],
[99, 100, 9],
[100, 100, 8],
]
const setup = ({ activePage, totalPages, onPageChange }: PaginatorProps) => {
const utils = render(
<Paginator
activePage={activePage}
totalPages={totalPages}
onPageChange={onPageChange}
/>,
)
return {
...utils,
}
}
describe('Paginator', () => {
it.each(testCases)(
'renders correctly the inner elements for active %s and %s pages',
(activePage, totalPages, renderedItems) => {
setup({ activePage, totalPages, onPageChange: mockPageChange })
expect(screen.getAllByTestId('paginator-item').length).toBe(renderedItems)
},<|fim▁hole|> const [activePage, totalPages, , activeItem] = testCases[0]
setup({ activePage, totalPages, onPageChange: mockPageChange })
expect(screen.getAllByTestId('paginator-item')[activeItem]).toHaveClass(
'active',
)
})
it('calls onPageChange when clicking PaginatorItem', () => {
const [activePage, totalPages, , activeItem] = testCases[0]
setup({ activePage, totalPages, onPageChange: mockPageChange })
userEvent.click(screen.getAllByTestId('paginator-item')[activeItem])
expect(mockPageChange).toHaveBeenCalled()
})
})<|fim▁end|> | )
it('activePage has active prop', () => { |
<|file_name|>previewContentProvider.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as vscode from 'vscode';
import * as nls from 'vscode-nls';
import { Logger } from '../logger';
import { MarkdownEngine } from '../markdownEngine';
import { MarkdownContributionProvider } from '../markdownExtensions';
import { ContentSecurityPolicyArbiter, MarkdownPreviewSecurityLevel } from '../security';
import { basename, dirname, isAbsolute, join } from '../util/path';
import { WebviewResourceProvider } from '../util/resources';
import { MarkdownPreviewConfiguration, MarkdownPreviewConfigurationManager } from './previewConfig';
const localize = nls.loadMessageBundle();
/**
* Strings used inside the markdown preview.
*
* Stored here and then injected in the preview so that they
* can be localized using our normal localization process.
*/
const previewStrings = {
cspAlertMessageText: localize(
'preview.securityMessage.text',
'Some content has been disabled in this document'),
cspAlertMessageTitle: localize(
'preview.securityMessage.title',
'Potentially unsafe or insecure content has been disabled in the Markdown preview. Change the Markdown preview security setting to allow insecure content or enable scripts'),
cspAlertMessageLabel: localize(
'preview.securityMessage.label',
'Content Disabled Security Warning')
};
function escapeAttribute(value: string | vscode.Uri): string {
return value.toString().replace(/"/g, '"');
}
export interface MarkdownContentProviderOutput {
html: string;
containingImages: { src: string }[];
}
export class MarkdownContentProvider {
constructor(
private readonly engine: MarkdownEngine,
private readonly context: vscode.ExtensionContext,
private readonly cspArbiter: ContentSecurityPolicyArbiter,
private readonly contributionProvider: MarkdownContributionProvider,
private readonly logger: Logger
) { }
public async provideTextDocumentContent(
markdownDocument: vscode.TextDocument,
resourceProvider: WebviewResourceProvider,
previewConfigurations: MarkdownPreviewConfigurationManager,
initialLine: number | undefined = undefined,
state?: any
): Promise<MarkdownContentProviderOutput> {
const sourceUri = markdownDocument.uri;
const config = previewConfigurations.loadAndCacheConfiguration(sourceUri);
const initialData = {
source: sourceUri.toString(),
fragment: state?.fragment || markdownDocument.uri.fragment || undefined,
line: initialLine,
lineCount: markdownDocument.lineCount,
scrollPreviewWithEditor: config.scrollPreviewWithEditor,
scrollEditorWithPreview: config.scrollEditorWithPreview,
doubleClickToSwitchToEditor: config.doubleClickToSwitchToEditor,
disableSecurityWarnings: this.cspArbiter.shouldDisableSecurityWarnings(),
webviewResourceRoot: resourceProvider.asWebviewUri(markdownDocument.uri).toString(),
};
this.logger.log('provideTextDocumentContent', initialData);
// Content Security Policy
const nonce = getNonce();
const csp = this.getCsp(resourceProvider, sourceUri, nonce);
const body = await this.engine.render(markdownDocument, resourceProvider);
const html = `<!DOCTYPE html>
<html style="${escapeAttribute(this.getSettingsOverrideStyles(config))}">
<head>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
${csp}
<meta id="vscode-markdown-preview-data"
data-settings="${escapeAttribute(JSON.stringify(initialData))}"
data-strings="${escapeAttribute(JSON.stringify(previewStrings))}"
data-state="${escapeAttribute(JSON.stringify(state || {}))}">
<script src="${this.extensionResourcePath(resourceProvider, 'pre.js')}" nonce="${nonce}"></script>
${this.getStyles(resourceProvider, sourceUri, config, state)}
<base href="${resourceProvider.asWebviewUri(markdownDocument.uri)}">
</head>
<body class="vscode-body ${config.scrollBeyondLastLine ? 'scrollBeyondLastLine' : ''} ${config.wordWrap ? 'wordWrap' : ''} ${config.markEditorSelection ? 'showEditorSelection' : ''}">
${body.html}
<div class="code-line" data-line="${markdownDocument.lineCount}"></div>
${this.getScripts(resourceProvider, nonce)}
</body>
</html>`;
return {
html,
containingImages: body.containingImages,
};
}
public provideFileNotFoundContent(
resource: vscode.Uri,
): string {
const resourcePath = basename(resource.fsPath);
const body = localize('preview.notFound', '{0} cannot be found', resourcePath);
return `<!DOCTYPE html>
<html>
<body class="vscode-body">
${body}
</body>
</html>`;
}
private extensionResourcePath(resourceProvider: WebviewResourceProvider, mediaFile: string): string {
const webviewResource = resourceProvider.asWebviewUri(
vscode.Uri.joinPath(this.context.extensionUri, 'media', mediaFile));
return webviewResource.toString();
}
private fixHref(resourceProvider: WebviewResourceProvider, resource: vscode.Uri, href: string): string {
if (!href) {
return href;
}
if (href.startsWith('http:') || href.startsWith('https:') || href.startsWith('file:')) {
return href;
}
// Assume it must be a local file
if (isAbsolute(href)) {
return resourceProvider.asWebviewUri(vscode.Uri.file(href)).toString();
}
// Use a workspace relative path if there is a workspace
const root = vscode.workspace.getWorkspaceFolder(resource);
if (root) {
return resourceProvider.asWebviewUri(vscode.Uri.joinPath(root.uri, href)).toString();
}
// Otherwise look relative to the markdown file
return resourceProvider.asWebviewUri(vscode.Uri.file(join(dirname(resource.fsPath), href))).toString();
}
private computeCustomStyleSheetIncludes(resourceProvider: WebviewResourceProvider, resource: vscode.Uri, config: MarkdownPreviewConfiguration): string {
if (!Array.isArray(config.styles)) {
return '';
}
const out: string[] = [];
for (const style of config.styles) {
out.push(`<link rel="stylesheet" class="code-user-style" data-source="${escapeAttribute(style)}" href="${escapeAttribute(this.fixHref(resourceProvider, resource, style))}" type="text/css" media="screen">`);
}
return out.join('\n');
}
private getSettingsOverrideStyles(config: MarkdownPreviewConfiguration): string {
return [
config.fontFamily ? `--markdown-font-family: ${config.fontFamily};` : '',
isNaN(config.fontSize) ? '' : `--markdown-font-size: ${config.fontSize}px;`,
isNaN(config.lineHeight) ? '' : `--markdown-line-height: ${config.lineHeight};`,
].join(' ');
}
private getImageStabilizerStyles(state?: any) {
let ret = '<style>\n';
if (state && state.imageInfo) {
state.imageInfo.forEach((imgInfo: any) => {
ret += `#${imgInfo.id}.loading {
height: ${imgInfo.height}px;
width: ${imgInfo.width}px;
}\n`;
});
}
ret += '</style>\n';
return ret;
}
private getStyles(resourceProvider: WebviewResourceProvider, resource: vscode.Uri, config: MarkdownPreviewConfiguration, state?: any): string {
const baseStyles: string[] = [];
for (const resource of this.contributionProvider.contributions.previewStyles) {
baseStyles.push(`<link rel="stylesheet" type="text/css" href="${escapeAttribute(resourceProvider.asWebviewUri(resource))}">`);
}
return `${baseStyles.join('\n')}
${this.computeCustomStyleSheetIncludes(resourceProvider, resource, config)}
${this.getImageStabilizerStyles(state)}`;
}
private getScripts(resourceProvider: WebviewResourceProvider, nonce: string): string {
const out: string[] = [];
for (const resource of this.contributionProvider.contributions.previewScripts) {
out.push(`<script async
src="${escapeAttribute(resourceProvider.asWebviewUri(resource))}"
nonce="${nonce}"
charset="UTF-8"></script>`);
}
return out.join('\n');
}
private getCsp(
provider: WebviewResourceProvider,
resource: vscode.Uri,
nonce: string
): string {
const rule = provider.cspSource;
switch (this.cspArbiter.getSecurityLevelForResource(resource)) {
case MarkdownPreviewSecurityLevel.AllowInsecureContent:
return `<meta http-equiv="Content-Security-Policy" content="default-src 'none'; img-src 'self' ${rule} http: https: data:; media-src 'self' ${rule} http: https: data:; script-src 'nonce-${nonce}'; style-src 'self' ${rule} 'unsafe-inline' http: https: data:; font-src 'self' ${rule} http: https: data:;">`;
case MarkdownPreviewSecurityLevel.AllowInsecureLocalContent:
return `<meta http-equiv="Content-Security-Policy" content="default-src 'none'; img-src 'self' ${rule} https: data: http://localhost:* http://127.0.0.1:*; media-src 'self' ${rule} https: data: http://localhost:* http://127.0.0.1:*; script-src 'nonce-${nonce}'; style-src 'self' ${rule} 'unsafe-inline' https: data: http://localhost:* http://127.0.0.1:*; font-src 'self' ${rule} https: data: http://localhost:* http://127.0.0.1:*;">`;
case MarkdownPreviewSecurityLevel.AllowScriptsAndAllContent:
return '<meta http-equiv="Content-Security-Policy" content="">';
case MarkdownPreviewSecurityLevel.Strict:
default:
return `<meta http-equiv="Content-Security-Policy" content="default-src 'none'; img-src 'self' ${rule} https: data:; media-src 'self' ${rule} https: data:; script-src 'nonce-${nonce}'; style-src 'self' ${rule} 'unsafe-inline' https: data:; font-src 'self' ${rule} https: data:;">`;
}
}
}
function getNonce() {
let text = '';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for (let i = 0; i < 64; i++) {
text += possible.charAt(Math.floor(Math.random() * possible.length));
}<|fim▁hole|> return text;
}<|fim▁end|> | |
<|file_name|>fn_type.py<|end_file_name|><|fim▁begin|>from core_types import IncompatibleTypes, ImmutableT
class FnT(ImmutableT):
"""Type of a typed function"""
def __init__(self, input_types, return_type):
self.input_types = tuple(input_types)
self.return_type = return_type
self._hash = hash(self.input_types + (return_type,))
def __str__(self):
input_str = ", ".join(str(t) for t in self.input_types)
return "(%s)->%s" % (input_str, self.return_type)
def __repr__(self):
return str(self)
def __eq__(self, other):
return other.__class__ is FnT and \
self.return_type == other.return_type and \
len(self.input_types) == len(other.input_types) and \
all(t1 == t2 for (t1, t2) in
zip(self.input_types, other.input_types))
def combine(self, other):
if self == other:
return self
else:
raise IncompatibleTypes(self, other)
def __hash__(self):
return self._hash
_fn_type_cache = {}
def make_fn_type(input_types, return_type):
input_types = tuple(input_types)
key = input_types, return_type
if key in _fn_type_cache:
return _fn_type_cache[key]
else:
t = FnT(input_types, return_type)<|fim▁hole|><|fim▁end|> | _fn_type_cache[key] = t
return t |
<|file_name|>response_writer.js<|end_file_name|><|fim▁begin|>const ResponseMessage = require('../../messages').Response;
const through2 = require('through2');
const xtend = require('xtend');<|fim▁hole|>};
function encoder(Message, options) {
options = xtend(defaults, options || {});
return through2.obj(function(message, enc, callback) {
if (Message.verify(message)) {
if (options.ignore_invalid) {
return this.queue(message);
}
throw new Error('unhandled request');
}
return callback(null, Message.encodeDelimited(message).finish());
});
}
module.exports = function () {
return encoder(ResponseMessage);
};<|fim▁end|> |
var defaults = {
ignore_invalid: false |
<|file_name|>testReferenceReads1.js<|end_file_name|><|fim▁begin|>(function($) {
module("referenceReads1");
// Test case : Reference Reads 1
_asyncTest("Reference Reads 1", function()
{
expect(6);
GitanaTest.authenticateFullOAuth().then(function() {
var platform = this;
// create a few things
var repository = null;
this.createRepository().then(function() {
repository = this;
});
var domain = null;
this.createDomain().then(function() {
domain = this;
});
var registrar = null;
this.createRegistrar().then(function() {
registrar = this;
});
var stack = null;<|fim▁hole|> var team = null;
this.then(function() {
this.subchain(stack).createTeam("abc-" + new Date().getTime()).then(function() {
team = this;
});
});
this.then(function() {
var repositoryReference = "repository://" + platform.getId() + "/" + repository.getId();
var domainReference = "domain://" + platform.getId() + "/" + domain.getId();
var registrarReference = "registrar://" + platform.getId() + "/" + registrar.getId();
var stackReference = "stack://" + platform.getId() + "/" + stack.getId();
var teamReference = "team://stack/" + platform.getId() + "/" + stack.getId() + "/" + team.getKey();
var entries = [{
"ref": repositoryReference
}, {
"ref": domainReference
}, {
"ref": registrarReference
}, {
"ref": stackReference
}, {
"ref": teamReference
}];
this.referenceReads(entries, function(results) {
// check that they all came back
ok(results.length == entries.length, "Found the right number of results");
for (var i = 0; i < results.length; i++)
{
ok(results[i].entry, "Found entry #" + i);
}
// done
start();
});
});
});
});
}(jQuery) );<|fim▁end|> | this.createStack().then(function() {
stack = this;
});
|
<|file_name|>group.js<|end_file_name|><|fim▁begin|>var https = require('https');
var xml2js = require('xml2js');
var groups = {};
var host, port , auth, origin;
groups.getUserGroups = function(req, res) {
var options = {
rejectUnauthorized: false,
hostname: host,
port: port,
path: "/sap/opu/odata/UI2/PAGE_BUILDER_PERS/PageSets('%2FUI2%2FFiori2LaunchpadHome')/Pages?$expand=PageChipInstances/Chip/ChipBags/ChipProperties",
method: 'GET',
auth: auth,
agent: false
};
var parser = new xml2js.Parser();
https.get(options, function(response) {
var bodyChunks = [];
response.on('data', function(chunk) {
bodyChunks.push(chunk);
}).on('end', function() {
var body = Buffer.concat(bodyChunks);
var jsonResult = [];
console.log(body.toString());
//convert the XML response to JSON using the xml2js
parser.parseString(body, function (err, result) {
var groups = result.feed.entry;
var currentGroupProperties, currentGroupTiles;
if(groups){
for(var i=0; i<groups.length; i++){
currentGroupProperties = groups[i].content[0]['m:properties'][0];
currentGroupTiles = groups[i].link[3]['m:inline'][0].feed[0].entry;
var groupJson = {
id : currentGroupProperties['d:id'][0],
title : currentGroupProperties['d:id'][0]==='/UI2/Fiori2LaunchpadHome'? 'My Home' : currentGroupProperties['d:title'][0],
tiles: []
};
//iterate on current group tiles and add them the json
var tileProps, chip, curTile;
if(currentGroupTiles){
for(var k=0; k<currentGroupTiles.length; k++){
chip = currentGroupTiles[k].link[1]['m:inline'][0];
if(chip !== ""){ //Need to remove tiles that were built from a catalog chip which is no longer exists, they should be removed...(not appear in FLP)
tileProps = chip.entry[0].content[0]['m:properties'][0]; //currentGroupTiles[k].content[0]['m:properties'][0];
curTile = {
title: tileProps['d:title'][0],
configuration: parseConfiguration(tileProps['d:configuration'][0]),
url: tileProps['d:url'][0],
baseChipId: tileProps['d:baseChipId'][0],//identify the type of tile (e.g."X-SAP-UI2-CHIP:/UI2/DYNAMIC_APPLAUNCHER")
id: tileProps['d:id'][0]
};
curTile.isDoubleWidth = curTile.configuration.col > 1;
curTile.isDoubleHeight = curTile.configuration.row > 1;
curTile.icon = '/images/index/main/apps/NewsImage11.png';
curTile.refreshInterval = curTile.configuration['service_refresh_interval'];
curTile.realIcon = matchTileIcon(curTile.configuration['display_icon_url']);
curTile.navigationTargetUrl = curTile.configuration['navigation_target_url'];
curTile.serviceURL = curTile.configuration['service_url'];
//Try to build working app url
curTile.navUrl = undefined;
if(curTile.navigationTargetUrl){
if(curTile.navigationTargetUrl.indexOf('#')===-1){ //it doesn't contain semantic object + action
curTile.navUrl = origin + curTile.navigationTargetUrl;
}else{
curTile.navUrl = origin + '/sap/bc/ui5_ui5/ui2/ushell/shells/abap/FioriLaunchpad.html?' + curTile.navigationTargetUrl;
}
}
curTile.dynamicData = 0;
getDynamicData(curTile);
curTile.description = curTile.configuration['display_subtitle_text'];
curTile.displayInfoText = curTile.configuration['display_info_text'];
curTile.displayNumberUnit = curTile.configuration['display_number_unit'];
switch(curTile.baseChipId){
case "X-SAP-UI2-CHIP:/UI2/AR_SRVC_NEWS":
curTile.type = 0;
break;
case "X-SAP-UI2-CHIP:/UI2/DYNAMIC_APPLAUNCHER":
curTile.type = 1;
break;
default: //"X-SAP-UI2-CHIP:/UI2/STATIC_APPLAUNCHER":
curTile.type = 2;
}
groupJson.tiles.push(curTile);
}
}
}
if(groupJson.tiles.length === 0){
groupJson.tiles.push(getEmptyTile());
}
jsonResult.push(groupJson);
}
}
//Needs to be after the parsing completes
res.json({ //Set the response back
status: 'OK',
results:jsonResult
});
})
})
}).on('error', function(e) {
console.error(e);
var jsonResult = jsonResult || [];
//work-around for non working ABAP
if(jsonResult.length === 0){
for(var i=0; i<6; i++){
var tile, tiles = [];
for(var k=0 ; k<i; k++){
tile = {
title: "TileTitle_" + k,
description: "TileDescription_" + k,
configuration: JSON.parse('{"row":"1","col":"1"}'), //Default value for regular tiles
url: "TODO tileURL",
baseChipId: "TODO",<|fim▁hole|> tile.isDoubleWidth = tile.configuration.row > 1,
tile.isDoubleHeight = tile.configuration.col > 1,
tiles.push(tile);
}
jsonResult.push({
id : "Group_" + i,
title : "GroupTitle_" + i,
tiles: tiles
});
}
}
res.json({ //Set the response back
status: 'OK',
results:jsonResult
});
});
var parseConfiguration = function(confString){
var res;
if(!confString){
res = {"row":"1","col":"1"};
}else{
res = JSON.parse(confString);
if(res.tileConfiguration){
res = JSON.parse(res.tileConfiguration);
}
}
return res;
};
var getEmptyTile = function(){
return {
title: '',
configuration: {"row":"1","col":"1"},
url: '',
icon: '/images/index/main/apps/plusSign.png',
type: -1
};
};
var getDynamicData = function(curTile){
if(curTile.serviceURL){
options.path = curTile.serviceURL;
https.get(options, function(response) {
var bodyChunks = [];
response.on('data', function(chunk) {
bodyChunks.push(chunk);
});
response.on('end', function() {
var body = Buffer.concat(bodyChunks);
this.dynamicData = body.toString();
});
}.bind(curTile))
}
};
var matchTileIcon = function(fontName){
switch(fontName){
case 'sap-icon://multi-select':
return 'glyphicon glyphicon-list';
break;
case 'sap-icon://action-settings':
return 'glyphicon glyphicon-cog';
break;
case 'sap-icon://appointment':
return 'glyphicon glyphicon-calendar';
break;
case 'sap-icon://travel-itinerary':
return 'glyphicon glyphicon-plane';
break;
case 'sap-icon://table-chart':
return 'glyphicon glyphicon-th';
break;
default:
return 'glyphicon glyphicon-eye-close';
}
}
};
module.exports = function(opts) {
if(opts) {
if(opts.fiori) {
var url = opts.fiori.split('//')[1].split(':');
host = url[0];
port = url[1];
origin = opts.fiori;
}
if(opts.fioriAuth) {
auth = opts.fioriAuth;
}
}
return groups;
};<|fim▁end|> | id: "Tile_" + k,
isNews: (k%2)?true:false
}; |
<|file_name|>p10_sieve.py<|end_file_name|><|fim▁begin|>big = 2000000 # B = the number below which primes are summed
p = [True] * big # P = whether a number is prime, all are initially true and will later be falsified
print("running sieve...")
s = 0 # S = the sum of primes less than big which begins as 0
for a in range(2, big): # loop A over all divisors less than BIG
if p[a]: # if A is prime
s += a # then add A to S
for b in range(a * a, big, a): # loop over multiples of A from A*A (first relatively prime) less than BIG, inc. by A<|fim▁hole|>
print(s)<|fim▁end|> | p[b] = False # the multiple isn't prime |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2014 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>,
# 2008-2013.
#
# Version: 1.0
#
''' A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything.'''
import traceback
import logging
import datetime
import omeroweb.webclient.views
from omero_version import build_year
from omero_version import omero_version
from django.template import loader as template_loader
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext as Context
from django.utils.translation import ugettext as _
from django.utils.encoding import smart_str
from forms import ForgottonPasswordForm, ExperimenterForm, GroupForm
from forms import GroupOwnerForm, MyAccountForm, ChangePassword
from forms import UploadPhotoForm, EmailForm
from omeroweb.http import HttpJPEGResponse
from omeroweb.webclient.decorators import login_required, render_response
from omeroweb.connector import Connector
logger = logging.getLogger(__name__)
##############################################################################
# decorators
class render_response_admin(omeroweb.webclient.decorators.render_response):
"""
Subclass for adding additional data to the 'context' dict passed to
templates
"""
def prepare_context(self, request, context, *args, **kwargs):
"""
We extend the webclient render_response to check if any groups are
created.
If not, add an appropriate message to the template context
"""
super(render_response_admin, self).prepare_context(request, context,
*args, **kwargs)
if 'conn' not in kwargs:
return
conn = kwargs['conn']
noGroupsCreated = conn.isAnythingCreated()
if noGroupsCreated:
msg = _('User must be in a group - You have not created any'
' groups yet. Click <a href="%s">here</a> to create a'
' group') % (reverse(viewname="wamanagegroupid",
args=["new"]))
context['ome']['message'] = msg
context['ome']['email'] = request.session \
.get('server_settings', False) \
.get('email', False)
##############################################################################
# utils
import omero
from omero.model import PermissionsI
def prepare_experimenter(conn, eid=None):
if eid is None:
eid = conn.getEventContext().userId
experimenter = conn.getObject("Experimenter", eid)
defaultGroup = experimenter.getDefaultGroup()
otherGroups = list(experimenter.getOtherGroups())
hasAvatar = conn.hasExperimenterPhoto()
isLdapUser = experimenter.isLdapUser()
return experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar
def otherGroupsInitialList(groups, excluded_names=("user", "guest"),
excluded_ids=list()):
formGroups = list()
for gr in groups:
flag = False
if gr.name in excluded_names:
flag = True
if gr.id in excluded_ids:
flag = True
if not flag:
formGroups.append(gr)
formGroups.sort(key=lambda x: x.getName().lower())
return formGroups
def ownedGroupsInitial(conn, excluded_names=("user", "guest", "system"),
excluded_ids=list()):
groupsList = list(conn.listOwnedGroups())
ownedGroups = list()
for gr in groupsList:
flag = False
if gr.name in excluded_names:
flag = True
if gr.id in excluded_ids:
flag = True
if not flag:
ownedGroups.append(gr)
ownedGroups.sort(key=lambda x: x.getName().lower())
return ownedGroups
# myphoto helpers
def attach_photo(conn, newFile):
if newFile.content_type.startswith("image"):
f = newFile.content_type.split("/")
format = f[1].upper()
else:
format = newFile.content_type
conn.uploadMyUserPhoto(smart_str(newFile.name), format, newFile.read())
# permission helpers
def setActualPermissions(permissions):
permissions = int(permissions)
if permissions == 0:
p = PermissionsI("rw----")
elif permissions == 1:
p = PermissionsI("rwr---")
elif permissions == 2:
p = PermissionsI("rwra--")
elif permissions == 3:
p = PermissionsI("rwrw--")
else:
p = PermissionsI()
return p
def getActualPermissions(group):
p = None
if group.details.getPermissions() is None:
raise AttributeError('Object has no permissions')
else:
p = group.details.getPermissions()
flag = None
if p.isGroupWrite():
flag = 3
elif p.isGroupAnnotate():
flag = 2
elif p.isGroupRead():
flag = 1
elif p.isUserRead():
flag = 0
return flag
# getters
def getSelectedGroups(conn, ids):
if ids is not None and len(ids) > 0:
return list(conn.getObjects("ExperimenterGroup", ids))
return list()
def getSelectedExperimenters(conn, ids):
if ids is not None and len(ids) > 0:
return list(conn.getObjects("Experimenter", ids))
return list()
def mergeLists(list1, list2):
if not list1 and not list2:
return list()
if not list1:
return list(list2)
if not list2:
return list(list1)
result = list()
result.extend(list1)
result.extend(list2)
return set(result)
@login_required()
@render_response()
def drivespace_json(request, query=None, groupId=None, userId=None, conn=None,
**kwargs):
"""
Returns a json list of {"label":<Name>, "data": <Value>, "groupId /
userId": <id>} for plotting disk usage by users or groups.
If 'query' is "groups" or "users", this is for an Admin to show all data
on server divided into groups or users.
Else, if groupId is not None, we return data for that group, split by user.
Else, if userId is not None, we return data for that user, split by group.
"""
diskUsage = []
# diskUsage.append({"label": "Free space", "data":conn.getFreeSpace()})
queryService = conn.getQueryService()
ctx = conn.SERVICE_OPTS.copy()
params = omero.sys.ParametersI()
params.theFilter = omero.sys.Filter()
def getBytes(ctx, eid=None):
bytesInGroup = 0
pixelsQuery = "select sum(cast( p.sizeX as double ) * p.sizeY * p.sizeZ * p.sizeT * p.sizeC * pt.bitSize / 8) " \
"from Pixels p join p.pixelsType as pt join p.image i left outer join i.fileset f " \
"join p.details.owner as owner " \
"where f is null"
filesQuery = "select sum(origFile.size) from OriginalFile as origFile " \
"join origFile.details.owner as owner"
if eid is not None:
params.add('eid', omero.rtypes.rlong(eid))
pixelsQuery = pixelsQuery + " and owner.id = (:eid)"
filesQuery = filesQuery + " where owner.id = (:eid)"
# Calculate disk usage via Pixels
result = queryService.projection(pixelsQuery, params, ctx)
if len(result) > 0 and len(result[0]) > 0:
bytesInGroup += result[0][0].val
# Now get Original File usage
result = queryService.projection(filesQuery, params, ctx)
if len(result) > 0 and len(result[0]) > 0:
bytesInGroup += result[0][0]._val
return bytesInGroup
sr = conn.getAdminService().getSecurityRoles()
if query == 'groups':
for g in conn.listGroups():
# ignore 'user' and 'guest' groups
if g.getId() in (sr.guestGroupId, sr.userGroupId):
continue
ctx.setOmeroGroup(g.getId())
b = getBytes(ctx)
if b > 0:
diskUsage.append({"label": g.getName(), "data": b,
"groupId": g.getId()})
elif query == 'users':
ctx.setOmeroGroup('-1')
for e in conn.getObjects("Experimenter"):
b = getBytes(ctx, e.getId())
if b > 0:
diskUsage.append({"label": e.getNameWithInitial(), "data": b,
"userId": e.getId()})
elif userId is not None:
eid = long(userId)
for g in conn.getOtherGroups(eid):
# ignore 'user' and 'guest' groups
if g.getId() in (sr.guestGroupId, sr.userGroupId):
continue
ctx.setOmeroGroup(g.getId())
b = getBytes(ctx, eid)
if b > 0:
diskUsage.append({"label": g.getName(), "data": b,
"groupId": g.getId()})
# users within a single group
elif groupId is not None:
ctx.setOmeroGroup(groupId)
for e in conn.getObjects("Experimenter"):
b = getBytes(ctx, e.getId())
if b > 0:
diskUsage.append({"label": e.getNameWithInitial(),
"data": b, "userId": e.getId()})
diskUsage.sort(key=lambda x: x['data'], reverse=True)
return diskUsage
##############################################################################
# views control
def forgotten_password(request, **kwargs):
request.session.modified = True
template = "webadmin/forgotten_password.html"
conn = None
error = None
def getGuestConnection(server_id):
return Connector(server_id, True).create_guest_connection('OMERO.web')
if request.method == 'POST':
form = ForgottonPasswordForm(data=request.POST.copy())
if form.is_valid():
server_id = form.cleaned_data['server']
try:
conn = getGuestConnection(server_id)
except Exception:
logger.error(traceback.format_exc())
error = "Internal server error, please contact administrator."
if conn is not None:
try:
req = omero.cmd.ResetPasswordRequest(
smart_str(form.cleaned_data['username']),
smart_str(form.cleaned_data['email']))
handle = conn.c.sf.submit(req)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
error = "Password was reset. Check your mailbox."
form = None
except omero.CmdError, exp:
logger.error(exp.err)
try:
error = exp.err.parameters[
exp.err.parameters.keys()[0]]
except:
error = exp
else:
form = ForgottonPasswordForm()
context = {'error': error, 'form': form, 'build_year': build_year,
'omero_version': omero_version}
t = template_loader.get_template(template)
c = Context(request, context)
rsp = t.render(c)
return HttpResponse(rsp)
@login_required()
def index(request, **kwargs):
conn = None
try:
conn = kwargs["conn"]
except:
logger.error(traceback.format_exc())
if conn.isAdmin():
return HttpResponseRedirect(reverse("waexperimenters"))
else:
return HttpResponseRedirect(reverse("wamyaccount"))
@login_required()
def logout(request, **kwargs):
omeroweb.webclient.views.logout(request, **kwargs)
return HttpResponseRedirect(reverse("waindex"))
@login_required(isAdmin=True)
@render_response_admin()
def experimenters(request, conn=None, **kwargs):
template = "webadmin/experimenters.html"
experimenterList = list(conn.getObjects("Experimenter"))
can_modify_user = 'ModifyUser' in conn.getCurrentAdminPrivileges()
context = {'experimenterList': experimenterList,
'can_modify_user': can_modify_user}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def manage_experimenter(request, action, eid=None, conn=None, **kwargs):
template = "webadmin/experimenter_form.html"
groups = list(conn.getObjects("ExperimenterGroup"))
groups.sort(key=lambda x: x.getName().lower())
if action == 'new':
user_id = conn.getUserId()
user_privileges = conn.getCurrentAdminPrivileges()
# Only Full Admin can set 'Role' of new experimenter
user_full_admin = 'ReadSession' in user_privileges
can_modify_user = 'ModifyUser' in user_privileges
form = ExperimenterForm(
can_edit_role=user_full_admin,
can_modify_user=can_modify_user,
initial={'with_password': True,
'active': True,
'groups': otherGroupsInitialList(groups)})
admin_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId]
context = {'form': form,
'admin_groups': admin_groups,
'can_modify_user': can_modify_user}
elif action == 'create':
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanageexperimenterid", args=["new"]))
else:
name_check = conn.checkOmeName(request.POST.get('omename'))
email_check = conn.checkEmail(request.POST.get('email'))
my_groups = getSelectedGroups(
conn,
request.POST.getlist('other_groups'))
initial = {'with_password': True,
'my_groups': my_groups,
'groups': otherGroupsInitialList(groups)}
form = ExperimenterForm(
initial=initial, data=request.POST.copy(),
name_check=name_check, email_check=email_check)
if form.is_valid():
logger.debug("Create experimenter form:" +
str(form.cleaned_data))
omename = form.cleaned_data['omename']
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
role = form.cleaned_data['role']
admin = role in ('administrator', 'restricted_administrator')
active = form.cleaned_data['active']
defaultGroup = form.cleaned_data['default_group']
otherGroups = form.cleaned_data['other_groups']
password = form.cleaned_data['password']
# default group
# if default group was not selected take first from the list.
if defaultGroup is None:
defaultGroup = otherGroups[0]
for g in groups:
if long(defaultGroup) == g.id:
dGroup = g
break
listOfOtherGroups = set()
# rest of groups
for g in groups:
for og in otherGroups:
# remove defaultGroup from otherGroups if contains
if long(og) == long(dGroup.id):
pass
elif long(og) == g.id:
listOfOtherGroups.add(g)
expId = conn.createExperimenter(
omename, firstName, lastName, email, admin, active,
dGroup, listOfOtherGroups, password, middleName,
institution)
# Update 'AdminPrivilege' config roles for user
conn.setConfigRoles(expId, form)
return HttpResponseRedirect(reverse("waexperimenters"))
context = {'form': form}
elif action == 'edit':
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn, eid)
try:
defaultGroupId = defaultGroup.id
except:
defaultGroupId = None
initial = {
'omename': experimenter.omeName,
'first_name': experimenter.firstName,
'middle_name': experimenter.middleName,
'last_name': experimenter.lastName,
'email': experimenter.email,
'institution': experimenter.institution,
'active': experimenter.isActive(),
'default_group': defaultGroupId,
'my_groups': otherGroups,
'other_groups': [g.id for g in otherGroups],
'groups': otherGroupsInitialList(groups)}
# Load 'AdminPrivilege' roles for 'initial'
delete_perms = []
write_perms = []
script_perms = []
privileges = conn.getAdminPrivileges(experimenter.id)
for privilege in privileges:
if privilege in ('DeleteOwned', 'DeleteFile', 'DeleteManagedRepo'):
delete_perms.append(privilege)
elif privilege in ('WriteOwned', 'WriteFile', 'WriteManagedRepo'):
write_perms.append(privilege)
elif privilege in ('WriteScriptRepo', 'DeleteScriptRepo'):
script_perms.append(privilege)
else:
initial[privilege] = True
# if ALL the Delete/Write permissions are found, Delete/Write is True
if set(delete_perms) == \
set(('DeleteOwned', 'DeleteFile', 'DeleteManagedRepo')):
initial['Delete'] = True
if set(write_perms) == \
set(('WriteOwned', 'WriteFile', 'WriteManagedRepo')):
initial['Write'] = True
if set(script_perms) == \
set(('WriteScriptRepo', 'DeleteScriptRepo')):
initial['Script'] = True
role = 'user'
if experimenter.isAdmin():
if 'ReadSession' in privileges:
role = 'administrator'
else:
role = 'restricted_administrator'
initial['role'] = role
root_id = [conn.getAdminService().getSecurityRoles().rootId]
user_id = conn.getUserId()
user_privileges = conn.getCurrentAdminPrivileges()
experimenter_root = long(eid) == root_id
experimenter_me = long(eid) == user_id
user_full_admin = 'ReadSession' in user_privileges
can_modify_user = 'ModifyUser' in user_privileges
# Only Full Admin can edit 'Role' of experimenter
can_edit_role = user_full_admin and not (experimenter_me
or experimenter_root)
form = ExperimenterForm(
can_modify_user=can_modify_user,
can_edit_role=can_edit_role,
experimenter_me=experimenter_me,
experimenter_root=experimenter_root,
initial=initial)
password_form = ChangePassword()
admin_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId]
context = {'form': form, 'eid': eid, 'ldapAuth': isLdapUser,
'can_modify_user': can_modify_user,
'password_form': password_form,
'admin_groups': admin_groups}
elif action == 'save':
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn, eid)
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanageexperimenterid",
args=["edit", experimenter.id]))
else:
name_check = conn.checkOmeName(request.POST.get('omename'),
experimenter.omeName)
email_check = conn.checkEmail(request.POST.get('email'),
experimenter.email)
my_groups = getSelectedGroups(
conn,
request.POST.getlist('other_groups'))
initial = {'my_groups': my_groups,
'groups': otherGroupsInitialList(groups)}
form = ExperimenterForm(initial=initial, data=request.POST.copy(),
name_check=name_check,
email_check=email_check)
if form.is_valid():
logger.debug("Update experimenter form:" +
str(form.cleaned_data))
omename = form.cleaned_data['omename']
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
role = form.cleaned_data['role']
admin = role in ('administrator', 'restricted_administrator')
active = form.cleaned_data['active']
rootId = conn.getAdminService().getSecurityRoles().rootId
# User can't disable themselves or 'root'
if experimenter.getId() in [conn.getUserId(), rootId]:
# disabled checkbox not in POST: do it manually
active = True
defaultGroup = form.cleaned_data['default_group']
otherGroups = form.cleaned_data['other_groups']
# default group
# if default group was not selected take first from the list.
if defaultGroup is None:
defaultGroup = otherGroups[0]
for g in groups:
if long(defaultGroup) == g.id:
dGroup = g
break
listOfOtherGroups = set()
# rest of groups
for g in groups:
for og in otherGroups:
# remove defaultGroup from otherGroups if contains
if long(og) == long(dGroup.id):
pass
elif long(og) == g.id:
listOfOtherGroups.add(g)
# Update 'AdminPrivilege' config roles for user
# If role is empty, roles section of form is disabled - ignore
# since disabled privileges will not show up in POST data
if role != '':
conn.setConfigRoles(long(eid), form)
conn.updateExperimenter(
experimenter, omename, firstName, lastName, email, admin,
active, dGroup, listOfOtherGroups, middleName,
institution)
return HttpResponseRedirect(reverse("waexperimenters"))
context = {'form': form, 'eid': eid, 'ldapAuth': isLdapUser}
# elif action == "delete":
# conn.deleteExperimenter()
# return HttpResponseRedirect(reverse("waexperimenters"))
else:
return HttpResponseRedirect(reverse("waexperimenters"))
context['template'] = template
return context
@login_required()
@render_response_admin()
def manage_password(request, eid, conn=None, **kwargs):
template = "webadmin/password.html"
error = None
if request.method == 'POST':
password_form = ChangePassword(data=request.POST.copy())
if not password_form.is_valid():
error = password_form.errors
else:
old_password = password_form.cleaned_data['old_password']
password = password_form.cleaned_data['password']
# if we're trying to change our own password...
if conn.getEventContext().userId == int(eid):
try:
conn.changeMyPassword(password, old_password)
except Exception, x:
error = x.message # E.g. old_password not valid
elif conn.isAdmin():
exp = conn.getObject("Experimenter", eid)
try:
conn.changeUserPassword(exp.omeName, password,
old_password)
except Exception, x:
error = x.message
else:
raise AttributeError("Can't change another user's password"
" unless you are an Admin")
context = {'error': error, 'password_form': password_form, 'eid': eid}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def groups(request, conn=None, **kwargs):
template = "webadmin/groups.html"
groups = conn.getObjects("ExperimenterGroup")
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
context = {'groups': groups, 'can_modify_group': can_modify_group}
context['template'] = template
return context
@login_required(isAdmin=True)
@render_response_admin()
def manage_group(request, action, gid=None, conn=None, **kwargs):
template = "webadmin/group_form.html"
msgs = []
experimenters = list(conn.getObjects("Experimenter"))
experimenters.sort(key=lambda x: x.getLastName().lower())
def getEditFormContext():
group = conn.getObject("ExperimenterGroup", gid)
ownerIds = [e.id for e in group.getOwners()]
memberIds = [m.id for m in group.getMembers()]
permissions = getActualPermissions(group)
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
system_groups = [
conn.getAdminService().getSecurityRoles().systemGroupId,
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId]
group_is_current_or_system = (
(conn.getEventContext().groupId == long(gid)) or
(long(gid) in system_groups))
form = GroupForm(initial={
'name': group.name,
'description': group.description,
'permissions': permissions,
'owners': ownerIds,
'members': memberIds,
'experimenters': experimenters},
can_modify_group=can_modify_group,
group_is_current_or_system=group_is_current_or_system)
admins = [conn.getAdminService().getSecurityRoles().rootId]
if long(gid) in system_groups:
# prevent removing 'root' or yourself from group if it's a system
# group
admins.append(conn.getUserId())
return {'form': form, 'gid': gid, 'permissions': permissions,
'admins': admins, 'can_modify_group': can_modify_group}
if action == 'new':
can_modify_group = 'ModifyGroup' in conn.getCurrentAdminPrivileges()
form = GroupForm(initial={'experimenters': experimenters,
'permissions': 0})
context = {'form': form, 'can_modify_group': can_modify_group}
elif action == 'create':
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamanagegroupid",
args=["new"]))
else:
name_check = conn.checkGroupName(request.POST.get('name'))
form = GroupForm(initial={'experimenters': experimenters},
data=request.POST.copy(), name_check=name_check)
if form.is_valid():
logger.debug("Create group form:" + str(form.cleaned_data))
name = form.cleaned_data['name']
description = form.cleaned_data['description']
owners = form.cleaned_data['owners']
members = form.cleaned_data['members']
permissions = form.cleaned_data['permissions']
perm = setActualPermissions(permissions)
listOfOwners = getSelectedExperimenters(conn, owners)
gid = conn.createGroup(name, perm, listOfOwners, description)
new_members = getSelectedExperimenters(
conn, mergeLists(members, owners))
group = conn.getObject("ExperimenterGroup", gid)
conn.setMembersOfGroup(group, new_members)
return HttpResponseRedirect(reverse("wagroups"))
context = {'form': form}
elif action == 'edit':
context = getEditFormContext()
elif action == 'save':
group = conn.getObject("ExperimenterGroup", gid)
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamanagegroupid",
args=["edit", group.id]))
else:
permissions = getActualPermissions(group)
name_check = conn.checkGroupName(request.POST.get('name'),
group.name)
form = GroupForm(initial={'experimenters': experimenters},
data=request.POST.copy(), name_check=name_check)
context = {'form': form, 'gid': gid, 'permissions': permissions}
if form.is_valid():
logger.debug("Update group form:" + str(form.cleaned_data))
name = form.cleaned_data['name']
description = form.cleaned_data['description']
owners = form.cleaned_data['owners']
permissions = form.cleaned_data['permissions']
members = form.cleaned_data['members']
listOfOwners = getSelectedExperimenters(conn, owners)
if permissions != int(permissions):
perm = setActualPermissions(permissions)
else:
perm = None
context = getEditFormContext()
context['ome'] = {}
try:
msgs = conn.updateGroup(group, name, perm, listOfOwners,
description)
except omero.SecurityViolation, ex:
if ex.message.startswith('Cannot change permissions'):
msgs.append("Downgrade to private group not currently"
" possible")
else:
msgs.append(ex.message)
new_members = getSelectedExperimenters(
conn, mergeLists(members, owners))
removalFails = conn.setMembersOfGroup(group, new_members)
if len(removalFails) == 0 and len(msgs) == 0:
return HttpResponseRedirect(reverse("wagroups"))
# If we've failed to remove user...
# prepare error messages
for e in removalFails:
url = reverse("wamanageexperimenterid",
args=["edit", e.id])
msgs.append("Can't remove user <a href='%s'>%s</a> from"
" their only group" % (url, e.getFullName()))
# refresh the form and add messages
context = getEditFormContext()
else:
return HttpResponseRedirect(reverse("wagroups"))
context['userId'] = conn.getEventContext().userId
context['template'] = template
if len(msgs) > 0:
context['ome'] = {}
context['ome']['message'] = "<br>".join(msgs)
return context
@login_required(isGroupOwner=True)
@render_response_admin()
def manage_group_owner(request, action, gid, conn=None, **kwargs):
template = "webadmin/group_form_owner.html"
group = conn.getObject("ExperimenterGroup", gid)
experimenters = list(conn.getObjects("Experimenter"))
userId = conn.getEventContext().userId
def getEditFormContext():
group = conn.getObject("ExperimenterGroup", gid)
memberIds = [m.id for m in group.getMembers()]
ownerIds = [e.id for e in group.getOwners()]
permissions = getActualPermissions(group)
form = GroupOwnerForm(initial={'permissions': permissions,
'members': memberIds,
'owners': ownerIds,
'experimenters': experimenters})
context = {'form': form, 'gid': gid, 'permissions': permissions,
"group": group}
experimenterDefaultIds = list()
for e in experimenters:
if (e != userId and e.getDefaultGroup() is not None and
e.getDefaultGroup().id == group.id):
experimenterDefaultIds.append(str(e.id))
context['experimenterDefaultGroups'] = ",".join(experimenterDefaultIds)
context['ownerIds'] = (",".join(str(x) for x in ownerIds
if x != userId))
return context
msgs = []
if action == 'edit':
context = getEditFormContext()
elif action == "save":
if request.method != 'POST':
return HttpResponseRedirect(
reverse(viewname="wamanagegroupownerid",
args=["edit", group.id]))
else:
form = GroupOwnerForm(data=request.POST.copy(),
initial={'experimenters': experimenters})
if form.is_valid():
members = form.cleaned_data['members']
owners = form.cleaned_data['owners']
permissions = form.cleaned_data['permissions']
listOfOwners = getSelectedExperimenters(conn, owners)
conn.setOwnersOfGroup(group, listOfOwners)
new_members = getSelectedExperimenters(conn, members)
removalFails = conn.setMembersOfGroup(group, new_members)
permissions = int(permissions)
if getActualPermissions(group) != permissions:
perm = setActualPermissions(permissions)
try:
msg = conn.updatePermissions(group, perm)
if msg is not None:
msgs.append(msg)
except omero.SecurityViolation, ex:
if ex.message.startswith('Cannot change permissions'):
msgs.append("Downgrade to private group not"
" currently possible")
else:
msgs.append(ex.message)
if len(removalFails) == 0 and len(msgs) == 0:
return HttpResponseRedirect(reverse("wamyaccount"))
# If we've failed to remove user...
# prepare error messages
for e in removalFails:
url = reverse("wamanageexperimenterid",
args=["edit", e.id])
msgs.append("Can't remove user <a href='%s'>%s</a> from"
" their only group" % (url, e.getFullName()))
# refresh the form and add messages
context = getEditFormContext()
else:
context = {'gid': gid, 'form': form}
else:
return HttpResponseRedirect(reverse("wamyaccount"))
context['userId'] = userId
context['template'] = template
if len(msgs) > 0:
context['ome'] = {}
context['ome']['message'] = "<br>".join(msgs)
return context
@login_required()
@render_response_admin()
def my_account(request, action=None, conn=None, **kwargs):
template = "webadmin/myaccount.html"
experimenter, defaultGroup, otherGroups, isLdapUser, hasAvatar = \
prepare_experimenter(conn)
try:
defaultGroupId = defaultGroup.id
except:
defaultGroupId = None
ownedGroups = ownedGroupsInitial(conn)
password_form = ChangePassword()
form = None
if action == "save":
if request.method != 'POST':
return HttpResponseRedirect(reverse(viewname="wamyaccount",
args=["edit"]))
else:
email_check = conn.checkEmail(request.POST.get('email'),
experimenter.email)
form = MyAccountForm(data=request.POST.copy(),
initial={'groups': otherGroups},
email_check=email_check)
if form.is_valid():
firstName = form.cleaned_data['first_name']
middleName = form.cleaned_data['middle_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
institution = form.cleaned_data['institution']
defaultGroupId = form.cleaned_data['default_group']
conn.updateMyAccount(
experimenter, firstName, lastName, email, defaultGroupId,
middleName, institution)
return HttpResponseRedirect(reverse("wamyaccount"))
else:
form = MyAccountForm(initial={
'omename': experimenter.omeName,
'first_name': experimenter.firstName,
'middle_name': experimenter.middleName,
'last_name': experimenter.lastName,
'email': experimenter.email,
'institution': experimenter.institution,
'default_group': defaultGroupId,
'groups': otherGroups})
context = {'form': form, 'ldapAuth': isLdapUser,
'experimenter': experimenter, 'ownedGroups': ownedGroups,
'password_form': password_form}
context['freeSpace'] = conn.getFreeSpace()
context['template'] = template
return context
@login_required()
def myphoto(request, conn=None, **kwargs):
photo = conn.getExperimenterPhoto()
return HttpJPEGResponse(photo)
@login_required()
@render_response_admin()
def manage_avatar(request, action=None, conn=None, **kwargs):
template = "webadmin/avatar.html"
edit_mode = False
photo_size = None
form_file = UploadPhotoForm()
if action == "upload":
if request.method == 'POST':
form_file = UploadPhotoForm(request.POST, request.FILES)
if form_file.is_valid():
attach_photo(conn, request.FILES['photo'])
return HttpResponseRedirect(
reverse(viewname="wamanageavatar",
args=[conn.getEventContext().userId]))
elif action == "crop":
x1 = long(request.POST.get('x1'))
x2 = long(request.POST.get('x2'))
y1 = long(request.POST.get('y1'))
y2 = long(request.POST.get('y2'))
box = (x1, y1, x2, y2)
conn.cropExperimenterPhoto(box)
return HttpResponseRedirect(reverse("wamyaccount"))
elif action == "editphoto":
photo_size = conn.getExperimenterPhotoSize()
if photo_size is not None:
edit_mode = True
elif action == "deletephoto":
conn.deleteExperimenterPhoto()
return HttpResponseRedirect(reverse("wamyaccount"))
photo_size = conn.getExperimenterPhotoSize()
context = {'form_file': form_file, 'edit_mode': edit_mode,
'photo_size': photo_size}
context['template'] = template
return context
@login_required()
@render_response_admin()
def stats(request, conn=None, **kwargs):
template = "webadmin/statistics.html"
freeSpace = conn.getFreeSpace()
context = {'template': template, 'freeSpace': freeSpace}
return context<|fim▁hole|>
# @login_required()
# def load_drivespace(request, conn=None, **kwargs):
# offset = request.POST.get('offset', 0)
# rv = usersData(conn, offset)
# return HttpJsonResponse(rv)
@login_required(isAdmin=True)
@render_response_admin()
def email(request, conn=None, **kwargs):
"""
View to gather recipients, subject and message for sending email
announcements
"""
# Check that the appropriate web settings are available
if (not request.session.get('server_settings', False)
.get('email', False)):
return {'template': 'webadmin/noemail.html'}
context = {'template': 'webadmin/email.html'}
# Get experimenters and groups.
experimenter_list = list(conn.getObjects("Experimenter"))
group_list = list(conn.getObjects("ExperimenterGroup"))
# Sort experimenters and groups
experimenter_list.sort(key=lambda x: x.getFirstName().lower())
group_list.sort(key=lambda x: x.getName().lower())
if request.method == 'POST': # If the form has been submitted...
# ContactForm was defined in the the previous section
form = EmailForm(experimenter_list, group_list, conn, request,
data=request.POST.copy())
if form.is_valid(): # All validation rules pass
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
experimenters = form.cleaned_data['experimenters']
groups = form.cleaned_data['groups']
everyone = form.cleaned_data['everyone']
inactive = form.cleaned_data['inactive']
req = omero.cmd.SendEmailRequest(subject=subject, body=message,
groupIds=groups,
userIds=experimenters,
everyone=everyone,
inactive=inactive)
handle = conn.c.sf.submit(req)
if handle is not None:
request.session.modified = True
request.session['callback'][str(handle)] = {
'job_type': 'send_email',
'status': 'in progress', 'error': 0,
'start_time': datetime.datetime.now()}
form = EmailForm(experimenter_list, group_list, conn, request)
context['non_field_errors'] = ("Email sent."
"Check status in activities.")
else:
context['non_field_errors'] = "Email wasn't sent."
else:
form = EmailForm(experimenter_list, group_list, conn, request)
context['form'] = form
return context
# Problem where render_response_admin was not populating required
# admin details:
# Explanation is that the CBV FormView returns an http response so the
# decorator render_response_admin simply bails out and returns this
# I think maybe the render_response decorator should not be adding context
# because it fails in situations like this, better to insert that context
# using a template tag when required<|fim▁end|> | |
<|file_name|>serializer.test.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import { shallow, mount } from 'enzyme'
import { Editor, EditorState, convertFromRaw } from 'draft-js'
import { Serlizer } from './serializer'
import { getContentState } from './testUtils'
describe('Editor Serilizer', () => {
it('serialize ContentState should get RawContentState', () => {
const rawState: any = getContentState('hello wolrd')
const editor = mount(
<Editor editorState={
EditorState.createWithContent(convertFromRaw(rawState))
}
onChange={ () => { } }
>
</Editor>
)
const contentState = editor.prop('editorState').getCurrentContent()
const s = Serlizer.serialize(contentState)
expect(JSON.stringify(rawState)).toEqual(JSON.stringify(s))
})
<|fim▁hole|> const editor = mount(
<Editor
onChange={ () => { } }
editorState={ EditorState.createWithContent(c) }>
</Editor >
)
expect(editor.text()).toEqual(text)
})
})<|fim▁end|> | it('<Editor/> should get right textContent', () => {
const text = '你好啊 今天开心吗,BUG又少了吗'
const s: any = getContentState(text)
const c = Serlizer.deserialize(s) |
<|file_name|>ABC083B.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
)
func main() {
sc := bufio.NewScanner(os.Stdin)
sc.Split(bufio.ScanWords)
n := nextInt(sc)<|fim▁hole|> answer := 0
for i := 1; i <= n; i++ {
sum := 0
for _, s := range fmt.Sprintf("%d", i) {
x, _ := strconv.Atoi(string(s))
sum = sum + x
}
if a <= sum && sum <= b {
answer = answer + i
}
}
fmt.Println(answer)
}
// ----------
func nextString(sc *bufio.Scanner) string {
sc.Scan()
return sc.Text()
}
func nextNumber(sc *bufio.Scanner) float64 {
sc.Scan()
f, err := strconv.ParseFloat(sc.Text(), 32)
if err != nil {
panic(err)
}
return f
}
func nextInt(sc *bufio.Scanner) int {
sc.Scan()
n, err := strconv.Atoi(sc.Text())
if err != nil {
panic(err)
}
return n
}
func printArray(xs []int) {
fmt.Println(strings.Trim(fmt.Sprint(xs), "[]"))
}
func debugPrintf(format string, a ...interface{}) {
fmt.Fprintf(os.Stderr, format, a...)
}<|fim▁end|> | a := nextInt(sc)
b := nextInt(sc)
|
<|file_name|>hash.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::hash::SipHasher;
use core::hash::Hasher;
use core::hash::Hash;
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {<|fim▁hole|> // }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
type T = i32;
type H = SipHasher; // H: hash::Hasher
#[test]
fn hash_test1() {
let mut state: H = <H>::new();
let finish: u64 = state.finish();
assert_eq!(finish, 0x1e924b9d737700d7);
let array: [T; 10] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9
];
array.hash::<H>(&mut state);
let finish: u64 = state.finish();
assert_eq!(finish, 0x118e42795349089a);
}
}<|fim▁end|> | // Ord::cmp(&&self[..], &&other[..])
// } |
<|file_name|>_context.py<|end_file_name|><|fim▁begin|><|fim▁hole|># This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
class TaxingContext(object):
def __init__(self, customer_tax_group=None, customer_tax_number=None, location=None):
self.customer_tax_group = customer_tax_group
self.customer_tax_number = customer_tax_number
self.country_code = getattr(location, "country_code", None) or getattr(location, "country", None)
self.region_code = getattr(location, "region_code", None)
self.postal_code = getattr(location, "postal_code", None)
self.location = location<|fim▁end|> | # This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
# |
<|file_name|>CoreConfigParser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import with_statement
from os.path import exists
from os.path import join
CONF_VERSION = 1
########################################################################
class ConfigParser:
#----------------------------------------------------------------------
def __init__(self, configdir):
"""Constructor"""
self.configdir = configdir
self.config = {}
if self.checkVersion():
self.readConfig()
#----------------------------------------------------------------------
def checkVersion(self):
if not exists(join(self.configdir, "pyload.conf")):
return False
f = open(join(self.configdir, "pyload.conf"), "rb")
v = f.readline()
f.close()
v = v[v.find(":")+1:].strip()
if int(v) < CONF_VERSION:
return False
return True
#----------------------------------------------------------------------
def readConfig(self):
"""reads the config file"""
self.config = self.parseConfig(join(self.configdir, "pyload.conf"))
#----------------------------------------------------------------------
def parseConfig(self, config):
"""parses a given configfile"""
f = open(config)
config = f.read()
config = config.split("\n")[1:]
conf = {}
section, option, value, typ, desc = "","","","",""
listmode = False
for line in config:
line = line.rpartition("#") # removes comments
if line[1]:
line = line[0]
else:
line = line[2]
line = line.strip()
try:
if line == "":
continue
elif line.endswith(":"):<|fim▁hole|> desc = desc.replace('"', "").strip()
conf[section] = { "desc" : desc }
else:
if listmode:
if line.endswith("]"):
listmode = False
line = line.replace("]","")
value += [self.cast(typ, x.strip()) for x in line.split(",") if x]
if not listmode:
conf[section][option] = { "desc" : desc,
"type" : typ,
"value" : value}
else:
content, none, value = line.partition("=")
content, none, desc = content.partition(":")
desc = desc.replace('"', "").strip()
typ, option = content.split()
value = value.strip()
if value.startswith("["):
if value.endswith("]"):
listmode = False
value = value[:-1]
else:
listmode = True
value = [self.cast(typ, x.strip()) for x in value[1:].split(",") if x]
else:
value = self.cast(typ, value)
if not listmode:
conf[section][option] = { "desc" : desc,
"type" : typ,
"value" : value}
except:
pass
f.close()
return conf
#----------------------------------------------------------------------
def cast(self, typ, value):
"""cast value to given format"""
if type(value) not in (str, unicode):
return value
if typ == "int":
return int(value)
elif typ == "bool":
return True if value.lower() in ("1","true", "on", "an","yes") else False
else:
return value
#----------------------------------------------------------------------
def get(self, section, option):
"""get value"""
return self.config[section][option]["value"]
#----------------------------------------------------------------------
def __getitem__(self, section):
"""provides dictonary like access: c['section']['option']"""
return Section(self, section)
########################################################################
class Section:
"""provides dictionary like access for configparser"""
#----------------------------------------------------------------------
def __init__(self, parser, section):
"""Constructor"""
self.parser = parser
self.section = section
#----------------------------------------------------------------------
def __getitem__(self, item):
"""getitem"""
return self.parser.get(self.section, item)<|fim▁end|> | section, none, desc = line[:-1].partition('-')
section = section.strip() |
<|file_name|>0005_cephpool_percent_used.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals<|fim▁hole|>from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ceph', '0004_rm_models_based_on_storageobj'),
]
operations = [
migrations.AddField(
model_name='cephpool',
name='percent_used',
field=models.FloatField(default=None, editable=False, blank=True),
preserve_default=True,
),
]<|fim▁end|> | |
<|file_name|>outputDialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
The output tab for the main toolbar
@author: Chris Scott
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import shutil
import subprocess
import copy
import logging
import math
import functools
import datetime
import time
import numpy as np
from PySide2 import QtGui, QtCore, QtWidgets
from PIL import Image
from ..visutils import utilities
from ..visutils import threading_vis
from ..visutils.utilities import iconPath
from . import genericForm
from ..plotting import rdf
from ..algebra import _vectors as vectors_c
from ..plotting import plotDialog
from . import utils
import six
from six.moves import range
class OutputDialog(QtWidgets.QDialog):
def __init__(self, parent, mainWindow, width, index):
super(OutputDialog, self).__init__(parent)
self.parent = parent
self.rendererWindow = parent
self.mainToolbar = parent
self.mainWindow = mainWindow
self.width = width
self.setWindowTitle("Output - Render window %d" % index)
self.setModal(0)
# size
self.resize(QtCore.QSize(350, 600))
# layout
outputTabLayout = QtWidgets.QVBoxLayout(self)
outputTabLayout.setContentsMargins(0, 0, 0, 0)
outputTabLayout.setSpacing(0)
outputTabLayout.setAlignment(QtCore.Qt.AlignTop)
# add tab bar
self.outputTypeTabBar = QtWidgets.QTabWidget(self)
self.outputTypeTabBar.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
# add tabs to tab bar
# image tab
imageTabWidget = QtWidgets.QWidget()
imageTabLayout = QtWidgets.QVBoxLayout(imageTabWidget)
imageTabLayout.setContentsMargins(0, 0, 0, 0)
self.imageTab = ImageTab(self, self.mainWindow, self.width)
imageTabLayout.addWidget(self.imageTab)
self.outputTypeTabBar.addTab(imageTabWidget, "Image")
# file tab
fileTabWidget = QtWidgets.QWidget()
fileTabLayout = QtWidgets.QVBoxLayout(fileTabWidget)
fileTabLayout.setContentsMargins(0, 0, 0, 0)
self.fileTab = FileTab(self, self.mainWindow, self.width)
fileTabLayout.addWidget(self.fileTab)
self.outputTypeTabBar.addTab(fileTabWidget, "File")
# plot tab
self.plotTab = PlotTab(self.mainWindow, self.rendererWindow, parent=self)
self.outputTypeTabBar.addTab(self.plotTab, "Plot")
# add tab bar to layout
outputTabLayout.addWidget(self.outputTypeTabBar)
class ScalarsHistogramOptionsForm(genericForm.GenericForm):
"""
Main options form for scalars histograms
"""
def __init__(self, parent, mainWindow, rendererWindow):
super(ScalarsHistogramOptionsForm, self).__init__(parent, 0, "Histogram plot options")
self.mainWindow = mainWindow
self.rendererWindow = rendererWindow
# current number of scalar plots
self.numScalarsPlots = 0
# current plots
self.currentPlots = {}
# add combo box
self.scalarsCombo = QtWidgets.QComboBox()
self.scalarsCombo.currentIndexChanged[int].connect(self.scalarsComboChanged)
self.newRow().addWidget(self.scalarsCombo)
# add stacked widget
self.stackedWidget = QtWidgets.QStackedWidget()
self.newRow().addWidget(self.stackedWidget)
self.logger = logging.getLogger(__name__ + ".ScalarsHistogramOptionsForm")
def scalarsComboChanged(self, index):
"""
Scalars combo changed
"""
self.stackedWidget.setCurrentIndex(index)
def removeScalarPlotOptions(self):
"""
Remove scalar plot options
"""
self.logger.debug("Removing scalar plot options")
for scalarsID in list(self.currentPlots.keys()):
self.logger.debug(" Removing: '%s'", scalarsID)
form = self.currentPlots.pop(scalarsID)
self.stackedWidget.removeWidget(form)
form.deleteLater()
self.scalarsCombo.removeItem(0)
self.numScalarsPlots = 0
def addAtomPropertyPlotOptions(self):
"""
Add atom property plot options
"""
self.logger.debug("Adding atom property plot options")
# get current pipeline page
pp = self.rendererWindow.getCurrentPipelinePage()
ppindex = pp.pipelineIndex
lattice = pp.inputState
# add charge plot option
scalarsArray = lattice.charge
if np.min(scalarsArray) == 0 == np.max(scalarsArray):
self.logger.debug(" Skipping charge: all zero")
else:
self.logger.debug(" Adding charge plot")
scalarsName = "Charge"
scalarsID = "%s (%d)" % (scalarsName, ppindex)
self.addScalarPlotOptions(scalarsID, scalarsName, scalarsArray)
def addScalarPlotOptions(self, scalarsID, scalarsName, scalarsArray):
"""
Add plot for scalar 'name'
"""
# don't add duplicates (should never happen anyway)
if scalarsID in self.currentPlots:
return
# don't add empty arrays
if not len(scalarsArray):
return
self.logger.debug("Adding scalar plot option: '%s'", scalarsID)
# create form
form = GenericHistogramPlotForm(self, scalarsID, scalarsName, scalarsArray)
# add to stacked widget
self.stackedWidget.addWidget(form)
# add to combo box<|fim▁hole|>
# store in dict
self.currentPlots[scalarsID] = form
# number of plots
self.numScalarsPlots += 1
def refreshScalarPlotOptions(self):
"""
Refresh plot options
* Called after pipeline page has run filters/single filter has run
* loops over all filter lists under pipeline page, adding plots for all scalars
* plots named after pipeline index and filter list index
* also called when renderWindow pipeline index changes
* also called when filter lists are cleared, etc...?
"""
self.logger.debug("Refreshing plot options")
# remove old options
self.removeScalarPlotOptions()
# get current pipeline page
pp = self.rendererWindow.getCurrentPipelinePage()
ppindex = pp.pipelineIndex
# get filter lists
filterLists = pp.filterLists
# first add atom properties (KE, PE, charge)
self.addAtomPropertyPlotOptions()
# loop over filter lists, adding scalars
self.logger.debug("Looping over filter lists (%d)", len(filterLists))
for filterList in filterLists:
# make unique name for pipeline page/filter list combo
findex = filterList.tab
filterListID = "%d-%d" % (ppindex, findex)
self.logger.debug("Filter list %d; id '%s'", filterList.tab, filterListID)
# loop over scalars in scalarsDict on filterer
for scalarsName, scalarsArray in six.iteritems(filterList.filterer.scalarsDict):
# make unique id
scalarsID = "%s (%s)" % (scalarsName, filterListID)
# add
self.addScalarPlotOptions(scalarsID, scalarsName, scalarsArray)
# loop over scalars in latticeScalarsDict on filterer
latticeScalarKeys = list(filterList.pipelinePage.inputState.scalarsDict.keys())
for key in latticeScalarKeys:
if key in filterList.filterer.latticeScalarsDict:
scalarsArray = filterList.filterer.latticeScalarsDict[key]
self.logger.debug("Using Filterer scalars for '%s'", key)
else:
scalarsArray = filterList.pipelinePage.inputState.scalarsDict[key]
self.logger.debug("Using Lattice scalars for '%s'", key)
# make unique id
scalarsName = key
scalarsID = "%s (%s)" % (scalarsName, filterListID)
# add
self.addScalarPlotOptions(scalarsID, scalarsName, scalarsArray)
# add cluster size/volume distributions too
if len(filterList.filterer.clusterList):
clusterSizes = []
clusterVolumes = []
haveVolumes = True
for c in filterList.filterer.clusterList:
# cluster sizes
clusterSizes.append(len(c))
# cluster volumes
vol = c.getVolume()
if vol is not None:
clusterVolumes.append(vol)
else:
haveVolumes = False
# plot cluster size
scalarsID = "Cluster size (%s)" % filterListID
self.addScalarPlotOptions(scalarsID, "Cluster size", np.asarray(clusterSizes, dtype=np.float64))
if haveVolumes:
# plot volumes
scalarsID = "Cluster volume (%s)" % filterListID
self.addScalarPlotOptions(scalarsID, "Cluster volume", np.asarray(clusterVolumes, dtype=np.float64))
# hide if no plots, otherwise show
if self.numScalarsPlots > 0:
self.show()
else:
self.hide()
class PlotTab(QtWidgets.QWidget):
"""
Plot tab
"""
def __init__(self, mainWindow, rendererWindow, parent=None):
super(PlotTab, self).__init__(parent)
self.mainWindow = mainWindow
self.rendererWindow = rendererWindow
# layout
self.layout = QtWidgets.QVBoxLayout(self)
self.layout.setAlignment(QtCore.Qt.AlignTop)
self.layout.setSpacing(0)
# rdf
row = self.newRow()
self.rdfForm = RDFForm(self, self.mainWindow)
row.addWidget(self.rdfForm)
# scalars histograms
self.scalarsForm = ScalarsHistogramOptionsForm(self, mainWindow, rendererWindow)
row = self.newRow()
row.addWidget(self.scalarsForm)
self.layout.addStretch(1)
# logging
self.logger = logging.getLogger(__name__ + ".PlotTab")
def newRow(self):
"""
New row
"""
row = genericForm.FormRow()
self.layout.addWidget(row)
return row
class GenericHistogramPlotForm(genericForm.GenericForm):
"""
Plot options for a histogram of scalar values
"""
def __init__(self, parent, scalarsID, scalarsName, scalarsArray):
super(GenericHistogramPlotForm, self).__init__(parent, 0, "%s plot options" % scalarsID)
self.parent = parent
self.scalarsID = scalarsID
self.scalarsName = scalarsName
self.scalarsArray = scalarsArray
self.logger = logging.getLogger(__name__ + ".GenericHistogramPlotForm")
# scalar stats
self.scalarMin = np.min(scalarsArray)
self.scalarMax = np.max(scalarsArray)
self.scalarMean = np.mean(scalarsArray)
self.scalarSTD = np.std(scalarsArray)
self.scalarSE = self.scalarSTD / math.sqrt(len(scalarsArray))
# default
self.useNumBins = True
self.numBins = 10
self.binWidth = 1.0
self.showAsFraction = False
# stats labels
row = self.newRow()
row.addWidget(QtWidgets.QLabel("Min: %f" % self.scalarMin))
row = self.newRow()
row.addWidget(QtWidgets.QLabel("Max: %f" % self.scalarMax))
row = self.newRow()
row.addWidget(QtWidgets.QLabel("Mean: %f" % self.scalarMean))
row = self.newRow()
row.addWidget(QtWidgets.QLabel("STD: %f; SE: %f" % (self.scalarSTD, self.scalarSE)))
# num bins/bin width combo
binCombo = QtWidgets.QComboBox()
binCombo.addItem("Number of bins:")
binCombo.addItem("Bin width:")
binCombo.currentIndexChanged[int].connect(self.binComboChanged)
# bin stack
self.binStack = QtWidgets.QStackedWidget()
# number of bins spin
numBinsSpin = QtWidgets.QSpinBox()
numBinsSpin.setMinimum(2)
numBinsSpin.setMaximum(999)
numBinsSpin.setSingleStep(1)
numBinsSpin.setValue(self.numBins)
numBinsSpin.valueChanged.connect(self.numBinsChanged)
self.binStack.addWidget(numBinsSpin)
# bin width spin
binWidthSpin = QtWidgets.QDoubleSpinBox()
binWidthSpin.setMinimum(0.01)
binWidthSpin.setMaximum(99.99)
binWidthSpin.setSingleStep(0.1)
binWidthSpin.setValue(self.binWidth)
binWidthSpin.valueChanged.connect(self.binWidthChanged)
self.binStack.addWidget(binWidthSpin)
binCombo.setCurrentIndex(1)
# row
row = self.newRow()
row.addWidget(binCombo)
row.addWidget(self.binStack)
# show as fraction option
showAsFractionCheck = QtWidgets.QCheckBox("Show as fraction")
showAsFractionCheck.setCheckState(QtCore.Qt.Unchecked)
showAsFractionCheck.stateChanged.connect(self.showAsFractionChanged)
row = self.newRow()
row.addWidget(showAsFractionCheck)
# plot button
plotButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/office-chart-bar.png")), "Plot")
plotButton.clicked.connect(self.makePlot)
row = self.newRow()
row.addWidget(plotButton)
# show
self.show()
def binWidthChanged(self, val):
"""
Bin width changed
"""
self.binWidth = val
def binComboChanged(self, index):
"""
Bin combo changed
"""
if index == 0:
self.useNumBins = True
self.binStack.setCurrentIndex(index)
elif index == 1:
self.useNumBins = False
self.binStack.setCurrentIndex(index)
else:
self.logger.error("Bin combo index error (%d)", index)
def showAsFractionChanged(self, checkState):
"""
Show as fraction changed
"""
if checkState == QtCore.Qt.Unchecked:
self.showAsFraction = False
else:
self.showAsFraction = True
def numBinsChanged(self, val):
"""
Number of bins changed
"""
self.numBins = val
def makePlot(self):
"""
Do the plot
"""
self.logger.debug("Plotting '%s'", self.scalarsID)
if self.scalarMax == self.scalarMin:
self.logger.error("Max val == min val; not plotting histogram")
return
scalars = self.scalarsArray
minVal = self.scalarMin
maxVal = self.scalarMax
# number of bins
if self.useNumBins:
numBins = self.numBins
else:
binWidth = self.binWidth
# min
tmp = math.floor(minVal / binWidth)
assert tmp * binWidth <= minVal and (tmp + 1) * binWidth > minVal
minVal = tmp * binWidth
# max
maxVal = minVal
while maxVal < self.scalarMax:
maxVal += binWidth
# num bins
numBins = math.ceil((maxVal - minVal) / binWidth)
# settings dict
settingsDict = {}
settingsDict["xlabel"] = self.scalarsName
# make plot dialog
if self.showAsFraction:
# compute histogram
hist, binEdges = np.histogram(scalars, numBins, range=(minVal, maxVal))
# make fraction
fracHist = hist / float(len(scalars))
# bin width
binWidth = (maxVal - minVal) / numBins
# y label
settingsDict["ylabel"] = "Fraction"
# bar plot
dlg = plotDialog.PlotDialog(self, self.parent.mainWindow, "%s histogram" % self.scalarsID, "bar",
(binEdges[:-1], fracHist), {"width": binWidth}, settingsDict=settingsDict)
else:
# y label
settingsDict["ylabel"] = "Number"
# histogram plot
dlg = plotDialog.PlotDialog(self, self.parent.mainWindow, "%s histogram" % self.scalarsID, "hist",
(scalars, numBins), {"range": (minVal, maxVal)}, settingsDict=settingsDict)
# show dialog
dlg.show()
class RDFForm(genericForm.GenericForm):
"""
RDF output form.
"""
def __init__(self, parent, mainWindow):
super(RDFForm, self).__init__(parent, 0, "RDF plot options")
self.parent = parent
self.mainWindow = mainWindow
self.rendererWindow = self.parent.rendererWindow
self.logger = logging.getLogger(__name__ + ".RDFForm")
# defaults
self.spec1 = "ALL"
self.spec2 = "ALL"
self.binMin = 2.0
self.binMax = 10.0
self.binWidth = 0.1
# bond type
label = QtWidgets.QLabel("Bond type:")
row = self.newRow()
row.addWidget(label)
self.spec1Combo = QtWidgets.QComboBox()
self.spec1Combo.addItem("ALL")
self.spec1Combo.currentIndexChanged[str].connect(self.spec1Changed)
row.addWidget(self.spec1Combo)
label = QtWidgets.QLabel(" - ")
row.addWidget(label)
self.spec2Combo = QtWidgets.QComboBox()
self.spec2Combo.addItem("ALL")
self.spec2Combo.currentIndexChanged[str].connect(self.spec2Changed)
row.addWidget(self.spec2Combo)
# bin range
label = QtWidgets.QLabel("Bin range:")
row = self.newRow()
row.addWidget(label)
binMinSpin = QtWidgets.QDoubleSpinBox()
binMinSpin.setMinimum(0.0)
binMinSpin.setMaximum(500.0)
binMinSpin.setSingleStep(1.0)
binMinSpin.setValue(self.binMin)
binMinSpin.valueChanged.connect(self.binMinChanged)
row.addWidget(binMinSpin)
label = QtWidgets.QLabel(" - ")
row.addWidget(label)
binMaxSpin = QtWidgets.QDoubleSpinBox()
binMaxSpin.setMinimum(0.0)
binMaxSpin.setMaximum(500.0)
binMaxSpin.setSingleStep(1.0)
binMaxSpin.setValue(self.binMax)
binMaxSpin.valueChanged.connect(self.binMaxChanged)
row.addWidget(binMaxSpin)
# num bins
label = QtWidgets.QLabel("Bin width:")
row = self.newRow()
row.addWidget(label)
binWidthSpin = QtWidgets.QDoubleSpinBox()
binWidthSpin.setMinimum(0.01)
binWidthSpin.setMaximum(1.00)
binWidthSpin.setSingleStep(0.1)
binWidthSpin.setValue(self.binWidth)
binWidthSpin.valueChanged.connect(self.binWidthChanged)
row.addWidget(binWidthSpin)
# plot button
plotButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/office-chart-bar.png")), "Plot")
plotButton.clicked.connect(self.plotRDF)
row = self.newRow()
row.addWidget(plotButton)
# show
self.show()
def refresh(self):
"""
Should be called whenver a new input is loaded.
Refreshes the combo boxes with input specie list.
"""
# lattice
specieList = self.rendererWindow.getCurrentInputState().specieList
# store current so can try to reselect
spec1CurrentText = str(self.spec1Combo.currentText())
spec2CurrentText = str(self.spec2Combo.currentText())
# clear and rebuild combo box
self.spec1Combo.clear()
self.spec2Combo.clear()
self.spec1Combo.addItem("ALL")
self.spec2Combo.addItem("ALL")
count = 1
match1 = False
match2 = False
for sym in specieList:
self.spec1Combo.addItem(sym)
self.spec2Combo.addItem(sym)
if sym == spec1CurrentText:
self.spec1Combo.setCurrentIndex(count)
match1 = True
if sym == spec2CurrentText:
self.spec2Combo.setCurrentIndex(count)
match2 = True
count += 1
if not match1:
self.spec1Combo.setCurrentIndex(0)
if not match2:
self.spec2Combo.setCurrentIndex(0)
def plotRDF(self):
"""
Plot RDF.
"""
self.logger.info("Plotting RDF for visible atoms")
# lattice and pipeline page
inputLattice = self.rendererWindow.getCurrentInputState()
pp = self.rendererWindow.getCurrentPipelinePage()
# check system size
warnDims = []
if pp.PBC[0] and self.binMax > inputLattice.cellDims[0] / 2.0:
warnDims.append("x")
if pp.PBC[1] and self.binMax > inputLattice.cellDims[1] / 2.0:
warnDims.append("y")
if pp.PBC[2] and self.binMax > inputLattice.cellDims[2] / 2.0:
warnDims.append("z")
if len(warnDims):
msg = "The maximum radius you have requested is greater than half the box length"
msg += " in the %s direction(s)!" % ", ".join(warnDims)
self.mainWindow.displayError(msg)
return
# first gather vis atoms
visibleAtoms = self.rendererWindow.gatherVisibleAtoms()
if not len(visibleAtoms):
self.mainWindow.displayWarning("No visible atoms: cannot calculate RDF")
return
# then determine species
if self.spec1 == "ALL":
spec1Index = -1
else:
spec1Index = inputLattice.getSpecieIndex(self.spec1)
if self.spec2 == "ALL":
spec2Index = -1
else:
spec2Index = inputLattice.getSpecieIndex(self.spec2)
# rdf calulator
rdfCalculator = rdf.RDFCalculator()
# show progress dialog
progDiag = utils.showProgressDialog("Calculating RDF", "Calculating RDF...", self)
try:
# then calculate
xn, rdfArray = rdfCalculator.calculateRDF(visibleAtoms, inputLattice, self.binMin, self.binMax,
self.binWidth, spec1Index, spec2Index)
finally:
utils.cancelProgressDialog(progDiag)
# prepare to plot
settingsDict = {}
settingsDict["title"] = "Radial distribution function"
settingsDict["xlabel"] = "Bond length (Angstroms)"
settingsDict["ylabel"] = "g(r) (%s - %s)" % (self.spec1, self.spec2)
# show plot dialog
dialog = plotDialog.PlotDialog(self, self.mainWindow, "Radial distribution function ",
"plot", (xn, rdfArray), {"linewidth": 2, "label": None},
settingsDict=settingsDict)
dialog.show()
def binWidthChanged(self, val):
"""
Num bins changed.
"""
self.binWidth = val
def binMinChanged(self, val):
"""
Bin min changed.
"""
self.binMin = val
def binMaxChanged(self, val):
"""
Bin max changed.
"""
self.binMax = val
def spec1Changed(self, text):
"""
Spec 1 changed.
"""
self.spec1 = str(text)
def spec2Changed(self, text):
"""
Spec 2 changed.
"""
self.spec2 = str(text)
class FileTab(QtWidgets.QWidget):
"""
File output tab.
"""
def __init__(self, parent, mainWindow, width):
super(FileTab, self).__init__(parent)
self.parent = parent
self.rendererWindow = parent.rendererWindow
self.mainWindow = mainWindow
self.width = width
# initial values
self.outputFileType = "LATTICE"
self.writeFullLattice = True
# layout
mainLayout = QtWidgets.QVBoxLayout(self)
mainLayout.setAlignment(QtCore.Qt.AlignTop)
# name group
fileNameGroup = genericForm.GenericForm(self, 0, "Output file options")
fileNameGroup.show()
# file type
outputTypeCombo = QtWidgets.QComboBox()
outputTypeCombo.addItem("LATTICE")
# outputTypeCombo.addItem("LBOMD REF")
# outputTypeCombo.addItem("LBOMD XYZ")
# outputTypeCombo.addItem("LBOMD FAILSAFE")
outputTypeCombo.currentIndexChanged[str].connect(self.outputTypeChanged)
label = QtWidgets.QLabel("File type: ")
row = fileNameGroup.newRow()
row.addWidget(label)
row.addWidget(outputTypeCombo)
# option to write full lattice
fullLatticeCheck = QtWidgets.QCheckBox("Write full lattice (not just visible)")
fullLatticeCheck.setCheckState(QtCore.Qt.Checked)
fullLatticeCheck.stateChanged.connect(self.fullLatticeCheckChanged)
row = fileNameGroup.newRow()
row.addWidget(fullLatticeCheck)
# file name, save image button
row = fileNameGroup.newRow()
label = QtWidgets.QLabel("File name: ")
self.outputFileName = QtWidgets.QLineEdit("lattice.dat")
self.outputFileName.setFixedWidth(120)
saveFileButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/document-save.png")), "")
saveFileButton.setToolTip("Save to file")
saveFileButton.clicked.connect(self.saveToFile)
row.addWidget(label)
row.addWidget(self.outputFileName)
row.addWidget(saveFileButton)
# dialog
row = fileNameGroup.newRow()
saveFileDialogButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath('oxygen/document-open.png')), "Save to file")
saveFileDialogButton.setToolTip("Save to file")
saveFileDialogButton.setCheckable(0)
saveFileDialogButton.setFixedWidth(150)
saveFileDialogButton.clicked.connect(self.saveToFileDialog)
row.addWidget(saveFileDialogButton)
# overwrite
self.overwriteCheck = QtWidgets.QCheckBox("Overwrite")
row = fileNameGroup.newRow()
row.addWidget(self.overwriteCheck)
mainLayout.addWidget(fileNameGroup)
def fullLatticeCheckChanged(self, val):
"""
Full lattice check changed.
"""
if val == QtCore.Qt.Unchecked:
self.writeFullLattice = False
else:
self.writeFullLattice = True
def saveToFile(self):
"""
Save current system to file.
"""
filename = str(self.outputFileName.text())
if not len(filename):
return
if os.path.exists(filename) and not self.overwriteCheck.isChecked():
self.mainWindow.displayWarning("File already exists: not overwriting")
return
# lattice object
lattice = self.rendererWindow.getCurrentInputState()
# gather vis atoms if required
if self.writeFullLattice:
visibleAtoms = None
else:
visibleAtoms = self.rendererWindow.gatherVisibleAtoms()
# write Lattice
lattice.writeLattice(filename, visibleAtoms=visibleAtoms)
def saveToFileDialog(self):
"""
Open dialog.
"""
filename = QtWidgets.QFileDialog.getSaveFileName(self, 'Save File', '.')[0][0]
if len(filename):
self.outputFileName.setText(str(filename))
self.saveToFile()
def outputTypeChanged(self, fileType):
"""
Output type changed.
"""
self.outputFileType = str(fileType)
class ImageTab(QtWidgets.QWidget):
def __init__(self, parent, mainWindow, width):
super(ImageTab, self).__init__(parent)
self.logger = logging.getLogger(__name__ + ".ImageTab")
self.parent = parent
self.mainWindow = mainWindow
self.width = width
self.rendererWindow = self.parent.rendererWindow
# initial values
self.renderType = "VTK"
self.imageFormat = "jpg"
# self.overlayImage = False
imageTabLayout = QtWidgets.QVBoxLayout(self)
# imageTabLayout.setContentsMargins(0, 0, 0, 0)
# imageTabLayout.setSpacing(0)
imageTabLayout.setAlignment(QtCore.Qt.AlignTop)
# Add the generic image options at the top
group = QtWidgets.QGroupBox("Image options")
group.setAlignment(QtCore.Qt.AlignHCenter)
groupLayout = QtWidgets.QVBoxLayout(group)
groupLayout.setContentsMargins(0, 0, 0, 0)
groupLayout.setSpacing(0)
# render type (povray or vtk)
renderTypeButtonGroup = QtWidgets.QButtonGroup(self)
renderTypeButtonGroup.setExclusive(1)
renderTypeButtonGroup.buttonClicked[int].connect(self.setRenderType)
self.POVButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("other/pov-icon.png")), "POV-Ray")
self.POVButton.setCheckable(1)
self.POVButton.setChecked(0)
self.VTKButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("other/vtk-icon.png")), "VTK")
self.VTKButton.setCheckable(1)
self.VTKButton.setChecked(1)
renderTypeButtonGroup.addButton(self.VTKButton)
renderTypeButtonGroup.addButton(self.POVButton)
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setAlignment(QtCore.Qt.AlignTop)
rowLayout.addWidget(self.VTKButton)
rowLayout.addWidget(self.POVButton)
groupLayout.addWidget(row)
# image format
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
imageFormatButtonGroup = QtWidgets.QButtonGroup(self)
imageFormatButtonGroup.setExclusive(1)
imageFormatButtonGroup.buttonClicked[int].connect(self.setImageFormat)
self.JPEGCheck = QtWidgets.QCheckBox("JPEG")
self.JPEGCheck.setChecked(1)
self.PNGCheck = QtWidgets.QCheckBox("PNG")
self.TIFFCheck = QtWidgets.QCheckBox("TIFF")
imageFormatButtonGroup.addButton(self.JPEGCheck)
imageFormatButtonGroup.addButton(self.PNGCheck)
imageFormatButtonGroup.addButton(self.TIFFCheck)
rowLayout.addWidget(self.JPEGCheck)
rowLayout.addWidget(self.PNGCheck)
rowLayout.addWidget(self.TIFFCheck)
groupLayout.addWidget(row)
# additional (POV-Ray) options
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
groupLayout.addWidget(row)
imageTabLayout.addWidget(group)
# tab bar for different types of image output
self.imageTabBar = QtWidgets.QTabWidget(self)
self.imageTabBar.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
# add tabs to tab bar
singleImageTabWidget = QtWidgets.QWidget()
singleImageTabLayout = QtWidgets.QVBoxLayout(singleImageTabWidget)
singleImageTabLayout.setContentsMargins(0, 0, 0, 0)
self.singleImageTab = SingleImageTab(self, self.mainWindow, self.width)
singleImageTabLayout.addWidget(self.singleImageTab)
self.imageTabBar.addTab(singleImageTabWidget, "Single")
imageSequenceTabWidget = QtWidgets.QWidget()
imageSequenceTabLayout = QtWidgets.QVBoxLayout(imageSequenceTabWidget)
imageSequenceTabLayout.setContentsMargins(0, 0, 0, 0)
self.imageSequenceTab = ImageSequenceTab(self, self.mainWindow, self.width)
imageSequenceTabLayout.addWidget(self.imageSequenceTab)
self.imageTabBar.addTab(imageSequenceTabWidget, "Sequence")
imageRotateTabWidget = QtWidgets.QWidget()
imageRotateTabLayout = QtWidgets.QVBoxLayout(imageRotateTabWidget)
imageRotateTabLayout.setContentsMargins(0, 0, 0, 0)
self.imageRotateTab = ImageRotateTab(self, self.mainWindow, self.width)
imageRotateTabLayout.addWidget(self.imageRotateTab)
self.imageTabBar.addTab(imageRotateTabWidget, "Rotate")
imageTabLayout.addWidget(self.imageTabBar)
def setImageFormat(self, val):
"""
Set the image format.
"""
if self.JPEGCheck.isChecked():
self.imageFormat = "jpg"
elif self.PNGCheck.isChecked():
self.imageFormat = "png"
elif self.TIFFCheck.isChecked():
self.imageFormat = "tif"
def setRenderType(self, val):
"""
Set current render type
"""
if self.POVButton.isChecked():
settings = self.mainWindow.preferences.povrayForm
if not utilities.checkForExe(settings.pathToPovray):
self.POVButton.setChecked(0)
self.VTKButton.setChecked(1)
utilities.warnExeNotFound(self, "%s (POV-Ray)" % (settings.pathToPovray,))
else:
self.renderType = "POV"
self.imageFormat = "png"
self.PNGCheck.setChecked(1)
elif self.VTKButton.isChecked():
self.renderType = "VTK"
self.imageFormat = "jpg"
self.JPEGCheck.setChecked(1)
def createMovieLogger(self, level, message):
"""
Log message for create movie object
"""
logger = logging.getLogger(__name__ + ".MovieGenerator")
method = getattr(logger, level, None)
if method is not None:
method(message)
def createMovie(self, saveDir, inputText, createMovieBox, prefix=None):
"""
Create movie.
"""
settings = self.mainWindow.preferences.ffmpegForm
ffmpeg = utilities.checkForExe(settings.pathToFFmpeg)
if not ffmpeg:
utilities.warnExeNotFound(self, "%s (FFmpeg)" % (settings.pathToFFmpeg,))
return 2
# settings
settings = self.mainWindow.preferences.ffmpegForm
framerate = createMovieBox.framerate
bitrate = settings.bitrate
if prefix is None:
outputprefix = createMovieBox.prefix
else:
outputprefix = prefix
outputprefix = os.path.join(saveDir, outputprefix)
outputsuffix = createMovieBox.suffix
self.logger.info("Creating movie file: %s.%s", outputprefix, outputsuffix)
# movie generator object
generator = MovieGenerator()
generator.log.connect(self.createMovieLogger)
generator.allDone.connect(generator.deleteLater)
# runnable for sending to thread pool
runnable = threading_vis.GenericRunnable(generator, args=(ffmpeg, framerate, inputText, self.imageFormat,
bitrate, outputprefix, outputsuffix))
runnable.setAutoDelete(False)
# add to thread pool
QtCore.QThreadPool.globalInstance().start(runnable)
# generator.run(ffmpeg, framerate, inputText, self.imageFormat, bitrate, outputprefix, outputsuffix)
class MovieGenerator(QtCore.QObject):
"""
Call ffmpeg to generate a movie
"""
log = QtCore.Signal(str, str)
allDone = QtCore.Signal()
def __init__(self):
super(MovieGenerator, self).__init__()
def run(self, ffmpeg, framerate, saveText, imageFormat, bitrate, outputPrefix, outputSuffix):
"""
Create movie
"""
ffmpegTime = time.time()
try:
if outputSuffix == "mp4":
# determine image size
firstFile = "%s.%s" % (saveText, imageFormat)
firstFile = firstFile % 0
self.log.emit("debug", "Checking first file size: '%s'" % firstFile)
im = Image.open(firstFile)
width, height = im.size
self.log.emit("debug", "Image size: %s x %s" % (width, height))
# h264 requires width and height be divisible by 2
newWidth = width - 1 if width % 2 else width
newHeight = height - 1 if height % 2 else height
if newWidth != width:
self.log.emit("debug", "Resizing image width: %d -> %d" % (width, newWidth))
if newHeight != height:
self.log.emit("debug", "Resizing image height: %d -> %d" % (height, newHeight))
# construct command; scale if required
if newWidth == width and newHeight == height:
# no scaling required
command = "'%s' -r %d -y -i %s.%s -c:v h264 -r %d -b:v %dk '%s.%s'" % (ffmpeg, framerate, saveText,
imageFormat, 25, bitrate,
outputPrefix, outputSuffix)
else:
# scaling required
command = "'%s' -r %d -y -i %s.%s -vf scale=%d:%d -c:v h264 -r %d -b:v %dk '%s.%s'" % (ffmpeg, framerate, saveText,
imageFormat, newWidth, newHeight,
25, bitrate, outputPrefix,
outputSuffix)
# run command
self.log.emit("debug", 'Command: "%s"' % command)
process = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, stderr = process.communicate()
status = process.poll()
else:
command = "'%s' -r %d -y -i %s.%s -r %d -b:v %dk '%s.%s'" % (ffmpeg, framerate, saveText,
imageFormat, 25, bitrate,
outputPrefix, outputSuffix)
self.log.emit("debug", 'Command: "%s"' % command)
process = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, stderr = process.communicate()
status = process.poll()
if status:
self.log.emit("error", "FFmpeg failed (%d)" % status)
self.log.emit("error", output.decode('utf-8'))
self.log.emit("error", stderr.decode('utf-8'))
finally:
ffmpegTime = time.time() - ffmpegTime
self.log.emit("debug", "FFmpeg time taken: %f s" % ffmpegTime)
self.allDone.emit()
class SingleImageTab(QtWidgets.QWidget):
def __init__(self, parent, mainWindow, width):
super(SingleImageTab, self).__init__(parent)
self.parent = parent
self.mainWindow = mainWindow
self.width = width
self.rendererWindow = self.parent.rendererWindow
# initial values
self.overwriteImage = 0
self.openImage = 1
# layout
mainLayout = QtWidgets.QVBoxLayout(self)
# mainLayout.setContentsMargins(0, 0, 0, 0)
# mainLayout.setSpacing(0)
mainLayout.setAlignment(QtCore.Qt.AlignTop)
# file name, save image button
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
label = QtWidgets.QLabel("File name")
self.imageFileName = QtWidgets.QLineEdit("image")
self.imageFileName.setFixedWidth(120)
saveImageButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/document-save.png")), "")
saveImageButton.setToolTip("Save image")
saveImageButton.clicked.connect(functools.partial(self.saveSingleImage, True))
rowLayout.addWidget(label)
rowLayout.addWidget(self.imageFileName)
rowLayout.addWidget(saveImageButton)
mainLayout.addWidget(row)
# dialog
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
saveImageDialogButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath('oxygen/document-open.png')), "Save image")
saveImageDialogButton.setToolTip("Save image")
saveImageDialogButton.setCheckable(0)
saveImageDialogButton.setFixedWidth(150)
saveImageDialogButton.clicked.connect(self.saveSingleImageDialog)
rowLayout.addWidget(saveImageDialogButton)
mainLayout.addWidget(row)
# options
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
self.overwriteCheck = QtWidgets.QCheckBox("Overwrite")
self.overwriteCheck.stateChanged[int].connect(self.overwriteCheckChanged)
self.openImageCheck = QtWidgets.QCheckBox("Open image")
self.openImageCheck.setChecked(True)
self.openImageCheck.stateChanged[int].connect(self.openImageCheckChanged)
rowLayout.addWidget(self.overwriteCheck)
rowLayout.addWidget(self.openImageCheck)
mainLayout.addWidget(row)
def saveSingleImageDialog(self):
"""
Open dialog to get save file name
"""
filename = QtWidgets.QFileDialog.getSaveFileName(self, 'Save File', '.')[0][0]
if len(filename):
self.imageFileName.setText(str(filename))
self.saveSingleImage(showProgress=True)
def saveSingleImage(self, showProgress=False):
"""
Screen capture.
"""
if self.parent.renderType == "POV":
settings = self.mainWindow.preferences.povrayForm
povray = utilities.checkForExe(settings.pathToPovray)
if not povray:
utilities.warnExeNotFound(self, "%s (POV-Ray)" % (settings.pathToPovray,))
return
else:
povray = ""
filename = str(self.imageFileName.text())
if not len(filename):
return
# show progress dialog
if showProgress and self.parent.renderType == "POV":
progress = QtWidgets.QProgressDialog(parent=self)
progress.setWindowModality(QtCore.Qt.WindowModal)
progress.setWindowTitle("Busy")
progress.setLabelText("Running POV-Ray...")
progress.setRange(0, 0)
progress.setMinimumDuration(0)
QtWidgets.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
progress.show()
filename = self.rendererWindow.renderer.saveImage(self.parent.renderType, self.parent.imageFormat,
filename, self.overwriteImage, povray=povray)
# hide progress dialog
if showProgress and self.parent.renderType == "POV":
QtWidgets.QApplication.restoreOverrideCursor()
progress.cancel()
if filename is None:
print("SAVE IMAGE FAILED")
return
# open image viewer
if self.openImage:
dirname = os.path.dirname(filename)
if not dirname:
dirname = os.getcwd()
self.mainWindow.imageViewer.changeDir(dirname)
self.mainWindow.imageViewer.showImage(filename)
self.mainWindow.imageViewer.hide()
self.mainWindow.imageViewer.show()
def openImageCheckChanged(self, val):
"""
Open image
"""
if self.openImageCheck.isChecked():
self.openImage = 1
else:
self.openImage = 0
def overwriteCheckChanged(self, val):
"""
Overwrite file
"""
if self.overwriteCheck.isChecked():
self.overwriteImage = 1
else:
self.overwriteImage = 0
class CreateMovieBox(QtWidgets.QGroupBox):
"""
Create movie settings
"""
def __init__(self, parent=None):
super(CreateMovieBox, self).__init__(parent)
self.setTitle("Create movie")
self.setCheckable(True)
self.setChecked(True)
self.setAlignment(QtCore.Qt.AlignCenter)
# defaults
self.framerate = 10
self.prefix = "movie"
self.suffix = "mp4"
# layout
self.contentLayout = QtWidgets.QVBoxLayout(self)
self.contentLayout.setContentsMargins(0, 0, 0, 0)
self.contentLayout.setSpacing(0)
# framerate
rowLayout = self.newRow()
label = QtWidgets.QLabel("Framerate:")
rowLayout.addWidget(label)
framerateSpin = QtWidgets.QSpinBox()
framerateSpin.setMinimum(1)
framerateSpin.setMaximum(10000)
framerateSpin.setValue(self.framerate)
framerateSpin.valueChanged.connect(self.framerateChanged)
rowLayout.addWidget(framerateSpin)
label = QtWidgets.QLabel(" fps")
rowLayout.addWidget(label)
# file prefix
rowLayout = self.newRow()
label = QtWidgets.QLabel("File prefix:")
rowLayout.addWidget(label)
prefixLineEdit = QtWidgets.QLineEdit(self.prefix)
prefixLineEdit.setFixedWidth(130)
prefixLineEdit.textChanged.connect(self.prefixChanged)
rowLayout.addWidget(prefixLineEdit)
# container
rowLayout = self.newRow()
label = QtWidgets.QLabel("Container:")
rowLayout.addWidget(label)
containerCombo = QtWidgets.QComboBox()
containerCombo.addItem("mp4")
containerCombo.addItem("flv")
containerCombo.addItem("mpg")
containerCombo.addItem("avi")
# containerCombo.addItem("mov")
containerCombo.currentIndexChanged[str].connect(self.suffixChanged)
rowLayout.addWidget(containerCombo)
def suffixChanged(self, text):
"""
Suffix changed
"""
self.suffix = str(text)
def framerateChanged(self, val):
"""
Framerate changed.
"""
self.framerate = val
def prefixChanged(self, text):
"""
Prefix changed.
"""
self.prefix = str(text)
def newRow(self, align=None):
"""
New row
"""
row = genericForm.FormRow(align=align)
self.contentLayout.addWidget(row)
return row
class ImageSequenceTab(QtWidgets.QWidget):
def __init__(self, parent, mainWindow, width):
super(ImageSequenceTab, self).__init__(parent)
self.parent = parent
self.mainWindow = mainWindow
self.width = width
self.rendererWindow = self.parent.rendererWindow
self.logger = logging.getLogger(__name__ + ".ImageSequenceTab")
# initial values
self.numberFormats = ["%04d", "%d"]
self.numberFormat = self.numberFormats[0]
self.minIndex = 0
self.maxIndex = -1
self.interval = 1
self.fileprefixText = "guess"
self.overwrite = False
self.flickerFlag = False
self.rotateAfter = False
# self.createMovie = 1
# layout
mainLayout = QtWidgets.QVBoxLayout(self)
# mainLayout.setContentsMargins(0, 0, 0, 0)
# mainLayout.setSpacing(0)
mainLayout.setAlignment(QtCore.Qt.AlignTop)
# output directory
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
label = QtWidgets.QLabel("Output folder")
self.outputFolder = QtWidgets.QLineEdit("sequencer")
self.outputFolder.setFixedWidth(120)
rowLayout.addWidget(label)
rowLayout.addWidget(self.outputFolder)
mainLayout.addWidget(row)
# file prefix
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
label = QtWidgets.QLabel("File prefix")
self.fileprefix = QtWidgets.QLineEdit(self.fileprefixText)
self.fileprefix.setFixedWidth(120)
self.fileprefix.textChanged[str].connect(self.fileprefixChanged)
resetPrefixButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/edit-find.png")), "")
resetPrefixButton.setStatusTip("Set prefix to input file")
resetPrefixButton.setToolTip("Set prefix to input file")
resetPrefixButton.clicked.connect(self.resetPrefix)
rowLayout.addWidget(label)
rowLayout.addWidget(self.fileprefix)
rowLayout.addWidget(resetPrefixButton)
mainLayout.addWidget(row)
group = QtWidgets.QGroupBox("Numbering")
group.setAlignment(QtCore.Qt.AlignHCenter)
groupLayout = QtWidgets.QVBoxLayout(group)
groupLayout.setContentsMargins(0, 0, 0, 0)
groupLayout.setSpacing(0)
# numbering format
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
# label = QtGui.QLabel("Number format")
self.numberFormatCombo = QtWidgets.QComboBox()
self.numberFormatCombo.addItems(self.numberFormats)
self.numberFormatCombo.currentIndexChanged[str].connect(self.numberFormatChanged)
# rowLayout.addWidget(label)
rowLayout.addWidget(self.numberFormatCombo)
groupLayout.addWidget(row)
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
self.minIndexSpinBox = QtWidgets.QSpinBox()
self.minIndexSpinBox.setMinimum(0)
self.minIndexSpinBox.setMaximum(99999)
self.minIndexSpinBox.setValue(self.minIndex)
self.minIndexSpinBox.valueChanged[int].connect(self.minIndexChanged)
label = QtWidgets.QLabel("to")
self.maxIndexSpinBox = QtWidgets.QSpinBox()
self.maxIndexSpinBox.setMinimum(-1)
self.maxIndexSpinBox.setMaximum(99999)
self.maxIndexSpinBox.setValue(self.maxIndex)
self.maxIndexSpinBox.valueChanged[int].connect(self.maxIndexChanged)
self.maxIndexSpinBox.setToolTip("The max index (inclusive; if less than min index do all we can find)")
label2 = QtWidgets.QLabel("by")
self.intervalSpinBox = QtWidgets.QSpinBox()
self.intervalSpinBox.setMinimum(1)
self.intervalSpinBox.setMaximum(99999)
self.intervalSpinBox.setValue(self.interval)
self.intervalSpinBox.valueChanged[int].connect(self.intervalChanged)
rowLayout.addWidget(self.minIndexSpinBox)
rowLayout.addWidget(label)
rowLayout.addWidget(self.maxIndexSpinBox)
rowLayout.addWidget(label2)
rowLayout.addWidget(self.intervalSpinBox)
groupLayout.addWidget(row)
mainLayout.addWidget(group)
# first file
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
label = QtWidgets.QLabel("First file:")
self.firstFileLabel = QtWidgets.QLabel("")
self.setFirstFileLabel()
rowLayout.addWidget(label)
rowLayout.addWidget(self.firstFileLabel)
mainLayout.addWidget(row)
# overwrite check box
# row = QtGui.QWidget(self)
# rowLayout = QtGui.QHBoxLayout(row)
# # rowLayout.setSpacing(0)
# rowLayout.setContentsMargins(0, 0, 0, 0)
# rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
# self.overwriteCheck = QtGui.QCheckBox("Overwrite")
# self.overwriteCheck.stateChanged[int].connect(self.overwriteCheckChanged)
# rowLayout.addWidget(self.overwriteCheck)
# mainLayout.addWidget(row)
# eliminate flicker check
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
self.flickerCheck = QtWidgets.QCheckBox("Eliminate flicker")
self.flickerCheck.stateChanged[int].connect(self.flickerCheckChanged)
rowLayout.addWidget(self.flickerCheck)
# mainLayout.addWidget(row)
# rotate at end
# row = QtGui.QWidget(self)
# rowLayout = QtGui.QHBoxLayout(row)
# rowLayout.setContentsMargins(0, 0, 0, 0)
# rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
rowLayout.addStretch()
self.rotateAfterCheck = QtWidgets.QCheckBox("Rotate at end")
self.rotateAfterCheck.stateChanged[int].connect(self.rotateAfterCheckChanged)
rowLayout.addWidget(self.rotateAfterCheck)
mainLayout.addWidget(row)
# link to other renderer combo
self.linkedRenderWindowIndex = None
self.linkedRendererCombo = QtWidgets.QComboBox()
self.linkedRendererCombo.currentIndexChanged[str].connect(self.linkedRendererChanged)
row = QtWidgets.QHBoxLayout()
row.setContentsMargins(0, 0, 0, 0)
row.setAlignment(QtCore.Qt.AlignHCenter)
row.addWidget(QtWidgets.QLabel("Linked render window:"))
row.addWidget(self.linkedRendererCombo)
mainLayout.addLayout(row)
# populate
self.linkedRendererCombo.addItem("<Off>")
myrwi = self.rendererWindow.rendererIndex
rws = [str(rw.rendererIndex) for rw in self.mainWindow.rendererWindows if rw.rendererIndex != myrwi]
self.linkedRendererCombo.addItems(rws)
# create movie box
self.createMovieBox = CreateMovieBox(self)
mainLayout.addWidget(self.createMovieBox)
# start button
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
startSequencerButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/go-last.png")), "START")
startSequencerButton.setStatusTip("Start sequencer")
startSequencerButton.setToolTip("Start sequencer")
startSequencerButton.clicked.connect(self.startSequencer)
rowLayout.addWidget(startSequencerButton)
mainLayout.addWidget(row)
def refreshLinkedRenderers(self):
"""
Refresh the linked renderers combo
"""
self.logger.debug("Refreshing linked renderer options")
# clear
self.linkedRendererCombo.clear()
self.linkedRenderWindowIndex = None
# populate
self.linkedRendererCombo.addItem("<Off>")
myrwi = self.rendererWindow.rendererIndex
rws = [str(rw.rendererIndex) for rw in self.mainWindow.rendererWindows if rw.rendererIndex != myrwi]
assert len(self.mainWindow.rendererWindows) == len(rws) + 1
self.linkedRendererCombo.addItems(rws)
def linkedRendererChanged(self, currentText):
"""
Linked renderer changed
"""
if self.linkedRendererCombo.currentIndex() > 0:
index = int(currentText)
rw2 = None
for rwIndex, rw in enumerate(self.mainWindow.rendererWindows):
if rw.rendererIndex == index:
rw2 = rw
break
if rw2 is None:
self.logger.error("Cannot find linked render window (%d)", index)
self.linkedRenderWindowIndex = None
return
# do some checks
if rw2.currentPipelineIndex == self.rendererWindow.currentPipelineIndex:
if rw2.vtkRenWinInteract.size().height() == self.rendererWindow.vtkRenWinInteract.size().height():
self.linkedRenderWindowIndex = rwIndex
return rw2
else:
self.logger.error("Cannot select linked render window %d; heights do not match", index)
self.linkedRendererCombo.setCurrentIndex(0)
else:
self.logger.error("Cannote select linked render window %d; cannot handle different pipelines yet (ask me...)", index)
self.linkedRendererCombo.setCurrentIndex(0)
else:
self.linkedRenderWindowIndex = None
def rotateAfterCheckChanged(self, state):
"""
Rotate after sequencer changed
"""
if state == QtCore.Qt.Unchecked:
self.rotateAfter = False
else:
self.rotateAfter = True
def resetPrefix(self):
"""
Reset the prefix to the one from
the input page
"""
pp = self.rendererWindow.getCurrentPipelinePage()
if pp is None:
filename = ""
else:
filename = pp.filename
guess = self.guessFilePrefix(filename)
self.fileprefix.setText(guess)
def guessFilePrefix(self, filename):
"""
Guess the file prefix
"""
count = 0
lim = None
for i in range(len(filename)):
if filename[i] == ".":
break
try:
int(filename[i])
if lim is None:
lim = count
except ValueError:
lim = None
count += 1
if lim is None:
array = os.path.splitext(filename)
if array[1] == '.gz' or array[1] == '.bz2':
array = os.path.splitext(array[0])
filename = array[0]
else:
filename = filename[:lim]
return filename
def startSequencer(self):
"""
Start the sequencer
"""
self.runSequencer()
def runSequencer(self):
"""
Run the sequencer
"""
self.logger.info("Running sequencer")
if self.parent.renderType == "POV":
settings = self.mainWindow.preferences.povrayForm
povray = utilities.checkForExe(settings.pathToPovray)
if not povray:
utilities.warnExeNotFound(self, "%s (POV-Ray)" % (settings.pathToPovray,))
return
else:
povray = ""
self.setFirstFileLabel()
# get pipeline page
pipelinePage = self.rendererWindow.getCurrentPipelinePage()
# check this is not a generated system
if pipelinePage.fileFormat is None:
self.logger.error("Cannot sequence a generated file")
self.mainWindow.displayError("Cannot sequence a generated file")
return
# formatted string
fileText = "%s%s%s" % (str(self.fileprefix.text()), self.numberFormat, pipelinePage.extension)
# check abspath (for sftp)
abspath = pipelinePage.abspath
sftpBrowser = None
if pipelinePage.fromSFTP:
self.logger.debug("Sequencing SFTP file: '%s'", abspath)
array = abspath.split(":")
sftpHost = array[0]
# handle case where ":"'s are in the file path
sftpFile = ":".join(array[1:])
self.logger.debug("Host: '%s'; path: '%s'", sftpHost, sftpFile)
sysDiag = self.mainWindow.systemsDialog
sftpDlg = sysDiag.load_system_form.sftp_browser
match = False
for i in range(sftpDlg.stackedWidget.count()):
w = sftpDlg.stackedWidget.widget(i)
if w.connectionID == sftpHost:
match = True
break
if not match:
self.logger.error("Could not find SFTP browser for '%s'", sftpHost)
return
# browser
sftpBrowser = w
# check first file exists
if sftpBrowser is None:
firstFileExists = utilities.checkForFile(str(self.firstFileLabel.text()))
else:
rp = os.path.join(os.path.dirname(sftpFile), str(self.firstFileLabel.text()))
self.logger.debug("Checking first file exists (SFTP): '%s'", rp)
firstFileExists = bool(sftpBrowser.checkPathExists(rp)) or bool(sftpBrowser.checkPathExists(rp+".gz")) or bool(sftpBrowser.checkPathExists(rp+".bz2"))
if not firstFileExists:
self.warnFileNotPresent(str(self.firstFileLabel.text()), tag="first")
return
# check last file exists
if self.maxIndex > self.minIndex:
lastFile = fileText % self.maxIndex
if sftpBrowser is None:
lastFileExists = utilities.checkForFile(lastFile)
else:
rp = os.path.join(os.path.dirname(sftpFile), lastFile)
self.logger.debug("Checking last file exists (SFTP): '%s'", rp)
lastFileExists = bool(sftpBrowser.checkPathExists(rp)) or bool(sftpBrowser.checkPathExists(rp+".gz")) or bool(sftpBrowser.checkPathExists(rp+".bz2"))
if not lastFileExists:
self.warnFileNotPresent(lastFile, tag="last")
return
maxIndex = self.maxIndex
else:
# find greatest file
self.logger.info("Auto-detecting last sequencer file")
lastIndex = self.minIndex
lastFile = fileText % lastIndex
if sftpBrowser is None:
def _checkForLastFile(fn):
return utilities.checkForFile(fn)
else:
def _checkForLastFile(fn):
rp = os.path.join(os.path.dirname(sftpFile), lastFile)
return bool(sftpBrowser.checkPathExists(rp)) or bool(sftpBrowser.checkPathExists(rp+".gz")) or bool(sftpBrowser.checkPathExists(rp+".bz2"))
while _checkForLastFile(lastFile):
lastIndex += 1
lastFile = fileText % lastIndex
lastIndex -= 1
lastFile = fileText % lastIndex
maxIndex = lastIndex
self.logger.info("Last file detected as: '%s'", lastFile)
# store current input state
origInput = copy.deepcopy(self.rendererWindow.getCurrentInputState())
# pipeline index
pipelineIndex = self.rendererWindow.currentPipelineIndex
# systems dialog
systemsDialog = self.mainWindow.systemsDialog
loadPage = systemsDialog.load_system_form
# reader
readerForm = loadPage.readerForm
reader = readerForm.latticeReader
self.logger.debug(" Reader: %s %s", str(readerForm), str(reader))
# directory
saveDir = str(self.outputFolder.text())
saveDir += "-%s" % datetime.datetime.now().strftime("%y%m%d-%H%M%S")
if os.path.exists(saveDir):
if self.overwrite:
shutil.rmtree(saveDir)
else:
count = 0
while os.path.exists(saveDir):
count += 1
saveDir = "%s.%d" % (str(self.outputFolder.text()), count)
os.mkdir(saveDir)
saveText = os.path.join(saveDir, "%s%s" % (str(self.fileprefix.text()), self.numberFormat))
# check if linked
rw2 = None
if self.linkedRenderWindowIndex is not None:
# make sure still ok to use this index
rw2 = self.linkedRendererChanged(self.linkedRendererCombo.currentText())
if rw2 is not None:
saveText2 = saveText + "_2"
# progress dialog
NSteps = int((maxIndex - self.minIndex) / self.interval) + 1
progDialog = QtWidgets.QProgressDialog("Running sequencer...", "Cancel", self.minIndex, NSteps)
progDialog.setWindowModality(QtCore.Qt.WindowModal)
progDialog.setWindowTitle("Progress")
progDialog.setValue(self.minIndex)
progDialog.show()
QtWidgets.QApplication.processEvents()
# loop over files
status = 0
previousPos = None
try:
count = 0
for i in range(self.minIndex, maxIndex + self.interval, self.interval):
if sftpBrowser is None:
currentFile = fileText % i
self.logger.info("Current file: '%s'", currentFile)
else:
# we have to copy current file locally and use that, then delete it afterwards
basename = fileText % i
remoteFile = os.path.join(os.path.dirname(sftpFile), basename)
currentFile = os.path.join(self.mainWindow.tmpDirectory, basename)
# check exists
remoteFileTest = remoteFile
fileExists = bool(sftpBrowser.checkPathExists(remoteFileTest))
if not fileExists:
# check gzip
remoteFileTest = remoteFile + ".gz"
fileExists = bool(sftpBrowser.checkPathExists(remoteFileTest))
if fileExists:
currentFile += ".gz"
else:
# check bzip
remoteFileTest = remoteFile + ".bz2"
fileExists = bool(sftpBrowser.checkPathExists(remoteFileTest))
if fileExists:
currentFile += ".bz2"
else:
self.logger.error("SFTP sequencer file does not exist: '%s'", remoteFile)
return
remoteFile = remoteFileTest
# copy locally
self.logger.debug("Copying file for sequencer: '%s' to '%s'", remoteFile, currentFile)
# copy file and roulette if exists..,
sftpBrowser.copySystem(remoteFile, currentFile)
# read in state
status, state = reader.readFile(currentFile, pipelinePage.fileFormat, rouletteIndex=i-1, linkedLattice=pipelinePage.linkedLattice)
if status:
self.logger.error("Sequencer read file failed with status: %d" % status)
break
# eliminate flicker across PBCs
if self.flickerFlag:
self.eliminateFlicker(state, previousPos, pipelinePage)
previousPos = copy.deepcopy(state.pos)
# set PBCs the same
state.PBC[:] = origInput.PBC[:]
# attempt to read any scalars/vectors files
for vectorsName, vectorsFile in six.iteritems(origInput.vectorsFiles):
self.logger.debug("Sequencer checking vectors file: '%s'", vectorsFile)
vdn, vbn = os.path.split(vectorsFile)
# guess prefix
guessvfn = self.guessFilePrefix(vbn)
if guessvfn != vbn:
ext = "." + vbn.split(".")[-1]
if ext == vbn:
ext = ""
vfn = "%s%s%s" % (guessvfn, self.numberFormat, ext)
if len(vdn):
vfn = os.path.join(vdn, vfn)
vfn = vfn % i
self.logger.debug("Looking for vectors file: '%s' (%s)", vfn, os.path.exists(vfn))
if os.path.exists(vfn):
# read vectors file
ok = True
with open(vfn) as f:
vectors = []
try:
for line in f:
array = line.split()
array[0] = float(array[0])
array[1] = float(array[1])
array[2] = float(array[2])
vectors.append(array)
except:
self.logger.error("Error reading vector file")
ok = False
if ok and len(vectors) != state.NAtoms:
self.logger.error("The vector data is the wrong length")
ok = False
if ok:
# convert to numpy array
vectors = np.asarray(vectors, dtype=np.float64)
assert vectors.shape[0] == state.NAtoms and vectors.shape[1] == 3
state.vectorsDict[vectorsName] = vectors
state.vectorsFiles[vectorsName] = vfn
self.logger.debug("Added vectors data (%s) to sequencer lattice", vectorsName)
# set input state on current pipeline
pipelinePage.inputState = state
# exit if cancelled
if progDialog.wasCanceled():
if sftpBrowser is not None:
os.unlink(currentFile)
return
# now apply all filters
pipelinePage.runAllFilterLists(sequencer=True)
# exit if cancelled
if progDialog.wasCanceled():
if sftpBrowser is not None:
os.unlink(currentFile)
return
saveName = saveText % count
self.logger.info(" Saving image: '%s'", saveName)
# now save image
filename = self.rendererWindow.renderer.saveImage(self.parent.renderType, self.parent.imageFormat, saveName, 1, povray=povray)
# linked image
if rw2 is not None:
saveName2 = saveText2 % count
self.logger.info(" Saving linked image: '%s'", saveName2)
filename2 = rw2.renderer.saveImage(self.parent.renderType, self.parent.imageFormat, saveName2, 1, povray=povray)
# merge the files
mergeFn = os.path.join(saveDir, "merge%d.%s" % (i, self.parent.imageFormat))
self.logger.debug("Merging the files together: '%s'", mergeFn)
# read images
im1 = Image.open(filename)
im2 = Image.open(filename2)
assert im1.size[1] == im2.size[1], "Image sizes do not match: %r != %r" % (im1.size, im2.size)
# new empty image
newSize = (im1.size[0] + im2.size[0], im1.size[1])
newIm = Image.new('RGB', newSize)
# paste images
newIm.paste(im1, (0, 0))
newIm.paste(im2, (im1.size[0], 0))
# save
newIm.save(mergeFn)
# increment output counter
count += 1
# exit if cancelled
if progDialog.wasCanceled():
if sftpBrowser is not None:
os.unlink(currentFile)
return
# delete local copy of file (SFTP)
if sftpBrowser is not None:
os.unlink(currentFile)
# update progress
progDialog.setValue(count)
QtWidgets.QApplication.processEvents()
# create movie
if not status and self.createMovieBox.isChecked():
# show wait cursor
# QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
try:
self.parent.createMovie(saveDir, saveText, self.createMovieBox)
if rw2 is not None:
self.parent.createMovie(saveDir, os.path.join(saveDir, "merge%d"), self.createMovieBox, prefix="merged")
finally:
# set cursor to normal
# QtGui.QApplication.restoreOverrideCursor()
pass
# rotate?
if self.rotateAfter:
self.logger.debug("Running rotator after sequencer...")
self.parent.imageRotateTab.startRotator()
finally:
self.logger.debug("Reloading original input")
# reload original input
pipelinePage.inputState = origInput
pipelinePage.postInputLoaded()
# run filter list if didn't auto run
if origInput.NAtoms > self.mainWindow.preferences.renderingForm.maxAtomsAutoRun:
pipelinePage.runAllFilterLists()
# close progress dialog
progDialog.close()
def eliminateFlicker(self, state, previousPos, pipelinePage):
"""
Attempt to eliminate flicker across PBCs
"""
if previousPos is None:
return
pbc = pipelinePage.PBC
if not pbc[0] and not pbc[1] and not pbc[2]:
return
logger = self.logger
logger.debug("Attempting to eliminate PBC flicker")
if len(previousPos) < len(state.pos):
prevNAtoms = len(previousPos)/3
count = vectors_c.eliminatePBCFlicker(prevNAtoms, state.pos, previousPos, state.cellDims, pbc)
else:
count = vectors_c.eliminatePBCFlicker(state.NAtoms, state.pos, previousPos, state.cellDims, pbc)
logger.debug("Modified: %d", count)
def warnFileNotPresent(self, filename, tag="first"):
"""
Warn the first file is not present.
"""
# QtGui.QMessageBox.warning(self, "Warning", "Could not locate %s file in sequence: %s" % (tag, filename))
message = "Could not locate %s file in sequence: %s" % (tag, filename)
msgBox = QtWidgets.QMessageBox(self)
msgBox.setText(message)
msgBox.setWindowFlags(msgBox.windowFlags() | QtCore.Qt.WindowStaysOnTopHint)
msgBox.setStandardButtons(QtWidgets.QMessageBox.Ok)
msgBox.setIcon(QtWidgets.QMessageBox.Warning)
msgBox.exec_()
def flickerCheckChanged(self, state):
"""
Flicker check changed
"""
if state == QtCore.Qt.Unchecked:
self.flickerFlag = False
else:
self.flickerFlag = True
def overwriteCheckChanged(self, val):
"""
Overwrite check changed
"""
if self.overwriteCheck.isChecked():
self.overwrite = 1
else:
self.overwrite = 0
def fileprefixChanged(self, text):
"""
File prefix has changed
"""
self.fileprefixText = str(text)
self.setFirstFileLabel()
def setFirstFileLabel(self):
"""
Set the first file label
"""
pp = self.rendererWindow.getCurrentPipelinePage()
if pp is None:
ext = ""
else:
ext = pp.extension
text = "%s%s%s" % (self.fileprefix.text(), self.numberFormat, ext)
foundFormat = False
testfn = text % self.minIndex
if not (os.path.isfile(testfn) or os.path.isfile(testfn+'.gz') or os.path.isfile(testfn+'.bz2')):
self.logger.debug("First file does not exist; checking other number formats")
for i, nfmt in enumerate(self.numberFormats):
if nfmt == self.numberFormat:
continue
testText = "%s%s%s" % (self.fileprefix.text(), nfmt, ext)
testfn = testText % self.minIndex
if os.path.isfile(testfn) or os.path.isfile(testfn+'.gz') or os.path.isfile(testfn+'.bz2'):
foundFormat = True
break
if foundFormat:
self.logger.debug("Found suitable number format: '%s'", nfmt)
self.numberFormatCombo.setCurrentIndex(i)
if not foundFormat:
self.firstFileLabel.setText(text % (self.minIndex,))
def minIndexChanged(self, val):
"""
Minimum index changed
"""
self.minIndex = val
self.setFirstFileLabel()
def maxIndexChanged(self, val):
"""
Maximum index changed
"""
self.maxIndex = val
def intervalChanged(self, val):
"""
Interval changed
"""
self.interval = val
def numberFormatChanged(self, text):
"""
Change number format
"""
self.numberFormat = str(text)
self.setFirstFileLabel()
################################################################################
class ImageRotateTab(QtWidgets.QWidget):
def __init__(self, parent, mainWindow, width):
super(ImageRotateTab, self).__init__(parent)
self.parent = parent
self.mainWindow = mainWindow
self.width = width
self.rendererWindow = self.parent.rendererWindow
# initial values
self.fileprefixText = "rotate"
self.overwrite = 0
self.degreesPerRotation = 5.0
self.logger = logging.getLogger(__name__+".ImageRotateTab")
# layout
mainLayout = QtWidgets.QVBoxLayout(self)
# mainLayout.setContentsMargins(0, 0, 0, 0)
# mainLayout.setSpacing(0)
mainLayout.setAlignment(QtCore.Qt.AlignTop)
# output directory
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
label = QtWidgets.QLabel("Output folder")
self.outputFolder = QtWidgets.QLineEdit("rotate")
self.outputFolder.setFixedWidth(120)
rowLayout.addWidget(label)
rowLayout.addWidget(self.outputFolder)
mainLayout.addWidget(row)
# file prefix
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
label = QtWidgets.QLabel("File prefix")
self.fileprefix = QtWidgets.QLineEdit(self.fileprefixText)
self.fileprefix.setFixedWidth(120)
self.fileprefix.textChanged[str].connect(self.fileprefixChanged)
rowLayout.addWidget(label)
rowLayout.addWidget(self.fileprefix)
mainLayout.addWidget(row)
# degrees per rotation
label = QtWidgets.QLabel("Degrees per rotation")
degPerRotSpinBox = QtWidgets.QSpinBox(self)
degPerRotSpinBox.setMinimum(1)
degPerRotSpinBox.setMaximum(360)
degPerRotSpinBox.setValue(self.degreesPerRotation)
degPerRotSpinBox.valueChanged.connect(self.degPerRotChanged)
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
rowLayout.addWidget(label)
rowLayout.addWidget(degPerRotSpinBox)
mainLayout.addWidget(row)
# overwrite check box
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
self.overwriteCheck = QtWidgets.QCheckBox("Overwrite")
self.overwriteCheck.stateChanged[int].connect(self.overwriteCheckChanged)
rowLayout.addWidget(self.overwriteCheck)
mainLayout.addWidget(row)
# create movie box
self.createMovieBox = CreateMovieBox(self)
mainLayout.addWidget(self.createMovieBox)
# start button
row = QtWidgets.QWidget(self)
rowLayout = QtWidgets.QHBoxLayout(row)
# rowLayout.setSpacing(0)
rowLayout.setContentsMargins(0, 0, 0, 0)
rowLayout.setAlignment(QtCore.Qt.AlignHCenter)
startRotatorButton = QtWidgets.QPushButton(QtGui.QIcon(iconPath("oxygen/go-last.png")), "START")
startRotatorButton.setToolTip("Start sequencer")
startRotatorButton.clicked.connect(self.startRotator)
rowLayout.addWidget(startRotatorButton)
mainLayout.addWidget(row)
def startRotator(self):
"""
Start the rotator.
"""
if self.parent.renderType == "POV":
settings = self.mainWindow.preferences.povrayForm
povray = utilities.checkForExe(settings.pathToPovray)
if not povray:
utilities.warnExeNotFound(self, "%s (POV-Ray)" % (settings.pathToPovray,))
return
else:
povray = ""
self.logger.debug("Running rotator")
# directory
saveDir = str(self.outputFolder.text())
saveDir += "-%s" % datetime.datetime.now().strftime("%y%m%d-%H%M%S")
if os.path.exists(saveDir):
if self.overwrite:
shutil.rmtree(saveDir)
else:
count = 0
while os.path.exists(saveDir):
count += 1
saveDir = "%s.%d" % (str(self.outputFolder.text()), count)
os.mkdir(saveDir)
# file name prefix
fileprefix = os.path.join(saveDir, str(self.fileprefix.text()))
# send to renderer
status = self.rendererWindow.renderer.rotateAndSaveImage(self.parent.renderType, self.parent.imageFormat, fileprefix,
1, self.degreesPerRotation, povray=povray)
# movie?
if status:
print("ERROR: rotate failed")
else:
# create movie
if self.createMovieBox.isChecked():
# show wait cursor
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
try:
saveText = os.path.join(saveDir, "%s%s" % (str(self.fileprefix.text()), "%d"))
self.parent.createMovie(saveDir, saveText, self.createMovieBox)
finally:
# set cursor to normal
QtWidgets.QApplication.restoreOverrideCursor()
def degPerRotChanged(self, val):
"""
Degrees per rotation changed.
"""
self.degreesPerRotation = val
def overwriteCheckChanged(self, val):
"""
Overwrite check changed
"""
if self.overwriteCheck.isChecked():
self.overwrite = 1
else:
self.overwrite = 0
def fileprefixChanged(self, text):
"""
File prefix has changed
"""
self.fileprefixText = str(text)<|fim▁end|> | self.scalarsCombo.addItem(scalarsID) |
<|file_name|>strain.py<|end_file_name|><|fim▁begin|>#Copyright (C) 2013 Alex Nitz
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Generals
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This modules contains functions reading, generating, and segmenting strain data
"""
import copy
import logging, numpy
import pycbc.noise
import pycbc.types
from pycbc.types import TimeSeries, zeros
from pycbc.types import Array, FrequencySeries, complex_same_precision_as
from pycbc.types import MultiDetOptionAppendAction, MultiDetOptionAction
from pycbc.types import MultiDetOptionActionSpecial
from pycbc.types import required_opts, required_opts_multi_ifo
from pycbc.types import ensure_one_opt, ensure_one_opt_multi_ifo
from pycbc.types import copy_opts_for_single_ifo
from pycbc.inject import InjectionSet, SGBurstInjectionSet
from pycbc.filter import resample_to_delta_t, highpass, make_frequency_series
from pycbc.filter.zpk import filter_zpk
from pycbc.waveform.spa_tmplt import spa_distance
import pycbc.psd
import pycbc.fft
import pycbc.events
import pycbc.frame
import pycbc.filter
from scipy.signal import kaiserord
def next_power_of_2(n):
"""Return the smallest integer power of 2 larger than the argument.
Parameters
----------
n : int
A positive integer.
Returns
-------
m : int
Smallest integer power of 2 larger than n.
"""
return 1 << n.bit_length()
def detect_loud_glitches(strain, psd_duration=4., psd_stride=2.,
psd_avg_method='median', low_freq_cutoff=30.,
threshold=50., cluster_window=5., corrupt_time=4.,
high_freq_cutoff=None, output_intermediates=False):
"""Automatic identification of loud transients for gating purposes.
This function first estimates the PSD of the input time series using the
FindChirp Welch method. Then it whitens the time series using that
estimate. Finally, it computes the magnitude of the whitened series,
thresholds it and applies the FindChirp clustering over time to the
surviving samples.
Parameters
----------
strain : TimeSeries
Input strain time series to detect glitches over.
psd_duration : {float, 4}
Duration of the segments for PSD estimation in seconds.
psd_stride : {float, 2}
Separation between PSD estimation segments in seconds.
psd_avg_method : {string, 'median'}
Method for averaging PSD estimation segments.
low_freq_cutoff : {float, 30}
Minimum frequency to include in the whitened strain.
threshold : {float, 50}
Minimum magnitude of whitened strain for considering a transient to
be present.
cluster_window : {float, 5}
Length of time window to cluster surviving samples over, in seconds.
corrupt_time : {float, 4}
Amount of time to be discarded at the beginning and end of the input
time series.
high_frequency_cutoff : {float, None}
Maximum frequency to include in the whitened strain. If given, the
input series is downsampled accordingly. If omitted, the Nyquist
frequency is used.
output_intermediates : {bool, False}
Save intermediate time series for debugging.
"""
# don't waste time trying to optimize a single FFT
pycbc.fft.fftw.set_measure_level(0)
if high_freq_cutoff:
strain = resample_to_delta_t(strain, 0.5 / high_freq_cutoff,
method='ldas')
else:
strain = strain.copy()
# taper strain
corrupt_length = int(corrupt_time * strain.sample_rate)
w = numpy.arange(corrupt_length) / float(corrupt_length)
strain[0:corrupt_length] *= pycbc.types.Array(w, dtype=strain.dtype)
strain[(len(strain) - corrupt_length):] *= \
pycbc.types.Array(w[::-1], dtype=strain.dtype)
if output_intermediates:
strain.save_to_wav('strain_conditioned.wav')
# zero-pad strain to a power-of-2 length
strain_pad_length = next_power_of_2(len(strain))
pad_start = int(strain_pad_length / 2 - len(strain) / 2)
pad_end = pad_start + len(strain)
pad_epoch = strain.start_time - pad_start / float(strain.sample_rate)
strain_pad = pycbc.types.TimeSeries(
pycbc.types.zeros(strain_pad_length, dtype=strain.dtype),
delta_t=strain.delta_t, copy=False, epoch=pad_epoch)
strain_pad[pad_start:pad_end] = strain[:]
# estimate the PSD
psd = pycbc.psd.welch(strain[corrupt_length:(len(strain)-corrupt_length)],
seg_len=int(psd_duration * strain.sample_rate),
seg_stride=int(psd_stride * strain.sample_rate),
avg_method=psd_avg_method,
require_exact_data_fit=False)
psd = pycbc.psd.interpolate(psd, 1. / strain_pad.duration)
psd = pycbc.psd.inverse_spectrum_truncation(
psd, int(psd_duration * strain.sample_rate),
low_frequency_cutoff=low_freq_cutoff,
trunc_method='hann')
kmin = int(low_freq_cutoff / psd.delta_f)
psd[0:kmin] = numpy.inf
if high_freq_cutoff:
kmax = int(high_freq_cutoff / psd.delta_f)
psd[kmax:] = numpy.inf
# whiten
strain_tilde = strain_pad.to_frequencyseries()
if high_freq_cutoff:
norm = high_freq_cutoff - low_freq_cutoff
else:
norm = strain.sample_rate / 2. - low_freq_cutoff
strain_tilde *= (psd * norm) ** (-0.5)
strain_pad = strain_tilde.to_timeseries()
if output_intermediates:
strain_pad[pad_start:pad_end].save_to_wav('strain_whitened.wav')
mag = abs(strain_pad[pad_start:pad_end])
if output_intermediates:
mag.save('strain_whitened_mag.npy')
mag = mag.numpy()
# remove strain corrupted by filters at the ends
mag[0:corrupt_length] = 0
mag[-1:-corrupt_length-1:-1] = 0
# find peaks and their times
indices = numpy.where(mag > threshold)[0]
cluster_idx = pycbc.events.findchirp_cluster_over_window(
indices, numpy.array(mag[indices]),
int(cluster_window*strain.sample_rate))
times = [idx * strain.delta_t + strain.start_time \
for idx in indices[cluster_idx]]
pycbc.fft.fftw.set_measure_level(pycbc.fft.fftw._default_measurelvl)
return times
def from_cli(opt, dyn_range_fac=1, precision='single',
inj_filter_rejector=None):
"""Parses the CLI options related to strain data reading and conditioning.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes (gps-start-time, gps-end-time, strain-high-pass,
pad-data, sample-rate, (frame-cache or frame-files), channel-name,
fake-strain, fake-strain-seed, fake-strain-from-file, gating_file).
dyn_range_fac : {float, 1}, optional
A large constant to reduce the dynamic range of the strain.
precision : string
Precision of the returned strain ('single' or 'double').
inj_filter_rejector : InjFilterRejector instance; optional, default=None
If given send the InjFilterRejector instance to the inject module so
that it can store a reduced representation of injections if
necessary.
Returns
-------
strain : TimeSeries
The time series containing the conditioned strain data.
"""
gating_info = {}
if opt.frame_cache or opt.frame_files or opt.frame_type or opt.hdf_store:
if opt.frame_cache:
frame_source = opt.frame_cache
if opt.frame_files:
frame_source = opt.frame_files
logging.info("Reading Frames")
if hasattr(opt, 'frame_sieve') and opt.frame_sieve:
sieve = opt.frame_sieve
else:
sieve = None
if opt.frame_type:
strain = pycbc.frame.query_and_read_frame(
opt.frame_type, opt.channel_name,
start_time=opt.gps_start_time-opt.pad_data,
end_time=opt.gps_end_time+opt.pad_data,
sieve=sieve)
elif opt.frame_files or opt.frame_cache:
strain = pycbc.frame.read_frame(
frame_source, opt.channel_name,
start_time=opt.gps_start_time-opt.pad_data,
end_time=opt.gps_end_time+opt.pad_data,
sieve=sieve)
elif opt.hdf_store:
strain = pycbc.frame.read_store(opt.hdf_store, opt.channel_name,
opt.gps_start_time - opt.pad_data,
opt.gps_end_time + opt.pad_data)
if opt.zpk_z and opt.zpk_p and opt.zpk_k:
logging.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
logging.info("Applying zpk filter")
z = numpy.array(opt.zpk_z)
p = numpy.array(opt.zpk_p)
k = float(opt.zpk_k)
strain = filter_zpk(strain.astype(numpy.float64), z, p, k)
if opt.normalize_strain:
logging.info("Dividing strain by constant")
l = opt.normalize_strain
strain = strain / l
if opt.injection_file:
logging.info("Applying injections")
injector = InjectionSet(opt.injection_file)
injections = \
injector.apply(strain, opt.channel_name[0:2],
distance_scale=opt.injection_scale_factor,
inj_filter_rejector=inj_filter_rejector)
if opt.sgburst_injection_file:
logging.info("Applying sine-Gaussian burst injections")
injector = SGBurstInjectionSet(opt.sgburst_injection_file)
injector.apply(strain, opt.channel_name[0:2],
distance_scale=opt.injection_scale_factor)
if opt.strain_high_pass:
logging.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
if precision == 'single':
logging.info("Converting to float32")
strain = (strain * dyn_range_fac).astype(pycbc.types.float32)
elif precision == "double":
logging.info("Converting to float64")
strain = (strain * dyn_range_fac).astype(pycbc.types.float64)
else:
raise ValueError("Unrecognized precision {}".format(precision))
if opt.sample_rate:
logging.info("Resampling data")
strain = resample_to_delta_t(strain,
1.0 / opt.sample_rate,
method='ldas')
if opt.gating_file is not None:
logging.info("Gating times contained in gating file")
gate_params = numpy.loadtxt(opt.gating_file)
if len(gate_params.shape) == 1:
gate_params = [gate_params]
strain = gate_data(strain, gate_params)
gating_info['file'] = \
[gp for gp in gate_params \
if (gp[0] + gp[1] + gp[2] >= strain.start_time) \
and (gp[0] - gp[1] - gp[2] <= strain.end_time)]
if opt.autogating_threshold is not None:
gating_info['auto'] = []
for _ in range(opt.autogating_max_iterations):
glitch_times = detect_loud_glitches(
strain, threshold=opt.autogating_threshold,
cluster_window=opt.autogating_cluster,
low_freq_cutoff=opt.strain_high_pass,
corrupt_time=opt.pad_data + opt.autogating_pad)
gate_params = [[gt, opt.autogating_width, opt.autogating_taper]
for gt in glitch_times]
gating_info['auto'] += gate_params
strain = gate_data(strain, gate_params)
if len(glitch_times) > 0:
logging.info('Autogating at %s',
', '.join(['%.3f' % gt
for gt in glitch_times]))
else:
break
if opt.strain_high_pass:
logging.info("Highpass Filtering")
strain = highpass(strain, frequency=opt.strain_high_pass)
if hasattr(opt, 'witness_frame_type') and opt.witness_frame_type:
stilde = strain.to_frequencyseries()
import h5py
tf_file = h5py.File(opt.witness_tf_file)
for key in tf_file:
witness = pycbc.frame.query_and_read_frame(opt.witness_frame_type, str(key),
start_time=strain.start_time, end_time=strain.end_time)
witness = (witness * dyn_range_fac).astype(strain.dtype)
tf = pycbc.types.load_frequencyseries(opt.witness_tf_file, group=key)
tf = tf.astype(stilde.dtype)
flen = int(opt.witness_filter_length * strain.sample_rate)
tf = pycbc.psd.interpolate(tf, stilde.delta_f)
tf_time = tf.to_timeseries()
window = Array(numpy.hanning(flen*2), dtype=strain.dtype)
tf_time[0:flen] *= window[flen:]
tf_time[len(tf_time)-flen:] *= window[0:flen]
tf = tf_time.to_frequencyseries()
kmax = min(len(tf), len(stilde)-1)
stilde[:kmax] -= tf[:kmax] * witness.to_frequencyseries()[:kmax]
strain = stilde.to_timeseries()
if opt.pad_data:
logging.info("Remove Padding")
start = opt.pad_data * strain.sample_rate
end = len(strain) - strain.sample_rate * opt.pad_data
strain = strain[start:end]
if opt.fake_strain or opt.fake_strain_from_file:
logging.info("Generating Fake Strain")
if not opt.low_frequency_cutoff:
raise ValueError('Please provide low frequency cutoff to '
'generate a fake strain')
duration = opt.gps_end_time - opt.gps_start_time
tlen = duration * opt.sample_rate
pdf = 1.0/128
plen = int(opt.sample_rate / pdf) / 2 + 1
if opt.fake_strain_from_file:
logging.info("Reading ASD from file")
strain_psd = pycbc.psd.from_txt(opt.fake_strain_from_file, plen, pdf,
opt.low_frequency_cutoff, is_asd_file=True)
elif opt.fake_strain != 'zeroNoise':
logging.info("Making PSD for strain")
strain_psd = pycbc.psd.from_string(opt.fake_strain, plen, pdf,
opt.low_frequency_cutoff)
if opt.fake_strain == 'zeroNoise':
logging.info("Making zero-noise time series")
strain = TimeSeries(pycbc.types.zeros(tlen),
delta_t=1.0/opt.sample_rate,
epoch=opt.gps_start_time)
else:
logging.info("Making colored noise")
from pycbc.noise.reproduceable import colored_noise<|fim▁hole|> lowfreq = opt.low_frequency_cutoff / 2.
strain = colored_noise(strain_psd, opt.gps_start_time,
opt.gps_end_time,
seed=opt.fake_strain_seed,
low_frequency_cutoff=lowfreq)
strain = resample_to_delta_t(strain, 1.0/opt.sample_rate)
if not opt.channel_name and (opt.injection_file \
or opt.sgburst_injection_file):
raise ValueError('Please provide channel names with the format '
'ifo:channel (e.g. H1:CALIB-STRAIN) to inject '
'simulated signals into fake strain')
if opt.injection_file:
logging.info("Applying injections")
injector = InjectionSet(opt.injection_file)
injections = \
injector.apply(strain, opt.channel_name[0:2],
distance_scale=opt.injection_scale_factor,
inj_filter_rejector=inj_filter_rejector)
if opt.sgburst_injection_file:
logging.info("Applying sine-Gaussian burst injections")
injector = SGBurstInjectionSet(opt.sgburst_injection_file)
injector.apply(strain, opt.channel_name[0:2],
distance_scale=opt.injection_scale_factor)
if precision == 'single':
logging.info("Converting to float32")
strain = (dyn_range_fac * strain).astype(pycbc.types.float32)
elif precision == 'double':
logging.info("Converting to float64")
strain = (dyn_range_fac * strain).astype(pycbc.types.float64)
else:
raise ValueError("Unrecognized precision {}".format(precision))
if opt.taper_data:
logging.info("Tapering data")
# Use auto-gating stuff for this, a one-sided gate is a taper
pd_taper_window = opt.taper_data
gate_params = [(strain.start_time, 0., pd_taper_window)]
gate_params.append( (strain.end_time, 0.,
pd_taper_window) )
gate_data(strain, gate_params)
if opt.injection_file:
strain.injections = injections
strain.gating_info = gating_info
return strain
def from_cli_single_ifo(opt, ifo, inj_filter_rejector=None, **kwargs):
"""
Get the strain for a single ifo when using the multi-detector CLI
"""
single_det_opt = copy_opts_for_single_ifo(opt, ifo)
return from_cli(single_det_opt,
inj_filter_rejector=inj_filter_rejector, **kwargs)
def from_cli_multi_ifos(opt, ifos, inj_filter_rejector_dict=None, **kwargs):
"""
Get the strain for all ifos when using the multi-detector CLI
"""
strain = {}
if inj_filter_rejector_dict is None:
inj_filter_rejector_dict = {ifo: None for ifo in ifos}
for ifo in ifos:
strain[ifo] = from_cli_single_ifo(opt, ifo,
inj_filter_rejector_dict[ifo], **kwargs)
return strain
def insert_strain_option_group(parser, gps_times=True):
""" Add strain-related options to the optparser object.
Adds the options used to call the pycbc.strain.from_cli function to an
optparser as an OptionGroup. This should be used if you
want to use these options in your code.
Parameters
-----------
parser : object
OptionParser instance.
gps_times : bool, optional
Include ``--gps-start-time`` and ``--gps-end-time`` options. Default
is True.
"""
data_reading_group = parser.add_argument_group("Options for obtaining h(t)",
"These options are used for generating h(t) either by "
"reading from a file or by generating it. This is only "
"needed if the PSD is to be estimated from the data, ie. "
" if the --psd-estimation option is given.")
# Required options
if gps_times:
data_reading_group.add_argument("--gps-start-time",
help="The gps start time of the data "
"(integer seconds)", type=int)
data_reading_group.add_argument("--gps-end-time",
help="The gps end time of the data "
" (integer seconds)", type=int)
data_reading_group.add_argument("--strain-high-pass", type=float,
help="High pass frequency")
data_reading_group.add_argument("--pad-data", default=8,
help="Extra padding to remove highpass corruption "
"(integer seconds)", type=int)
data_reading_group.add_argument("--taper-data",
help="Taper ends of data to zero using the supplied length as a "
"window (integer seconds)", type=int, default=0)
data_reading_group.add_argument("--sample-rate", type=int,
help="The sample rate to use for h(t) generation (integer Hz).")
data_reading_group.add_argument("--channel-name", type=str,
help="The channel containing the gravitational strain data")
#Read from cache file
data_reading_group.add_argument("--frame-cache", type=str, nargs="+",
help="Cache file containing the frame locations.")
#Read from frame files
data_reading_group.add_argument("--frame-files",
type=str, nargs="+",
help="list of frame files")
#Read from hdf store file
data_reading_group.add_argument("--hdf-store",
type=str,
help="Store of time series data in hdf format")
#Use datafind to get frame files
data_reading_group.add_argument("--frame-type",
type=str,
help="(optional), replaces frame-files. Use datafind "
"to get the needed frame file(s) of this type.")
#Filter frame files by URL
data_reading_group.add_argument("--frame-sieve",
type=str,
help="(optional), Only use frame files where the "
"URL matches the regular expression given.")
#Generate gaussian noise with given psd
data_reading_group.add_argument("--fake-strain",
help="Name of model PSD for generating fake gaussian noise.",
choices=pycbc.psd.get_lalsim_psd_list() + ['zeroNoise'])
data_reading_group.add_argument("--fake-strain-seed", type=int, default=0,
help="Seed value for the generation of fake colored"
" gaussian noise")
data_reading_group.add_argument("--fake-strain-from-file",
help="File containing ASD for generating fake noise from it.")
#optional
data_reading_group.add_argument("--injection-file", type=str,
help="(optional) Injection file used to add "
"waveforms into the strain")
data_reading_group.add_argument("--sgburst-injection-file", type=str,
help="(optional) Injection file used to add "
"sine-Gaussian burst waveforms into the strain")
data_reading_group.add_argument("--injection-scale-factor", type=float,
default=1, help="Divide injections by this factor "
"before injecting into the data.")
data_reading_group.add_argument("--gating-file", type=str,
help="(optional) Text file of gating segments to apply."
" Format of each line is (all times in secs):"
" gps_time zeros_half_width pad_half_width")
data_reading_group.add_argument('--autogating-threshold', type=float,
metavar='SIGMA',
help='If given, find and gate glitches '
'producing a deviation larger than '
'SIGMA in the whitened strain time '
'series.')
data_reading_group.add_argument('--autogating-max-iterations', type=int,
metavar='SIGMA', default=1,
help='If given, iteratively apply '
'autogating')
data_reading_group.add_argument('--autogating-cluster', type=float,
metavar='SECONDS', default=5.,
help='Length of clustering window for '
'detecting glitches for autogating.')
data_reading_group.add_argument('--autogating-width', type=float,
metavar='SECONDS', default=0.25,
help='Half-width of the gating window.')
data_reading_group.add_argument('--autogating-taper', type=float,
metavar='SECONDS', default=0.25,
help='Taper the strain before and after '
'each gating window over a duration '
'of SECONDS.')
data_reading_group.add_argument('--autogating-pad', type=float,
metavar='SECONDS', default=16,
help='Ignore the given length of whitened '
'strain at the ends of a segment, to '
'avoid filters ringing.')
data_reading_group.add_argument("--normalize-strain", type=float,
help="(optional) Divide frame data by constant.")
data_reading_group.add_argument("--zpk-z", type=float, nargs="+",
help="(optional) Zero-pole-gain (zpk) filter strain. "
"A list of zeros for transfer function")
data_reading_group.add_argument("--zpk-p", type=float, nargs="+",
help="(optional) Zero-pole-gain (zpk) filter strain. "
"A list of poles for transfer function")
data_reading_group.add_argument("--zpk-k", type=float,
help="(optional) Zero-pole-gain (zpk) filter strain. "
"Transfer function gain")
# Options to apply to subtract noise from a witness channel and known
# transfer function.
data_reading_group.add_argument("--witness-frame-type", type=str,
help="(optional), frame type which will be use to query the"
"witness channel data.")
data_reading_group.add_argument("--witness-tf-file", type=str,
help="an hdf file containing the transfer"
" functions and the associated channel names")
data_reading_group.add_argument("--witness-filter-length", type=float,
help="filter length in seconds for the transfer function")
return data_reading_group
# FIXME: This repeats almost all of the options above. Any nice way of reducing
# this?
def insert_strain_option_group_multi_ifo(parser, gps_times=True):
"""
Adds the options used to call the pycbc.strain.from_cli function to an
optparser as an OptionGroup. This should be used if you
want to use these options in your code.
Parameters
-----------
parser : object
OptionParser instance.
gps_times : bool, optional
Include ``--gps-start-time`` and ``--gps-end-time`` options. Default
is True.
"""
data_reading_group_multi = parser.add_argument_group("Options for obtaining"
" h(t)",
"These options are used for generating h(t) either by "
"reading from a file or by generating it. This is only "
"needed if the PSD is to be estimated from the data, ie. "
"if the --psd-estimation option is given. This group "
"supports reading from multiple ifos simultaneously.")
# Required options
if gps_times:
data_reading_group_multi.add_argument(
"--gps-start-time", nargs='+', action=MultiDetOptionAction,
metavar='IFO:TIME', type=int,
help="The gps start time of the data (integer seconds)")
data_reading_group_multi.add_argument(
"--gps-end-time", nargs='+', action=MultiDetOptionAction,
metavar='IFO:TIME', type=int,
help="The gps end time of the data (integer seconds)")
data_reading_group_multi.add_argument("--strain-high-pass", nargs='+',
action=MultiDetOptionAction,
type=float, metavar='IFO:FREQUENCY',
help="High pass frequency")
data_reading_group_multi.add_argument("--pad-data", nargs='+', default=8,
action=MultiDetOptionAction,
type=int, metavar='IFO:LENGTH',
help="Extra padding to remove highpass corruption "
"(integer seconds)")
data_reading_group_multi.add_argument("--taper-data", nargs='+',
action=MultiDetOptionAction,
type=int, default=0, metavar='IFO:LENGTH',
help="Taper ends of data to zero using the "
"supplied length as a window (integer seconds)")
data_reading_group_multi.add_argument("--sample-rate", type=int, nargs='+',
action=MultiDetOptionAction, metavar='IFO:RATE',
help="The sample rate to use for h(t) generation "
" (integer Hz).")
data_reading_group_multi.add_argument("--channel-name", type=str, nargs='+',
action=MultiDetOptionActionSpecial,
metavar='IFO:CHANNEL',
help="The channel containing the gravitational "
"strain data")
#Read from cache file
data_reading_group_multi.add_argument("--frame-cache", type=str, nargs="+",
action=MultiDetOptionAppendAction,
metavar='IFO:FRAME_CACHE',
help="Cache file containing the frame locations.")
#Read from frame files
data_reading_group_multi.add_argument("--frame-files", type=str, nargs="+",
action=MultiDetOptionAppendAction,
metavar='IFO:FRAME_FILES',
help="list of frame files")
#Read from hdf store file
data_reading_group_multi.add_argument("--hdf-store", type=str, nargs='+',
action=MultiDetOptionAction,
metavar='IFO:HDF_STORE_FILE',
help="Store of time series data in hdf format")
# Use datafind to get frame files
data_reading_group_multi.add_argument("--frame-type", type=str, nargs="+",
action=MultiDetOptionAction,
metavar='IFO:FRAME_TYPE',
help="(optional) Replaces frame-files. "
"Use datafind to get the needed frame "
"file(s) of this type.")
#Filter frame files by URL
data_reading_group_multi.add_argument("--frame-sieve", type=str, nargs="+",
action=MultiDetOptionAction,
metavar='IFO:FRAME_SIEVE',
help="(optional), Only use frame files where the "
"URL matches the regular expression given.")
#Generate gaussian noise with given psd
data_reading_group_multi.add_argument("--fake-strain", type=str, nargs="+",
action=MultiDetOptionAction, metavar='IFO:CHOICE',
help="Name of model PSD for generating fake "
"gaussian noise. Choose from %s or zeroNoise" \
%((', ').join(pycbc.psd.get_lalsim_psd_list()),) )
data_reading_group_multi.add_argument("--fake-strain-seed", type=int,
default=0, nargs="+", action=MultiDetOptionAction,
metavar='IFO:SEED',
help="Seed value for the generation of fake "
"colored gaussian noise")
data_reading_group_multi.add_argument("--fake-strain-from-file", nargs="+",
action=MultiDetOptionAction, metavar='IFO:FILE',
help="File containing ASD for generating fake "
"noise from it.")
#optional
data_reading_group_multi.add_argument("--injection-file", type=str,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:FILE',
help="(optional) Injection file used to add "
"waveforms into the strain")
data_reading_group_multi.add_argument("--sgburst-injection-file", type=str,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:FILE',
help="(optional) Injection file used to add "
"sine-Gaussian burst waveforms into the strain")
data_reading_group_multi.add_argument("--injection-scale-factor",
type=float, nargs="+", action=MultiDetOptionAction,
metavar="IFO:VAL", default=1.,
help="Multiple injections by this factor "
"before injecting into the data.")
data_reading_group_multi.add_argument("--gating-file", type=str,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:FILE',
help="(optional) Text file of gating segments to apply."
" Format of each line is (all times in secs):"
" gps_time zeros_half_width pad_half_width")
data_reading_group_multi.add_argument('--autogating-threshold', type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:SIGMA',
help='If given, find and gate glitches '
'producing a deviation larger than '
'SIGMA in the whitened strain time '
'series.')
data_reading_group_multi.add_argument('--autogating-max-iterations', type=int,
metavar='SIGMA', default=1,
help='If given, iteratively apply '
'autogating')
data_reading_group_multi.add_argument('--autogating-cluster', type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:SECONDS', default=5.,
help='Length of clustering window for '
'detecting glitches for autogating.')
data_reading_group_multi.add_argument('--autogating-width', type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:SECONDS', default=0.25,
help='Half-width of the gating window.')
data_reading_group_multi.add_argument('--autogating-taper', type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:SECONDS', default=0.25,
help='Taper the strain before and after '
'each gating window over a duration '
'of SECONDS.')
data_reading_group_multi.add_argument('--autogating-pad', type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:SECONDS', default=16,
help='Ignore the given length of whitened '
'strain at the ends of a segment, to '
'avoid filters ringing.')
data_reading_group_multi.add_argument("--normalize-strain", type=float,
nargs="+", action=MultiDetOptionAction,
metavar='IFO:VALUE',
help="(optional) Divide frame data by constant.")
data_reading_group_multi.add_argument("--zpk-z", type=float,
nargs="+", action=MultiDetOptionAppendAction,
metavar='IFO:VALUE',
help="(optional) Zero-pole-gain (zpk) filter strain. "
"A list of zeros for transfer function")
data_reading_group_multi.add_argument("--zpk-p", type=float,
nargs="+", action=MultiDetOptionAppendAction,
metavar='IFO:VALUE',
help="(optional) Zero-pole-gain (zpk) filter strain. "
"A list of poles for transfer function")
data_reading_group_multi.add_argument("--zpk-k", type=float,
nargs="+", action=MultiDetOptionAppendAction,
metavar='IFO:VALUE',
help="(optional) Zero-pole-gain (zpk) filter strain. "
"Transfer function gain")
return data_reading_group_multi
ensure_one_opt_groups = []
ensure_one_opt_groups.append(['--frame-cache','--fake-strain',
'--fake-strain-from-file',
'--frame-files', '--frame-type',
'--hdf-store'])
required_opts_list = ['--gps-start-time', '--gps-end-time',
'--strain-high-pass', '--pad-data', '--sample-rate',
'--channel-name']
def verify_strain_options(opts, parser):
"""Sanity check provided strain arguments.
Parses the strain data CLI options and verifies that they are consistent
and reasonable.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes (gps-start-time, gps-end-time, strain-high-pass,
pad-data, sample-rate, frame-cache, channel-name, fake-strain,
fake-strain-seed).
parser : object
OptionParser instance.
"""
for opt_group in ensure_one_opt_groups:
ensure_one_opt(opts, parser, opt_group)
required_opts(opts, parser, required_opts_list)
def verify_strain_options_multi_ifo(opts, parser, ifos):
"""Sanity check provided strain arguments.
Parses the strain data CLI options and verifies that they are consistent
and reasonable.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes (gps-start-time, gps-end-time, strain-high-pass,
pad-data, sample-rate, frame-cache, channel-name, fake-strain,
fake-strain-seed).
parser : object
OptionParser instance.
ifos : list of strings
List of ifos for which to verify options for
"""
for ifo in ifos:
for opt_group in ensure_one_opt_groups:
ensure_one_opt_multi_ifo(opts, parser, ifo, opt_group)
required_opts_multi_ifo(opts, parser, ifo, required_opts_list)
def gate_data(data, gate_params):
"""Apply a set of gating windows to a time series.
Each gating window is
defined by a central time, a given duration (centered on the given
time) to zero out, and a given duration of smooth tapering on each side of
the window. The window function used for tapering is a Tukey window.
Parameters
----------
data : TimeSeries
The time series to be gated.
gate_params : list
List of parameters for the gating windows. Each element should be a
list or tuple with 3 elements: the central time of the gating window,
the half-duration of the portion to zero out, and the duration of the
Tukey tapering on each side. All times in seconds. The total duration
of the data affected by one gating window is thus twice the second
parameter plus twice the third parameter.
Returns
-------
data: TimeSeries
The gated time series.
"""
def inverted_tukey(M, n_pad):
midlen = M - 2*n_pad
if midlen < 0:
raise ValueError("No zeros left after applying padding.")
padarr = 0.5*(1.+numpy.cos(numpy.pi*numpy.arange(n_pad)/n_pad))
return numpy.concatenate((padarr,numpy.zeros(midlen),padarr[::-1]))
sample_rate = 1./data.delta_t
temp = data.data
for glitch_time, glitch_width, pad_width in gate_params:
t_start = glitch_time - glitch_width - pad_width - data.start_time
t_end = glitch_time + glitch_width + pad_width - data.start_time
if t_start > data.duration or t_end < 0.:
continue # Skip gate segments that don't overlap
win_samples = int(2*sample_rate*(glitch_width+pad_width))
pad_samples = int(sample_rate*pad_width)
window = inverted_tukey(win_samples, pad_samples)
offset = int(t_start * sample_rate)
idx1 = max(0, -offset)
idx2 = min(len(window), len(data)-offset)
temp[idx1+offset:idx2+offset] *= window[idx1:idx2]
return data
class StrainSegments(object):
""" Class for managing manipulation of strain data for the purpose of
matched filtering. This includes methods for segmenting and
conditioning.
"""
def __init__(self, strain, segment_length=None, segment_start_pad=0,
segment_end_pad=0, trigger_start=None, trigger_end=None,
filter_inj_only=False, injection_window=None,
allow_zero_padding=False):
""" Determine how to chop up the strain data into smaller segments
for analysis.
"""
self._fourier_segments = None
self.strain = strain
self.delta_t = strain.delta_t
self.sample_rate = strain.sample_rate
if segment_length:
seg_len = segment_length
else:
seg_len = strain.duration
self.delta_f = 1.0 / seg_len
self.time_len = seg_len * self.sample_rate
self.freq_len = self.time_len // 2 + 1
seg_end_pad = segment_end_pad
seg_start_pad = segment_start_pad
if not trigger_start:
trigger_start = int(strain.start_time) + segment_start_pad
else:
if not allow_zero_padding:
min_start_time = int(strain.start_time) + segment_start_pad
else:
min_start_time = int(strain.start_time)
if trigger_start < min_start_time:
err_msg = "Trigger start time must be within analysable "
err_msg += "window. Asked to start from %d " %(trigger_start)
err_msg += "but can only analyse from %d." %(min_start_time)
raise ValueError(err_msg)
if not trigger_end:
trigger_end = int(strain.end_time) - segment_end_pad
else:
if not allow_zero_padding:
max_end_time = int(strain.end_time) - segment_end_pad
else:
max_end_time = int(strain.end_time)
if trigger_end > max_end_time:
err_msg = "Trigger end time must be within analysable "
err_msg += "window. Asked to end at %d " %(trigger_end)
err_msg += "but can only analyse to %d." %(max_end_time)
raise ValueError(err_msg)
throwaway_size = seg_start_pad + seg_end_pad
seg_width = seg_len - throwaway_size
# The amount of time we can actually analyze given the
# amount of padding that is needed
analyzable = trigger_end - trigger_start
data_start = (trigger_start - segment_start_pad) - \
int(strain.start_time)
data_end = trigger_end + segment_end_pad - int(strain.start_time)
data_dur = data_end - data_start
data_start = data_start * strain.sample_rate
data_end = data_end * strain.sample_rate
#number of segments we need to analyze this data
num_segs = int(numpy.ceil(float(analyzable) / float(seg_width)))
# The offset we will use between segments
seg_offset = int(numpy.ceil(analyzable / float(num_segs)))
self.segment_slices = []
self.analyze_slices = []
# Determine how to chop up the strain into smaller segments
for nseg in range(num_segs-1):
# boundaries for time slices into the strain
seg_start = int(data_start + (nseg*seg_offset) * strain.sample_rate)
seg_end = int(seg_start + seg_len * strain.sample_rate)
seg_slice = slice(seg_start, seg_end)
self.segment_slices.append(seg_slice)
# boundaries for the analyzable portion of the segment
ana_start = int(seg_start_pad * strain.sample_rate)
ana_end = int(ana_start + seg_offset * strain.sample_rate)
ana_slice = slice(ana_start, ana_end)
self.analyze_slices.append(ana_slice)
# The last segment takes up any integer boundary slop
seg_end = int(data_end)
seg_start = int(seg_end - seg_len * strain.sample_rate)
seg_slice = slice(seg_start, seg_end)
self.segment_slices.append(seg_slice)
remaining = (data_dur - ((num_segs - 1) * seg_offset + seg_start_pad))
ana_start = int((seg_len - remaining) * strain.sample_rate)
ana_end = int((seg_len - seg_end_pad) * strain.sample_rate)
ana_slice = slice(ana_start, ana_end)
self.analyze_slices.append(ana_slice)
self.full_segment_slices = copy.deepcopy(self.segment_slices)
#Remove segments that are outside trig start and end
segment_slices_red = []
analyze_slices_red = []
trig_start_idx = (trigger_start - int(strain.start_time)) * strain.sample_rate
trig_end_idx = (trigger_end - int(strain.start_time)) * strain.sample_rate
if filter_inj_only and hasattr(strain, 'injections'):
end_times = strain.injections.end_times()
end_times = [time for time in end_times if float(time) < trigger_end and float(time) > trigger_start]
inj_idx = [(float(time) - float(strain.start_time)) * strain.sample_rate for time in end_times]
for seg, ana in zip(self.segment_slices, self.analyze_slices):
start = ana.start
stop = ana.stop
cum_start = start + seg.start
cum_end = stop + seg.start
# adjust first segment
if trig_start_idx > cum_start:
start += (trig_start_idx - cum_start)
# adjust last segment
if trig_end_idx < cum_end:
stop -= (cum_end - trig_end_idx)
if filter_inj_only and hasattr(strain, 'injections'):
analyze_this = False
inj_window = strain.sample_rate * 8
for inj_id in inj_idx:
if inj_id < (cum_end + inj_window) and \
inj_id > (cum_start - inj_window):
analyze_this = True
if not analyze_this:
continue
if start < stop:
segment_slices_red.append(seg)
analyze_slices_red.append(slice(start, stop))
self.segment_slices = segment_slices_red
self.analyze_slices = analyze_slices_red
def fourier_segments(self):
""" Return a list of the FFT'd segments.
Return the list of FrequencySeries. Additional properties are
added that describe the strain segment. The property 'analyze'
is a slice corresponding to the portion of the time domain equivelant
of the segment to analyze for triggers. The value 'cumulative_index'
indexes from the beginning of the original strain series.
"""
if not self._fourier_segments:
self._fourier_segments = []
for seg_slice, ana in zip(self.segment_slices, self.analyze_slices):
if seg_slice.start >= 0 and seg_slice.stop <= len(self.strain):
freq_seg = make_frequency_series(self.strain[seg_slice])
# Assume that we cannot have a case where we both zero-pad on
# both sides
elif seg_slice.start < 0:
strain_chunk = self.strain[:seg_slice.stop]
strain_chunk.prepend_zeros(-seg_slice.start)
freq_seg = make_frequency_series(strain_chunk)
elif seg_slice.stop > len(self.strain):
strain_chunk = self.strain[seg_slice.start:]
strain_chunk.append_zeros(seg_slice.stop - len(self.strain))
freq_seg = make_frequency_series(strain_chunk)
freq_seg.analyze = ana
freq_seg.cumulative_index = seg_slice.start + ana.start
freq_seg.seg_slice = seg_slice
self._fourier_segments.append(freq_seg)
return self._fourier_segments
@classmethod
def from_cli(cls, opt, strain):
"""Calculate the segmentation of the strain data for analysis from
the command line options.
"""
return cls(strain, segment_length=opt.segment_length,
segment_start_pad=opt.segment_start_pad,
segment_end_pad=opt.segment_end_pad,
trigger_start=opt.trig_start_time,
trigger_end=opt.trig_end_time,
filter_inj_only=opt.filter_inj_only,
injection_window=opt.injection_window,
allow_zero_padding=opt.allow_zero_padding)
@classmethod
def insert_segment_option_group(cls, parser):
segment_group = parser.add_argument_group(
"Options for segmenting the strain",
"These options are used to determine how to "
"segment the strain into smaller chunks, "
"and for determining the portion of each to "
"analyze for triggers. ")
segment_group.add_argument("--trig-start-time", type=int, default=0,
help="(optional) The gps time to start recording triggers")
segment_group.add_argument("--trig-end-time", type=int, default=0,
help="(optional) The gps time to stop recording triggers")
segment_group.add_argument("--segment-length", type=int,
help="The length of each strain segment in seconds.")
segment_group.add_argument("--segment-start-pad", type=int,
help="The time in seconds to ignore of the "
"beginning of each segment in seconds. ")
segment_group.add_argument("--segment-end-pad", type=int,
help="The time in seconds to ignore at the "
"end of each segment in seconds.")
segment_group.add_argument("--allow-zero-padding", action='store_true',
help="Allow for zero padding of data to "
"analyze requested times, if needed.")
# Injection optimization options
segment_group.add_argument("--filter-inj-only", action='store_true',
help="Analyze only segments that contain an injection.")
segment_group.add_argument("--injection-window", default=None,
type=float, help="""If using --filter-inj-only then
only search for injections within +/- injection
window of the injections's end time. This is useful
to speed up a coherent search or a search where we
initially filter at lower sample rate, and then
filter at full rate where needed. NOTE: Reverts to
full analysis if two injections are in the same
segment.""")
@classmethod
def from_cli_single_ifo(cls, opt, strain, ifo):
"""Calculate the segmentation of the strain data for analysis from
the command line options.
"""
return cls(strain, segment_length=opt.segment_length[ifo],
segment_start_pad=opt.segment_start_pad[ifo],
segment_end_pad=opt.segment_end_pad[ifo],
trigger_start=opt.trig_start_time[ifo],
trigger_end=opt.trig_end_time[ifo],
filter_inj_only=opt.filter_inj_only,
allow_zero_padding=opt.allow_zero_padding)
@classmethod
def from_cli_multi_ifos(cls, opt, strain_dict, ifos):
"""Calculate the segmentation of the strain data for analysis from
the command line options.
"""
strain_segments = {}
for ifo in ifos:
strain_segments[ifo] = cls.from_cli_single_ifo(
opt, strain_dict[ifo], ifo)
return strain_segments
@classmethod
def insert_segment_option_group_multi_ifo(cls, parser):
segment_group = parser.add_argument_group(
"Options for segmenting the strain",
"These options are used to determine how to "
"segment the strain into smaller chunks, "
"and for determining the portion of each to "
"analyze for triggers. ")
segment_group.add_argument("--trig-start-time", type=int, default=0,
nargs='+', action=MultiDetOptionAction, metavar='IFO:TIME',
help="(optional) The gps time to start recording triggers")
segment_group.add_argument("--trig-end-time", type=int, default=0,
nargs='+', action=MultiDetOptionAction, metavar='IFO:TIME',
help="(optional) The gps time to stop recording triggers")
segment_group.add_argument("--segment-length", type=int,
nargs='+', action=MultiDetOptionAction,
metavar='IFO:LENGTH',
help="The length of each strain segment in seconds.")
segment_group.add_argument("--segment-start-pad", type=int,
nargs='+', action=MultiDetOptionAction, metavar='IFO:TIME',
help="The time in seconds to ignore of the "
"beginning of each segment in seconds. ")
segment_group.add_argument("--segment-end-pad", type=int,
nargs='+', action=MultiDetOptionAction, metavar='IFO:TIME',
help="The time in seconds to ignore at the "
"end of each segment in seconds.")
segment_group.add_argument("--allow-zero-padding", action='store_true',
help="Allow for zero padding of data to analyze "
"requested times, if needed.")
segment_group.add_argument("--filter-inj-only", action='store_true',
help="Analyze only segments that contain "
"an injection.")
required_opts_list = ['--segment-length',
'--segment-start-pad',
'--segment-end-pad',
]
@classmethod
def verify_segment_options(cls, opt, parser):
required_opts(opt, parser, cls.required_opts_list)
@classmethod
def verify_segment_options_multi_ifo(cls, opt, parser, ifos):
for ifo in ifos:
required_opts_multi_ifo(opt, parser, ifo, cls.required_opts_list)
class StrainBuffer(pycbc.frame.DataBuffer):
def __init__(self, frame_src, channel_name, start_time,
max_buffer=512,
sample_rate=4096,
low_frequency_cutoff=20,
highpass_frequency=15.0,
highpass_reduction=200.0,
highpass_bandwidth=5.0,
psd_samples=30,
psd_segment_length=4,
psd_inverse_length=3.5,
trim_padding=0.25,
autogating_threshold=None,
autogating_cluster=None,
autogating_pad=None,
autogating_width=None,
autogating_taper=None,
state_channel=None,
data_quality_channel=None,
dyn_range_fac=pycbc.DYN_RANGE_FAC,
psd_abort_difference=None,
psd_recalculate_difference=None,
force_update_cache=True,
increment_update_cache=None,
analyze_flags=None,
data_quality_flags=None,
dq_padding=0):
""" Class to produce overwhitened strain incrementally
Parameters
----------
frame_src: str of list of strings
Strings that indicate where to read from files from. This can be a
list of frame files, a glob, etc.
channel_name: str
Name of the channel to read from the frame files
start_time:
Time to start reading from.
max_buffer: {int, 512}, Optional
Length of the buffer in seconds
sample_rate: {int, 2048}, Optional
Rate in Hz to sample the data.
low_frequency_cutoff: {float, 20}, Optional
The low frequency cutoff to use for inverse spectrum truncation
highpass_frequency: {float, 15}, Optional
The frequency to apply a highpass filter at before downsampling.
highpass_reduction: {float, 200}, Optional
The amount of reduction to apply to the low frequencies.
highpass_bandwidth: {float, 5}, Optional
The width of the transition region for the highpass filter.
psd_samples: {int, 30}, Optional
The number of sample to use for psd estimation
psd_segment_length: {float, 4}, Optional
The number of seconds in each psd sample.
psd_inverse_length: {float, 3.5}, Optional
The length in seconds for fourier transform of the inverse of the
PSD to be truncated to.
trim_padding: {float, 0.25}, Optional
Amount of padding in seconds to give for truncated the overwhitened
data stream.
autogating_threshold: float, Optional
Sigma deviation required to cause autogating of data.
If None, no autogating is performed.
autogating_cluster: float, Optional
Seconds to cluster possible gating locations.
autogating_pad: float, Optional
Seconds of corrupted whitened strain to ignore when generating a gate.
autogating_width: float, Optional
Half-duration of the zeroed-out portion of autogates.
autogating_taper: float, Optional
Duration of taper on either side of the gating window in seconds.
state_channel: {str, None}, Optional
Channel to use for state information about the strain
data_quality_channel: {str, None}, Optional
Channel to use for data quality information about the strain
dyn_range_fac: {float, pycbc.DYN_RANGE_FAC}, Optional
Scale factor to apply to strain
psd_abort_difference: {float, None}, Optional
The relative change in the inspiral range from the previous PSD
estimate to trigger the data to be considered invalid.
psd_recalculate_difference: {float, None}, Optional
the relative change in the inspiral range from the previous PSD
to trigger a re-estimatoin of the PSD.
force_update_cache: {boolean, True}, Optional
Re-check the filesystem for frame files on every attempt to
read more data.
analyze_flags: list of strs
The flags that must be on to mark the current data as valid for
*any* use.
data_quality_flags: list of strs
The flags used to determine if to keep triggers.
dq_padding: {float, 0}, optional
Extra seconds to consider invalid before/after times with bad DQ.
increment_update_cache: {str, None}, Optional
Pattern to look for frame files in a GPS dependent directory. This
is an alternate to the forced updated of the frame cache, and
apptempts to predict the next frame file name without probing the
filesystem.
"""
super(StrainBuffer, self).__init__(frame_src, channel_name, start_time,
max_buffer=max_buffer,
force_update_cache=force_update_cache,
increment_update_cache=increment_update_cache)
self.low_frequency_cutoff = low_frequency_cutoff
# Set up status buffers
self.analyze_flags = analyze_flags
self.data_quality_flags = data_quality_flags
self.state = None
self.dq = None
self.dq_padding = dq_padding
# State channel
if state_channel is not None:
valid_mask = pycbc.frame.flag_names_to_bitmask(self.analyze_flags)
logging.info('State channel %s interpreted as bitmask %s = good',
state_channel, bin(valid_mask))
self.state = pycbc.frame.StatusBuffer(
frame_src,
state_channel, start_time,
max_buffer=max_buffer,
valid_mask=valid_mask,
force_update_cache=force_update_cache,
increment_update_cache=increment_update_cache)
# low latency dq channel
if data_quality_channel is not None:
sb_kwargs = dict(max_buffer=max_buffer,
force_update_cache=force_update_cache,
increment_update_cache=increment_update_cache)
if len(self.data_quality_flags) == 1 \
and self.data_quality_flags[0] == 'veto_nonzero':
sb_kwargs['valid_on_zero'] = True
logging.info('DQ channel %s interpreted as zero = good',
data_quality_channel)
else:
sb_kwargs['valid_mask'] = pycbc.frame.flag_names_to_bitmask(
self.data_quality_flags)
logging.info('DQ channel %s interpreted as bitmask %s = good',
data_quality_channel, bin(valid_mask))
self.dq = pycbc.frame.StatusBuffer(frame_src, data_quality_channel,
start_time, **sb_kwargs)
self.highpass_frequency = highpass_frequency
self.highpass_reduction = highpass_reduction
self.highpass_bandwidth = highpass_bandwidth
self.autogating_threshold = autogating_threshold
self.autogating_cluster = autogating_cluster
self.autogating_pad = autogating_pad
self.autogating_width = autogating_width
self.autogating_taper = autogating_taper
self.gate_params = []
self.sample_rate = sample_rate
self.dyn_range_fac = dyn_range_fac
self.psd_abort_difference = psd_abort_difference
self.psd_recalculate_difference = psd_recalculate_difference
self.psd_segment_length = psd_segment_length
self.psd_samples = psd_samples
self.psd_inverse_length = psd_inverse_length
self.psd = None
self.psds = {}
strain_len = int(sample_rate * self.raw_buffer.delta_t * len(self.raw_buffer))
self.strain = TimeSeries(zeros(strain_len, dtype=numpy.float32),
delta_t=1.0/self.sample_rate,
epoch=start_time-max_buffer)
# Determine the total number of corrupted samples for highpass
# and PSD over whitening
highpass_samples, self.beta = kaiserord(self.highpass_reduction,
self.highpass_bandwidth / self.raw_buffer.sample_rate * 2 * numpy.pi)
self.highpass_samples = int(highpass_samples / 2)
resample_corruption = 10 # If using the ldas method
self.factor = int(1.0 / self.raw_buffer.delta_t / self.sample_rate)
self.corruption = self.highpass_samples // self.factor + resample_corruption
self.psd_corruption = self.psd_inverse_length * self.sample_rate
self.total_corruption = self.corruption + self.psd_corruption
# Determine how much padding is needed after removing the parts
# associated with PSD over whitening and highpass filtering
self.trim_padding = int(trim_padding * self.sample_rate)
if self.trim_padding > self.total_corruption:
self.trim_padding = self.total_corruption
self.psd_duration = (psd_samples - 1) // 2 * psd_segment_length
self.reduced_pad = int(self.total_corruption - self.trim_padding)
self.segments = {}
# time to ignore output of frame (for initial buffering)
self.add_hard_count()
self.taper_immediate_strain = True
@property
def start_time(self):
""" Return the start time of the current valid segment of data """
return self.end_time - self.blocksize
@property
def end_time(self):
""" Return the end time of the current valid segment of data """
return float(self.strain.start_time + (len(self.strain) - self.total_corruption) / self.sample_rate)
def add_hard_count(self):
""" Reset the countdown timer, so that we don't analyze data long enough
to generate a new PSD.
"""
self.wait_duration = int(numpy.ceil(self.total_corruption / self.sample_rate + self.psd_duration))
self.invalidate_psd()
def invalidate_psd(self):
""" Make the current PSD invalid. A new one will be generated when
it is next required """
self.psd = None
self.psds = {}
def recalculate_psd(self):
""" Recalculate the psd
"""
seg_len = int(self.sample_rate * self.psd_segment_length)
e = len(self.strain)
s = e - (self.psd_samples + 1) * seg_len // 2
psd = pycbc.psd.welch(self.strain[s:e], seg_len=seg_len, seg_stride=seg_len//2)
psd.dist = spa_distance(psd, 1.4, 1.4, self.low_frequency_cutoff) * pycbc.DYN_RANGE_FAC
# If the new psd is similar to the old one, don't replace it
if self.psd and self.psd_recalculate_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist < self.psd_recalculate_difference:
logging.info("Skipping recalculation of %s PSD, %s-%s",
self.detector, self.psd.dist, psd.dist)
return True
# If the new psd is *really* different than the old one, return an error
if self.psd and self.psd_abort_difference:
if abs(self.psd.dist - psd.dist) / self.psd.dist > self.psd_abort_difference:
logging.info("%s PSD is CRAZY, aborting!!!!, %s-%s",
self.detector, self.psd.dist, psd.dist)
self.psd = psd
self.psds = {}
return False
# If the new estimate replaces the current one, invalide the ineterpolate PSDs
self.psd = psd
self.psds = {}
logging.info("Recalculating %s PSD, %s", self.detector, psd.dist)
return True
def overwhitened_data(self, delta_f):
""" Return overwhitened data
Parameters
----------
delta_f: float
The sample step to generate overwhitened frequency domain data for
Returns
-------
htilde: FrequencySeries
Overwhited strain data
"""
# we haven't already computed htilde for this delta_f
if delta_f not in self.segments:
buffer_length = int(1.0 / delta_f)
e = len(self.strain)
s = int(e - buffer_length * self.sample_rate - self.reduced_pad * 2)
fseries = make_frequency_series(self.strain[s:e])
# we haven't calculated a resample psd for this delta_f
if delta_f not in self.psds:
psdt = pycbc.psd.interpolate(self.psd, fseries.delta_f)
psdt = pycbc.psd.inverse_spectrum_truncation(psdt,
int(self.sample_rate * self.psd_inverse_length),
low_frequency_cutoff=self.low_frequency_cutoff)
psdt._delta_f = fseries.delta_f
psd = pycbc.psd.interpolate(self.psd, delta_f)
psd = pycbc.psd.inverse_spectrum_truncation(psd,
int(self.sample_rate * self.psd_inverse_length),
low_frequency_cutoff=self.low_frequency_cutoff)
psd.psdt = psdt
self.psds[delta_f] = psd
psd = self.psds[delta_f]
fseries /= psd.psdt
# trim ends of strain
if self.reduced_pad != 0:
overwhite = TimeSeries(zeros(e-s, dtype=self.strain.dtype),
delta_t=self.strain.delta_t)
pycbc.fft.ifft(fseries, overwhite)
overwhite2 = overwhite[self.reduced_pad:len(overwhite)-self.reduced_pad]
taper_window = self.trim_padding / 2.0 / overwhite.sample_rate
gate_params = [(overwhite2.start_time, 0., taper_window),
(overwhite2.end_time, 0., taper_window)]
gate_data(overwhite2, gate_params)
fseries_trimmed = FrequencySeries(zeros(len(overwhite2) / 2 + 1,
dtype=fseries.dtype), delta_f=delta_f)
pycbc.fft.fft(overwhite2, fseries_trimmed)
fseries_trimmed.start_time = fseries.start_time + self.reduced_pad * self.strain.delta_t
else:
fseries_trimmed = fseries
fseries_trimmed.psd = psd
self.segments[delta_f] = fseries_trimmed
stilde = self.segments[delta_f]
return stilde
def near_hwinj(self):
"""Check that the current set of triggers could be influenced by
a hardware injection.
"""
if not self.state:
return False
if not self.state.is_extent_valid(self.start_time, self.blocksize, pycbc.frame.NO_HWINJ):
return True
return False
def null_advance_strain(self, blocksize):
""" Advance and insert zeros
Parameters
----------
blocksize: int
The number of seconds to attempt to read from the channel
"""
sample_step = int(blocksize * self.sample_rate)
csize = sample_step + self.corruption * 2
self.strain.roll(-sample_step)
# We should roll this off at some point too...
self.strain[len(self.strain) - csize + self.corruption:] = 0
self.strain.start_time += blocksize
# The next time we need strain will need to be tapered
self.taper_immediate_strain = True
def advance(self, blocksize, timeout=10):
"""Advanced buffer blocksize seconds.
Add blocksize seconds more to the buffer, push blocksize seconds
from the beginning.
Parameters
----------
blocksize: int
The number of seconds to attempt to read from the channel
Returns
-------
status: boolean
Returns True if this block is analyzable.
"""
ts = super(StrainBuffer, self).attempt_advance(blocksize, timeout=timeout)
self.blocksize = blocksize
self.gate_params = []
# We have given up so there is no time series
if ts is None:
logging.info("%s frame is late, giving up", self.detector)
self.null_advance_strain(blocksize)
if self.state:
self.state.null_advance(blocksize)
if self.dq:
self.dq.null_advance(blocksize)
return False
# We collected some data so we are closer to being able to analyze data
self.wait_duration -= blocksize
# If the data we got was invalid, reset the counter on how much to collect
# This behavior corresponds to how we handle CAT1 vetoes
if self.state and self.state.advance(blocksize) is False:
self.add_hard_count()
self.null_advance_strain(blocksize)
if self.dq:
self.dq.null_advance(blocksize)
logging.info("%s time has invalid data, resetting buffer",
self.detector)
return False
# Also advance the dq vector in lockstep
if self.dq:
self.dq.advance(blocksize)
self.segments = {}
# only condition with the needed raw data so we can continuously add
# to the existing result
# Precondition
sample_step = int(blocksize * self.sample_rate)
csize = sample_step + self.corruption * 2
start = len(self.raw_buffer) - csize * self.factor
strain = self.raw_buffer[start:]
strain = pycbc.filter.highpass_fir(strain, self.highpass_frequency,
self.highpass_samples,
beta=self.beta)
strain = (strain * self.dyn_range_fac).astype(numpy.float32)
strain = pycbc.filter.resample_to_delta_t(strain,
1.0/self.sample_rate, method='ldas')
# remove corruption at beginning
strain = strain[self.corruption:]
# taper beginning if needed
if self.taper_immediate_strain:
logging.info("Tapering start of %s strain block", self.detector)
strain = gate_data(
strain, [(strain.start_time, 0., self.autogating_taper)])
self.taper_immediate_strain = False
# Stitch into continuous stream
self.strain.roll(-sample_step)
self.strain[len(self.strain) - csize + self.corruption:] = strain[:]
self.strain.start_time += blocksize
# apply gating if needed
if self.autogating_threshold is not None:
glitch_times = detect_loud_glitches(
strain[:-self.corruption],
psd_duration=2., psd_stride=1.,
threshold=self.autogating_threshold,
cluster_window=self.autogating_cluster,
low_freq_cutoff=self.highpass_frequency,
corrupt_time=self.autogating_pad)
if len(glitch_times) > 0:
logging.info('Autogating %s at %s', self.detector,
', '.join(['%.3f' % gt for gt in glitch_times]))
self.gate_params = \
[(gt, self.autogating_width, self.autogating_taper)
for gt in glitch_times]
self.strain = gate_data(self.strain, self.gate_params)
if self.psd is None and self.wait_duration <=0:
self.recalculate_psd()
return self.wait_duration <= 0
@classmethod
def from_cli(cls, ifo, args, maxlen):
"""Initialize a StrainBuffer object (data reader) for a particular
detector.
"""
state_channel = analyze_flags = None
if args.state_channel and ifo in args.state_channel \
and args.analyze_flags and ifo in args.analyze_flags:
state_channel = ':'.join([ifo, args.state_channel[ifo]])
analyze_flags = args.analyze_flags[ifo].split(',')
dq_channel = dq_flags = None
if args.data_quality_channel and ifo in args.data_quality_channel \
and args.data_quality_flags and ifo in args.data_quality_flags:
dq_channel = ':'.join([ifo, args.data_quality_channel[ifo]])
dq_flags = args.data_quality_flags[ifo].split(',')
if args.frame_type:
frame_src = pycbc.frame.frame_paths(args.frame_type[ifo],
args.start_time,
args.end_time)
else:
frame_src = [args.frame_src[ifo]]
strain_channel = ':'.join([ifo, args.channel_name[ifo]])
return cls(frame_src, strain_channel,
args.start_time, max_buffer=maxlen * 2,
state_channel=state_channel,
data_quality_channel=dq_channel,
sample_rate=args.sample_rate,
low_frequency_cutoff=args.low_frequency_cutoff,
highpass_frequency=args.highpass_frequency,
highpass_reduction=args.highpass_reduction,
highpass_bandwidth=args.highpass_bandwidth,
psd_samples=args.psd_samples,
trim_padding=args.trim_padding,
psd_segment_length=args.psd_segment_length,
psd_inverse_length=args.psd_inverse_length,
autogating_threshold=args.autogating_threshold,
autogating_cluster=args.autogating_cluster,
autogating_pad=args.autogating_pad,
autogating_width=args.autogating_width,
autogating_taper=args.autogating_taper,
psd_abort_difference=args.psd_abort_difference,
psd_recalculate_difference=args.psd_recalculate_difference,
force_update_cache=args.force_update_cache,
increment_update_cache=args.increment_update_cache[ifo],
analyze_flags=analyze_flags,
data_quality_flags=dq_flags,
dq_padding=args.data_quality_padding)<|fim▁end|> | |
<|file_name|>pyserial_driver.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Swift Navigation Inc.
# Contact: Mark Fine <[email protected]>
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
from .base_driver import BaseDriver
import serial
import serial.tools.list_ports
class PySerialDriver(BaseDriver):
"""
PySerialDriver
The :class:`PySerialDriver` class reads SBP messages from a serial port
using the pyserial driver. This is mostly redundant, is the Serial object's
read and write methods can be used directly.
Parameters
----------
port : string
URI to port to read SBP messages from. Accepts the following types
of URLs:
- rfc2217://<host>:<port>[/<option>[/<option>]]
- socket://<host>:<port>[/<option>[/<option>]]
- loop://[<option>[/<option>]]
and device names, such as /dev/ttyUSB0 (Linux) and COM3 (Windows). See<|fim▁hole|>
"""
def __init__(self, port, baud=115200):
import serial
try:
handle = serial.serial_for_url(port)
handle.baudrate = baud
handle.timeout = 1
super(PySerialDriver, self).__init__(handle)
except (OSError, serial.SerialException) as e:
print
print "Error opening serial device '%s':" % port
print e
print
print "The following serial devices were detected:"
print
for (name, desc, _) in serial.tools.list_ports.comports():
if desc[0:4] == "ttyS":
continue
if name == desc:
print "\t%s" % name
else:
print "\t%s (%s)" % (name, desc)
print
raise SystemExit
def read(self, size):
"""
Read wrapper.
Parameters
----------
size : int
Number of bytes to read.
"""
try:
return self.handle.read(size)
except (OSError, serial.SerialException):
print
print "Piksi disconnected"
print
raise IOError
def write(self, s):
"""
Write wrapper.
Parameters
----------
s : bytes
Bytes to write
"""
try:
return self.handle.write(s)
except (OSError, serial.SerialException):
print
print "Piksi disconnected"
print
raise IOError<|fim▁end|> | http://pyserial.sourceforge.net/pyserial_api.html#urls for more details.
baud : int
Baud rate of serial port (defaults to 115200) |
<|file_name|>Fx-catalog-collapsible-menu.js<|end_file_name|><|fim▁begin|>define([
"jquery",<|fim▁hole|> "fx-cat-br/widgets/Fx-widgets-commons",
'text!fx-cat-br/json/fx-catalog-collapsible-menu-config.json',
"lib/bootstrap"
], function ($, W_Commons, conf) {
var o = { },
defaultOptions = {
widget: {
lang: 'EN'
},
events: {
SELECT: 'fx.catalog.module.select'
}
};
var cache = {},
w_Commons, $collapse;
function Fx_Catalog_Collapsible_Menu() {
w_Commons = new W_Commons();
}
Fx_Catalog_Collapsible_Menu.prototype.init = function (options) {
//Merge options
$.extend(o, defaultOptions);
$.extend(o, options);
};
Fx_Catalog_Collapsible_Menu.prototype.render = function (options) {
$.extend(o, options);
cache.json = JSON.parse(conf);
this.initStructure();
this.renderMenu(cache.json);
};
Fx_Catalog_Collapsible_Menu.prototype.initStructure = function () {
o.collapseId = "fx-collapse-" + w_Commons.getFenixUniqueId();
$collapse = $('<div class="panel-group" id="accordion"></div>');
$collapse.attr("id", o.collapseId);
$(o.container).append($collapse);
};
Fx_Catalog_Collapsible_Menu.prototype.renderMenu = function (json) {
var self = this;
if (json.hasOwnProperty("panels")) {
var panels = json.panels;
for (var i = 0; i < panels.length; i++) {
$collapse.append(self.buildPanel(panels[i]))
}
$(o.container).append($collapse)
} else {
throw new Error("Fx_Catalog_Collapsible_Menu: no 'panels' attribute in config JSON.")
}
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanel = function (panel) {
var self = this,
id = "fx-collapse-panel-" + w_Commons.getFenixUniqueId();
var $p = $(document.createElement("DIV"));
$p.addClass("panel");
$p.addClass("panel-default");
$p.append(self.buildPanelHeader(panel, id));
$p.append(self.buildPanelBody(panel, id));
return $p;
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelHeader = function (panel, id) {
//Init header
var $header = $('<div class="panel-heading"></div>'),
$title = $('<h4 class="panel-title fx-menu-category-title"></h4>'),
$a = $('<a data-toggle="collapse"></a>'),
$info = $('<div class="fx-catalog-modular-menu-category-info"></div>'),
$plus = $('<div class="fx-catalog-modular-menu-category-plus"></div>');
$a.attr("data-parent", "#" + o.collapseId);
$a.attr("href", "#" + id);
if (panel.hasOwnProperty("title")) {
$a.html(panel["title"][o.widget.lang]);
}
return $header.append($title.append($a.append($plus)).append($info));
};
Fx_Catalog_Collapsible_Menu.prototype.buildPanelBody = function (panel, id) {
//Init panel body
var $bodyContainer = $("<div class='panel-collapse collapse'></div>");
$bodyContainer.attr("id", id);
var $body = $('<div class="panel-body"></div>');
if (panel.hasOwnProperty("modules")) {
var modules = panel["modules"];
for (var j = 0; j < modules.length; j++) {
var $module = $("<div></div>"),
$btn = $('<button type="button" class="btn btn-default btn-block"></button>');
$btn.on('click', {module: modules[j] }, function (e) {
var $btn = $(this);
if ($btn.is(':disabled') === false) {
$btn.attr("disabled", "disabled");
w_Commons.raiseCustomEvent(o.container, o.events.SELECT, e.data.module)
}
});
if (modules[j].hasOwnProperty("id")) {
$btn.attr("id", modules[j].id);
}
if (modules[j].hasOwnProperty("module")) {
$btn.attr("data-module", modules[j].module);
}
//Keep it before the label to have the icon in its the left side
if (modules[j].hasOwnProperty("icon")) {
$btn.append($('<span class="' + modules[j].icon + '"></span>'));
}
if (modules[j].hasOwnProperty("label")) {
$btn.append(modules[j].label[o.widget.lang]);
}
if (modules[j].hasOwnProperty("popover")) {
/* console.log(modules[j]["popover"])
var keys = Object.keys(modules[j]["popover"]);
for (var k = 0; k < keys.length; k++ ){
$btn.attr(keys[k], modules[j]["popover"][keys[k]])
}*/
}
$module.append($btn);
$body.append($module)
}
}
return $bodyContainer.append($body);
};
Fx_Catalog_Collapsible_Menu.prototype.disable = function (module) {
$(o.container).find("[data-module='" + module + "']").attr("disabled", "disabled");
};
Fx_Catalog_Collapsible_Menu.prototype.activate = function (module) {
$(o.container).find("[data-module='" + module + "']").removeAttr("disabled");
};
return Fx_Catalog_Collapsible_Menu;
});<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for TaskTracker project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!ojo^0p3t2kj096an0ep+uise$z$)0qrhjbz-9621w-7takmmt'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True<|fim▁hole|>
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'bootstrap3',
'homepage.apps.HomepageConfig',
'tasks.apps.TasksConfig',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TaskTracker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TaskTracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Prague'
USE_I18N = True
USE_L10N = False
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Date and datetime default formatting
DATE_FORMAT = 'd. m. Y'
DATETIME_FORMAT = 'd. m. Y H:i'
# Login URL
LOGIN_URL = 'homepage-login'<|fim▁end|> | |
<|file_name|>problem_1449.py<|end_file_name|><|fim▁begin|>"""1449. Form Largest Integer With Digits That Add up to Target
https://leetcode.com/problems/form-largest-integer-with-digits-that-add-up-to-target/
"""
from functools import lru_cache
from typing import List
class Solution:
def largest_number(self, cost: List[int], target: int) -> str:
def compare(a: str, b: str) -> bool:
return a > b if len(a) == len(b) else len(a) > len(b)
@lru_cache(None)
def dfs(x: int) -> str:
if x == 0:
return ''
res = '0'
for i in range(len(cost)):
if cost[i] <= x:
ret = dfs(x - cost[i])
if ret != '0':<|fim▁hole|>
return dfs(target)
def largest_number3(self, cost: List[int], target: int) -> str:
def gt(a: str, b: str) -> bool:
return a > b if len(a) == len(b) else len(a) > len(b)
dp = [''] * (target + 1)
for i in range(1, target + 1):
dp[i] = '0'
for j in range(9):
if cost[j] <= i:
ret = dp[i - cost[j]]
if ret != '0':
ret = str(j + 1) + ret
if gt(ret, dp[i]):
dp[i] = ret
return dp[target]
def largest_number2(self, cost: List[int], target: int) -> str:
def get_digits() -> int:
for i in range(1, len(cost) + 1):
for j in range(1, target + 1):
dp[i][j] = dp[i - 1][j]
if cost[i - 1] == j:
dp[i][j] = max(dp[i][j], 1)
elif cost[i - 1] < j and dp[i][j - cost[i - 1]] != 0:
dp[i][j] = max(dp[i][j], 1 + dp[i][j - cost[i - 1]])
return dp[len(cost)][target]
dp = [[0] * (target + 1) for _ in range(len(cost) + 1)]
digits = get_digits()
if digits <= 0:
return '0'
ans = ''
for num in range(len(cost), 0, -1):
c = cost[num - 1]
while target >= c and dp[-1][target] == 1 + dp[-1][target - c]:
if target == c:
return ans + str(num)
elif dp[-1][target - c] != 0:
ans += str(num)
target -= c
else:
break
return ans
if __name__ == '__main__':
sol = Solution()
print(sol.largest_number2([4, 3, 2, 5, 6, 7, 2, 5, 5], 9))
print(sol.largest_number3([4, 3, 2, 5, 6, 7, 2, 5, 5], 9))<|fim▁end|> | ret = str(i + 1) + ret
if compare(ret, res):
res = ret
return res |
<|file_name|>simplepool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""simple thread pool
@author: dn13([email protected])
@author: Fibrizof([email protected])
"""
import threading
import Queue
import new
def WorkerPoolError( Exception ):
pass
class Task(threading.Thread):
def __init__(self, queue, result_queue):
threading.Thread.__init__(self)
self.queue = queue
self.result_queue = result_queue
self.running = True
def cancel(self):
self.running = False
self.queue.put(None)
def run(self):
while self.running:
call = self.queue.get()
if call:
try:
reslut = call()
self.result_queue.put(reslut)
except:
pass
self.queue.task_done()<|fim▁hole|>
def __init__( self, threadnum ):
self.threadnum = threadnum
self.q = Queue.Queue()
self.result_q = Queue.Queue()
self.ts = [ Task(self.q, self.result_q) for i in range(threadnum) ]
self._registfunctions = {}
self.is_in_join = False
for t in self.ts :
t.setDaemon(True)
t.start()
def __del__(self):
try:
# 调用两次的意义在于, 第一次将所有线程的running置成false, 在让他们发一次queue的信号
# 偷懒没有写成两个接口
for t in self.ts:
t.cancel()
for t in self.ts:
t.cancel()
except:
pass
def __call__( self, work ):
if not self.is_in_join:
self.q.put( work )
else:
raise WorkerPoolError, 'Pool has been joined'
def join( self ):
self.is_in_join = True
self.q.join()
self.is_in_join = False
return
def runwithpool( self, _old ):
def _new( *args, **kwargs ):
self.q.put( lambda : _old( *args, **kwargs ) )
return _new
def registtopool( self, _old ):
if _old.__name__ in self._registfunctions :
raise WorkerPoolError, 'function name exists'
self._registfunctions[_old.__name__] = _old
return _old
def get_all_result(self):
result_list = []
while True:
try:
result_list.append(self.result_q.get_nowait())
except Exception as e:
if 0 == self.result_q.qsize():
break
else:
continue
return result_list
def __getattr__( self, name ):
if name in self._registfunctions :
return self._registfunctions[name]
raise AttributeError, '%s not found' % name
if __name__ == '__main__' :
import thread
p = WorkerPool(5)
@p.runwithpool
def foo( a ):
print 'foo>', thread.get_ident(), '>', a
return
@p.registtopool
def bar( b ):
print 'bar>', thread.get_ident(), '>', b
for i in range(10):
foo(i)
p.bar(i+100)
p( lambda : bar(200) )
p.join()<|fim▁end|> |
class WorkerPool( object ): |
<|file_name|>test_app.py<|end_file_name|><|fim▁begin|>from bottle import route, default_app
app = default_app()
data = {
"id": 78874,
"seriesName": "Firefly",
"aliases": [
"Serenity"
],
"banner": "graphical/78874-g3.jpg",
"seriesId": "7097",
"status": "Ended",
"firstAired": "2002-09-20",
"network": "FOX (US)",
"networkId": "",
"runtime": "45",
"genre": [
"Drama",
"Science-Fiction"
],
"overview": "In the far-distant future, Captain Malcolm \"Mal\" Reynolds is a renegade former brown-coat sergeant, now turned smuggler & rogue, "
"who is the commander of a small spacecraft, with a loyal hand-picked crew made up of the first mate, Zoe Warren; the pilot Hoban \"Wash\" Washburn; "
"the gung-ho grunt Jayne Cobb; the engineer Kaylee Frye; the fugitives Dr. Simon Tam and his psychic sister River. "
"Together, they travel the far reaches of space in search of food, money, and anything to live on.",
"lastUpdated": 1486759680,
"airsDayOfWeek": "",
"airsTime": "",
"rating": "TV-14",
"imdbId": "tt0303461",
"zap2itId": "EP00524463",
"added": "",
"addedBy": None,
"siteRating": 9.5,<|fim▁hole|>
@route('/api')
def api():
return data<|fim▁end|> | "siteRatingCount": 472,
}
|
<|file_name|>test_component_padmonitor.py<|end_file_name|><|fim▁begin|># -*- Mode: Python; test-case-name: flumotion.test.test_feedcomponent010 -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
import gst
from twisted.internet import defer, reactor
from twisted.trial import unittest
from flumotion.common import testsuite
from flumotion.component import padmonitor
attr = testsuite.attr
class TestPadMonitor(testsuite.TestCase):
slow = True
def _run_pipeline(self, pipeline):
pipeline.set_state(gst.STATE_PLAYING)
pipeline.get_bus().poll(gst.MESSAGE_EOS, -1)
pipeline.set_state(gst.STATE_NULL)
def testPadMonitorActivation(self):
pipeline = gst.parse_launch(
'fakesrc num-buffers=1 ! identity name=id ! fakesink')
identity = pipeline.get_by_name('id')
srcpad = identity.get_pad('src')
monitor = padmonitor.PadMonitor(srcpad, "identity-source",
lambda name: None,
lambda name: None)
self.assertEquals(monitor.isActive(), False)
self._run_pipeline(pipeline)
# Now give the reactor a chance to process the callFromThread()
d = defer.Deferred()
def finishTest():
self.assertEquals(monitor.isActive(), True)
monitor.detach()
d.callback(True)
reactor.callLater(0.1, finishTest)
return d<|fim▁hole|> padmonitor.PadMonitor.PAD_MONITOR_PROBE_INTERVAL = 0.2
padmonitor.PadMonitor.PAD_MONITOR_CHECK_INTERVAL = 0.5
pipeline = gst.parse_launch(
'fakesrc num-buffers=1 ! identity name=id ! fakesink')
identity = pipeline.get_by_name('id')
srcpad = identity.get_pad('src')
# Now give the reactor a chance to process the callFromThread()
def finished():
monitor.detach()
d.callback(True)
def hasInactivated(name):
# We can't detach the monitor from this callback safely, so do
# it from a reactor.callLater()
reactor.callLater(0, finished)
def hasActivated():
self.assertEquals(monitor.isActive(), True)
# Now, we don't send any more data, and after our 0.5 second
# timeout we should go inactive. Pass our test if that happens.
# Otherwise trial will time out.
monitor = padmonitor.PadMonitor(srcpad, "identity-source",
lambda name: None,
hasInactivated)
self.assertEquals(monitor.isActive(), False)
self._run_pipeline(pipeline)
d = defer.Deferred()
reactor.callLater(0.2, hasActivated)
return d
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
def testPadMonitorTimeout(self): |
<|file_name|>script_watcher.py<|end_file_name|><|fim▁begin|>"""
script_watcher.py: Reload watched script upon changes.
Copyright (C) 2015 Isaac Weaver
Author: Isaac Weaver <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
bl_info = {
"name": "Script Watcher",
"author": "Isaac Weaver",
"version": (0, 5),
"blender": (2, 75, 0),
"location": "Properties > Scene > Script Watcher",
"description": "Reloads an external script on edits.",
"warning": "Still in beta stage.",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Development/Script_Watcher",
"tracker_url": "https://github.com/wisaac407/blender-script-watcher/issues/new",
"category": "Development",
}
import os, sys
import io
import traceback
import types
import bpy
from bpy.app.handlers import persistent
@persistent
def load_handler(dummy):
try:
if (bpy.context.scene.sw_settings.running and bpy.context.scene.sw_settings.auto_watch_on_startup):
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
bpy.ops.wm.sw_watch_start('EXEC_DEFAULT')
else:
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
except:
print("Exception on startup check!")
def add_scrollback(ctx, text, text_type):
for line in text:
bpy.ops.console.scrollback_append(ctx, text=line.replace('\t', ' '),
type=text_type)
class SplitIO(io.StringIO):
"""Feed the input stream into another stream."""
PREFIX = '[Script Watcher]: '
_can_prefix = True
def __init__(self, stream):
io.StringIO.__init__(self)
self.stream = stream
def write(self, s):
# Make sure we prefix our string before we do anything else with it.
if self._can_prefix:
s = self.PREFIX + s
# only add the prefix if the last stream ended with a newline.
self._can_prefix = s.endswith('\n')
# Make sure to call the super classes write method.
io.StringIO.write(self, s)
# When we are written to, we also write to the secondary stream.
self.stream.write(s)
# Define the script watching operator.
class WatchScriptOperator(bpy.types.Operator):
"""Watches the script for changes, reloads the script if any changes occur."""
bl_idname = "wm.sw_watch_start"
bl_label = "Watch Script"
_timer = None
_running = False
_times = None
filepath = None
def get_paths(self):
"""Find all the python paths surrounding the given filepath."""
dirname = os.path.dirname(self.filepath)
paths = []
filepaths = []
for root, dirs, files in os.walk(dirname, topdown=True):
if '__init__.py' in files:
paths.append(root)
for f in files:
filepaths.append(os.path.join(root, f))
else:
dirs[:] = [] # No __init__ so we stop walking this dir.
# If we just have one (non __init__) file then return just that file.
return paths, filepaths or [self.filepath]
def get_mod_name(self):
"""Return the module name and the root path of the givin python file path."""
dir, mod = os.path.split(self.filepath)
# Module is a package.
if mod == '__init__.py':<|fim▁hole|> mod = os.path.basename(dir)
dir = os.path.dirname(dir)
# Module is a single file.
else:
mod = os.path.splitext(mod)[0]
return mod, dir
def remove_cached_mods(self):
"""Remove all the script modules from the system cache."""
paths, files = self.get_paths()
for mod_name, mod in list(sys.modules.items()):
if hasattr(mod, '__file__') and os.path.dirname(mod.__file__) in paths:
del sys.modules[mod_name]
def _reload_script_module(self):
print('Reloading script:', self.filepath)
self.remove_cached_mods()
try:
f = open(self.filepath)
paths, files = self.get_paths()
# Get the module name and the root module path.
mod_name, mod_root = self.get_mod_name()
# Create the module and setup the basic properties.
mod = types.ModuleType('__main__')
mod.__file__ = self.filepath
mod.__path__ = paths
mod.__package__ = mod_name
# Add the module to the system module cache.
sys.modules[mod_name] = mod
# Fianally, execute the module.
exec(compile(f.read(), self.filepath, 'exec'), mod.__dict__)
except IOError:
print('Could not open script file.')
except:
sys.stderr.write("There was an error when running the script:\n" + traceback.format_exc())
else:
f.close()
def reload_script(self, context):
"""Reload this script while printing the output to blenders python console."""
# Setup stdout and stderr.
stdout = SplitIO(sys.stdout)
stderr = SplitIO(sys.stderr)
sys.stdout = stdout
sys.stderr = stderr
# Run the script.
self._reload_script_module()
# Go back to the begining so we can read the streams.
stdout.seek(0)
stderr.seek(0)
# Don't use readlines because that leaves trailing new lines.
output = stdout.read().split('\n')
output_err = stderr.read().split('\n')
if self.use_py_console:
# Print the output to the consoles.
for area in context.screen.areas:
if area.type == "CONSOLE":
ctx = context.copy()
ctx.update({"area": area})
# Actually print the output.
if output:
add_scrollback(ctx, output, 'OUTPUT')
if output_err:
add_scrollback(ctx, output_err, 'ERROR')
# Cleanup
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
def modal(self, context, event):
if not context.scene.sw_settings.running:
self.cancel(context)
return {'CANCELLED'}
if context.scene.sw_settings.reload:
context.scene.sw_settings.reload = False
self.reload_script(context)
return {'PASS_THROUGH'}
if event.type == 'TIMER':
for path in self._times:
cur_time = os.stat(path).st_mtime
if cur_time != self._times[path]:
self._times[path] = cur_time
self.reload_script(context)
return {'PASS_THROUGH'}
def execute(self, context):
if context.scene.sw_settings.running:
return {'CANCELLED'}
# Grab the settings and store them as local variables.
self.filepath = bpy.path.abspath(context.scene.sw_settings.filepath)
self.use_py_console = context.scene.sw_settings.use_py_console
# If it's not a file, doesn't exist or permistion is denied we don't preceed.
if not os.path.isfile(self.filepath):
self.report({'ERROR'}, 'Unable to open script.')
return {'CANCELLED'}
# Setup the times dict to keep track of when all the files where last edited.
dirs, files = self.get_paths()
self._times = dict((path, os.stat(path).st_mtime) for path in files) # Where we store the times of all the paths.
self._times[files[0]] = 0 # We set one of the times to 0 so the script will be loaded on startup.
# Setup the event timer.
wm = context.window_manager
self._timer = wm.event_timer_add(0.1, context.window)
wm.modal_handler_add(self)
context.scene.sw_settings.running = True
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
self.remove_cached_mods()
context.scene.sw_settings.running = False
class CancelScriptWatcher(bpy.types.Operator):
"""Stop watching the current script."""
bl_idname = "wm.sw_watch_end"
bl_label = "Stop Watching"
def execute(self, context):
# Setting the running flag to false will cause the modal to cancel itself.
context.scene.sw_settings.running = False
return {'FINISHED'}
class ReloadScriptWatcher(bpy.types.Operator):
"""Reload the current script."""
bl_idname = "wm.sw_reload"
bl_label = "Reload Script"
def execute(self, context):
# Setting the reload flag to true will cause the modal to cancel itself.
context.scene.sw_settings.reload = True
return {'FINISHED'}
# Create the UI for the operator. NEEDS FINISHING!!
class ScriptWatcherPanel(bpy.types.Panel):
"""UI for the script watcher."""
bl_label = "Script Watcher"
bl_idname = "SCENE_PT_script_watcher"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "scene"
def draw(self, context):
layout = self.layout
running = context.scene.sw_settings.running
col = layout.column()
col.prop(context.scene.sw_settings, 'filepath')
col.prop(context.scene.sw_settings, 'use_py_console')
col.prop(context.scene.sw_settings, 'auto_watch_on_startup')
col.operator('wm.sw_watch_start', icon='VISIBLE_IPO_ON')
col.enabled = not running
if running:
row = layout.row(align=True)
row.operator('wm.sw_watch_end', icon='CANCEL')
row.operator('wm.sw_reload', icon='FILE_REFRESH')
class ScriptWatcherSettings(bpy.types.PropertyGroup):
"""All the script watcher settings."""
running = bpy.props.BoolProperty(default=False)
reload = bpy.props.BoolProperty(default=False)
filepath = bpy.props.StringProperty(
name = 'Script',
description = 'Script file to watch for changes.',
subtype = 'FILE_PATH'
)
use_py_console = bpy.props.BoolProperty(
name = 'Use py console',
description = 'Use blenders built-in python console for program output (e.g. print statments and error messages)',
default = False
)
auto_watch_on_startup = bpy.props.BoolProperty(
name = 'Watch on startup',
description = 'Watch script automatically on new .blend load',
default = False
)
def register():
bpy.utils.register_class(WatchScriptOperator)
bpy.utils.register_class(ScriptWatcherPanel)
bpy.utils.register_class(CancelScriptWatcher)
bpy.utils.register_class(ReloadScriptWatcher)
bpy.utils.register_class(ScriptWatcherSettings)
bpy.types.Scene.sw_settings = \
bpy.props.PointerProperty(type=ScriptWatcherSettings)
bpy.app.handlers.load_post.append(load_handler)
def unregister():
bpy.utils.unregister_class(WatchScriptOperator)
bpy.utils.unregister_class(ScriptWatcherPanel)
bpy.utils.unregister_class(CancelScriptWatcher)
bpy.utils.unregister_class(ReloadScriptWatcher)
bpy.utils.unregister_class(ScriptWatcherSettings)
bpy.app.handlers.load_post.remove(load_handler)
del bpy.types.Scene.sw_settings
if __name__ == "__main__":
register()<|fim▁end|> | |
<|file_name|>querycondition.cpp<|end_file_name|><|fim▁begin|>#include "querycondition.h"
queryCondition::queryCondition(const QString &fieldName, const conditionOperator condition, const QStringList &values, QObject *parent) : QObject(parent)
{
_fieldName = fieldName;
_condition = condition;<|fim▁hole|>queryCondition::~queryCondition()
{
}
QString queryCondition::fieldName() const
{
return _fieldName;
}
queryCondition::conditionOperator queryCondition::condition() const
{
return _condition;
}
QStringList queryCondition::values() const
{
return _values;
}<|fim▁end|> | _values = values;
}
|
<|file_name|>test_course_topic.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt<|fim▁hole|>
class TestCourseTopic(unittest.TestCase):
pass<|fim▁end|> |
import unittest
|
<|file_name|>test_images.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.image import base
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
class BasicOperationsImagesAdminTest(base.BaseV2ImageAdminTest):
@decorators.related_bug('1420008')
@decorators.idempotent_id('646a6eaa-135f-4493-a0af-12583021224e')
def test_create_image_owner_param(self):
# NOTE: Create image with owner different from tenant owner by
# using "owner" parameter requires an admin privileges.
random_id = data_utils.rand_uuid_hex()
image = self.admin_client.create_image(
container_format='bare', disk_format='raw', owner=random_id)
self.addCleanup(self.admin_client.delete_image, image['id'])
image_info = self.admin_client.show_image(image['id'])
self.assertEqual(random_id, image_info['owner'])
<|fim▁hole|> @decorators.idempotent_id('525ba546-10ef-4aad-bba1-1858095ce553')
def test_update_image_owner_param(self):
random_id_1 = data_utils.rand_uuid_hex()
image = self.admin_client.create_image(
container_format='bare', disk_format='raw', owner=random_id_1)
self.addCleanup(self.admin_client.delete_image, image['id'])
created_image_info = self.admin_client.show_image(image['id'])
random_id_2 = data_utils.rand_uuid_hex()
self.admin_client.update_image(
image['id'], [dict(replace="/owner", value=random_id_2)])
updated_image_info = self.admin_client.show_image(image['id'])
self.assertEqual(random_id_2, updated_image_info['owner'])
self.assertNotEqual(created_image_info['owner'],
updated_image_info['owner'])<|fim▁end|> | @decorators.related_bug('1420008') |
<|file_name|>test_pyramid_sendgrid_webhooks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pyramid_sendgrid_webhooks
----------------------------------
Tests for `pyramid_sendgrid_webhooks` module.
"""
from __future__ import unicode_literals
import unittest
import pyramid_sendgrid_webhooks as psw
from pyramid_sendgrid_webhooks import events, errors
class EventGrabber(object):
""" Grabs events as they're dispatched """
def __init__(self):
self.events = []
self.last = None
def __call__(self, event):
self.events.append(event)
self.last = event
def simple_app(global_config, **settings):
from pyramid.config import Configurator
config = Configurator(settings=settings)
config.include('pyramid_sendgrid_webhooks', WebhookTestBase._PREFIX)
config.registry.grabber = EventGrabber()
config.add_subscriber(config.registry.grabber, events.BaseWebhookEvent)
return config.make_wsgi_app()
class WebhookTestBase(unittest.TestCase):
_PREFIX = '/webhook'
_PATH = _PREFIX + '/receive'
def setUp(self):
from pyramid import testing
self.request = testing.DummyRequest()
self.config = testing.setUp(request=self.request)
def tearDown(self):
from pyramid import testing
testing.tearDown()
def _createGrabber(self, event_cls=events.BaseWebhookEvent):
grabber = EventGrabber()
self.config.add_subscriber(grabber, event_cls)
return grabber
def _createRequest(self, event_body):
if not isinstance(event_body, list):
event_body = [event_body]
self.request.json_body = event_body
return self.request
def _createApp(self, event_cls=events.BaseWebhookEvent):
from webtest.app import TestApp
app = TestApp(simple_app({}))
app.grabber = app.app.registry.grabber
return app
class TestBaseEvents(WebhookTestBase):
def _makeOne(self, event_type='bounce', category='category'):
return {
'asm_group_id': 1,
'category': category,
'cert_error': '0',
'email': '[email protected]',
'event': event_type,
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<[email protected]>',
'status': '5.0.0',<|fim▁hole|> }
def _create_dt(self):
import datetime
return datetime.datetime(2009, 8, 11, 0, 0)
def test_event_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_event_parsed_from_request(self):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()])
self.assertEqual(len(grabber.events), 1)
def test_multiple_events_parsed_from_request(self, n=3):
app = self._createApp()
grabber = app.grabber
app.post_json(self._PATH, [self._makeOne()] * n)
self.assertEqual(len(grabber.events), n)
def test_specific_event_caught(self):
grabber = self._createGrabber(events.BounceEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 1)
def test_unspecified_event_ignored(self):
grabber = self._createGrabber(events.DeferredEvent)
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(len(grabber.events), 0)
def test_timestamp_parsed(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual(grabber.last.dt, self._create_dt())
def test_unique_arguments_extracted(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertDictEqual(grabber.last.unique_arguments, {
'unique_arg_key': 'unique_arg_value',
})
def test_correct_subclass(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertIsInstance(grabber.last, events.BounceEvent)
def test_unknown_event_raises_exception(self):
request = self._createRequest(self._makeOne(event_type='UNKNOWN'))
self.assertRaises(
errors.UnknownEventError, psw.receive_events, request)
def test_single_category_is_list_wrapped(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne())
psw.receive_events(request)
self.assertEqual([grabber.last.category], grabber.last.categories)
def test_multiple_categories_are_unchanged(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=['c1', 'c2']))
psw.receive_events(request)
self.assertEqual(grabber.last.category, grabber.last.categories)
def test_empty_categories_is_empty_list(self):
grabber = self._createGrabber()
request = self._createRequest(self._makeOne(category=None))
psw.receive_events(request)
self.assertEqual(grabber.last.categories, [])
class TestDeliveryEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'cert_error': '0',
'email': '[email protected]',
'event': 'bounce',
'ip': '127.0.0.1',
'reason': '500 No Such User',
'smtp-id': '<[email protected]>',
'status': '5.0.0',
'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value',
}
class TestEngagementEvents(WebhookTestBase):
def _makeOne(self):
return {
'asm_group_id': 1,
'category': ['category1', 'category2'],
'email': '[email protected]',
'event': 'click',
'ip': '255.255.255.255',
'timestamp': 1249948800,
'unique_arg_key': 'unique_arg_value',
'url': 'http://yourdomain.com/blog/news.html',
'useragent': 'Example Useragent',
}
if __name__ == '__main__':
import sys
sys.exit(unittest.main())<|fim▁end|> | 'timestamp': 1249948800,
'tls': '1',
'type': 'bounce',
'unique_arg_key': 'unique_arg_value', |
<|file_name|>simulator.go<|end_file_name|><|fim▁begin|>package numato
import (
"bytes"
"fmt"
"strconv"
"strings"
)
// Simulator controls a dummy Numato device.
// It only deals with the input/output of the numato.Numato object and does not
// handle all valid inputs to a real Numato.
type Simulator struct {
relays, GPIOs, ADCs uint8
state map[portType][]bool<|fim▁hole|> pending []byte
}
// OpenSimulator returns a Simulator and a Numato object under its control.
func OpenSimulator(relays, GPIOs, ADCs uint8) (*Simulator, *Numato) {
sim := &Simulator{
relays, GPIOs, ADCs,
map[portType][]bool{
Relay: make([]bool, relays),
GPIO: make([]bool, GPIOs),
},
bytes.Buffer{},
[]byte{},
}
dummy := &Numato{sim}
return sim, dummy
}
// Read can be used to receive responses from the Simulator.
func (sim *Simulator) Read(b []byte) (int, error) {
return sim.buf.Read(b)
}
// Write acts as a dummy serial port and processes any completed command.
// Incomplete commands will be buffered and handled once a '\r' is written.
func (sim *Simulator) Write(b []byte) (int, error) {
commands := bytes.Split(b, []byte("\r"))
commands[0] = append(sim.pending, commands[0]...)
for i := 0; i < len(commands)-1; i++ {
sim.process(commands[i])
}
sim.pending = commands[len(commands)-1]
return sim.buf.Write(b)
}
func (sim *Simulator) process(cmd []byte) {
// Simulate the echo behaviour
sim.buf.Write(cmd)
sim.buf.Write([]byte("\r"))
components := strings.Split(string(cmd), " ")
if len(components) != 3 {
return
}
num, err := strconv.Atoi(components[2])
if err != nil {
return
}
p := Port{
portType(components[0]),
num,
}
s := state(components[1])
switch s {
case On:
fallthrough
case Off:
sim.Set(p, s)
case read:
on, err := sim.IsOn(p)
if err != nil {
break
}
status := "on"
if !on {
status = "off"
}
sim.buf.Write([]byte(fmt.Sprintf("\n\r%s\n\r", status)))
default:
// an error happened
}
sim.buf.Write([]byte("\n\r> "))
}
// Close is a noop.
func (sim *Simulator) Close() error {
return nil
}
// Off turns the simulated port off.
func (sim *Simulator) Off(p Port) {
sim.Set(p, Off)
}
// On turns the simulated port on.
func (sim *Simulator) On(p Port) {
sim.Set(p, On)
}
// Set sets a port to the profided state.
func (sim *Simulator) Set(p Port, s state) error {
set, ok := sim.state[p.Class]
if !ok {
panic("invalid type")
}
if p.Number >= len(set) {
panic("port out of range")
}
set[p.Number] = s == On
return nil
}
// IsOn reads the status of the port as seen by the simulator.
func (sim *Simulator) IsOn(p Port) (bool, error) {
set, ok := sim.state[p.Class]
if !ok {
panic("invalid type")
}
if p.Number >= len(set) {
panic("port out of range")
}
return set[p.Number], nil
}<|fim▁end|> |
buf bytes.Buffer |
<|file_name|>ExampleInventoryRead.java<|end_file_name|><|fim▁begin|>// file ExampleInventoryRead
// $Id: ExampleInventoryRead.java,v 1.10 2009/01/09 22:10:13 mark Exp $
package je.gettingStarted;
import java.io.File;
import java.io.IOException;
import com.sleepycat.bind.EntryBinding;
import com.sleepycat.bind.serial.SerialBinding;
import com.sleepycat.bind.tuple.TupleBinding;
import com.sleepycat.je.Cursor;
import com.sleepycat.je.DatabaseEntry;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.LockMode;
import com.sleepycat.je.OperationStatus;
import com.sleepycat.je.SecondaryCursor;
public class ExampleInventoryRead {
private static File myDbEnvPath =
new File("/tmp/JEDB");
// Encapsulates the database environment and databases.
private static MyDbEnv myDbEnv = new MyDbEnv();
private static TupleBinding inventoryBinding;
private static EntryBinding vendorBinding;
// The item to locate if the -s switch is used
private static String locateItem;
private static void usage() {
System.out.println("ExampleInventoryRead [-h <env directory>]" +
"[-s <item to locate>]");
System.exit(-1);
}
public static void main(String args[]) {
ExampleInventoryRead eir = new ExampleInventoryRead();
try {
eir.run(args);
} catch (DatabaseException dbe) {
System.err.println("ExampleInventoryRead: " + dbe.toString());
dbe.printStackTrace();
} finally {
myDbEnv.close();
}
System.out.println("All done.");
}
private void run(String args[])
throws DatabaseException {
// Parse the arguments list
parseArgs(args);
myDbEnv.setup(myDbEnvPath, // path to the environment home
true); // is this environment read-only?
// Setup our bindings.
inventoryBinding = new InventoryBinding();
vendorBinding =
new SerialBinding(myDbEnv.getClassCatalog(),<|fim▁hole|> if (locateItem != null) {
showItem();
} else {
showAllInventory();
}
}
private void showItem() throws DatabaseException {
SecondaryCursor secCursor = null;
try {
// searchKey is the key that we want to find in the
// secondary db.
DatabaseEntry searchKey =
new DatabaseEntry(locateItem.getBytes("UTF-8"));
// foundKey and foundData are populated from the primary
// entry that is associated with the secondary db key.
DatabaseEntry foundKey = new DatabaseEntry();
DatabaseEntry foundData = new DatabaseEntry();
// open a secondary cursor
secCursor =
myDbEnv.getNameIndexDB().openSecondaryCursor(null, null);
// Search for the secondary database entry.
OperationStatus retVal =
secCursor.getSearchKey(searchKey, foundKey,
foundData, LockMode.DEFAULT);
// Display the entry, if one is found. Repeat until no more
// secondary duplicate entries are found
while(retVal == OperationStatus.SUCCESS) {
Inventory theInventory =
(Inventory)inventoryBinding.entryToObject(foundData);
displayInventoryRecord(foundKey, theInventory);
retVal = secCursor.getNextDup(searchKey, foundKey,
foundData, LockMode.DEFAULT);
}
} catch (Exception e) {
System.err.println("Error on inventory secondary cursor:");
System.err.println(e.toString());
e.printStackTrace();
} finally {
if (secCursor != null) {
secCursor.close();
}
}
}
private void showAllInventory()
throws DatabaseException {
// Get a cursor
Cursor cursor = myDbEnv.getInventoryDB().openCursor(null, null);
// DatabaseEntry objects used for reading records
DatabaseEntry foundKey = new DatabaseEntry();
DatabaseEntry foundData = new DatabaseEntry();
try { // always want to make sure the cursor gets closed
while (cursor.getNext(foundKey, foundData,
LockMode.DEFAULT) == OperationStatus.SUCCESS) {
Inventory theInventory =
(Inventory)inventoryBinding.entryToObject(foundData);
displayInventoryRecord(foundKey, theInventory);
}
} catch (Exception e) {
System.err.println("Error on inventory cursor:");
System.err.println(e.toString());
e.printStackTrace();
} finally {
cursor.close();
}
}
private void displayInventoryRecord(DatabaseEntry theKey,
Inventory theInventory)
throws DatabaseException {
DatabaseEntry searchKey = null;
try {
String theSKU = new String(theKey.getData(), "UTF-8");
System.out.println(theSKU + ":");
System.out.println("\t " + theInventory.getItemName());
System.out.println("\t " + theInventory.getCategory());
System.out.println("\t " + theInventory.getVendor());
System.out.println("\t\tNumber in stock: " +
theInventory.getVendorInventory());
System.out.println("\t\tPrice per unit: " +
theInventory.getVendorPrice());
System.out.println("\t\tContact: ");
searchKey =
new DatabaseEntry(theInventory.getVendor().getBytes("UTF-8"));
} catch (IOException willNeverOccur) {}
DatabaseEntry foundVendor = new DatabaseEntry();
if (myDbEnv.getVendorDB().get(null, searchKey, foundVendor,
LockMode.DEFAULT) != OperationStatus.SUCCESS) {
System.out.println("Could not find vendor: " +
theInventory.getVendor() + ".");
System.exit(-1);
} else {
Vendor theVendor =
(Vendor)vendorBinding.entryToObject(foundVendor);
System.out.println("\t\t " + theVendor.getAddress());
System.out.println("\t\t " + theVendor.getCity() + ", " +
theVendor.getState() + " " + theVendor.getZipcode());
System.out.println("\t\t Business Phone: " +
theVendor.getBusinessPhoneNumber());
System.out.println("\t\t Sales Rep: " +
theVendor.getRepName());
System.out.println("\t\t " +
theVendor.getRepPhoneNumber());
}
}
protected ExampleInventoryRead() {}
private static void parseArgs(String args[]) {
for(int i = 0; i < args.length; ++i) {
if (args[i].startsWith("-")) {
switch(args[i].charAt(1)) {
case 'h':
myDbEnvPath = new File(args[++i]);
break;
case 's':
locateItem = new String(args[++i]);
break;
default:
usage();
}
}
}
}
}<|fim▁end|> | Vendor.class);
|
<|file_name|>CompiledMethod_i386.cpp<|end_file_name|><|fim▁begin|>/*
*
*
* Copyright 1990-2006 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version
* 2 only, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details (a copy is<|fim▁hole|> *
* You should have received a copy of the GNU General Public License
* version 2 along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
* Clara, CA 95054 or visit www.sun.com if you need additional
* information or have any questions.
*/
# include "incls/_precompiled.incl"
# include "incls/_CompiledMethod_i386.cpp.incl"
#if ENABLE_COMPILER
#if !defined(PRODUCT) || ENABLE_TTY_TRACE
int DisassemblerEnv::code_offset(address target) {
int result = target - _code->entry();
if (result < 0 || result > _code->size()) {
return -1;
}
return result;
}
void DisassemblerEnv::comment_on_immediate(unsigned char* pc, int value) {
static char temp[30];
int code_offset = pc - _code->entry();
RelocationReader stream(_code);
switch (stream.kind_at(code_offset)) {
case Relocation::oop_type:
case Relocation::rom_oop_type:
_oop = (OopDesc*)value;
break;
case Relocation::compiler_stub_type:
_comment = " {Compiler stub}";
break;
}
}
void DisassemblerEnv::print_comment(Stream* st) {
if (_comment != NULL) {
st->print(_comment);
} else {
_oop.print_value_on(st);
}
}
void CompiledMethod::print_code_on(Stream* st, jint start, jint end) {
// Warning this is not safe for garbage collection
address pc = entry() + start;
while (*pc != 0x00 && pc < entry() + end) {
DisassemblerEnv env(this);
address instruction_start = pc;
st->print(" %4d: ", instruction_start - entry());
pc = disasm(instruction_start, &env);
st->print("%s", env.buffer());
if (env.has_comment()) {
st->print(" // ");
env.print_comment(st);
}
st->cr();
}
}
void CompiledMethod::print_code_on(Stream* st) {
// Warning this is not safe for garbage collection
address pc = entry();
while (*pc != 0x00) {
DisassemblerEnv env(this);
address instruction_start = pc;
print_comment_for(instruction_start - entry(), st);
st->print(" %4d: ", instruction_start - entry());
pc = disasm(instruction_start, &env);
st->print("%s", env.buffer());
if (env.has_comment()) {
st->print(" // ");
env.print_comment(st);
}
st->cr();
}
}
#endif // !PRODUCT
#endif // COMPILER
#if ENABLE_ROM_GENERATOR
// generate a map of all the field types in this object
int CompiledMethod::generate_fieldmap(TypeArray* field_map) {
SHOULD_NOT_REACH_HERE();
return 0;
}
#endif /* #if ENABLE_ROM_GENERATOR*/<|fim▁end|> | * included at /legal/license.txt). |
<|file_name|>test_config.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from io import BytesIO
import mock
import pytest
from twitter.common.contextutil import temporary_dir<|fim▁hole|>from apache.aurora.client.config import get_config as get_aurora_config
from apache.aurora.client.config import PRODUCTION_DEPRECATED_WARNING
from apache.aurora.config import AuroraConfig
from apache.aurora.config.loader import AuroraConfigLoader
from apache.aurora.config.schema.base import (
MB,
Announcer,
HealthCheckConfig,
Job,
Resources,
Task,
UpdateConfig
)
from apache.thermos.config.schema_base import Process
MESOS_CONFIG_BASE = """
HELLO_WORLD = Job(
name = 'hello_world',
role = 'john_doe',
cluster = 'test-cluster',
environment = 'test',
%(announce)s
task = Task(
name = 'main',
processes = [Process(name = 'hello_world', cmdline = '%(cmdline)s')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
jobs = [HELLO_WORLD]
"""
MESOS_CONFIG_WITH_INCLUDE = """
%s
include(%s)
"""
MESOS_CONFIG_WITH_ANNOUNCE_1 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http"),'}
MESOS_CONFIG_WITH_ANNOUNCE_2 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': '''announce = Announcer(
primary_port = "http",
portmap = {"aurora": "http"}),
'''}
MESOS_CONFIG_WITH_INVALID_STATS = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http", stats_port="blah"),'}
MESOS_CONFIG_WITHOUT_ANNOUNCE = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': ''
}
def test_get_config_announces():
for good_config in (
MESOS_CONFIG_WITH_ANNOUNCE_1,
MESOS_CONFIG_WITH_ANNOUNCE_2,
MESOS_CONFIG_WITHOUT_ANNOUNCE):
bio = BytesIO(good_config)
get_aurora_config('hello_world', bio).job()
def test_get_config_with_broken_subscopes():
bad_config = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{hello[{{thermos.ports[http]}}]}}',
'announce': '',
}
bio = BytesIO(bad_config)
with pytest.raises(AuroraConfig.InvalidConfig) as cm:
get_aurora_config('hello_world', bio).job()
assert 'Unexpected unbound refs' in str(cm.value.message)
def test_get_config_select():
bio = BytesIO(MESOS_CONFIG_WITHOUT_ANNOUNCE)
get_aurora_config(
'hello_world',
bio,
select_env='test',
select_role='john_doe',
select_cluster='test-cluster').job()
bio.seek(0)
with pytest.raises(ValueError) as cm:
get_aurora_config(
'hello_world',
bio,
select_env='staging42',
select_role='moua',
select_cluster='test-cluster').job()
assert 'test-cluster/john_doe/test/hello_world' in str(cm.value.message)
def test_include():
with temporary_dir() as dir:
hello_mesos_fname = "hello_world.mesos"
hello_mesos_path = os.path.join(dir, hello_mesos_fname)
with open(os.path.join(dir, hello_mesos_path), "wb") as hello_world_mesos:
hello_world_mesos.write(MESOS_CONFIG_WITHOUT_ANNOUNCE)
hello_world_mesos.flush()
hello_include_fname_path = os.path.join(dir, "hello_include_fname.mesos")
with open(hello_include_fname_path, "wb+") as hello_include_fname_fp:
hello_include_fname_fp.write(MESOS_CONFIG_WITH_INCLUDE %
("", """'%s'""" % hello_mesos_fname))
hello_include_fname_fp.flush()
get_aurora_config('hello_world', hello_include_fname_path)
hello_include_fname_fp.seek(0)
with pytest.raises(AuroraConfigLoader.InvalidConfigError):
get_aurora_config('hello_world', hello_include_fname_fp)
def test_dedicated_portmap():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_announce_configuration(AuroraConfig(base_job))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'})))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'},
announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}),
constraints={'foo': 'bar'})))
def test_update_config_passes_with_default_values():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_max_consecutive_failures_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(max_consecutive_failures=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_with_max_consecutive_failures_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=26),
health_check_config=HealthCheckConfig(max_consecutive_failures=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_min_consecutive_successes_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(min_consecutive_successes=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_with_min_consecutive_successes_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(min_consecutive_successes=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_watch_secs_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_watch_secs_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_validate_deprecated_config_adds_warning_for_production():
job = Job(name='hello_world', role='john_doe', cluster='test-cluster', environment='test',
task=Task(name='main', processes=[Process(cmdline='echo {{_unbound_}}', name='eco')],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)), production='true')
with mock.patch('apache.aurora.client.config.deprecation_warning') as mock_warning:
config._validate_deprecated_config(AuroraConfig(job))
mock_warning.assert_called_once_with(PRODUCTION_DEPRECATED_WARNING)
def test_validate_deprecated_config_adds_no_warning_when_tier_is_set():
job = Job(name='hello_world', role='john_doe', cluster='test-cluster', environment='test',
task=Task(name='main', processes=[Process(cmdline='echo {{_unbound_}}', name='eco')],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)),
production='true', tier='preferred')
with mock.patch('apache.aurora.client.config.deprecation_warning') as mock_warning:
config._validate_deprecated_config(AuroraConfig(job))
assert mock_warning.call_count == 0<|fim▁end|> |
from apache.aurora.client import config |
<|file_name|>control.py<|end_file_name|><|fim▁begin|>##### CONTROL AND NAVIGATION ###################################################
# update the applications' title bar
def UpdateCaption(page=0, force=False):
global CurrentCaption, CurrentOSDCaption, CurrentOSDPage, CurrentOSDStatus
global CurrentOSDComment
if (page == CurrentCaption) and not(force):
return
CurrentCaption = page
caption = __title__
if DocumentTitle:
caption += " - " + DocumentTitle
if page < 1:
CurrentOSDCaption = ""
CurrentOSDPage = ""
CurrentOSDStatus = ""
CurrentOSDComment = ""
Platform.SetWindowTitle(caption)
return
CurrentOSDPage = "%d/%d" % (page, PageCount)
caption = "%s (%s)" % (caption, CurrentOSDPage)
title = GetPageProp(page, 'title') or GetPageProp(page, '_title')
if title:
caption += ": %s" % title
CurrentOSDCaption = title
else:
CurrentOSDCaption = ""
status = []
if GetPageProp(page, 'skip', False):
status.append("skipped: yes")
if not GetPageProp(page, ('overview', '_overview'), True):
status.append("on overview page: no")
CurrentOSDStatus = ", ".join(status)
CurrentOSDComment = GetPageProp(page, 'comment')
Platform.SetWindowTitle(caption)
# get next/previous page
def GetNextPage(page, direction):
checked_pages = set()
while True:
checked_pages.add(page)
page = GetPageProp(page,
('prev' if (direction < 0) else 'next'),
page + direction)
if page in checked_pages:
return 0 # we looped around completely and found nothing
if Wrap:
if page < 1: page = PageCount
if page > PageCount: page = 1
else:
if page < 1 or page > PageCount:
return 0 # start or end of presentation
if not GetPageProp(page, 'skip', False):
return page
# pre-load the following page into Pnext/Tnext
def PreloadNextPage(page):
global Pnext, Tnext
if (page < 1) or (page > PageCount):
Pnext = 0
return 0
if page == Pnext:
return 1
RenderPage(page, Tnext)
Pnext = page
return 1
# perform box fading; the fade animation time is mapped through func()
def BoxFade(func):
t0 = Platform.GetTicks()
while BoxFadeDuration > 0:
if Platform.CheckAnimationCancelEvent(): break
t = (Platform.GetTicks() - t0) * 1.0 / BoxFadeDuration
if t >= 1.0: break
DrawCurrentPage(func(t))
DrawCurrentPage(func(1.0))
return 0
# reset the timer
def ResetTimer():
global StartTime, PageEnterTime
if TimeTracking and not(FirstPage):
print("--- timer was reset here ---")
StartTime = Platform.GetTicks()
PageEnterTime = 0
# start video playback
def PlayVideo(video):
global MPlayerProcess, VideoPlaying, NextPageAfterVideo
if not video: return
StopMPlayer()
if Platform.use_omxplayer:
opts = ["omxplayer"]
else:
opts = [MPlayerPath, "-quiet", "-slave", \
"-monitorpixelaspect", "1:1", \
"-vo", "gl", \
"-autosync", "100"]
try:
opts += ["-wid", str(Platform.GetWindowID())]
except KeyError:
if Fullscreen:
opts.append("-fs")
else:
print("Sorry, but Impressive only supports video on your operating system if fullscreen", file=sys.stderr)
print("mode is used.", file=sys.stderr)
VideoPlaying = False
MPlayerProcess = None
return
if not isinstance(video, list):
video = [video]
NextPageAfterVideo = False
try:
MPlayerProcess = Popen(opts + video, stdin=subprocess.PIPE)
if Platform.use_omxplayer:
gl.Clear(gl.COLOR_BUFFER_BIT)
Platform.SwapBuffers()
if Fullscreen and (os.name == 'nt'):
# very ugly Win32-specific hack: in -wid embedding mode,
# video display only works if we briefly minimize and restore
# the window ... and that's the good case: in -fs, keyboard
# focus is messed up and we don't get any input!
if Win32FullscreenVideoHackTiming[0] > 0:
time.sleep(Win32FullscreenVideoHackTiming[0])
win32gui.ShowWindow(Platform.GetWindowID(), 6) # SW_MINIMIZE
if Win32FullscreenVideoHackTiming[1] > 0:
time.sleep(Win32FullscreenVideoHackTiming[1])
win32gui.ShowWindow(Platform.GetWindowID(), 9) # SW_RESTORE
VideoPlaying = True
except OSError:
MPlayerProcess = None
# called each time a page is entered, AFTER the transition, BEFORE entering box-fade mode
def PreparePage():
global SpotRadius, SpotRadiusBase
global BoxFadeDarkness, BoxFadeDarknessBase
global BoxZoomDarkness, BoxZoomDarknessBase
override = GetPageProp(Pcurrent, 'radius')
if override:
SpotRadius = override
SpotRadiusBase = override
GenerateSpotMesh()
override = GetPageProp(Pcurrent, 'darkness')
if override is not None:
BoxFadeDarkness = override * 0.01
BoxFadeDarknessBase = override * 0.01
override = GetPageProp(Pcurrent, 'zoomdarkness')
if override is not None:
BoxZoomDarkness = override * 0.01
BoxZoomDarknessBase = override * 0.01
# called each time a page is entered, AFTER the transition, AFTER entering box-fade mode
def PageEntered(update_time=True):
global PageEnterTime, PageTimeout, MPlayerProcess, IsZoomed, WantStatus
if update_time:
PageEnterTime = Platform.GetTicks() - StartTime
IsZoomed = 0 # no, we don't have a pre-zoomed image right now
WantStatus = False # don't show status unless it's changed interactively
PageTimeout = AutoAdvanceTime if AutoAdvanceEnabled else 0
shown = GetPageProp(Pcurrent, '_shown', 0)
try:
os.chdir(os.path.dirname(GetPageProp(Pcurrent, '_file')))
except OSError:
pass
if not(shown) or Wrap:
PageTimeout = GetPageProp(Pcurrent, 'timeout', PageTimeout)
if GetPageProp(Pcurrent, '_video'):
PlayVideo(GetPageProp(Pcurrent, '_file'))
if not(shown) or GetPageProp(Pcurrent, 'always', False):
if not GetPageProp(Pcurrent, '_video'):
<|fim▁hole|> video = GetPageProp(Pcurrent, 'video')
sound = GetPageProp(Pcurrent, 'sound')
PlayVideo(video)
if sound and not(video):
StopMPlayer()
try:
MPlayerProcess = Popen(
[MPlayerPath, "-quiet", "-really-quiet", "-novideo", sound],
stdin=subprocess.PIPE)
except OSError:
MPlayerProcess = None
SafeCall(GetPageProp(Pcurrent, 'OnEnterOnce'))
SafeCall(GetPageProp(Pcurrent, 'OnEnter'))
if PageTimeout:
Platform.ScheduleEvent("$page-timeout", PageTimeout)
SetPageProp(Pcurrent, '_shown', shown + 1)
# called each time a page is left
def PageLeft(overview=False):
global FirstPage, LastPage, WantStatus, PageLeaveTime
PageLeaveTime = Platform.GetTicks() - StartTime
WantStatus = False
if not overview:
if GetTristatePageProp(Pcurrent, 'reset'):
ResetTimer()
FirstPage = False
LastPage = Pcurrent
if GetPageProp(Pcurrent, '_shown', 0) == 1:
SafeCall(GetPageProp(Pcurrent, 'OnLeaveOnce'))
SafeCall(GetPageProp(Pcurrent, 'OnLeave'))
if TimeTracking:
t1 = Platform.GetTicks() - StartTime
dt = (t1 - PageEnterTime + 500) // 1000
if overview:
p = "over"
else:
p = "%4d" % Pcurrent
print("%s%9s%9s%9s" % (p, FormatTime(dt),
FormatTime(PageEnterTime // 1000),
FormatTime(t1 // 1000)))
# create an instance of a transition class
def InstantiateTransition(trans_class):
if isinstance(trans_class, basestring):
index = dict((c.__name__.lower(), c) for c in AllTransitions)
try:
trans_class = index[trans_class.lower()]
except KeyError:
print("Error: invalid transition '{}', ignoring".format(trans_class), file=sys.stderr)
return None
elif not(isinstance(trans_class, type) and issubclass(trans_class, Transition)):
print("Error: invalid transition '{!r}', ignoring".format(trans_class), file=sys.stderr)
return None
try:
return trans_class()
except GLInvalidShaderError:
return None
except GLShaderCompileError:
print("Note: all %s transitions will be disabled" % trans_class.__name__, file=sys.stderr)
return None
# perform a transition to a specified page
def TransitionTo(page, allow_transition=True, notify_page_left=True):
global Pcurrent, Pnext, Tcurrent, Tnext
global PageCount, Marking, Tracing, Panning
global TransitionRunning, TransitionPhase
global TransitionDone
TransitionDone = False
# first, stop video and kill the auto-timer
if VideoPlaying:
StopMPlayer()
Platform.ScheduleEvent("$page-timeout", 0)
# invalid page? go away
if not PreloadNextPage(page):
if QuitAtEnd:
LeaveZoomMode(allow_transition)
if FadeInOut:
EnterFadeMode()
PageLeft()
Quit()
return 0
# leave zoom mode now, if enabled
LeaveZoomMode(allow_transition)
# notify that the page has been left
if notify_page_left:
PageLeft()
if TransitionDone:
return 1 # nested call to TransitionTo() detected -> abort here
# box fade-out
if GetPageProp(Pcurrent, 'boxes') or Tracing:
skip = BoxFade(lambda t: 1.0 - t)
else:
skip = 0
# some housekeeping
Marking = False
Tracing = False
UpdateCaption(page)
# check if the transition is valid
tpage = max(Pcurrent, Pnext)
trans = None
if allow_transition:
trans = GetPageProp(tpage, 'transition', GetPageProp(tpage, '_transition'))
else:
trans = None
if trans is not None:
transtime = GetPageProp(tpage, 'transtime', TransitionDuration)
try:
dummy = trans.__class__
except AttributeError:
# ah, gotcha! the transition is not yet instantiated!
trans = InstantiateTransition(trans)
PageProps[tpage][tkey] = trans
if trans is None:
transtime = 0
# backward motion? then swap page buffers now
backward = (Pnext < Pcurrent)
if Wrap and (min(Pcurrent, Pnext) == 1) and (max(Pcurrent, Pnext) == PageCount):
backward = not(backward) # special case: last<->first in wrap mode
if backward:
Pcurrent, Pnext = (Pnext, Pcurrent)
Tcurrent, Tnext = (Tnext, Tcurrent)
# transition animation
if not(skip) and transtime:
transtime = 1.0 / transtime
TransitionRunning = True
trans.start()
t0 = Platform.GetTicks()
while not(VideoPlaying):
if Platform.CheckAnimationCancelEvent():
skip = 1
break
t = (Platform.GetTicks() - t0) * transtime
if t >= 1.0: break
TransitionPhase = t
if backward: t = 1.0 - t
gl.Clear(gl.COLOR_BUFFER_BIT)
trans.render(t)
DrawOverlays(t)
Platform.SwapBuffers()
TransitionRunning = False
# forward motion => swap page buffers now
if not backward:
Pcurrent, Pnext = (Pnext, Pcurrent)
Tcurrent, Tnext = (Tnext, Tcurrent)
# prepare the page's changeable metadata
PreparePage()
# box fade-in
if not(skip) and GetPageProp(Pcurrent, 'boxes'): BoxFade(lambda t: t)
# finally update the screen and preload the next page
DrawCurrentPage()
PageEntered()
if TransitionDone:
return 1
if not PreloadNextPage(GetNextPage(Pcurrent, 1)):
PreloadNextPage(GetNextPage(Pcurrent, -1))
TransitionDone = True
return 1
# zoom mode animation
def ZoomAnimation(targetx, targety, func, duration_override=None):
global ZoomX0, ZoomY0, ZoomArea
t0 = Platform.GetTicks()
if duration_override is None:
duration = ZoomDuration
else:
duration = duration_override
while duration > 0:
if Platform.CheckAnimationCancelEvent(): break
t = (Platform.GetTicks() - t0) * 1.0 / duration
if t >= 1.0: break
t = func(t)
dark = (t if BoxZoom else 1.0)
t = (2.0 - t) * t
ZoomX0 = targetx * t
ZoomY0 = targety * t
ZoomArea = 1.0 - (1.0 - 1.0 / ViewZoomFactor) * t
DrawCurrentPage(dark=dark)
t = func(1.0)
ZoomX0 = targetx * t
ZoomY0 = targety * t
ZoomArea = 1.0 - (1.0 - 1.0 / ViewZoomFactor) * t
GenerateSpotMesh()
DrawCurrentPage(dark=(t if BoxZoom else 1.0))
# re-render zoomed page image
def ReRenderZoom(factor):
global ResZoomFactor, IsZoomed, HighResZoomFailed
ResZoomFactor = min(factor, MaxZoomFactor)
if (IsZoomed >= ResZoomFactor) or (ResZoomFactor < 1.1) or HighResZoomFailed:
return
gl.BindTexture(gl.TEXTURE_2D, Tcurrent)
while gl.GetError():
pass # clear all OpenGL errors
gl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGB, int(ResZoomFactor * TexWidth), int(ResZoomFactor * TexHeight), 0, gl.RGB, gl.UNSIGNED_BYTE, PageImage(Pcurrent, True))
if gl.GetError():
print("I'm sorry, but your graphics card is not capable of rendering presentations", file=sys.stderr)
print("in this resolution. Either the texture memory is exhausted, or there is no", file=sys.stderr)
print("support for large textures (%dx%d). Please try to run Impressive in a" % (TexWidth, TexHeight), file=sys.stderr)
print("smaller resolution using the -g command-line option.", file=sys.stderr)
HighResZoomFailed = True
return
DrawCurrentPage()
IsZoomed = ResZoomFactor
# enter zoom mode
def EnterZoomMode(factor, targetx, targety):
global ZoomMode, ViewZoomFactor
ViewZoomFactor = factor
ZoomAnimation(targetx, targety, lambda t: t)
ZoomMode = True
ReRenderZoom(factor)
# leave zoom mode (if enabled)
def LeaveZoomMode(allow_transition=True):
global ZoomMode, BoxZoom, Panning, ViewZoomFactor, ResZoomFactor
global ZoomArea, ZoomX0, ZoomY0
if not ZoomMode: return
ZoomAnimation(ZoomX0, ZoomY0, lambda t: 1.0 - t, (None if allow_transition else 0))
ZoomMode = False
BoxZoom = False
Panning = False
ViewZoomFactor = 1
ResZoomFactor = 1
ZoomArea = 1.0
ZoomX0 = 0.0
ZoomY0 = 0.0
# change zoom factor in zoom mode
def ChangeZoom(target_factor, mousepos):
global ZoomMode, ViewZoomFactor, ZoomArea, ZoomX0, ZoomY0
px, py = MouseToScreen(mousepos)
log_zf = log(ViewZoomFactor)
dlog = log(target_factor) - log_zf
t0 = Platform.GetTicks()
dt = -1
while dt < WheelZoomDuration:
dt = Platform.GetTicks() - t0
rel = min(1.0, float(dt) / WheelZoomDuration) if WheelZoomDuration else 1.0
factor = exp(log_zf + rel * dlog)
if factor < 1.001: factor = 1.0
ZoomArea = 1.0 / factor
ZoomX0 = max(0.0, min(1.0 - ZoomArea, px - mousepos[0] * ZoomArea / ScreenWidth))
ZoomY0 = max(0.0, min(1.0 - ZoomArea, py - mousepos[1] * ZoomArea / ScreenHeight))
DrawCurrentPage()
ViewZoomFactor = factor
ZoomMode = (factor > 1.0)
# check whether a box mark is too small
def BoxTooSmall():
return ((abs(MarkUL[0] - MarkLR[0]) * ScreenWidth) < MinBoxSize) \
or ((abs(MarkUL[1] - MarkLR[1]) * ScreenHeight) < MinBoxSize)
# increment/decrement spot radius
def IncrementSpotSize(delta):
global SpotRadius
if not Tracing:
return
SpotRadius = max(SpotRadius + delta, 8)
GenerateSpotMesh()
DrawCurrentPage()
# post-initialize the page transitions
def PrepareTransitions():
Unspecified = 0xAFFED00F
# STEP 1: randomly assign transitions where the user didn't specify them
cnt = sum([1 for page in range(1, PageCount + 1) \
if GetPageProp(page, 'transition', Unspecified) == Unspecified])
newtrans = ((cnt // len(AvailableTransitions) + 1) * AvailableTransitions)[:cnt]
random.shuffle(newtrans)
for page in range(1, PageCount + 1):
if GetPageProp(page, 'transition', Unspecified) == Unspecified:
SetPageProp(page, '_transition', newtrans.pop())
# STEP 2: instantiate transitions
for page in PageProps:
for key in ('transition', '_transition'):
if not key in PageProps[page]:
continue
trans = PageProps[page][key]
if trans is not None:
PageProps[page][key] = InstantiateTransition(trans)
# update timer values and screen timer
def TimerTick():
global CurrentTime, ProgressBarPos
redraw = False
newtime = (Platform.GetTicks() - StartTime) * 0.001
if EstimatedDuration:
newpos = int(ScreenWidth * newtime / EstimatedDuration)
if newpos != ProgressBarPos:
redraw = True
ProgressBarPos = newpos
newtime = int(newtime)
if TimeDisplay and (CurrentTime != newtime):
redraw = True
if PageTimeout and AutoAdvanceProgress:
redraw = True
CurrentTime = newtime
return redraw
# enables time tracking mode (if not already done so)
def EnableTimeTracking(force=False):
global TimeTracking
if force or (TimeDisplay and not(TimeTracking) and not(ShowClock) and FirstPage):
print("Time tracking mode enabled.", file=sys.stderr)
TimeTracking = True
print("page duration enter leave")
print("---- -------- -------- --------")
# set cursor visibility
def SetCursor(visible):
global CursorVisible
CursorVisible = visible
if not(CursorImage) and (MouseHideDelay != 1):
Platform.SetMouseVisible(visible)
# handle a shortcut key event: store it (if shifted) or return the
# page number to navigate to (if not)
def HandleShortcutKey(key, current=0):
if not(key) or (key[0] != '*'):
return None
shift = key.startswith('*shift+')
if shift:
key = key[7:]
else:
key = key[1:]
if (len(key) == 1) or ((key >= "f1") and (key <= "f9")):
# Note: F10..F12 are implicitly included due to lexicographic sorting
page = None
for check_page, props in PageProps.items():
if props.get('shortcut') == key:
page = check_page
break
if shift:
if page:
DelPageProp(page, 'shortcut')
SetPageProp(current, 'shortcut', key)
elif page and (page != current):
return page
return None<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 by Stacy Prowell. All rights reserved.
//
// Licensed under the BSD 2-Clause license. See the file LICENSE
// that is part of this distribution. This file may not be copied,
// modified, or distributed except according to those terms.extern
extern crate linenoise;
extern crate getopts;
extern crate num;
extern crate relision;
use getopts::Options;
/// The REPL.
fn repl() {
let history_filename = relision::get_config_dir() + ("/repl.history");
linenoise::history_load(&history_filename);
loop {
let val = linenoise::input("e> ");
match val {
None => {
linenoise::history_save(&history_filename);
break;
}
Some(input) => {
println!("{}", input);
linenoise::history_add(&input);
if input == "clear" {
linenoise::clear_screen();
}
}
} // Match.
} // REPL loop.
}
/// Print the command line help. First print the prototype for using the
/// command, and then print help about using the switches.
/// progname: The program name.
/// switches: The allowed command line switch data structure.
fn print_usage(progname: &str, switches: Options) {
let prototype = format!("Usage: {} [switches...] [elision files...]", progname);
print!("{}", switches.usage(&prototype));
}
/// Entry point when run from the prompt.
fn main() {
println!("Running on {}.", relision::get_platform());
println!("Configuration stored at: {}.", relision::get_config_dir());
// Get the command line arguments.
let args = std::env::args().collect::<Vec<String>>();
let me = args[0].clone();
// Specify the switches this wrapper takes.
let mut switches = getopts::Options::new();
switches.optflag("h", "help", "Print this command line help.");
// Now process all command line switches. The "tail" removes the program
// name.<|fim▁hole|> let matches = match switches.parse(args) {
Ok(mat) => mat,
Err(fail) => {
println!("ERROR parsing command line arguments:");
println!(" {}", fail.to_string());
return;
}
};
if matches.opt_present("h") {
print_usage(&me, switches);
return;
}
// Now run the REPL.
repl();
}<|fim▁end|> | |
<|file_name|>cinder_service_check.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
# Technically maas_common isn't third-party but our own thing but hacking
# consideres it third-party<|fim▁hole|>from maas_common import print_output
from maas_common import status_err
from maas_common import status_ok
import requests
from requests import exceptions as exc
# NOTE(mancdaz): until https://review.openstack.org/#/c/111051/
# lands, there is no way to pass a custom (local) endpoint to
# cinderclient. Only way to test local is direct http. :sadface:
def check(auth_ref, args):
keystone = get_keystone_client(auth_ref)
auth_token = keystone.auth_token
VOLUME_ENDPOINT = (
'{protocol}://{hostname}:8776/v1/{tenant}'.format(
protocol=args.protocol,
hostname=args.hostname,
tenant=keystone.tenant_id)
)
s = requests.Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# We cannot do /os-services?host=X as cinder returns a hostname of
# X@lvm for cinder-volume binary
r = s.get('%s/os-services' % VOLUME_ENDPOINT, verify=False, timeout=5)
except (exc.ConnectionError,
exc.HTTPError,
exc.Timeout) as e:
metric_bool('client_success', False, m_name='maas_cinder')
status_err(str(e), m_name='maas_cinder')
if not r.ok:
metric_bool('client_success', False, m_name='maas_cinder')
status_err(
'Could not get response from Cinder API',
m_name='cinder'
)
else:
metric_bool('client_success', True, m_name='maas_cinder')
services = r.json()['services']
# We need to match against a host of X and X@lvm (or whatever backend)
if args.host:
backend = ''.join((args.host, '@'))
services = [service for service in services
if (service['host'].startswith(backend) or
service['host'] == args.host)]
if len(services) == 0:
status_err(
'No host(s) found in the service list',
m_name='maas_cinder'
)
status_ok(m_name='maas_cinder')
if args.host:
for service in services:
service_is_up = True
name = '%s_status' % service['binary']
if service['status'] == 'enabled' and service['state'] != 'up':
service_is_up = False
if '@' in service['host']:
[host, backend] = service['host'].split('@')
name = '%s-%s_status' % (service['binary'], backend)
metric_bool(name, service_is_up)
else:
for service in services:
service_is_up = True
if service['status'] == 'enabled' and service['state'] != 'up':
service_is_up = False
name = '%s_on_host_%s' % (service['binary'], service['host'])
metric_bool(name, service_is_up)
def main(args):
auth_ref = get_auth_ref()
check(auth_ref, args)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Check Cinder API against"
" local or remote address")
parser.add_argument('hostname',
type=str,
help='Cinder API hostname or IP address')
parser.add_argument('--host',
type=str,
help='Only return metrics for the specified host')
parser.add_argument('--telegraf-output',
action='store_true',
default=False,
help='Set the output format to telegraf')
parser.add_argument('--protocol',
type=str,
default='http',
help='Protocol to use for cinder client')
args = parser.parse_args()
with print_output(print_telegraf=args.telegraf_output):
main(args)<|fim▁end|> | from maas_common import get_auth_ref
from maas_common import get_keystone_client
from maas_common import metric_bool |
<|file_name|>QueryReadStore.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2008, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
if(!dojo._hasResource["dojox.data.QueryReadStore"]){
dojo._hasResource["dojox.data.QueryReadStore"]=true;
dojo.provide("dojox.data.QueryReadStore");
dojo.require("dojo.string");
dojo.require("dojo.data.util.sorter");
dojo.declare("dojox.data.QueryReadStore",null,{url:"",requestMethod:"get",_className:"dojox.data.QueryReadStore",_items:[],_lastServerQuery:null,_numRows:-1,lastRequestHash:null,doClientPaging:false,doClientSorting:false,_itemsByIdentity:null,_identifier:null,_features:{"dojo.data.api.Read":true,"dojo.data.api.Identity":true},_labelAttr:"label",constructor:function(_1){
dojo.mixin(this,_1);
},getValue:function(_2,_3,_4){
this._assertIsItem(_2);
if(!dojo.isString(_3)){
throw new Error(this._className+".getValue(): Invalid attribute, string expected!");
}
if(!this.hasAttribute(_2,_3)){
if(_4){
return _4;
}
}
return _2.i[_3];
},getValues:function(_5,_6){
this._assertIsItem(_5);
var _7=[];
if(this.hasAttribute(_5,_6)){
_7.push(_5.i[_6]);
}
return _7;
},getAttributes:function(_8){
this._assertIsItem(_8);
var _9=[];
for(var i in _8.i){
_9.push(i);
}
return _9;
},hasAttribute:function(_b,_c){
return this.isItem(_b)&&typeof _b.i[_c]!="undefined";
},containsValue:function(_d,_e,_f){
var _10=this.getValues(_d,_e);
var len=_10.length;
for(var i=0;i<len;i++){
if(_10[i]==_f){
return true;
}
}
return false;
},isItem:function(_13){
if(_13){
return typeof _13.r!="undefined"&&_13.r==this;
}
return false;
},isItemLoaded:function(_14){
return this.isItem(_14);
},loadItem:function(_15){
if(this.isItemLoaded(_15.item)){
return;
}
},fetch:function(_16){
_16=_16||{};
if(!_16.store){
_16.store=this;
}
var _17=this;
var _18=function(_19,_1a){
if(_1a.onError){
var _1b=_1a.scope||dojo.global;
_1a.onError.call(_1b,_19,_1a);
}
};
var _1c=function(_1d,_1e,_1f){
var _20=_1e.abort||null;
var _21=false;
var _22=_1e.start?_1e.start:0;
if(_17.doClientPaging==false){
_22=0;
}
var _23=_1e.count?(_22+_1e.count):_1d.length;
_1e.abort=function(){
_21=true;
if(_20){
_20.call(_1e);
}
};
var _24=_1e.scope||dojo.global;
if(!_1e.store){
_1e.store=_17;
}
if(_1e.onBegin){
_1e.onBegin.call(_24,_1f,_1e);
}
if(_1e.sort&&_17.doClientSorting){
_1d.sort(dojo.data.util.sorter.createSortFunction(_1e.sort,_17));
}
if(_1e.onItem){
for(var i=_22;(i<_1d.length)&&(i<_23);++i){
var _26=_1d[i];
if(!_21){
_1e.onItem.call(_24,_26,_1e);
}
}
}
if(_1e.onComplete&&!_21){
var _27=null;
if(!_1e.onItem){
_27=_1d.slice(_22,_23);
}
_1e.onComplete.call(_24,_27,_1e);
}
};
this._fetchItems(_16,_1c,_18);
return _16;
},getFeatures:function(){
return this._features;
},close:function(_28){
},getLabel:function(_29){
if(this._labelAttr&&this.isItem(_29)){
return this.getValue(_29,this._labelAttr);
}
return undefined;
},getLabelAttributes:function(_2a){
if(this._labelAttr){
return [this._labelAttr];
}
return null;
},_xhrFetchHandler:function(_2b,_2c,_2d,_2e){
_2b=this._filterResponse(_2b);
if(_2b.label){
this._labelAttr=_2b.label;
}
var _2f=_2b.numRows||-1;
this._items=[];
dojo.forEach(_2b.items,function(e){
this._items.push({i:e,r:this});
},this);
var _31=_2b.identifier;
this._itemsByIdentity={};
if(_31){
this._identifier=_31;
var i;
for(i=0;i<this._items.length;++i){
var _33=this._items[i].i;
var _34=_33[_31];
if(!this._itemsByIdentity[_34]){
this._itemsByIdentity[_34]=_33;
}else{
throw new Error(this._className+": The json data as specified by: ["+this.url+"] is malformed. Items within the list have identifier: ["+_31+"]. Value collided: ["+_34+"]");
}
}
}else{
this._identifier=Number;
for(i=0;i<this._items.length;++i){
this._items[i].n=i;
}
}
_2f=this._numRows=(_2f===-1)?this._items.length:_2f;
_2d(this._items,_2c,_2f);
this._numRows=_2f;
},_fetchItems:function(_35,_36,_37){
var _38=_35.serverQuery||_35.query||{};
if(!this.doClientPaging){
_38.start=_35.start||0;
if(_35.count){
_38.count=_35.count;
}
}
if(!this.doClientSorting){
if(_35.sort){
var _39=_35.sort[0];
if(_39&&_39.attribute){
var _3a=_39.attribute;
if(_39.descending){
_3a="-"+_3a;
}
_38.sort=_3a;
}
}
}
if(this.doClientPaging&&this._lastServerQuery!==null&&dojo.toJson(_38)==dojo.toJson(this._lastServerQuery)){
this._numRows=(this._numRows===-1)?this._items.length:this._numRows;<|fim▁hole|>_36(this._items,_35,this._numRows);
}else{
var _3b=this.requestMethod.toLowerCase()=="post"?dojo.xhrPost:dojo.xhrGet;
var _3c=_3b({url:this.url,handleAs:"json-comment-optional",content:_38});
_3c.addCallback(dojo.hitch(this,function(_3d){
this._xhrFetchHandler(_3d,_35,_36,_37);
}));
_3c.addErrback(function(_3e){
_37(_3e,_35);
});
this.lastRequestHash=new Date().getTime()+"-"+String(Math.random()).substring(2);
this._lastServerQuery=dojo.mixin({},_38);
}
},_filterResponse:function(_3f){
return _3f;
},_assertIsItem:function(_40){
if(!this.isItem(_40)){
throw new Error(this._className+": Invalid item argument.");
}
},_assertIsAttribute:function(_41){
if(typeof _41!=="string"){
throw new Error(this._className+": Invalid attribute argument ('"+_41+"').");
}
},fetchItemByIdentity:function(_42){
if(this._itemsByIdentity){
var _43=this._itemsByIdentity[_42.identity];
if(!(_43===undefined)){
if(_42.onItem){
var _44=_42.scope?_42.scope:dojo.global;
_42.onItem.call(_44,{i:_43,r:this});
}
return;
}
}
var _45=function(_46,_47){
var _48=_42.scope?_42.scope:dojo.global;
if(_42.onError){
_42.onError.call(_48,_46);
}
};
var _49=function(_4a,_4b){
var _4c=_42.scope?_42.scope:dojo.global;
try{
var _4d=null;
if(_4a&&_4a.length==1){
_4d=_4a[0];
}
if(_42.onItem){
_42.onItem.call(_4c,_4d);
}
}
catch(error){
if(_42.onError){
_42.onError.call(_4c,error);
}
}
};
var _4e={serverQuery:{id:_42.identity}};
this._fetchItems(_4e,_49,_45);
},getIdentity:function(_4f){
var _50=null;
if(this._identifier===Number){
_50=_4f.n;
}else{
_50=_4f.i[this._identifier];
}
return _50;
},getIdentityAttributes:function(_51){
return [this._identifier];
}});
}<|fim▁end|> | |
<|file_name|>SoftwareRenderer.js<|end_file_name|><|fim▁begin|>/**
* @author mrdoob / http://mrdoob.com/
* @author ryg / http://farbrausch.de/~fg
* @author mraleph / http://mrale.ph/
* @author daoshengmu / http://dsmu.me/
*/
THREE.SoftwareRenderer = function ( parameters ) {
console.log( 'THREE.SoftwareRenderer', THREE.REVISION );
parameters = parameters || {};
var canvas = parameters.canvas !== undefined
? parameters.canvas
: document.createElement( 'canvas' );
var context = canvas.getContext( '2d', {
alpha: parameters.alpha === true
} );
var alpha = parameters.alpha;
var shaders = {};
var textures = {};
var canvasWidth, canvasHeight;
var canvasWBlocks, canvasHBlocks;
var viewportXScale, viewportYScale, viewportZScale;
var viewportXOffs, viewportYOffs, viewportZOffs;
var clearColor = new THREE.Color( 0x000000 );
var imagedata, data, zbuffer;
var numBlocks, blockMaxZ, blockFlags;
var BLOCK_ISCLEAR = ( 1 << 0 );
var BLOCK_NEEDCLEAR = ( 1 << 1 );
var subpixelBits = 4;
var subpixelBias = ( 1 << subpixelBits ) - 1;
var blockShift = 3;
var blockSize = 1 << blockShift;
var maxZVal = ( 1 << 24 ); // Note: You want to size this so you don't get overflows.
var lineMode = false;
var lookVector = new THREE.Vector3( 0, 0, 1 );
var crossVector = new THREE.Vector3();
var rectx1 = Infinity, recty1 = Infinity;
var rectx2 = 0, recty2 = 0;
var prevrectx1 = Infinity, prevrecty1 = Infinity;
var prevrectx2 = 0, prevrecty2 = 0;
var projector = new THREE.Projector();
var spriteV1 = new THREE.Vector4();
var spriteV2 = new THREE.Vector4();
var spriteV3 = new THREE.Vector4();
var spriteUV1 = new THREE.Vector2();
var spriteUV2 = new THREE.Vector2();
var spriteUV3 = new THREE.Vector2();
var mpVPool = [];
var mpVPoolCount = 0;
var mpNPool = [];
var mpNPoolCount = 0;
var mpUVPool = [];
var mpUVPoolCount = 0;
this.domElement = canvas;
this.autoClear = true;
// WebGLRenderer compatibility
this.supportsVertexTextures = function () {};
this.setFaceCulling = function () {};
this.setClearColor = function ( color ) {
clearColor.set( color );
clearColorBuffer( clearColor );
};
this.setPixelRatio = function () {};
this.setSize = function ( width, height ) {
canvasWBlocks = Math.floor( width / blockSize );
canvasHBlocks = Math.floor( height / blockSize );
canvasWidth = canvasWBlocks * blockSize;
canvasHeight = canvasHBlocks * blockSize;
var fixScale = 1 << subpixelBits;
viewportXScale = fixScale * canvasWidth / 2;
viewportYScale = - fixScale * canvasHeight / 2;
viewportZScale = maxZVal / 2;
viewportXOffs = fixScale * canvasWidth / 2 + 0.5;
viewportYOffs = fixScale * canvasHeight / 2 + 0.5;
viewportZOffs = maxZVal / 2 + 0.5;
canvas.width = canvasWidth;
canvas.height = canvasHeight;
context.fillStyle = alpha ? "rgba(0, 0, 0, 0)" : clearColor.getStyle();
context.fillRect( 0, 0, canvasWidth, canvasHeight );
imagedata = context.getImageData( 0, 0, canvasWidth, canvasHeight );
data = imagedata.data;
zbuffer = new Int32Array( data.length / 4 );
numBlocks = canvasWBlocks * canvasHBlocks;
blockMaxZ = new Int32Array( numBlocks );
blockFlags = new Uint8Array( numBlocks );
for ( var i = 0, l = zbuffer.length; i < l; i ++ ) {
zbuffer[ i ] = maxZVal;
}
for ( var i = 0; i < numBlocks; i ++ ) {
blockFlags[ i ] = BLOCK_ISCLEAR;
}
clearColorBuffer( clearColor );
};
this.setSize( canvas.width, canvas.height );
this.clear = function () {
rectx1 = Infinity;
recty1 = Infinity;
rectx2 = 0;
recty2 = 0;
mpVPoolCount = 0;
mpNPoolCount = 0;
mpUVPoolCount = 0;
for ( var i = 0; i < numBlocks; i ++ ) {
blockMaxZ[ i ] = maxZVal;
blockFlags[ i ] = ( blockFlags[ i ] & BLOCK_ISCLEAR ) ? BLOCK_ISCLEAR : BLOCK_NEEDCLEAR;
}
};
this.render = function ( scene, camera ) {
// TODO: Check why autoClear can't be false.
this.clear();
var background = scene.background;
if ( background && background.isColor ) {
clearColorBuffer( background );
}
var renderData = projector.projectScene( scene, camera, false, false );
var elements = renderData.elements;
for ( var e = 0, el = elements.length; e < el; e ++ ) {
var element = elements[ e ];
var material = element.material;
var shader = getMaterialShader( material );
if ( ! shader ) continue;
if ( element instanceof THREE.RenderableFace ) {
if ( ! element.uvs ) {
drawTriangle(
element.v1.positionScreen,
element.v2.positionScreen,
element.v3.positionScreen,
null, null, null,
shader, element, material
);
} else {
drawTriangle(
element.v1.positionScreen,
element.v2.positionScreen,
element.v3.positionScreen,
element.uvs[ 0 ], element.uvs[ 1 ], element.uvs[ 2 ],
shader, element, material
);
}
} else if ( element instanceof THREE.RenderableSprite ) {
var scaleX = element.scale.x * 0.5;
var scaleY = element.scale.y * 0.5;
spriteV1.copy( element );
spriteV1.x -= scaleX;
spriteV1.y += scaleY;
spriteV2.copy( element );
spriteV2.x -= scaleX;
spriteV2.y -= scaleY;
spriteV3.copy( element );
spriteV3.x += scaleX;
spriteV3.y += scaleY;
if ( material.map ) {
spriteUV1.set( 0, 1 );
spriteUV2.set( 0, 0 );
spriteUV3.set( 1, 1 );
drawTriangle(
spriteV1, spriteV2, spriteV3,
spriteUV1, spriteUV2, spriteUV3,
shader, element, material
);
} else {
drawTriangle(
spriteV1, spriteV2, spriteV3,
null, null, null,
shader, element, material
);
}
spriteV1.copy( element );
spriteV1.x += scaleX;
spriteV1.y += scaleY;
spriteV2.copy( element );
spriteV2.x -= scaleX;
spriteV2.y -= scaleY;
spriteV3.copy( element );
spriteV3.x += scaleX;
spriteV3.y -= scaleY;
if ( material.map ) {
spriteUV1.set( 1, 1 );
spriteUV2.set( 0, 0 );
spriteUV3.set( 1, 0 );
drawTriangle(
spriteV1, spriteV2, spriteV3,
spriteUV1, spriteUV2, spriteUV3,
shader, element, material
);
} else {
drawTriangle(
spriteV1, spriteV2, spriteV3,
null, null, null,
shader, element, material
);
}
} else if ( element instanceof THREE.RenderableLine ) {
var shader = getMaterialShader( material );
drawLine(
element.v1.positionScreen,
element.v2.positionScreen,
element.vertexColors[ 0 ],
element.vertexColors[ 1 ],
shader,
material
);
}
}
finishClear();
var x = Math.min( rectx1, prevrectx1 );
var y = Math.min( recty1, prevrecty1 );
var width = Math.max( rectx2, prevrectx2 ) - x;
var height = Math.max( recty2, prevrecty2 ) - y;
/*
// debug; draw zbuffer
for ( var i = 0, l = zbuffer.length; i < l; i++ ) {
var o = i * 4;
var v = (65535 - zbuffer[ i ]) >> 3;
data[ o + 0 ] = v;
data[ o + 1 ] = v;
data[ o + 2 ] = v;
data[ o + 3 ] = 255;
}
*/
if ( x !== Infinity ) {
context.putImageData( imagedata, 0, 0, x, y, width, height );
}
prevrectx1 = rectx1; prevrecty1 = recty1;
prevrectx2 = rectx2; prevrecty2 = recty2;
};
function setSize( width, height ) {
canvasWBlocks = Math.floor( width / blockSize );
canvasHBlocks = Math.floor( height / blockSize );
canvasWidth = canvasWBlocks * blockSize;
canvasHeight = canvasHBlocks * blockSize;
var fixScale = 1 << subpixelBits;
viewportXScale = fixScale * canvasWidth / 2;
viewportYScale = -fixScale * canvasHeight / 2;
viewportZScale = maxZVal / 2;
viewportXOffs = fixScale * canvasWidth / 2 + 0.5;
viewportYOffs = fixScale * canvasHeight / 2 + 0.5;
viewportZOffs = maxZVal / 2 + 0.5;
canvas.width = canvasWidth;
canvas.height = canvasHeight;
context.fillStyle = alpha ? "rgba(0, 0, 0, 0)" : clearColor.getStyle();
context.fillRect( 0, 0, canvasWidth, canvasHeight );
imagedata = context.getImageData( 0, 0, canvasWidth, canvasHeight );
data = imagedata.data;
zbuffer = new Int32Array( data.length / 4 );
numBlocks = canvasWBlocks * canvasHBlocks;
blockMaxZ = new Int32Array( numBlocks );
blockFlags = new Uint8Array( numBlocks );
for ( var i = 0, l = zbuffer.length; i < l; i ++ ) {
zbuffer[ i ] = maxZVal;
}
for ( var i = 0; i < numBlocks; i ++ ) {
blockFlags[ i ] = BLOCK_ISCLEAR;
}
clearColorBuffer( clearColor );
}
function clearColorBuffer( color ) {
var size = canvasWidth * canvasHeight * 4;
for ( var i = 0; i < size; i += 4 ) {
data[ i ] = color.r * 255 | 0;
data[ i + 1 ] = color.g * 255 | 0;
data[ i + 2 ] = color.b * 255 | 0;
data[ i + 3 ] = alpha ? 0 : 255;
}
context.fillStyle = alpha ? "rgba(0, 0, 0, 0)" : color.getStyle();
context.fillRect( 0, 0, canvasWidth, canvasHeight );
}
function getPalette( material, bSimulateSpecular ) {
var i = 0, j = 0;
var diffuseR = material.color.r * 255;
var diffuseG = material.color.g * 255;
var diffuseB = material.color.b * 255;
var palette = new Uint8Array( 256 * 3 );
if ( bSimulateSpecular ) {
while ( i < 204 ) {
palette[ j ++ ] = Math.min( i * diffuseR / 204, 255 );
palette[ j ++ ] = Math.min( i * diffuseG / 204, 255 );
palette[ j ++ ] = Math.min( i * diffuseB / 204, 255 );
++ i;
}
while ( i < 256 ) {
// plus specular highlight
palette[ j ++ ] = Math.min( diffuseR + ( i - 204 ) * ( 255 - diffuseR ) / 82, 255 );
palette[ j ++ ] = Math.min( diffuseG + ( i - 204 ) * ( 255 - diffuseG ) / 82, 255 );
palette[ j ++ ] = Math.min( diffuseB + ( i - 204 ) * ( 255 - diffuseB ) / 82, 255 );
++ i;
}
} else {
while ( i < 256 ) {
palette[ j ++ ] = Math.min( i * diffuseR / 255, 255 );
palette[ j ++ ] = Math.min( i * diffuseG / 255, 255 );
palette[ j ++ ] = Math.min( i * diffuseB / 255, 255 );
++ i;
}
}
return palette;
}
function basicMaterialShader( buffer, depthBuf, offset, depth, u, v, n, face, material ) {
var colorOffset = offset * 4;
var texture = textures[ material.map.id ];
if ( ! texture.data )
return;
var tdim = texture.width;
var isTransparent = material.transparent;
var tbound = tdim - 1;
var tdata = texture.data;
var tIndex = ( ( ( v * tdim ) & tbound ) * tdim + ( ( u * tdim ) & tbound ) ) * 4;
if ( ! isTransparent ) {
buffer[ colorOffset ] = tdata[ tIndex ];
buffer[ colorOffset + 1 ] = tdata[ tIndex + 1 ];
buffer[ colorOffset + 2 ] = tdata[ tIndex + 2 ];
buffer[ colorOffset + 3 ] = ( material.opacity << 8 ) - 1;
depthBuf[ offset ] = depth;<|fim▁hole|> var srcR = tdata[ tIndex ];
var srcG = tdata[ tIndex + 1 ];
var srcB = tdata[ tIndex + 2 ];
var opaci = tdata[ tIndex + 3 ] * material.opacity / 255;
var destR = buffer[ colorOffset ];
var destG = buffer[ colorOffset + 1 ];
var destB = buffer[ colorOffset + 2 ];
buffer[ colorOffset ] = ( srcR * opaci + destR * ( 1 - opaci ) );
buffer[ colorOffset + 1 ] = ( srcG * opaci + destG * ( 1 - opaci ) );
buffer[ colorOffset + 2 ] = ( srcB * opaci + destB * ( 1 - opaci ) );
buffer[ colorOffset + 3 ] = ( material.opacity << 8 ) - 1;
if ( buffer[ colorOffset + 3 ] == 255 ) // Only opaue pixls write to the depth buffer
depthBuf[ offset ] = depth;
}
}
function lightingMaterialShader( buffer, depthBuf, offset, depth, u, v, n, face, material ) {
var colorOffset = offset * 4;
var texture = textures[ material.map.id ];
if ( ! texture.data )
return;
var tdim = texture.width;
var isTransparent = material.transparent;
var cIndex = ( n > 0 ? ( ~~ n ) : 0 ) * 3;
var tbound = tdim - 1;
var tdata = texture.data;
var tIndex = ( ( ( v * tdim ) & tbound ) * tdim + ( ( u * tdim ) & tbound ) ) * 4;
if ( ! isTransparent ) {
buffer[ colorOffset ] = ( material.palette[ cIndex ] * tdata[ tIndex ] ) >> 8;
buffer[ colorOffset + 1 ] = ( material.palette[ cIndex + 1 ] * tdata[ tIndex + 1 ] ) >> 8;
buffer[ colorOffset + 2 ] = ( material.palette[ cIndex + 2 ] * tdata[ tIndex + 2 ] ) >> 8;
buffer[ colorOffset + 3 ] = ( material.opacity << 8 ) - 1;
depthBuf[ offset ] = depth;
} else {
var foreColorR = material.palette[ cIndex ] * tdata[ tIndex ];
var foreColorG = material.palette[ cIndex + 1 ] * tdata[ tIndex + 1 ];
var foreColorB = material.palette[ cIndex + 2 ] * tdata[ tIndex + 2 ];
var opaci = tdata[ tIndex + 3 ] * material.opacity / 256;
var destR = buffer[ colorOffset ];
var destG = buffer[ colorOffset + 1 ];
var destB = buffer[ colorOffset + 2 ];
buffer[ colorOffset ] = foreColorR * opaci + destR * ( 1 - opaci );
buffer[ colorOffset + 1 ] = foreColorG * opaci + destG * ( 1 - opaci );
buffer[ colorOffset + 2 ] = foreColorB * opaci + destB * ( 1 - opaci );
buffer[ colorOffset + 3 ] = ( material.opacity << 8 ) - 1;
if ( buffer[ colorOffset + 3 ] == 255 ) // Only opaue pixls write to the depth buffer
depthBuf[ offset ] = depth;
}
}
function getMaterialShader( material ) {
var id = material.id;
var shader = shaders[ id ];
if ( shader && material.map && !textures[ material.map.id ] ) delete shaders[ id ];
if ( shaders[ id ] === undefined || material.needsUpdate === true ) {
if ( material instanceof THREE.MeshBasicMaterial ||
material instanceof THREE.MeshLambertMaterial ||
material instanceof THREE.MeshPhongMaterial ||
material instanceof THREE.SpriteMaterial ) {
if ( material instanceof THREE.MeshLambertMaterial ) {
// Generate color palette
if ( ! material.palette ) {
material.palette = getPalette( material, false );
}
} else if ( material instanceof THREE.MeshPhongMaterial ) {
// Generate color palette
if ( ! material.palette ) {
material.palette = getPalette( material, true );
}
}
var string;
if ( material.map ) {
var texture = new THREE.SoftwareRenderer.Texture();
texture.fromImage( material.map.image );
if ( ! texture.data ) return;
textures[ material.map.id ] = texture;
if ( material instanceof THREE.MeshBasicMaterial
|| material instanceof THREE.SpriteMaterial ) {
shader = basicMaterialShader;
} else {
shader = lightingMaterialShader;
}
} else {
if ( material.vertexColors === THREE.FaceColors ) {
string = [
'var colorOffset = offset * 4;',
'buffer[ colorOffset ] = face.color.r * 255;',
'buffer[ colorOffset + 1 ] = face.color.g * 255;',
'buffer[ colorOffset + 2 ] = face.color.b * 255;',
'buffer[ colorOffset + 3 ] = material.opacity * 255;',
'depthBuf[ offset ] = depth;'
].join( '\n' );
} else {
string = [
'var colorOffset = offset * 4;',
'buffer[ colorOffset ] = material.color.r * 255;',
'buffer[ colorOffset + 1 ] = material.color.g * 255;',
'buffer[ colorOffset + 2 ] = material.color.b * 255;',
'buffer[ colorOffset + 3 ] = material.opacity * 255;',
'depthBuf[ offset ] = depth;'
].join( '\n' );
}
shader = new Function( 'buffer, depthBuf, offset, depth, u, v, n, face, material', string );
}
} else if ( material instanceof THREE.LineBasicMaterial ) {
var string = [
'var colorOffset = offset * 4;',
'buffer[ colorOffset ] = material.color.r * (color1.r+color2.r) * 0.5 * 255;',
'buffer[ colorOffset + 1 ] = material.color.g * (color1.g+color2.g) * 0.5 * 255;',
'buffer[ colorOffset + 2 ] = material.color.b * (color1.b+color2.b) * 0.5 * 255;',
'buffer[ colorOffset + 3 ] = 255;',
'depthBuf[ offset ] = depth;'
].join( '\n' );
shader = new Function( 'buffer, depthBuf, offset, depth, color1, color2, material', string );
} else {
var string = [
'var colorOffset = offset * 4;',
'buffer[ colorOffset ] = u * 255;',
'buffer[ colorOffset + 1 ] = v * 255;',
'buffer[ colorOffset + 2 ] = 0;',
'buffer[ colorOffset + 3 ] = 255;',
'depthBuf[ offset ] = depth;'
].join( '\n' );
shader = new Function( 'buffer, depthBuf, offset, depth, u, v, n, face, material', string );
}
shaders[ id ] = shader;
material.needsUpdate = false;
}
return shader;
}
/*
function clearRectangle( x1, y1, x2, y2 ) {
var xmin = Math.max( Math.min( x1, x2 ), 0 );
var xmax = Math.min( Math.max( x1, x2 ), canvasWidth );
var ymin = Math.max( Math.min( y1, y2 ), 0 );
var ymax = Math.min( Math.max( y1, y2 ), canvasHeight );
var offset = ( xmin + ymin * canvasWidth ) * 4 + 3;
var linestep = ( canvasWidth - ( xmax - xmin ) ) * 4;
for ( var y = ymin; y < ymax; y ++ ) {
for ( var x = xmin; x < xmax; x ++ ) {
data[ offset += 4 ] = 0;
}
offset += linestep;
}
}
*/
function drawTriangle( v1, v2, v3, uv1, uv2, uv3, shader, face, material ) {
// TODO: Implement per-pixel z-clipping
if ( v1.z < - 1 || v1.z > 1 || v2.z < - 1 || v2.z > 1 || v3.z < - 1 || v3.z > 1 ) return;
// https://gist.github.com/2486101
// explanation: http://pouet.net/topic.php?which=8760&page=1
var fixscale = ( 1 << subpixelBits );
// 28.4 fixed-point coordinates
var x1 = ( v1.x * viewportXScale + viewportXOffs ) | 0;
var x2 = ( v2.x * viewportXScale + viewportXOffs ) | 0;
var x3 = ( v3.x * viewportXScale + viewportXOffs ) | 0;
var y1 = ( v1.y * viewportYScale + viewportYOffs ) | 0;
var y2 = ( v2.y * viewportYScale + viewportYOffs ) | 0;
var y3 = ( v3.y * viewportYScale + viewportYOffs ) | 0;
var bHasNormal = face.vertexNormalsModel && face.vertexNormalsModel.length;
var bHasUV = uv1 && uv2 && uv3;
var longestSide = Math.max(
Math.sqrt( ( x1 - x2 ) * ( x1 - x2 ) + ( y1 - y2 ) * ( y1 - y2 ) ),
Math.sqrt( ( x2 - x3 ) * ( x2 - x3 ) + ( y2 - y3 ) * ( y2 - y3 ) ),
Math.sqrt( ( x3 - x1 ) * ( x3 - x1 ) + ( y3 - y1 ) * ( y3 - y1 ) )
);
if ( ! ( face instanceof THREE.RenderableSprite ) && ( longestSide > 100 * fixscale ) ) {
// 1
// |\
// |a\
// |__\
// |\c|\
// |b\|d\
// |__\__\
// 2 3
var tempFace = { vertexNormalsModel: [], color: face.color };
var mpUV12, mpUV23, mpUV31;
if ( bHasUV ) {
if ( mpUVPoolCount === mpUVPool.length ) {
mpUV12 = new THREE.Vector2();
mpUVPool.push( mpUV12 );
++mpUVPoolCount;
mpUV23 = new THREE.Vector2();
mpUVPool.push( mpUV23 );
++mpUVPoolCount;
mpUV31 = new THREE.Vector2();
mpUVPool.push( mpUV31 );
++mpUVPoolCount;
} else {
mpUV12 = mpUVPool[ mpUVPoolCount ];
++mpUVPoolCount;
mpUV23 = mpUVPool[ mpUVPoolCount ];
++mpUVPoolCount;
mpUV31 = mpUVPool[ mpUVPoolCount ];
++mpUVPoolCount;
}
var weight;
weight = ( 1 + v2.z ) * ( v2.w / v1.w ) / ( 1 + v1.z );
mpUV12.copy( uv1 ).multiplyScalar( weight ).add( uv2 ).multiplyScalar( 1 / ( weight + 1 ) );
weight = ( 1 + v3.z ) * ( v3.w / v2.w ) / ( 1 + v2.z );
mpUV23.copy( uv2 ).multiplyScalar( weight ).add( uv3 ).multiplyScalar( 1 / ( weight + 1 ) );
weight = ( 1 + v1.z ) * ( v1.w / v3.w ) / ( 1 + v3.z );
mpUV31.copy( uv3 ).multiplyScalar( weight ).add( uv1 ).multiplyScalar( 1 / ( weight + 1 ) );
}
var mpV12, mpV23, mpV31;
if ( mpVPoolCount === mpVPool.length ) {
mpV12 = new THREE.Vector4();
mpVPool.push( mpV12 );
++mpVPoolCount;
mpV23 = new THREE.Vector4();
mpVPool.push( mpV23 );
++mpVPoolCount;
mpV31 = new THREE.Vector4();
mpVPool.push( mpV31 );
++mpVPoolCount;
} else {
mpV12 = mpVPool[ mpVPoolCount ];
++mpVPoolCount;
mpV23 = mpVPool[ mpVPoolCount ];
++mpVPoolCount;
mpV31 = mpVPool[ mpVPoolCount ];
++mpVPoolCount;
}
mpV12.copy( v1 ).add( v2 ).multiplyScalar( 0.5 );
mpV23.copy( v2 ).add( v3 ).multiplyScalar( 0.5 );
mpV31.copy( v3 ).add( v1 ).multiplyScalar( 0.5 );
var mpN12, mpN23, mpN31;
if ( bHasNormal ) {
if ( mpNPoolCount === mpNPool.length ) {
mpN12 = new THREE.Vector3();
mpNPool.push( mpN12 );
++mpNPoolCount;
mpN23 = new THREE.Vector3();
mpNPool.push( mpN23 );
++mpNPoolCount;
mpN31 = new THREE.Vector3();
mpNPool.push( mpN31 );
++mpNPoolCount;
} else {
mpN12 = mpNPool[ mpNPoolCount ];
++mpNPoolCount;
mpN23 = mpNPool[ mpNPoolCount ];
++mpNPoolCount;
mpN31 = mpNPool[ mpNPoolCount ];
++mpNPoolCount;
}
mpN12.copy( face.vertexNormalsModel[ 0 ] ).add( face.vertexNormalsModel[ 1 ] ).normalize();
mpN23.copy( face.vertexNormalsModel[ 1 ] ).add( face.vertexNormalsModel[ 2 ] ).normalize();
mpN31.copy( face.vertexNormalsModel[ 2 ] ).add( face.vertexNormalsModel[ 0 ] ).normalize();
}
// a
if ( bHasNormal ) {
tempFace.vertexNormalsModel[ 0 ] = face.vertexNormalsModel[ 0 ];
tempFace.vertexNormalsModel[ 1 ] = mpN12;
tempFace.vertexNormalsModel[ 2 ] = mpN31;
}
drawTriangle( v1, mpV12, mpV31, uv1, mpUV12, mpUV31, shader, tempFace, material );
// b
if ( bHasNormal ) {
tempFace.vertexNormalsModel[ 0 ] = face.vertexNormalsModel[ 1 ];
tempFace.vertexNormalsModel[ 1 ] = mpN23;
tempFace.vertexNormalsModel[ 2 ] = mpN12;
}
drawTriangle( v2, mpV23, mpV12, uv2, mpUV23, mpUV12, shader, tempFace, material );
// c
if ( bHasNormal ) {
tempFace.vertexNormalsModel[ 0 ] = mpN12;
tempFace.vertexNormalsModel[ 1 ] = mpN23;
tempFace.vertexNormalsModel[ 2 ] = mpN31;
}
drawTriangle( mpV12, mpV23, mpV31, mpUV12, mpUV23, mpUV31, shader, tempFace, material );
// d
if ( bHasNormal ) {
tempFace.vertexNormalsModel[ 0 ] = face.vertexNormalsModel[ 2 ];
tempFace.vertexNormalsModel[ 1 ] = mpN31;
tempFace.vertexNormalsModel[ 2 ] = mpN23;
}
drawTriangle( v3, mpV31, mpV23, uv3, mpUV31, mpUV23, shader, tempFace, material );
return;
}
// Z values (.28 fixed-point)
var z1 = ( v1.z * viewportZScale + viewportZOffs ) | 0;
var z2 = ( v2.z * viewportZScale + viewportZOffs ) | 0;
var z3 = ( v3.z * viewportZScale + viewportZOffs ) | 0;
// UV values
var bHasUV = false;
var tu1, tv1, tu2, tv2, tu3, tv3;
if ( uv1 && uv2 && uv3 ) {
bHasUV = true;
tu1 = uv1.x;
tv1 = 1 - uv1.y;
tu2 = uv2.x;
tv2 = 1 - uv2.y;
tu3 = uv3.x;
tv3 = 1 - uv3.y;
}
// Normal values
var n1, n2, n3, nz1, nz2, nz3;
if ( bHasNormal ) {
n1 = face.vertexNormalsModel[ 0 ];
n2 = face.vertexNormalsModel[ 1 ];
n3 = face.vertexNormalsModel[ 2 ];
nz1 = n1.z * 255;
nz2 = n2.z * 255;
nz3 = n3.z * 255;
}
// Deltas
var dx12 = x1 - x2, dy12 = y2 - y1;
var dx23 = x2 - x3, dy23 = y3 - y2;
var dx31 = x3 - x1, dy31 = y1 - y3;
// Bounding rectangle
var minx = Math.max( ( Math.min( x1, x2, x3 ) + subpixelBias ) >> subpixelBits, 0 );
var maxx = Math.min( ( Math.max( x1, x2, x3 ) + subpixelBias ) >> subpixelBits, canvasWidth );
var miny = Math.max( ( Math.min( y1, y2, y3 ) + subpixelBias ) >> subpixelBits, 0 );
var maxy = Math.min( ( Math.max( y1, y2, y3 ) + subpixelBias ) >> subpixelBits, canvasHeight );
rectx1 = Math.min( minx, rectx1 );
rectx2 = Math.max( maxx, rectx2 );
recty1 = Math.min( miny, recty1 );
recty2 = Math.max( maxy, recty2 );
// Block size, standard 8x8 (must be power of two)
var q = blockSize;
// Start in corner of 8x8 block
minx &= ~ ( q - 1 );
miny &= ~ ( q - 1 );
// Constant part of half-edge functions
var minXfixscale = ( minx << subpixelBits );
var minYfixscale = ( miny << subpixelBits );
var c1 = dy12 * ( ( minXfixscale ) - x1 ) + dx12 * ( ( minYfixscale ) - y1 );
var c2 = dy23 * ( ( minXfixscale ) - x2 ) + dx23 * ( ( minYfixscale ) - y2 );
var c3 = dy31 * ( ( minXfixscale ) - x3 ) + dx31 * ( ( minYfixscale ) - y3 );
// Correct for fill convention
if ( dy12 > 0 || ( dy12 == 0 && dx12 > 0 ) ) c1 ++;
if ( dy23 > 0 || ( dy23 == 0 && dx23 > 0 ) ) c2 ++;
if ( dy31 > 0 || ( dy31 == 0 && dx31 > 0 ) ) c3 ++;
// Note this doesn't kill subpixel precision, but only because we test for >=0 (not >0).
// It's a bit subtle. :)
c1 = ( c1 - 1 ) >> subpixelBits;
c2 = ( c2 - 1 ) >> subpixelBits;
c3 = ( c3 - 1 ) >> subpixelBits;
// Z interpolation setup
var dz12 = z1 - z2, dz31 = z3 - z1;
var invDet = 1.0 / ( dx12 * dy31 - dx31 * dy12 );
var dzdx = ( invDet * ( dz12 * dy31 - dz31 * dy12 ) ); // dz per one subpixel step in x
var dzdy = ( invDet * ( dz12 * dx31 - dx12 * dz31 ) ); // dz per one subpixel step in y
// Z at top/left corner of rast area
var cz = ( z1 + ( ( minXfixscale ) - x1 ) * dzdx + ( ( minYfixscale ) - y1 ) * dzdy ) | 0;
// Z pixel steps
dzdx = ( dzdx * fixscale ) | 0;
dzdy = ( dzdy * fixscale ) | 0;
var dtvdx, dtvdy, cbtu, cbtv;
if ( bHasUV ) {
// UV interpolation setup
var dtu12 = tu1 - tu2, dtu31 = tu3 - tu1;
var dtudx = ( invDet * ( dtu12 * dy31 - dtu31 * dy12 ) ); // dtu per one subpixel step in x
var dtudy = ( invDet * ( dtu12 * dx31 - dx12 * dtu31 ) ); // dtu per one subpixel step in y
var dtv12 = tv1 - tv2, dtv31 = tv3 - tv1;
dtvdx = ( invDet * ( dtv12 * dy31 - dtv31 * dy12 ) ); // dtv per one subpixel step in x
dtvdy = ( invDet * ( dtv12 * dx31 - dx12 * dtv31 ) ); // dtv per one subpixel step in y
// UV at top/left corner of rast area
cbtu = ( tu1 + ( minXfixscale - x1 ) * dtudx + ( minYfixscale - y1 ) * dtudy );
cbtv = ( tv1 + ( minXfixscale - x1 ) * dtvdx + ( minYfixscale - y1 ) * dtvdy );
// UV pixel steps
dtudx = dtudx * fixscale;
dtudy = dtudy * fixscale;
dtvdx = dtvdx * fixscale;
dtvdy = dtvdy * fixscale;
}
var dnzdy, cbnz;
if ( bHasNormal ) {
// Normal interpolation setup
var dnz12 = nz1 - nz2, dnz31 = nz3 - nz1;
var dnzdx = ( invDet * ( dnz12 * dy31 - dnz31 * dy12 ) ); // dnz per one subpixel step in x
var dnzdy = ( invDet * ( dnz12 * dx31 - dx12 * dnz31 ) ); // dnz per one subpixel step in y
// Normal at top/left corner of rast area
cbnz = ( nz1 + ( minXfixscale - x1 ) * dnzdx + ( minYfixscale - y1 ) * dnzdy );
// Normal pixel steps
dnzdx = ( dnzdx * fixscale );
dnzdy = ( dnzdy * fixscale );
}
// Set up min/max corners
var qm1 = q - 1; // for convenience
var nmin1 = 0, nmax1 = 0;
var nmin2 = 0, nmax2 = 0;
var nmin3 = 0, nmax3 = 0;
var nminz = 0, nmaxz = 0;
if ( dx12 >= 0 ) nmax1 -= qm1 * dx12; else nmin1 -= qm1 * dx12;
if ( dy12 >= 0 ) nmax1 -= qm1 * dy12; else nmin1 -= qm1 * dy12;
if ( dx23 >= 0 ) nmax2 -= qm1 * dx23; else nmin2 -= qm1 * dx23;
if ( dy23 >= 0 ) nmax2 -= qm1 * dy23; else nmin2 -= qm1 * dy23;
if ( dx31 >= 0 ) nmax3 -= qm1 * dx31; else nmin3 -= qm1 * dx31;
if ( dy31 >= 0 ) nmax3 -= qm1 * dy31; else nmin3 -= qm1 * dy31;
if ( dzdx >= 0 ) nmaxz += qm1 * dzdx; else nminz += qm1 * dzdx;
if ( dzdy >= 0 ) nmaxz += qm1 * dzdy; else nminz += qm1 * dzdy;
// Loop through blocks
var linestep = canvasWidth - q;
var cb1 = c1;
var cb2 = c2;
var cb3 = c3;
var cbz = cz;
var qstep = - q;
var e1x = qstep * dy12;
var e2x = qstep * dy23;
var e3x = qstep * dy31;
var ezx = qstep * dzdx;
var etux, etvx;
if ( bHasUV ) {
etux = qstep * dtudx;
etvx = qstep * dtvdx;
}
var enzx;
if ( bHasNormal ) {
enzx = qstep * dnzdx;
}
var x0 = minx;
for ( var y0 = miny; y0 < maxy; y0 += q ) {
// New block line - keep hunting for tri outer edge in old block line dir
while ( x0 >= minx && x0 < maxx && cb1 >= nmax1 && cb2 >= nmax2 && cb3 >= nmax3 ) {
x0 += qstep;
cb1 += e1x;
cb2 += e2x;
cb3 += e3x;
cbz += ezx;
if ( bHasUV ) {
cbtu += etux;
cbtv += etvx;
}
if ( bHasNormal ) {
cbnz += enzx;
}
}
// Okay, we're now in a block we know is outside. Reverse direction and go into main loop.
qstep = - qstep;
e1x = - e1x;
e2x = - e2x;
e3x = - e3x;
ezx = - ezx;
if ( bHasUV ) {
etux = - etux;
etvx = - etvx;
}
if ( bHasNormal ) {
enzx = - enzx;
}
while ( 1 ) {
// Step everything
x0 += qstep;
cb1 += e1x;
cb2 += e2x;
cb3 += e3x;
cbz += ezx;
if ( bHasUV ) {
cbtu += etux;
cbtv += etvx;
}
if ( bHasNormal ) {
cbnz += enzx;
}
// We're done with this block line when at least one edge completely out
// If an edge function is too small and decreasing in the current traversal
// dir, we're done with this line.
if ( x0 < minx || x0 >= maxx ) break;
if ( cb1 < nmax1 ) if ( e1x < 0 ) break; else continue;
if ( cb2 < nmax2 ) if ( e2x < 0 ) break; else continue;
if ( cb3 < nmax3 ) if ( e3x < 0 ) break; else continue;
// We can skip this block if it's already fully covered
var blockX = x0 >> blockShift;
var blockY = y0 >> blockShift;
var blockId = blockX + blockY * canvasWBlocks;
var minz = cbz + nminz;
// farthest point in block closer than closest point in our tri?
if ( blockMaxZ[ blockId ] < minz ) continue;
// Need to do a deferred clear?
var bflags = blockFlags[ blockId ];
if ( bflags & BLOCK_NEEDCLEAR ) clearBlock( blockX, blockY );
blockFlags[ blockId ] = bflags & ~ ( BLOCK_ISCLEAR | BLOCK_NEEDCLEAR );
// Offset at top-left corner
var offset = x0 + y0 * canvasWidth;
// Accept whole block when fully covered
if ( cb1 >= nmin1 && cb2 >= nmin2 && cb3 >= nmin3 ) {
var maxz = cbz + nmaxz;
blockMaxZ[ blockId ] = Math.min( blockMaxZ[ blockId ], maxz );
var cy1 = cb1;
var cy2 = cb2;
var cyz = cbz;
var cytu, cytv;
if ( bHasUV ) {
cytu = cbtu;
cytv = cbtv;
}
var cynz;
if ( bHasNormal ) {
cynz = cbnz;
}
for ( var iy = 0; iy < q; iy ++ ) {
var cx1 = cy1;
var cx2 = cy2;
var cxz = cyz;
var cxtu;
var cxtv;
if ( bHasUV ) {
cxtu = cytu;
cxtv = cytv;
}
var cxnz;
if ( bHasNormal ) {
cxnz = cynz;
}
for ( var ix = 0; ix < q; ix ++ ) {
var z = cxz;
if ( z < zbuffer[ offset ] ) {
shader( data, zbuffer, offset, z, cxtu, cxtv, cxnz, face, material );
}
cx1 += dy12;
cx2 += dy23;
cxz += dzdx;
if ( bHasUV ) {
cxtu += dtudx;
cxtv += dtvdx;
}
if ( bHasNormal ) {
cxnz += dnzdx;
}
offset ++;
}
cy1 += dx12;
cy2 += dx23;
cyz += dzdy;
if ( bHasUV ) {
cytu += dtudy;
cytv += dtvdy;
}
if ( bHasNormal ) {
cynz += dnzdy;
}
offset += linestep;
}
} else {
// Partially covered block
var cy1 = cb1;
var cy2 = cb2;
var cy3 = cb3;
var cyz = cbz;
var cytu, cytv;
if ( bHasUV ) {
cytu = cbtu;
cytv = cbtv;
}
var cynz;
if ( bHasNormal ) {
cynz = cbnz;
}
for ( var iy = 0; iy < q; iy ++ ) {
var cx1 = cy1;
var cx2 = cy2;
var cx3 = cy3;
var cxz = cyz;
var cxtu;
var cxtv;
if ( bHasUV ) {
cxtu = cytu;
cxtv = cytv;
}
var cxnz;
if ( bHasNormal ) {
cxnz = cynz;
}
for ( var ix = 0; ix < q; ix ++ ) {
if ( ( cx1 | cx2 | cx3 ) >= 0 ) {
var z = cxz;
if ( z < zbuffer[ offset ] ) {
shader( data, zbuffer, offset, z, cxtu, cxtv, cxnz, face, material );
}
}
cx1 += dy12;
cx2 += dy23;
cx3 += dy31;
cxz += dzdx;
if ( bHasUV ) {
cxtu += dtudx;
cxtv += dtvdx;
}
if ( bHasNormal ) {
cxnz += dnzdx;
}
offset ++;
}
cy1 += dx12;
cy2 += dx23;
cy3 += dx31;
cyz += dzdy;
if ( bHasUV ) {
cytu += dtudy;
cytv += dtvdy;
}
if ( bHasNormal ) {
cynz += dnzdy;
}
offset += linestep;
}
}
}
// Advance to next row of blocks
cb1 += q * dx12;
cb2 += q * dx23;
cb3 += q * dx31;
cbz += q * dzdy;
if ( bHasUV ) {
cbtu += q * dtudy;
cbtv += q * dtvdy;
}
if ( bHasNormal ) {
cbnz += q * dnzdy;
}
}
}
// When drawing line, the blockShiftShift has to be zero. In order to clean pixel
// Using color1 and color2 to interpolation pixel color
// LineWidth is according to material.linewidth
function drawLine( v1, v2, color1, color2, shader, material ) {
// While the line mode is enable, blockSize has to be changed to 0.
if ( ! lineMode ) {
lineMode = true;
blockShift = 0;
blockSize = 1 << blockShift;
setSize( canvas.width, canvas.height );
}
// TODO: Implement per-pixel z-clipping
if ( v1.z < - 1 || v1.z > 1 || v2.z < - 1 || v2.z > 1 ) return;
var halfLineWidth = Math.floor( ( material.linewidth - 1 ) * 0.5 );
// https://gist.github.com/2486101
// explanation: http://pouet.net/topic.php?which=8760&page=1
// 28.4 fixed-point coordinates
var x1 = ( v1.x * viewportXScale + viewportXOffs ) | 0;
var x2 = ( v2.x * viewportXScale + viewportXOffs ) | 0;
var y1 = ( v1.y * viewportYScale + viewportYOffs ) | 0;
var y2 = ( v2.y * viewportYScale + viewportYOffs ) | 0;
var z1 = ( v1.z * viewportZScale + viewportZOffs ) | 0;
var z2 = ( v2.z * viewportZScale + viewportZOffs ) | 0;
// Deltas
var dx12 = x1 - x2, dy12 = y1 - y2, dz12 = z1 - z2;
// Bounding rectangle
var minx = Math.max( ( Math.min( x1, x2 ) + subpixelBias ) >> subpixelBits, 0 );
var maxx = Math.min( ( Math.max( x1, x2 ) + subpixelBias ) >> subpixelBits, canvasWidth );
var miny = Math.max( ( Math.min( y1, y2 ) + subpixelBias ) >> subpixelBits, 0 );
var maxy = Math.min( ( Math.max( y1, y2 ) + subpixelBias ) >> subpixelBits, canvasHeight );
var minz = Math.max( ( Math.min( z1, z2 ) + subpixelBias ) >> subpixelBits, 0 );
var maxz = ( Math.max( z1, z2 ) + subpixelBias ) >> subpixelBits;
rectx1 = Math.min( minx, rectx1 );
rectx2 = Math.max( maxx, rectx2 );
recty1 = Math.min( miny, recty1 );
recty2 = Math.max( maxy, recty2 );
// Get the line's unit vector and cross vector
var length = Math.sqrt( ( dy12 * dy12 ) + ( dx12 * dx12 ) );
var unitX = ( dx12 / length );
var unitY = ( dy12 / length );
var unitZ = ( dz12 / length );
var pixelX, pixelY, pixelZ;
var pX, pY, pZ;
crossVector.set( unitX, unitY, unitZ );
crossVector.cross( lookVector );
crossVector.normalize();
while ( length > 0 ) {
// Get this pixel.
pixelX = x2 + length * unitX;
pixelY = y2 + length * unitY;
pixelZ = z2 + length * unitZ;
pixelX = ( pixelX + subpixelBias ) >> subpixelBits;
pixelY = ( pixelY + subpixelBias ) >> subpixelBits;
pZ = ( pixelZ + subpixelBias ) >> subpixelBits;
// Draw line with line width
for ( var i = - halfLineWidth; i <= halfLineWidth; ++ i ) {
// Compute the line pixels.
// Get the pixels on the vector that crosses to the line vector
pX = Math.floor( ( pixelX + crossVector.x * i ) );
pY = Math.floor( ( pixelY + crossVector.y * i ) );
// if pixel is over the rect. Continue
if ( rectx1 >= pX || rectx2 <= pX || recty1 >= pY || recty2 <= pY )
continue;
// Find this pixel at which block
var blockX = pX >> blockShift;
var blockY = pY >> blockShift;
var blockId = blockX + blockY * canvasWBlocks;
// Compare the pixel depth width z block.
if ( blockMaxZ[ blockId ] < minz ) continue;
blockMaxZ[ blockId ] = Math.min( blockMaxZ[ blockId ], maxz );
var bflags = blockFlags[ blockId ];
if ( bflags & BLOCK_NEEDCLEAR ) clearBlock( blockX, blockY );
blockFlags[ blockId ] = bflags & ~( BLOCK_ISCLEAR | BLOCK_NEEDCLEAR );
// draw pixel
var offset = pX + pY * canvasWidth;
if ( pZ < zbuffer[ offset ] ) {
shader( data, zbuffer, offset, pZ, color1, color2, material );
}
}
--length;
}
}
function clearBlock( blockX, blockY ) {
var zoffset = blockX * blockSize + blockY * blockSize * canvasWidth;
var poffset = zoffset * 4;
var zlinestep = canvasWidth - blockSize;
var plinestep = zlinestep * 4;
for ( var y = 0; y < blockSize; y ++ ) {
for ( var x = 0; x < blockSize; x ++ ) {
zbuffer[ zoffset ++ ] = maxZVal;
data[ poffset ++ ] = clearColor.r * 255 | 0;
data[ poffset ++ ] = clearColor.g * 255 | 0;
data[ poffset ++ ] = clearColor.b * 255 | 0;
data[ poffset ++ ] = alpha ? 0 : 255;
}
zoffset += zlinestep;
poffset += plinestep;
}
}
function finishClear( ) {
var block = 0;
for ( var y = 0; y < canvasHBlocks; y ++ ) {
for ( var x = 0; x < canvasWBlocks; x ++ ) {
if ( blockFlags[ block ] & BLOCK_NEEDCLEAR ) {
clearBlock( x, y );
blockFlags[ block ] = BLOCK_ISCLEAR;
}
block ++;
}
}
}
};
THREE.SoftwareRenderer.Texture = function () {
var canvas;
this.fromImage = function ( image ) {
if ( ! image || image.width <= 0 || image.height <= 0 )
return;
if ( canvas === undefined ) {
canvas = document.createElement( 'canvas' );
}
var size = image.width > image.height ? image.width : image.height;
size = THREE.Math.nextPowerOfTwo( size );
if ( canvas.width != size || canvas.height != size ) {
canvas.width = size;
canvas.height = size;
}
var ctx = canvas.getContext( '2d' );
ctx.clearRect( 0, 0, size, size );
ctx.drawImage( image, 0, 0, size, size );
var imgData = ctx.getImageData( 0, 0, size, size );
this.data = imgData.data;
this.width = size;
this.height = size;
this.srcUrl = image.src;
};
};<|fim▁end|> |
} else {
|
<|file_name|>73_Set_Matrix_Zeroes.py<|end_file_name|><|fim▁begin|>class Solution(object):
def setZeroes(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: void Do not return anything, modify matrix in-place instead.
"""
width,height = len(matrix[0]),len(matrix)
for i in xrange(height):
foundzero = False
for j in xrange(width):
if matrix[i][j] == 0:
foundzero = True
matrix[i][j] = float("inf")
if not foundzero:
continue
for j in xrange(width):
if matrix[i][j] != float("inf"):
matrix[i][j] = 0
for i in xrange(width):
foundtarget = False
for j in xrange(height):<|fim▁hole|> foundtarget = True
break
if not foundtarget:
continue
for j in xrange(height):
matrix[j][i] = 0<|fim▁end|> | if matrix[j][i] == float("inf"): |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|># This file is part of the qpopplerview package.
#
# Copyright (c) 2010 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Caching of generated images.<|fim▁hole|>import time
import weakref
try:
import popplerqt4
except ImportError:
from . import popplerqt4_dummy as popplerqt4
from PyQt4.QtCore import Qt, QThread
from . import render
from . import rectangles
from .locking import lock
__all__ = ['maxsize', 'setmaxsize', 'image', 'generate', 'clear', 'links', 'options']
_cache = weakref.WeakKeyDictionary()
_schedulers = weakref.WeakKeyDictionary()
_options = weakref.WeakKeyDictionary()
_links = weakref.WeakKeyDictionary()
# cache size
_maxsize = 104857600 # 100M
_currentsize = 0
_globaloptions = None
def setmaxsize(maxsize):
"""Sets the maximum cache size in Megabytes."""
global _maxsize
_maxsize = maxsize * 1048576
purge()
def maxsize():
"""Returns the maximum cache size in Megabytes."""
return _maxsize / 1048576
def clear(document=None):
"""Clears the whole cache or the cache for the given Poppler.Document."""
if document:
try:
del _cache[document]
except KeyError:
pass
else:
_cache.clear()
global _currentsize
_currentsize = 0
def image(page, exact=True):
"""Returns a rendered image for given Page if in cache.
If exact is True (default), the function returns None if the exact size was
not in the cache. If exact is False, the function may return a temporary
rendering of the page scaled from a different size, if that was available.
"""
document = page.document()
pageKey = (page.pageNumber(), page.rotation())
sizeKey = (page.width(), page.height())
if exact:
try:
entry = _cache[document][pageKey][sizeKey]
except KeyError:
return
else:
entry[1] = time.time()
return entry[0]
try:
sizes = _cache[document][pageKey].keys()
except KeyError:
return
# find the closest size (assuming aspect ratio has not changed)
if sizes:
sizes = sorted(sizes, key=lambda s: abs(1 - s[0] / float(page.width())))
return _cache[document][pageKey][sizes[0]][0]
def generate(page):
"""Schedule an image to be generated for the cache."""
# Poppler-Qt4 crashes when different pages from a Document are rendered at the same time,
# so we schedule them to be run in sequence.
document = page.document()
try:
scheduler = _schedulers[document]
except KeyError:
scheduler = _schedulers[document] = Scheduler()
scheduler.schedulejob(page)
def add(image, document, pageNumber, rotation, width, height):
"""(Internal) Adds an image to the cache."""
pageKey = (pageNumber, rotation)
sizeKey = (width, height)
_cache.setdefault(document, {}).setdefault(pageKey, {})[sizeKey] = [image, time.time()]
# maintain cache size
global _maxsize, _currentsize
_currentsize += image.byteCount()
if _currentsize > _maxsize:
purge()
def purge():
"""Removes old images from the cache to limit the space used.
(Not necessary to call, as the cache will monitor its size automatically.)
"""
# make a list of the images, sorted on time, newest first
images = iter(sorted((
(time, document, pageKey, sizeKey, image.byteCount())
for document, pageKeys in _cache.items()
for pageKey, sizeKeys in pageKeys.items()
for sizeKey, (image, time) in sizeKeys.items()),
reverse=True))
# sum the size of the newest images
global _maxsize, _currentsize
byteCount = 0
for item in images:
byteCount += item[4]
if byteCount > _maxsize:
break
_currentsize = byteCount
# delete the other images
for time, document, pageKey, sizeKey, byteCount in images:
del _cache[document][pageKey][sizeKey]
def links(page):
"""Returns a position-searchable list of the links in the page."""
document, pageNumber = page.document(), page.pageNumber()
try:
return _links[document][pageNumber]
except KeyError:
with lock(document):
links = rectangles.Rectangles(document.page(pageNumber).links(),
lambda link: link.linkArea().normalized().getCoords())
_links.setdefault(document, {})[pageNumber] = links
return links
def options(document=None):
"""Returns a RenderOptions object for a document or the global one if no document is given."""
global _globaloptions, _options
if document:
try:
return _options[document]
except KeyError:
result = _options[document] = render.RenderOptions()
return result
if not _globaloptions:
_globaloptions = render.RenderOptions()
# enable antialiasing by default
_globaloptions.setRenderHint(popplerqt4.Poppler.Document.Antialiasing |
popplerqt4.Poppler.Document.TextAntialiasing)
return _globaloptions
def setoptions(options, document=None):
"""Sets a RenderOptions instance for the given document or as the global one if no document is given.
Use None for the options to unset (delete) the options.
"""
global _globaloptions, _options
if not document:
_globaloptions = options
elif options:
_options[document] = options
else:
try:
del _options[document]
except KeyError:
pass
class Scheduler(object):
"""Manages running rendering jobs in sequence for a Document."""
def __init__(self):
self._schedule = [] # order
self._jobs = {} # jobs on key
self._waiting = weakref.WeakKeyDictionary() # jobs on page
self._running = None
def schedulejob(self, page):
"""Creates or retriggers an existing Job.
If a Job was already scheduled for the page, it is canceled.
The page's update() method will be called when the Job has completed.
"""
# uniquely identify the image to be generated
key = (page.pageNumber(), page.rotation(), page.width(), page.height())
try:
job = self._jobs[key]
except KeyError:
job = self._jobs[key] = Job(page)
job.key = key
else:
self._schedule.remove(job)
self._schedule.append(job)
self._waiting[page] = job
self.checkStart()
def checkStart(self):
"""Starts a job if none is running and at least one is waiting."""
while self._schedule and not self._running:
job = self._schedule[-1]
document = job.document()
if document and job in self._waiting.values():
self._running = Runner(self, document, job)
break
else:
self.done(job)
def done(self, job):
"""Called when the job has completed."""
del self._jobs[job.key]
self._schedule.remove(job)
self._running = None
for page in list(self._waiting):
if self._waiting[page] is job:
page.update()
del self._waiting[page]
class Job(object):
"""Simply contains data needed to create an image later."""
def __init__(self, page):
self.document = weakref.ref(page.document())
self.pageNumber = page.pageNumber()
self.rotation = page.rotation()
self.width = page.width()
self.height = page.height()
class Runner(QThread):
"""Immediately runs a Job in a background thread."""
def __init__(self, scheduler, document, job):
super(Runner, self).__init__()
self.scheduler = scheduler
self.job = job
self.document = document # keep reference now so that it does not die during this thread
self.finished.connect(self.slotFinished)
self.start()
def run(self):
"""Main method of this thread, called by Qt on start()."""
page = self.document.page(self.job.pageNumber)
pageSize = page.pageSize()
if self.job.rotation & 1:
pageSize.transpose()
xres = 72.0 * self.job.width / pageSize.width()
yres = 72.0 * self.job.height / pageSize.height()
threshold = options().oversampleThreshold() or options(self.document).oversampleThreshold()
multiplier = 2 if xres < threshold else 1
with lock(self.document):
options().write(self.document)
options(self.document).write(self.document)
self.image = page.renderToImage(xres * multiplier, yres * multiplier, 0, 0, self.job.width * multiplier, self.job.height * multiplier, self.job.rotation)
if multiplier == 2:
self.image = self.image.scaledToWidth(self.job.width, Qt.SmoothTransformation)
def slotFinished(self):
"""Called when the thread has completed."""
add(self.image, self.document, self.job.pageNumber, self.job.rotation, self.job.width, self.job.height)
self.scheduler.done(self.job)
self.scheduler.checkStart()<|fim▁end|> | """
|
<|file_name|>templatetags.py<|end_file_name|><|fim▁begin|>def format_date(dt):<|fim▁hole|><|fim▁end|> | return dt.strftime('%Y-%m-%d %H:%M:%S') |
<|file_name|>issue-78722.rs<|end_file_name|><|fim▁begin|>// edition:2018
#![feature(type_alias_impl_trait)]
type F = impl core::future::Future<Output = u8>;
struct Bug {
V1: [(); {<|fim▁hole|> }
let f: F = async { 1 };
//~^ ERROR mismatched types [E0308]
1
}],
}
fn main() {}<|fim▁end|> | fn concrete_use() -> F {
async {} |
<|file_name|>LambdaOutputUpdate.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/kinesisanalytics/model/LambdaOutputUpdate.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace KinesisAnalytics
{
namespace Model
{
LambdaOutputUpdate::LambdaOutputUpdate() :
m_resourceARNUpdateHasBeenSet(false),
m_roleARNUpdateHasBeenSet(false)
{
}
LambdaOutputUpdate::LambdaOutputUpdate(JsonView jsonValue) :
m_resourceARNUpdateHasBeenSet(false),
m_roleARNUpdateHasBeenSet(false)
{
*this = jsonValue;
}
LambdaOutputUpdate& LambdaOutputUpdate::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("ResourceARNUpdate"))
{
m_resourceARNUpdate = jsonValue.GetString("ResourceARNUpdate");
m_resourceARNUpdateHasBeenSet = true;
}
if(jsonValue.ValueExists("RoleARNUpdate"))
{
m_roleARNUpdate = jsonValue.GetString("RoleARNUpdate");<|fim▁hole|>
m_roleARNUpdateHasBeenSet = true;
}
return *this;
}
JsonValue LambdaOutputUpdate::Jsonize() const
{
JsonValue payload;
if(m_resourceARNUpdateHasBeenSet)
{
payload.WithString("ResourceARNUpdate", m_resourceARNUpdate);
}
if(m_roleARNUpdateHasBeenSet)
{
payload.WithString("RoleARNUpdate", m_roleARNUpdate);
}
return payload;
}
} // namespace Model
} // namespace KinesisAnalytics
} // namespace Aws<|fim▁end|> | |
<|file_name|>content.d.ts<|end_file_name|><|fim▁begin|>export const enum ContentType {
Component,
Helper,
String,<|fim▁hole|> SafeString,
Fragment,
Node,
Other,
}<|fim▁end|> | Empty, |
<|file_name|>hole.cpp<|end_file_name|><|fim▁begin|>/*
SPDX-FileCopyrightText: 2009-2013 Graeme Gott <[email protected]>
SPDX-License-Identifier: GPL-3.0-or-later
*/
#include "hole.h"
<|fim▁hole|>#include <QRadialGradient>
//-----------------------------------------------------------------------------
Hole::Hole(const QPoint& position, QGraphicsItem* parent)
: QGraphicsEllipseItem(0, 0, 16, 16, parent)
, m_peg(nullptr)
{
QRadialGradient gradient(QPointF(8,8), 8);
gradient.setColorAt(0, QColor(0, 0, 0, 0));
gradient.setColorAt(1, QColor(0, 0, 0, 64));
setBrush(gradient);
setPen(Qt::NoPen);
setZValue(1);
setPos(position.x() * 20 + 2, position.y() * 20 + 2);
setFlag(QGraphicsItem::ItemIsMovable, false);
}
//-----------------------------------------------------------------------------
void Hole::setHighlight(bool highlight)
{
setPen(!highlight ? Qt::NoPen : QPen(Qt::yellow, 2));
}
//-----------------------------------------------------------------------------<|fim▁end|> | #include <QBrush>
#include <QPen> |
<|file_name|>ExampleUnitTest.java<|end_file_name|><|fim▁begin|>package com.xing.sample.actionbarcompat_basic;
import org.junit.Test;<|fim▁hole|>
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
}<|fim▁end|> | |
<|file_name|>FeedController.java<|end_file_name|><|fim▁begin|>package org.ominidi.api.controller;
import org.ominidi.api.exception.ConnectionException;
import org.ominidi.api.exception.NotFoundException;
import org.ominidi.api.model.Errors;
import org.ominidi.domain.model.Feed;
import org.ominidi.domain.model.Post;
import org.ominidi.facebook.service.PageFeedService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import java.util.Optional;
@RestController
@RequestMapping("/api/v1")
<|fim▁hole|> @Autowired
public FeedController(PageFeedService pageFeedService) {
this.pageFeedService = pageFeedService;
}
@GetMapping(value = "/feed", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public Feed<Post> getFeed(@RequestParam(value = "u", required = false) Optional<String> feedUrl) {
Optional<Feed<Post>> result = feedUrl.isPresent()
? pageFeedService.getFeed(feedUrl.get())
: pageFeedService.getFeed();
return result.orElseThrow(() -> new ConnectionException(Errors.CONNECTION_PROBLEM));
}
@GetMapping(value = "/post/{id}", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public Post getPost(@PathVariable(value = "id") String id) {
return pageFeedService.getPostById(id).orElseThrow(() -> new NotFoundException(Errors.postNotFound(id)));
}
}<|fim▁end|> | public class FeedController {
private PageFeedService pageFeedService;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.