prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>regions-assoc-type-region-bound.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the compiler considers the 'a bound declared in the
// trait. Issue #20890.
trait Foo<'a> {
type Value: 'a;
fn get(&self) -> &'a Self::Value;
}
fn takes_foo<'a,F: Foo<'a>>(f: &'a F) {
// This call would be illegal, because it results in &'a F::Value,
// and the only way we know that `F::Value : 'a` is because of the
// trait declaration.
<|fim▁hole|><|fim▁end|>
|
f.get();
}
fn main() { }
|
<|file_name|>sjisprober.py<|end_file_name|><|fim▁begin|>######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJISSMModel
from . import constants
class SJISProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(SJISSMModel)
self._mDistributionAnalyzer = SJISDistributionAnalysis()
self._mContextAnalyzer = SJISContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
<|fim▁hole|> return self._mContextAnalyzer.get_charset_name()
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
- charLen], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mContextAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)<|fim▁end|>
|
def get_charset_name(self):
|
<|file_name|>lazy.js<|end_file_name|><|fim▁begin|>/**
* tiny-di
* @module binding/lazy
* @copyright Dennis Saenger <[email protected]>
*/
'use strict';
import { AbstractBinding } from './abstract';
export class LazyBinding extends AbstractBinding {
constructor(injector, key, path, opts) {
super(injector, key);
this.path = path;
this.opts = opts;
}
load() {
return this.injector.load(this.key, this.path, this.opts);
}
$get() {<|fim▁hole|> }
}<|fim▁end|>
|
return this.load();
|
<|file_name|>if-let.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn macros() {
macro_rules! foo{
($p:pat, $e:expr, $b:block) => {{
if let $p = $e $b
}}
}
macro_rules! bar{
($p:pat, $e:expr, $b:block) => {{
foo!($p, $e, $b)
}}
}
foo!(a, 1, { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
});
bar!(a, 1, { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
});
}
pub fn main() {
if let a = 1 { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
}
if let a = 1 { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
} else if true {
println!("else-if in irrefutable if-let");
} else {
println!("else in irrefutable if-let");
}
if let 1 = 2 {
println!("refutable pattern");<|fim▁hole|> }
if true {
println!("if");
} else if let a = 1 { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
}
}<|fim▁end|>
|
} else if let a = 1 { //~ ERROR irrefutable if-let
println!("irrefutable pattern");
|
<|file_name|>yesterday_dump.py<|end_file_name|><|fim▁begin|>import time
from pymongo import MongoClient
from datetime import datetime, timedelta
import json
from bson import Binary, Code
from bson.json_util import dumps
client = MongoClient('localhost', 27017)
db = client['election-2016']
def dumpData(yesterdayStr):
collectionName = 't' + yesterdayStr
cursor = db[collectionName].find()
count = cursor.count()
print(collectionName + ' found ' + str(count) + ' tweets')
# dump only if data count is greater than 0
if count > 0:
file = open('out/' + yesterdayStr + '.json', 'w')
file.write('[')
i = 0
for document in cursor:
doc = dumps(document)<|fim▁hole|> if (i != count - 1):
file.write(',\n')
else:
file.write('\n]')
i = i + 1
print('data for ' + yesterdayStr + ' successfully dumped at ' + str(now))
# Run following code when the program starts
if __name__ == '__main__':
currentDate = str(datetime.now().month) + '_' + str(datetime.now().day)
#get now and yesterday strings
now = datetime.now()
yesterday = now - timedelta(days=1)
yesterdayStr = str(yesterday.month) + '_' + str(yesterday.day)
#update currentDate
dumpData(yesterdayStr)<|fim▁end|>
|
file.write(doc)
|
<|file_name|>read_message.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::io;
use futures::{Poll, Future, Async};
use tokio_io::AsyncRead;
use ethkey::KeyPair;
use key_server_cluster::Error;
use key_server_cluster::message::Message;
use key_server_cluster::io::{read_header, ReadHeader, read_payload, read_encrypted_payload, ReadPayload};
/// Create future for read single message from the stream.
pub fn read_message<A>(a: A) -> ReadMessage<A> where A: AsyncRead {
ReadMessage {
key: None,
state: ReadMessageState::ReadHeader(read_header(a)),
}
}
/// Create future for read single encrypted message from the stream.
pub fn read_encrypted_message<A>(a: A, key: KeyPair) -> ReadMessage<A> where A: AsyncRead {
ReadMessage {
key: Some(key),
state: ReadMessageState::ReadHeader(read_header(a)),
}
}
enum ReadMessageState<A> {
ReadHeader(ReadHeader<A>),
ReadPayload(ReadPayload<A>),
Finished,
}
/// Future for read single message from the stream.
pub struct ReadMessage<A> {
key: Option<KeyPair>,
state: ReadMessageState<A>,
}
impl<A> Future for ReadMessage<A> where A: AsyncRead {
type Item = (A, Result<Message, Error>);
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let (next, result) = match self.state {
ReadMessageState::ReadHeader(ref mut future) => {
let (read, header) = try_ready!(future.poll());
let header = match header {
Ok(header) => header,
Err(err) => return Ok((read, Err(err)).into()),
};
let future = match self.key.take() {
Some(key) => read_encrypted_payload(read, header, key),
None => read_payload(read, header),
};
let next = ReadMessageState::ReadPayload(future);
(next, Async::NotReady)
},
ReadMessageState::ReadPayload(ref mut future) => {
let (read, payload) = try_ready!(future.poll());
(ReadMessageState::Finished, Async::Ready((read, payload)))
},
ReadMessageState::Finished => panic!("poll ReadMessage after it's done"),
};
self.state = next;
match result {
// by polling again, we register new future<|fim▁hole|> }
}
}<|fim▁end|>
|
Async::NotReady => self.poll(),
result => Ok(result)
|
<|file_name|>32.py<|end_file_name|><|fim▁begin|># Hangman for real tho(t)
import random
def pickWord(words):
toReturn = random.choice(lol)
return toReturn
def drawHangman(parts):
if parts >= 1:
print(" O")
if parts >= 4:
print("/|\\")
elif parts >= 3:
print("/|")
elif parts >= 2:
print(" |")
if parts >= 6:
print("/ \\")
elif parts == 5:
print("/")
print("\n")
with open("scrabble.txt", "r") as paroleFile:
lol = paroleFile.read().split("\n")
word = pickWord(lol)
completo = False
okLetter = False
guessedLetters = set()
progress = ["_" for i in range(len(word))]
remainingLetters = len(word)
guesses = 0
while not completo:
okLetter = False
for i in progress:
print(i, end="")
while not okLetter:
print("\n\n\nGuess your letter: ")
letter = input().upper()
if letter in guessedLetters:
print("You already tried that ")
else:<|fim▁hole|> print("Wrong letter ")
guesses += 1
print("Guesses remaining: ", 7 - guesses, "\n")
else:
for i in range(len(word)):
if word[i] == letter:
progress[i] = letter
remainingLetters -= 1
drawHangman(guesses)
if remainingLetters <= 0:
for i in progress:
print(i, end="")
print("\n\nYou won ye")
completo = True
if guesses > 6:
print(" ^^ DED ^^ \n")
print("Hai perso lol\n")
print("\nLa parola era", str(word))
completo = True<|fim▁end|>
|
guessedLetters.add(letter)
okLetter = True
if letter not in word:
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
===========================================
Main Components (:mod:`artview.components`)
===========================================
.. currentmodule:: artview.components
ARTview offers some basic Components for visualization
of weather radar data using Py-ART and
ARTview functions.
.. autosummary::
:toctree: generated/
RadarDisplay
GridDisplay
Menu
LevelButtonWindow
FieldButtonWindow
LinkPlugins
SelectRegion
PlotDisplay
"""
import pyart
from pkg_resources import parse_version
from .plot_radar import RadarDisplay
if parse_version(pyart.__version__) >= parse_version('1.6.0'):
from .plot_grid import GridDisplay
else:
from .plot_grid_legacy import GridDisplay
from .plot_points import PointsDisplay
from .menu import Menu
from .level import LevelButtonWindow
from .field import FieldButtonWindow
from .component_control import LinkPlugins
from .select_region import SelectRegion as SelectRegion_dev
from .select_region_old import SelectRegion
from .plot_simple import PlotDisplay<|fim▁hole|>
del pyart
del parse_version<|fim▁end|>
| |
<|file_name|>key_ops.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Keybase Inc. All rights reserved.
// Use of this source code is governed by a BSD
// license that can be found in the LICENSE file.<|fim▁hole|>
import (
"context"
"github.com/keybase/client/go/kbfs/idutil"
"github.com/keybase/client/go/kbfs/kbfscrypto"
"github.com/keybase/client/go/kbfs/kbfsmd"
"github.com/keybase/client/go/protocol/keybase1"
)
// KeyOpsConfig is a config object containing the outside helper
// instances needed by KeyOps.
type KeyOpsConfig interface {
KeyServer() KeyServer
KBPKI() idutil.KBPKI
}
// KeyOpsStandard implements the KeyOps interface and relays get/put
// requests for server-side key halves from/to the key server.
type KeyOpsStandard struct {
config KeyOpsConfig
}
// NewKeyOpsStandard creates a new KeyOpsStandard instance.
func NewKeyOpsStandard(config KeyOpsConfig) *KeyOpsStandard {
return &KeyOpsStandard{config}
}
// Test that KeyOps standard fully implements the KeyOps interface.
var _ KeyOps = (*KeyOpsStandard)(nil)
// GetTLFCryptKeyServerHalf is an implementation of the KeyOps interface.
func (k *KeyOpsStandard) GetTLFCryptKeyServerHalf(
ctx context.Context, serverHalfID kbfscrypto.TLFCryptKeyServerHalfID,
key kbfscrypto.CryptPublicKey) (kbfscrypto.TLFCryptKeyServerHalf, error) {
// get the key half from the server
serverHalf, err := k.config.KeyServer().GetTLFCryptKeyServerHalf(
ctx, serverHalfID, key)
if err != nil {
return kbfscrypto.TLFCryptKeyServerHalf{}, err
}
// get current uid and deviceKID
session, err := k.config.KBPKI().GetCurrentSession(ctx)
if err != nil {
return kbfscrypto.TLFCryptKeyServerHalf{}, err
}
// verify we got the expected key
err = kbfscrypto.VerifyTLFCryptKeyServerHalfID(
serverHalfID, session.UID, key, serverHalf)
if err != nil {
return kbfscrypto.TLFCryptKeyServerHalf{}, err
}
return serverHalf, nil
}
// PutTLFCryptKeyServerHalves is an implementation of the KeyOps interface.
func (k *KeyOpsStandard) PutTLFCryptKeyServerHalves(
ctx context.Context,
keyServerHalves kbfsmd.UserDeviceKeyServerHalves) error {
// upload the keys
return k.config.KeyServer().PutTLFCryptKeyServerHalves(ctx, keyServerHalves)
}
// DeleteTLFCryptKeyServerHalf is an implementation of the KeyOps interface.
func (k *KeyOpsStandard) DeleteTLFCryptKeyServerHalf(
ctx context.Context, uid keybase1.UID, key kbfscrypto.CryptPublicKey,
serverHalfID kbfscrypto.TLFCryptKeyServerHalfID) error {
return k.config.KeyServer().DeleteTLFCryptKeyServerHalf(
ctx, uid, key, serverHalfID)
}<|fim▁end|>
|
package libkey
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::io::Read;
use std::collections::HashMap;
use std::str::FromStr;
type DynamicInfo<'a> = HashMap<&'a str, u8>;
#[derive(Debug)]
struct Aunt<'a> {
number: u16,
info: DynamicInfo<'a>
}
fn parse_aunt(line: &str) -> Aunt {
let tokens: Vec<_> = line
.split(|c: char| !c.is_alphanumeric())
.filter(|c| !c.is_empty())
.collect();
let mut aunt = Aunt { number: u16::from_str(tokens[1]).unwrap(), info: HashMap::new() };
for i in 0..((tokens.len() - 2) / 2) {
aunt.info.insert(tokens[2 * i + 2], u8::from_str(tokens[2 * i + 3]).unwrap());
}
<|fim▁hole|> aunt
}
fn read_input() -> io::Result<String> {
let mut buffer = String::new();
try!(io::stdin().read_to_string(&mut buffer));
Ok(buffer.trim().to_string())
}
fn matches(aunt: &Aunt, specification: &DynamicInfo) -> bool {
let ref info = aunt.info;
info
.into_iter()
.all(|(attribute, value)| {
match specification.get(attribute) {
Some(x) if x == value => true,
_ => false
}
})
}
fn matches_adjusted(aunt: &Aunt, specification: &DynamicInfo) -> bool {
let ref info = aunt.info;
info
.into_iter()
.all(|(attribute, value)| {
match (*attribute, specification.get(attribute)) {
("cats", Some(x)) | ("trees", Some(x)) => x < value,
("pomeranians", Some(x)) | ("goldfish", Some(x)) => x > value,
(_, Some(x)) => x == value,
_ => false
}
})
}
fn main() {
let input = read_input().unwrap();
let aunts: Vec<_> = input.lines().map(parse_aunt).collect();
let mut machine_output = HashMap::with_capacity(10);
machine_output.insert("children", 3);
machine_output.insert("cats", 7);
machine_output.insert("samoyeds", 2);
machine_output.insert("pomeranians", 3);
machine_output.insert("akitas", 0);
machine_output.insert("vizslas", 0);
machine_output.insert("goldfish", 5);
machine_output.insert("trees", 2);
machine_output.insert("cars", 3);
machine_output.insert("perfumes", 1);
let aunt_sue = aunts.iter().find(|aunt| matches(aunt, &machine_output));
println!("Aunt Sue: {:?}", aunt_sue);
let another_aunt_sue = aunts.iter().find(|aunt| matches_adjusted(aunt, &machine_output));
println!("Another Aunt Sue: {:?}", another_aunt_sue);
}<|fim▁end|>
| |
<|file_name|>vendor.js<|end_file_name|><|fim▁begin|>/* eslint-disable global-require */<|fim▁hole|>
// polyfills and vendors
if (!window._babelPolyfill) {
require('babel-polyfill')
}<|fim▁end|>
| |
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>import collections
def moduleman_plugin(*args):
method_args = []
def inner_decorator(cls):
for method in method_args:
if (not (method in dir(cls))):
raise Exception("Required method %s not implemented" % method)<|fim▁hole|> if not isinstance(args[0], collections.Callable):
method_args += args
return inner_decorator
return inner_decorator(args[0])<|fim▁end|>
|
cls.__PLUGIN_MODULEMAN_MARK = "Plugin mark"
return cls
|
<|file_name|>swf.js.communication.js<|end_file_name|><|fim▁begin|>/**
* ...
* @author paul
*/
function initCBX(object, id, options) {
var design = "assets";
if(object == null){
jQuery.noConflict();
var cboxClass;
cboxClass = jQuery(id).attr("class");
if(jQuery.browser.msie && parseInt(jQuery.browser.version)<8 ){
jQuery(id).colorbox();
}
else{
if(cboxClass.indexOf("cboxElement") == -1){
if(options.classes.image.id){
jQuery('.'+options.classes.image.id).colorbox({transition:options.classes.image.transition, slideshow:options.classes.image.slideshow, slideshowSpeed:options.classes.image.slideshowSpeed});
}
if(options.classes.video){
if(options.classes.video.id){
jQuery('.'+options.classes.video.id).colorbox({iframe:true, innerWidth:options.classes.video.innerWidth, innerHeight:options.classes.video.innerHeight, transition:options.classes.image.transition, slideshow:options.classes.image.slideshow, slideshowSpeed:options.classes.image.slideshowSpeed});
}
}
if(options.classes.swf){
if(options.classes.swf.id){
var cbxSWFSrc = jQuery('.'+options.classes.swf.id).attr("href");
var objEmbd = '<OBJECT classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,0,0" WIDTH="'+options.classes.video.innerWidth+'" HEIGHT="'+options.classes.video.innerHeight+'" id="cbxSWF" ALIGN="">'+
'<PARAM NAME=movie VALUE="'+cbxSWFSrc+'">' +
'<PARAM NAME=quality VALUE=high>' +
'<PARAM NAME=wmode VALUE=transparent>'+
'<PARAM NAME=bgcolor VALUE=#333399>'+
'<EMBED src="'+cbxSWFSrc+'" quality=high wmode=transparent WIDTH="'+options.classes.video.innerWidth+'" HEIGHT="'+options.classes.video.innerHeight+'" NAME="Yourfilename" ALIGN="" TYPE="application/x-shockwave-flash" PLUGINSPAGE="http://www.macromedia.com/go/getflashplayer"></EMBED>'+
'</OBJECT>';
jQuery('.'+options.classes.swf.id).colorbox({html:objEmbd, transition:options.classes.image.transition, slideshow:options.classes.image.slideshow, slideshowSpeed:options.classes.image.slideshowSpeed});
}
}
}
}
jQuery(id).trigger('click');
return;
}
loadjQuery = function(filename) {
loadjQuery.getScript(object.path+"/"+filename);
loadjQuery.retry(0);
}
loadColorbox = function(filename) {
loadColorbox.getScript(object.path+"/"+filename);
loadColorbox.retry(0);
}
loadjQuery.getScript = function(filename) {
if(typeof jQuery == "undefined"){
var script = document.createElement('script');
script.setAttribute("type","text/javascript");
script.setAttribute("src", filename);
document.getElementsByTagName("head")[0].appendChild(script);
}
}
loadColorbox.getScript = function(filename) {
if(typeof jQuery.colorbox == "undefined"){
var link = document.createElement('link');
link.setAttribute('media', 'screen');
link.setAttribute('href', object.path+'/'+design+'/colorbox.css');
link.setAttribute('rel', 'stylesheet');
document.getElementsByTagName("head")[0].appendChild(link);
var script = document.createElement('script');
script.setAttribute("type","text/javascript");
script.setAttribute("src", filename);<|fim▁hole|> document.getElementsByTagName("head")[0].appendChild(script);
}
}
loadjQuery.retry = function(time_elapsed) {
if (typeof jQuery == "undefined") {
if (time_elapsed <= 5000) {
setTimeout("loadjQuery.retry(" + (time_elapsed + 200) + ")", 200);
}
}
else {
if(typeof jQuery.colorbox == "undefined"){
loadColorbox("jquery.colorbox-min.js");
}
}
}
loadColorbox.retry = function(time_elapsed) {
if (typeof jQuery.colorbox == "undefined") {
if (time_elapsed <= 5000) {
setTimeout("loadColorbox.retry(" + (time_elapsed + 200) + ")", 200);
}
}
}
if(typeof jQuery == "undefined"){
loadjQuery("jquery-1.7.2.min.js");
}
else if(typeof jQuery.colorbox == "undefined"){
loadColorbox("jquery.colorbox-min.js");
}
}<|fim▁end|>
| |
<|file_name|>coherence.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! See `README.md` for high-level documentation
use super::Normalized;
use super::SelectionContext;
use super::ObligationCause;
use super::PredicateObligation;
use super::project;
use super::util;
use middle::subst::{Subst, Substs, TypeSpace};
use middle::ty::{self, ToPolyTraitRef, Ty};
use middle::infer::{self, InferCtxt};
use syntax::ast;
use syntax::codemap::{DUMMY_SP, Span};
#[derive(Copy, Clone)]
struct InferIsLocal(bool);
/// True if there exist types that satisfy both of the two given impls.
pub fn overlapping_impls(infcx: &InferCtxt,
impl1_def_id: ast::DefId,
impl2_def_id: ast::DefId)
-> bool
{
debug!("impl_can_satisfy(\
impl1_def_id={:?}, \
impl2_def_id={:?})",
impl1_def_id,
impl2_def_id);
let selcx = &mut SelectionContext::intercrate(infcx);
infcx.probe(|_| {
overlap(selcx, impl1_def_id, impl2_def_id) || overlap(selcx, impl2_def_id, impl1_def_id)
})
}
/// Can the types from impl `a` be used to satisfy impl `b`?
/// (Including all conditions)
fn overlap(selcx: &mut SelectionContext,
a_def_id: ast::DefId,
b_def_id: ast::DefId)
-> bool
{
debug!("overlap(a_def_id={:?}, b_def_id={:?})",
a_def_id,
b_def_id);
let (a_trait_ref, a_obligations) = impl_trait_ref_and_oblig(selcx,
a_def_id,
util::fresh_type_vars_for_impl);
let (b_trait_ref, b_obligations) = impl_trait_ref_and_oblig(selcx,
b_def_id,
util::fresh_type_vars_for_impl);
debug!("overlap: a_trait_ref={:?}", a_trait_ref);
debug!("overlap: b_trait_ref={:?}", b_trait_ref);
// Does `a <: b` hold? If not, no overlap.
if let Err(_) = infer::mk_sub_poly_trait_refs(selcx.infcx(),
true,
infer::Misc(DUMMY_SP),
a_trait_ref.to_poly_trait_ref(),
b_trait_ref.to_poly_trait_ref()) {
return false;
}
debug!("overlap: subtraitref check succeeded");
// Are any of the obligations unsatisfiable? If so, no overlap.
let infcx = selcx.infcx();
let opt_failing_obligation =
a_obligations.iter()
.chain(&b_obligations)
.map(|o| infcx.resolve_type_vars_if_possible(o))
.find(|o| !selcx.evaluate_obligation(o));
if let Some(failing_obligation) = opt_failing_obligation {
debug!("overlap: obligation unsatisfiable {:?}", failing_obligation);
return false
}
true
}
pub fn trait_ref_is_knowable<'tcx>(tcx: &ty::ctxt<'tcx>, trait_ref: &ty::TraitRef<'tcx>) -> bool
{
debug!("trait_ref_is_knowable(trait_ref={:?})", trait_ref);
// if the orphan rules pass, that means that no ancestor crate can
// impl this, so it's up to us.
if orphan_check_trait_ref(tcx, trait_ref, InferIsLocal(false)).is_ok() {
debug!("trait_ref_is_knowable: orphan check passed");
return true;
}
// if the trait is not marked fundamental, then it's always possible that
// an ancestor crate will impl this in the future, if they haven't
// already
if
trait_ref.def_id.krate != ast::LOCAL_CRATE &&
!tcx.has_attr(trait_ref.def_id, "fundamental")
{
debug!("trait_ref_is_knowable: trait is neither local nor fundamental");
return false;
}
// find out when some downstream (or cousin) crate could impl this
// trait-ref, presuming that all the parameters were instantiated
// with downstream types. If not, then it could only be
// implemented by an upstream crate, which means that the impl
// must be visible to us, and -- since the trait is fundamental
// -- we can test.
orphan_check_trait_ref(tcx, trait_ref, InferIsLocal(true)).is_err()
}
type SubstsFn = for<'a,'tcx> fn(infcx: &InferCtxt<'a, 'tcx>,
span: Span,
impl_def_id: ast::DefId)
-> Substs<'tcx>;
/// Instantiate fresh variables for all bound parameters of the impl
/// and return the impl trait ref with those variables substituted.
fn impl_trait_ref_and_oblig<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
impl_def_id: ast::DefId,
substs_fn: SubstsFn)
-> (ty::TraitRef<'tcx>,
Vec<PredicateObligation<'tcx>>)
{
let impl_substs =
&substs_fn(selcx.infcx(), DUMMY_SP, impl_def_id);
let impl_trait_ref =
selcx.tcx().impl_trait_ref(impl_def_id).unwrap();
let impl_trait_ref =
impl_trait_ref.subst(selcx.tcx(), impl_substs);
let Normalized { value: impl_trait_ref, obligations: normalization_obligations1 } =
project::normalize(selcx, ObligationCause::dummy(), &impl_trait_ref);
let predicates = selcx.tcx().lookup_predicates(impl_def_id);
let predicates = predicates.instantiate(selcx.tcx(), impl_substs);
let Normalized { value: predicates, obligations: normalization_obligations2 } =
project::normalize(selcx, ObligationCause::dummy(), &predicates);
let impl_obligations =
util::predicates_for_generics(ObligationCause::dummy(), 0, &predicates);
let impl_obligations: Vec<_> =
impl_obligations.into_iter()
.chain(normalization_obligations1)
.chain(normalization_obligations2)
.collect();
(impl_trait_ref, impl_obligations)
}
pub enum OrphanCheckErr<'tcx> {
NoLocalInputType,
UncoveredTy(Ty<'tcx>),
}
/// Checks the coherence orphan rules. `impl_def_id` should be the
/// def-id of a trait impl. To pass, either the trait must be local, or else
/// two conditions must be satisfied:
///
/// 1. All type parameters in `Self` must be "covered" by some local type constructor.
/// 2. Some local type must appear in `Self`.
pub fn orphan_check<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_def_id: ast::DefId)
-> Result<(), OrphanCheckErr<'tcx>>
{
debug!("orphan_check({:?})", impl_def_id);
// We only except this routine to be invoked on implementations
// of a trait, not inherent implementations.
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
debug!("orphan_check: trait_ref={:?}", trait_ref);
// If the *trait* is local to the crate, ok.
if trait_ref.def_id.krate == ast::LOCAL_CRATE {
debug!("trait {:?} is local to current crate",
trait_ref.def_id);
return Ok(());
}
orphan_check_trait_ref(tcx, &trait_ref, InferIsLocal(false))
}
fn orphan_check_trait_ref<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_ref: &ty::TraitRef<'tcx>,
infer_is_local: InferIsLocal)
-> Result<(), OrphanCheckErr<'tcx>>
{
debug!("orphan_check_trait_ref(trait_ref={:?}, infer_is_local={})",
trait_ref, infer_is_local.0);
// First, create an ordered iterator over all the type parameters to the trait, with the self
// type appearing first.
let input_tys = Some(trait_ref.self_ty());
let input_tys = input_tys.iter().chain(trait_ref.substs.types.get_slice(TypeSpace));<|fim▁hole|> // Find the first input type that either references a type parameter OR
// some local type.
for input_ty in input_tys {
if ty_is_local(tcx, input_ty, infer_is_local) {
debug!("orphan_check_trait_ref: ty_is_local `{:?}`", input_ty);
// First local input type. Check that there are no
// uncovered type parameters.
let uncovered_tys = uncovered_tys(tcx, input_ty, infer_is_local);
for uncovered_ty in uncovered_tys {
if let Some(param) = uncovered_ty.walk().find(|t| is_type_parameter(t)) {
debug!("orphan_check_trait_ref: uncovered type `{:?}`", param);
return Err(OrphanCheckErr::UncoveredTy(param));
}
}
// OK, found local type, all prior types upheld invariant.
return Ok(());
}
// Otherwise, enforce invariant that there are no type
// parameters reachable.
if !infer_is_local.0 {
if let Some(param) = input_ty.walk().find(|t| is_type_parameter(t)) {
debug!("orphan_check_trait_ref: uncovered type `{:?}`", param);
return Err(OrphanCheckErr::UncoveredTy(param));
}
}
}
// If we exit above loop, never found a local type.
debug!("orphan_check_trait_ref: no local type");
return Err(OrphanCheckErr::NoLocalInputType);
}
fn uncovered_tys<'tcx>(tcx: &ty::ctxt<'tcx>,
ty: Ty<'tcx>,
infer_is_local: InferIsLocal)
-> Vec<Ty<'tcx>>
{
if ty_is_local_constructor(tcx, ty, infer_is_local) {
vec![]
} else if fundamental_ty(tcx, ty) {
ty.walk_shallow()
.flat_map(|t| uncovered_tys(tcx, t, infer_is_local))
.collect()
} else {
vec![ty]
}
}
fn is_type_parameter<'tcx>(ty: Ty<'tcx>) -> bool {
match ty.sty {
// FIXME(#20590) straighten story about projection types
ty::TyProjection(..) | ty::TyParam(..) => true,
_ => false,
}
}
fn ty_is_local<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>, infer_is_local: InferIsLocal) -> bool
{
ty_is_local_constructor(tcx, ty, infer_is_local) ||
fundamental_ty(tcx, ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, infer_is_local))
}
fn fundamental_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool
{
match ty.sty {
ty::TyBox(..) | ty::TyRef(..) =>
true,
ty::TyEnum(def_id, _) | ty::TyStruct(def_id, _) =>
tcx.has_attr(def_id, "fundamental"),
ty::TyTrait(ref data) =>
tcx.has_attr(data.principal_def_id(), "fundamental"),
_ =>
false
}
}
fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>,
ty: Ty<'tcx>,
infer_is_local: InferIsLocal)
-> bool
{
debug!("ty_is_local_constructor({:?})", ty);
match ty.sty {
ty::TyBool |
ty::TyChar |
ty::TyInt(..) |
ty::TyUint(..) |
ty::TyFloat(..) |
ty::TyStr(..) |
ty::TyBareFn(..) |
ty::TyArray(..) |
ty::TySlice(..) |
ty::TyRawPtr(..) |
ty::TyRef(..) |
ty::TyTuple(..) |
ty::TyParam(..) |
ty::TyProjection(..) => {
false
}
ty::TyInfer(..) => {
infer_is_local.0
}
ty::TyEnum(def_id, _) |
ty::TyStruct(def_id, _) => {
def_id.krate == ast::LOCAL_CRATE
}
ty::TyBox(_) => { // Box<T>
let krate = tcx.lang_items.owned_box().map(|d| d.krate);
krate == Some(ast::LOCAL_CRATE)
}
ty::TyTrait(ref tt) => {
tt.principal_def_id().krate == ast::LOCAL_CRATE
}
ty::TyClosure(..) |
ty::TyError => {
tcx.sess.bug(
&format!("ty_is_local invoked on unexpected type: {:?}",
ty))
}
}
}<|fim▁end|>
| |
<|file_name|>gameimp.hpp<|end_file_name|><|fim▁begin|>#ifndef PWN_ENGINE_GAMEIMP_HPP
#define PWN_ENGINE_GAMEIMP_HPP
#include <vector>
#include <map>
#include <boost/shared_ptr.hpp>
#include <pwn/math/types.h>
//#include <events/event.h>
#include <pwn/engine/key.h>
namespace pwn
{
namespace render
{
class VirtualDisplay;
}
namespace engine
{
class System;
class Display;
/** Implementation of the Game class.
* This class sort of follows the pimpl idiom, except the "private" is private at librariy-scope, and not at class-scope
*/
class GameImp
{<|fim▁hole|> void
install(System* system); // assumes ownership
void
updateSystems();
// only associate, ownership has to be handled somewhere else
void
display_add(int id, Display* disp);
void
display(int id, render::VirtualDisplay& world);
void
display_remove(int id, Display* disp);
// post events
void
handleKey(Key::Code key, bool isDown);
void
handleMouse(const math::vec2 movement);
typedef boost::shared_ptr<System> SystemPtr;
private:
std::vector<SystemPtr> systems;
typedef std::map<int, Display*> DisplayMap;
DisplayMap displays;
};
}
}
#endif<|fim▁end|>
|
public:
GameImp();
~GameImp();
|
<|file_name|>zvectorgenerator.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
#include "zvectorgenerator.h"
|
<|file_name|>events.js<|end_file_name|><|fim▁begin|>/**
* Why has this not been moved to e.g. @tryghost/events or shared yet?
*
* - We currently massively overuse this utility, coupling together bits of the codebase in unexpected ways
* - We want to prevent this, not reinforce it
* * Having an @tryghost/events or shared/events module would reinforce this bad patter of using the same event emitter everywhere
*
* - Ideally, we want to refactor to:
* - either remove dependence on events where we can
* - or have separate event emitters for e.g. model layer and routing layer
*
*/
const events = require('events');
const util = require('util');<|fim▁hole|>let EventRegistry;
let EventRegistryInstance;
EventRegistry = function () {
events.EventEmitter.call(this);
};
util.inherits(EventRegistry, events.EventEmitter);
EventRegistryInstance = new EventRegistry();
EventRegistryInstance.setMaxListeners(100);
module.exports = EventRegistryInstance;<|fim▁end|>
| |
<|file_name|>contextManager.py<|end_file_name|><|fim▁begin|># This file is part of Indico.<|fim▁hole|># Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
"""
Just for backwards-compatibility
"""
from indico.util.contextManager import *<|fim▁end|>
| |
<|file_name|>_version.py<|end_file_name|><|fim▁begin|># This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.15 (https://github.com/warner/python-versioneer)
import errno
import os
import re
import subprocess
import sys
def get_keywords():
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
pass
def get_config():
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = ""
cfg.versionfile_source = "doctr/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
pass
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
def decorate(f):
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out<|fim▁hole|> dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}<|fim▁end|>
|
# look for -dirty suffix
|
<|file_name|>bitcoin_ms_MY.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ms_MY" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>DarkSwift</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The DarkSwift developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klik dua kali untuk mengubah alamat atau label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Cipta alamat baru</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Salin alamat terpilih ke dalam sistem papan klip</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your DarkSwift addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Padam</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>DarkSwift will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show information about DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>Pilihan</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>DarkSwift client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to DarkSwift network</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About DarkSwift card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about DarkSwift card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid DarkSwift address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. DarkSwift can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid DarkSwift address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>DarkSwift-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start DarkSwift after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start DarkSwift on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the DarkSwift client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the DarkSwift network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting DarkSwift.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show DarkSwift addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting DarkSwift.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the DarkSwift network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the DarkSwift-Qt help message to get a list with possible DarkSwift command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>DarkSwift - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>DarkSwift Core</source>
<translation type="unfinished"/>
</message>
<message><|fim▁hole|> <source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the DarkSwift debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the DarkSwift RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Baki</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified DarkSwift address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a DarkSwift address (e.g. Sjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter DarkSwift signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Fail yang dipisahkan dengan koma</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>DarkSwift version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or DarkSwiftd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: DarkSwift.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: DarkSwiftd.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong DarkSwift will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=DarkSwiftrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "DarkSwift Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. DarkSwift is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of DarkSwift</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart DarkSwift to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. DarkSwift is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
<location line="+279"/>
|
<|file_name|>AndFilterBlock.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.ssg.dcst.panthera.parse.sql.transformer.fb;
import java.util.ArrayList;
import java.util.List;
import org.antlr.runtime.tree.CommonTree;
import com.intel.ssg.dcst.panthera.parse.sql.PantheraExpParser;
import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateException;
import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateUtil;
import com.intel.ssg.dcst.panthera.parse.sql.TranslateContext;
import br.com.porcelli.parser.plsql.PantheraParser_PLSQLParser;
/**
* transform AND to JOIN(by rebuilding left select).<br>
* AndFilterBlock.
*
*/
public class AndFilterBlock extends LogicFilterBlock {
/**
* this must have two children.
*
* @throws SqlXlateException
*/
@Override
public void process(FilterBlockContext fbContext, TranslateContext context)
throws SqlXlateException {
FilterBlock leftFB = this.getChildren().get(0);
leftFB.process(fbContext, context);
fbContext.getQueryStack().peek().setQueryForTransfer(leftFB.getTransformedNode());
fbContext.getQueryStack().peek().setRebuildQueryForTransfer();
FilterBlock rightFB = this.getChildren().get(1);
CommonTree condition = rightFB.getASTNode();
TypeFilterBlock type = fbContext.getTypeStack().peek();
if (rightFB instanceof UnCorrelatedFilterBlock) {
// simple condition
if (type instanceof WhereFilterBlock) {
rebuildWhereCondition(leftFB, condition);
}
if (type instanceof HavingFilterBlock) {
rebuildHavingCondition(leftFB, condition);
}
this.setTransformedNode(leftFB.getTransformedNode());
} else {
rightFB.process(fbContext, context);
this.setTransformedNode(rightFB.getTransformedNode());
}
}
private void rebuildWhereCondition(FilterBlock leftFB, CommonTree condition) {
CommonTree transformedSelect = leftFB.getTransformedNode();
rebuildWhereCond(transformedSelect, condition);
}
private void rebuildWhereCond(CommonTree transformedSelect, CommonTree condition) {
if (transformedSelect.getType() == PantheraParser_PLSQLParser.SUBQUERY) {
for (int i = 0; i < transformedSelect.getChildCount(); i++) {
rebuildWhereCond((CommonTree) transformedSelect.getChild(i), condition);
}
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT) {
rebuildWhereCondition(transformedSelect, condition);
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_UNION) { // UNION node
rebuildWhereCond((CommonTree) transformedSelect.getChild(0), condition);
}
}
private void rebuildWhereCondition(CommonTree transformedSelect, CommonTree condition) {
CommonTree tableRefElement = (CommonTree) transformedSelect.getChild(0).getChild(0).getChild(0);
CommonTree subQuery = (CommonTree) tableRefElement.getChild(tableRefElement.getChildCount() - 1).getChild(0)
.getChild(0).getChild(0);
List<List<CommonTree>> selects = new ArrayList<List<CommonTree>>();
for (int i = 0; i < subQuery.getChildCount(); i++) {
List<CommonTree> selectLists = new ArrayList<CommonTree>();
FilterBlockUtil.findNode((CommonTree) subQuery.getChild(i),
PantheraExpParser.SELECT_LIST, selectLists);
assert(selectLists != null);
List<CommonTree> oneSelects = new ArrayList<CommonTree>();
for (CommonTree sl:selectLists) {
oneSelects.add((CommonTree) sl.getParent());
}
selects.add(oneSelects);
}
for (List<CommonTree> sels:selects) {
CommonTree sel = sels.get(0);
for (int j = 0; j < sels.size(); j++) {
sel = sels.get(j);
if(sel.getCharPositionInLine() == condition.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT).getCharPositionInLine()) {
break;
}
}
CommonTree where = (CommonTree) sel
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_WHERE);
if (where == null) {
where = FilterBlockUtil.createSqlASTNode(condition, PantheraExpParser.SQL92_RESERVED_WHERE,
"where");
CommonTree group = (CommonTree) sel
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_GROUP);
if (group != null) {
int groupIndex = group.getChildIndex();
SqlXlateUtil.addCommonTreeChild(sel, groupIndex, where);
} else {
sel.addChild(where);
}
CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(condition,
PantheraExpParser.LOGIC_EXPR, "LOGIC_EXPR");
where.addChild(logicExpr);
logicExpr.addChild(condition);
} else {
CommonTree logicExpr = (CommonTree) where.getChild(0);
FilterBlockUtil.addConditionToLogicExpr(logicExpr, condition);<|fim▁hole|> }
}
private void rebuildHavingCondition(FilterBlock leftFB, CommonTree condition) {
CommonTree transformedSelect = leftFB.getTransformedNode();
rebuildHavingCond(transformedSelect, condition);
}
private void rebuildHavingCond(CommonTree transformedSelect, CommonTree condition) {
if (transformedSelect.getType() == PantheraParser_PLSQLParser.SUBQUERY) {
for (int i = 0; i < transformedSelect.getChildCount(); i++) {
rebuildHavingCond((CommonTree) transformedSelect.getChild(i), condition);
}
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT) {
rebuildHavingCondition(transformedSelect, condition);
} else if (transformedSelect.getType() == PantheraParser_PLSQLParser.SQL92_RESERVED_UNION) { // UNION node
rebuildHavingCond((CommonTree) transformedSelect.getChild(0), condition);
}
}
private void rebuildHavingCondition(CommonTree transformedSelect, CommonTree condition) {
CommonTree tableRefElement = (CommonTree) transformedSelect.getChild(0).getChild(0).getChild(0);
CommonTree subQuery = (CommonTree) tableRefElement.getChild(tableRefElement.getChildCount() - 1).getChild(0)
.getChild(0).getChild(0);
List<List<CommonTree>> groups = new ArrayList<List<CommonTree>>();
for(int i = 0; i < subQuery.getChildCount(); i++){
List<CommonTree> oneGroups = new ArrayList<CommonTree>();
FilterBlockUtil.findNode((CommonTree) subQuery.getChild(i),
PantheraExpParser.SQL92_RESERVED_GROUP, oneGroups);
assert(oneGroups != null);
groups.add(oneGroups);
}
for(List<CommonTree> grps:groups) {
CommonTree group = grps.get(0);
for (int j = 0; j < grps.size(); j++) {
group = grps.get(j);
if(group.getCharPositionInLine() == condition.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_GROUP).getCharPositionInLine()) {
break;
}
}
CommonTree having = (CommonTree) group
.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_HAVING);
if (having == null) {
having = FilterBlockUtil.createSqlASTNode(condition, PantheraExpParser.SQL92_RESERVED_HAVING,
"having");
group.addChild(having);
CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(condition,
PantheraExpParser.LOGIC_EXPR, "LOGIC_EXPR");
having.addChild(logicExpr);
logicExpr.addChild(condition);
} else {
CommonTree logicExpr = (CommonTree) having.getChild(0);
FilterBlockUtil.addConditionToLogicExpr(logicExpr, condition);
}
}
}
}<|fim▁end|>
|
}
|
<|file_name|>Unit Group Form - Information.js<|end_file_name|><|fim▁begin|>/// <reference path="Xrm.js" />
var EntityLogicalName = "uomschedule";<|fim▁hole|>};
var Form_a793fa7c_8b63_43f0_b4bc_73f75a68935a_Controls = {
description: "description",
name: "name"
};
var pageData = {
"Event": "none",
"SaveMode": 1,
"EventSource": null,
"AuthenticationHeader": "",
"CurrentTheme": "Default",
"OrgLcid": 1033,
"OrgUniqueName": "",
"QueryStringParameters": {
"_gridType": "1056",
"etc": "1056",
"id": "",
"pagemode": "iframe",
"preloadcache": "1344548892170",
"rskey": "141637534"
},
"ServerUrl": "",
"UserId": "",
"UserLcid": 1033,
"UserRoles": [""],
"isOutlookClient": false,
"isOutlookOnline": true,
"DataXml": "",
"EntityName": "uomschedule",
"Id": "",
"IsDirty": false,
"CurrentControl": "",
"CurrentForm": null,
"Forms": [],
"FormType": 2,
"ViewPortHeight": 558,
"ViewPortWidth": 1231,
"Attributes": [{
"Name": "description",
"Value": "",
"Type": "memo",
"Format": "text",
"IsDirty": false,
"RequiredLevel": "none",
"SubmitMode": "dirty",
"UserPrivilege": {
"canRead": true,
"canUpdate": true,
"canCreate": true
},
"MaxLength": 2000,
"Controls": [{
"Name": "description"
}]
},
{
"Name": "name",
"Value": "",
"Type": "string",
"Format": "text",
"IsDirty": false,
"RequiredLevel": "none",
"SubmitMode": "dirty",
"UserPrivilege": {
"canRead": true,
"canUpdate": true,
"canCreate": true
},
"MaxLength": 200,
"Controls": [{
"Name": "name"
}]
}],
"AttributesLength": 2,
"Controls": [{
"Name": "description",
"Type": "standard",
"Disabled": false,
"Visible": true,
"Label": "Description",
"Attribute": "description"
},
{
"Name": "name",
"Type": "standard",
"Disabled": false,
"Visible": true,
"Label": "Name",
"Attribute": "name"
}],
"ControlsLength": 2,
"Navigation": [],
"Tabs": [{
"Label": "General",
"Name": "general",
"DisplayState": "expanded",
"Visible": true,
"Sections": [{
"Label": "UOM Schedule Information",
"Name": "uom schedule information",
"Visible": true,
"Controls": [{
"Name": "name"
},
{
"Name": "description"
}]
}]
}]
};
var Xrm = new _xrm(pageData);<|fim▁end|>
|
var Form_a793fa7c_8b63_43f0_b4bc_73f75a68935a_Properties = {
description: "description",
name: "name"
|
<|file_name|>test_revoke.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
import mock
from keystone.common import dependency
from keystone import config
from keystone.contrib.revoke import model
from keystone import exception
from keystone.openstack.common import timeutils
from keystone import tests
from keystone.tests import test_backend_sql
CONF = config.CONF
def _new_id():
return uuid.uuid4().hex
def _future_time():
expire_delta = datetime.timedelta(seconds=1000)
future_time = timeutils.utcnow() + expire_delta
return future_time
def _past_time():
expire_delta = datetime.timedelta(days=-1000)
past_time = timeutils.utcnow() + expire_delta
return past_time
def _sample_blank_token():
issued_delta = datetime.timedelta(minutes=-2)
issued_at = timeutils.utcnow() + issued_delta
token_data = model.blank_token_data(issued_at)
return token_data
def _matches(event, token_values):
"""See if the token matches the revocation event.
Used as a secondary check on the logic to Check
By Tree Below: This is abrute force approach to checking.
Compare each attribute from the event with the corresponding
value from the token. If the event does not have a value for
the attribute, a match is still possible. If the event has a
value for the attribute, and it does not match the token, no match
is possible, so skip the remaining checks.
:param event one revocation event to match
:param token_values dictionary with set of values taken from the
token
:returns if the token matches the revocation event, indicating the
token has been revoked
"""
# The token has three attributes that can match the user_id
if event.user_id is not None:
for attribute_name in ['user_id', 'trustor_id', 'trustee_id']:
if event.user_id == token_values[attribute_name]:
break
else:
return False
# The token has two attributes that can match the domain_id
if event.domain_id is not None:
for attribute_name in ['user_domain_id', 'project_domain_id']:
if event.domain_id == token_values[attribute_name]:
break
else:
return False
# If any one check does not match, the while token does
# not match the event. The numerous return False indicate
# that the token is still valid and short-circuits the
# rest of the logic.
attribute_names = ['project_id',
'expires_at', 'trust_id', 'consumer_id',
'access_token_id']
for attribute_name in attribute_names:
if getattr(event, attribute_name) is not None:
if (getattr(event, attribute_name) !=
token_values[attribute_name]):
return False
if event.role_id is not None:
roles = token_values['roles']
for role in roles:
if event.role_id == role:
break
else:
return False
if token_values['issued_at'] > event.issued_before:
return False
return True
@dependency.requires('revoke_api')
class RevokeTests(object):
def test_list(self):
self.revoke_api.revoke_by_user(user_id=1)
self.assertEqual(1, len(self.revoke_api.get_events()))
self.revoke_api.revoke_by_user(user_id=2)
self.assertEqual(2, len(self.revoke_api.get_events()))
def test_list_since(self):
self.revoke_api.revoke_by_user(user_id=1)
self.revoke_api.revoke_by_user(user_id=2)
past = timeutils.utcnow() - datetime.timedelta(seconds=1000)
self.assertEqual(2, len(self.revoke_api.get_events(past)))
future = timeutils.utcnow() + datetime.timedelta(seconds=1000)
self.assertEqual(0, len(self.revoke_api.get_events(future)))
def test_past_expiry_are_removed(self):
user_id = 1
self.revoke_api.revoke_by_expiration(user_id, _future_time())
self.assertEqual(1, len(self.revoke_api.get_events()))
event = model.RevokeEvent()
event.revoked_at = _past_time()
self.revoke_api.revoke(event)
self.assertEqual(1, len(self.revoke_api.get_events()))
<|fim▁hole|> token = _sample_blank_token()
token['expires_at'] = timeutils.isotime(_future_time(),
subsecond=True)
return token
now = datetime.datetime.utcnow()
now_plus_2h = now + datetime.timedelta(hours=2)
mock_utcnow.return_value = now
# Build a token and validate it. This will seed the cache for the
# future 'synchronize' call.
token_values = _sample_token_values()
user_id = _new_id()
self.revoke_api.revoke_by_user(user_id)
token_values['user_id'] = user_id
self.assertRaises(exception.TokenNotFound,
self.revoke_api.check_token,
token_values)
# Move our clock forward by 2h, build a new token and validate it.
# 'synchronize' should now be exercised and remove old expired events
mock_utcnow.return_value = now_plus_2h
self.revoke_api.revoke_by_expiration(_new_id(), now_plus_2h)
# should no longer throw an exception
self.revoke_api.check_token(token_values)
class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
def config_overrides(self):
super(SqlRevokeTests, self).config_overrides()
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.sql.Revoke')
self.config_fixture.config(
group='token',
provider='keystone.token.providers.pki.Provider',
revoke_by_id=False)
class KvsRevokeTests(tests.TestCase, RevokeTests):
def config_overrides(self):
super(KvsRevokeTests, self).config_overrides()
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.kvs.Revoke')
self.config_fixture.config(
group='token',
provider='keystone.token.providers.pki.Provider',
revoke_by_id=False)
def setUp(self):
super(KvsRevokeTests, self).setUp()
self.load_backends()
class RevokeTreeTests(tests.TestCase):
def setUp(self):
super(RevokeTreeTests, self).setUp()
self.events = []
self.tree = model.RevokeTree()
self._sample_data()
def _sample_data(self):
user_ids = []
project_ids = []
role_ids = []
for i in range(0, 3):
user_ids.append(_new_id())
project_ids.append(_new_id())
role_ids.append(_new_id())
project_tokens = []
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[0]
project_tokens[i]['project_id'] = project_ids[0]
project_tokens[i]['roles'] = [role_ids[1]]
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[1]
project_tokens[i]['project_id'] = project_ids[0]
project_tokens[i]['roles'] = [role_ids[0]]
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[0]
project_tokens[i]['project_id'] = project_ids[1]
project_tokens[i]['roles'] = [role_ids[0]]
token_to_revoke = _sample_blank_token()
token_to_revoke['user_id'] = user_ids[0]
token_to_revoke['project_id'] = project_ids[0]
token_to_revoke['roles'] = [role_ids[0]]
self.project_tokens = project_tokens
self.user_ids = user_ids
self.project_ids = project_ids
self.role_ids = role_ids
self.token_to_revoke = token_to_revoke
def _assertTokenRevoked(self, token_data):
self.assertTrue(any([_matches(e, token_data) for e in self.events]))
return self.assertTrue(self.tree.is_revoked(token_data),
'Token should be revoked')
def _assertTokenNotRevoked(self, token_data):
self.assertFalse(any([_matches(e, token_data) for e in self.events]))
return self.assertFalse(self.tree.is_revoked(token_data),
'Token should not be revoked')
def _revoke_by_user(self, user_id):
return self.tree.add_event(
model.RevokeEvent(user_id=user_id))
def _revoke_by_expiration(self, user_id, expires_at):
event = self.tree.add_event(
model.RevokeEvent(user_id=user_id,
expires_at=expires_at))
self.events.append(event)
return event
def _revoke_by_grant(self, role_id, user_id=None,
domain_id=None, project_id=None):
event = self.tree.add_event(
model.RevokeEvent(user_id=user_id,
role_id=role_id,
domain_id=domain_id,
project_id=project_id))
self.events.append(event)
return event
def _revoke_by_user_and_project(self, user_id, project_id):
event = self.tree.add_event(
model.RevokeEvent(project_id=project_id,
user_id=user_id))
self.events.append(event)
return event
def _revoke_by_project_role_assignment(self, project_id, role_id):
event = self.tree.add_event(
model.RevokeEvent(project_id=project_id,
role_id=role_id))
self.events.append(event)
return event
def _revoke_by_domain_role_assignment(self, domain_id, role_id):
event = self.tree.add_event(
model.RevokeEvent(domain_id=domain_id,
role_id=role_id))
self.events.append(event)
return event
def _user_field_test(self, field_name):
user_id = _new_id()
event = self._revoke_by_user(user_id)
self.events.append(event)
token_data_u1 = _sample_blank_token()
token_data_u1[field_name] = user_id
self._assertTokenRevoked(token_data_u1)
token_data_u2 = _sample_blank_token()
token_data_u2[field_name] = _new_id()
self._assertTokenNotRevoked(token_data_u2)
self.tree.remove_event(event)
self.events.remove(event)
self._assertTokenNotRevoked(token_data_u1)
def test_revoke_by_user(self):
self._user_field_test('user_id')
def test_revoke_by_user_matches_trustee(self):
self._user_field_test('trustee_id')
def test_revoke_by_user_matches_trustor(self):
self._user_field_test('trustor_id')
def test_by_user_expiration(self):
future_time = _future_time()
user_id = 1
event = self._revoke_by_expiration(user_id, future_time)
token_data_1 = _sample_blank_token()
token_data_1['user_id'] = user_id
token_data_1['expires_at'] = future_time
self._assertTokenRevoked(token_data_1)
token_data_2 = _sample_blank_token()
token_data_2['user_id'] = user_id
expire_delta = datetime.timedelta(seconds=2000)
future_time = timeutils.utcnow() + expire_delta
token_data_2['expires_at'] = future_time
self._assertTokenNotRevoked(token_data_2)
self.removeEvent(event)
self._assertTokenNotRevoked(token_data_1)
def removeEvent(self, event):
self.events.remove(event)
self.tree.remove_event(event)
def test_by_project_grant(self):
token_to_revoke = self.token_to_revoke
tokens = self.project_tokens
self._assertTokenNotRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
event = self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
token_to_revoke['roles'] = [self.role_ids[0],
self.role_ids[1],
self.role_ids[2]]
event = self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
event = self._revoke_by_grant(role_id=self.role_ids[1],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._revoke_by_grant(role_id=self.role_ids[1],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._revoke_by_grant(role_id=self.role_ids[2],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
def test_by_project_and_user_and_role(self):
user_id1 = _new_id()
user_id2 = _new_id()
project_id = _new_id()
self.events.append(self._revoke_by_user(user_id1))
self.events.append(
self._revoke_by_user_and_project(user_id2, project_id))
token_data = _sample_blank_token()
token_data['user_id'] = user_id2
token_data['project_id'] = project_id
self._assertTokenRevoked(token_data)
def _assertEmpty(self, collection):
return self.assertEqual(0, len(collection), "collection not empty")
def _assertEventsMatchIteration(self, turn):
self.assertEqual(1, len(self.tree.revoke_map))
self.assertEqual(turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']))
# two different functions add domain_ids, +1 for None
self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']))
# two different functions add project_ids, +1 for None
self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']
['domain_id=*']))
# 10 users added
self.assertEqual(turn, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']
['domain_id=*']
['project_id=*']))
def test_cleanup(self):
events = self.events
self._assertEmpty(self.tree.revoke_map)
expiry_base_time = _future_time()
for i in range(0, 10):
events.append(
self._revoke_by_user(_new_id()))
args = (_new_id(),
expiry_base_time + datetime.timedelta(seconds=i))
events.append(
self._revoke_by_expiration(*args))
self.assertEqual(i + 2, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']),
'adding %s to %s' % (args,
self.tree.revoke_map))
events.append(
self._revoke_by_project_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_user_and_project(_new_id(), _new_id()))
self._assertEventsMatchIteration(i + 1)
for event in self.events:
self.tree.remove_event(event)
self._assertEmpty(self.tree.revoke_map)<|fim▁end|>
|
@mock.patch.object(timeutils, 'utcnow')
def test_expired_events_removed_validate_token_success(self, mock_utcnow):
def _sample_token_values():
|
<|file_name|>add.rs<|end_file_name|><|fim▁begin|>#![feature(core, core_simd)]
extern crate core;
#[cfg(test)]
mod tests {
use core::simd::u32x4;
// #[simd]
// #[derive(Copy, Clone, Debug)]
// #[repr(C)]
// pub struct u32x4(pub u32, pub u32, pub u32, pub u32);
<|fim▁hole|> 0, 1, 2, 3
);
let y: u32x4 = u32x4(
2, 2, 2, 2
);
let z: u32x4 = x + y;
let result: String = format!("{:?}", z);
assert_eq!(result, "u32x4(2, 3, 4, 5)".to_string());
}
}<|fim▁end|>
|
#[test]
fn add_test1() {
let x: u32x4 = u32x4(
|
<|file_name|>scmutil.py<|end_file_name|><|fim▁begin|># Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# scmutil.py - Mercurial core utility functions
#
# Copyright Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import errno
import glob
import hashlib
import os
import re
import socket
import subprocess
import time
import traceback
import weakref
from . import (
encoding,
error,
match as matchmod,
pathutil,
phases,
pycompat,
revsetlang,
similar,
smartset,
url,
util,
vfs,
visibility,
winutil,
)
from .i18n import _
from .node import hex, nullid, short, wdirid, wdirrev
from .pycompat import basestring, encodeutf8, isint
if pycompat.iswindows:
from . import scmwindows as scmplatform
else:
from . import scmposix as scmplatform
termsize = scmplatform.termsize
# pyre-fixme[39]: `Tuple[Any, ...]` is not a valid parent class.
class status(tuple):
"""Named tuple with a list of files per status. The 'deleted', 'unknown'
and 'ignored' properties are only relevant to the working copy.
"""
__slots__ = ()
def __new__(cls, modified, added, removed, deleted, unknown, ignored, clean):
assert all(isinstance(f, str) for f in modified)
assert all(isinstance(f, str) for f in added)
assert all(isinstance(f, str) for f in removed)
assert all(isinstance(f, str) for f in deleted)
assert all(isinstance(f, str) for f in unknown)
assert all(isinstance(f, str) for f in ignored)
assert all(isinstance(f, str) for f in clean)
return tuple.__new__(
cls, (modified, added, removed, deleted, unknown, ignored, clean)
)
@property
def modified(self):
"""files that have been modified"""
return self[0]
@property
def added(self):
"""files that have been added"""
return self[1]
@property
def removed(self):
"""files that have been removed"""
return self[2]
@property
def deleted(self):
"""files that are in the dirstate, but have been deleted from the
working copy (aka "missing")
"""
return self[3]
@property
def unknown(self):
"""files not in the dirstate that are not ignored"""
return self[4]
@property
def ignored(self):
"""files not in the dirstate that are ignored (by _dirignore())"""
return self[5]
@property
def clean(self):
"""files that have not been modified"""
return self[6]
def __repr__(self, *args, **kwargs):
return (
"<status modified=%r, added=%r, removed=%r, deleted=%r, "
"unknown=%r, ignored=%r, clean=%r>"
) % self
def nochangesfound(ui, repo, excluded=None):
"""Report no changes for push/pull, excluded is None or a list of
nodes excluded from the push/pull.
"""
secretlist = []
if excluded:
for n in excluded:
ctx = repo[n]
if ctx.phase() >= phases.secret:
secretlist.append(n)
if secretlist:
ui.status(
_("no changes found (ignored %d secret changesets)\n") % len(secretlist)
)
else:
ui.status(_("no changes found\n"))
def callcatch(ui, func):
"""call func() with global exception handling
return func() if no exception happens. otherwise do some error handling
and return an exit code accordingly. does not handle all exceptions.
"""
try:
try:
return func()
except Exception as ex: # re-raises
ui.traceback()
# Log error info for all non-zero exits.
_uploadtraceback(ui, str(ex), util.smartformatexc())
raise
finally:
# Print 'remote:' messages before 'abort:' messages.
# This also avoids sshpeer.__del__ during Py_Finalize -> GC
# on Python 3, which can cause deadlocks waiting for the
# stderr reading thread.
from . import sshpeer
sshpeer.cleanupall()
# Global exception handling, alphabetically
# Mercurial-specific first, followed by built-in and library exceptions
except error.LockHeld as inst:
if inst.errno == errno.ETIMEDOUT:
reason = _("timed out waiting for lock held by %s") % inst.lockinfo
else:
reason = _("lock held by %r") % inst.lockinfo
ui.warn(_("%s: %s\n") % (inst.desc or inst.filename, reason), error=_("abort"))
if not inst.lockinfo:
ui.warn(_("(lock might be very busy)\n"))
except error.LockUnavailable as inst:
ui.warn(
_("could not lock %s: %s\n")
% (inst.desc or inst.filename, encoding.strtolocal(inst.strerror)),
error=_("abort"),
)
except error.OutOfBandError as inst:
if inst.args:
msg = _("remote error:\n")
else:
msg = _("remote error\n")
ui.warn(msg, error=_("abort"))
if inst.args:
ui.warn("".join(inst.args))
if inst.hint:
ui.warn("(%s)\n" % inst.hint)
except error.RepoError as inst:
ui.warn(_("%s!\n") % inst, error=_("abort"))
inst.printcontext(ui)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except error.ResponseError as inst:
ui.warn(inst.args[0], error=_("abort"))
if not isinstance(inst.args[1], basestring):
ui.warn(" %r\n" % (inst.args[1],))
elif not inst.args[1]:
ui.warn(_(" empty string\n"))
else:
ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
except error.CensoredNodeError as inst:
ui.warn(_("file censored %s!\n") % inst, error=_("abort"))
except error.CommitLookupError as inst:
ui.warn(_("%s!\n") % inst.args[0], error=_("abort"))
except error.CertificateError as inst:
# This error is definitively due to a problem with the user's client
# certificate, so print the configured remediation message.
helptext = ui.config("help", "tlsauthhelp")
if helptext is None:
helptext = _("(run 'hg config auth' to see configured certificates)")
ui.warn(
_("%s!\n\n%s\n") % (inst.args[0], helptext),
error=_("certificate error"),
)
except error.TlsError as inst:
# This is a generic TLS error that may or may not be due to the user's
# client certificate, so print a more generic message about TLS errors.
helptext = ui.config("help", "tlshelp")
if helptext is None:
helptext = _("(is your client certificate valid?)")
ui.warn(
_("%s!\n\n%s\n") % (inst.args[0], helptext),
error=_("tls error"),
)
except error.RevlogError as inst:
ui.warn(_("%s!\n") % inst, error=_("abort"))
inst.printcontext(ui)
except error.InterventionRequired as inst:
ui.warn("%s\n" % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
return 1
except error.WdirUnsupported:
ui.warn(_("working directory revision cannot be specified\n"), error=_("abort"))
except error.Abort as inst:
ui.warn(_("%s\n") % inst, error=_("abort"), component=inst.component)
inst.printcontext(ui)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
return inst.exitcode<|fim▁hole|> ui.warn(_(" %s\n\n") % str(inst).replace("\n", "\n "))
ui.warn(_("(this usually happens after hard reboot or system crash)\n"))
ui.warn(_("(try '@prog@ doctor' to attempt to fix it)\n"))
except error.RustError as inst:
if ui.config("ui", "traceback") and inst.args[0].has_metadata():
fault = inst.args[0].fault()
transience = inst.args[0].transience()
category = inst.args[0].category()
typename = inst.args[0].typename()
ui.warn(
_("error has type name %s, category %s, transience %s, and fault %s\n")
% (typename, category, transience, fault)
)
raise
except error.RevisionstoreError as inst:
ui.warn(_("%s\n") % inst, error=_("abort"))
except error.NonUTF8PathError as inst:
ui.warn(_("%s\n") % str(inst), error=_("abort"))
except ImportError as inst:
ui.warn(_("%s!\n") % inst, error=_("abort"))
m = str(inst).split()[-1]
if m in "mpatch bdiff".split():
ui.warn(_("(did you forget to compile extensions?)\n"))
elif m in "zlib".split():
ui.warn(_("(is your Python install correct?)\n"))
except IOError as inst:
if util.safehasattr(inst, "code"):
ui.warn(_("%s\n") % inst, error=_("abort"))
elif util.safehasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
if isinstance(reason, pycompat.unicode):
# SSLError of Python 2.7.9 contains a unicode
reason = encoding.unitolocal(reason)
ui.warn(_("error: %s\n") % reason, error=_("abort"))
elif (
util.safehasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE
):
pass
elif getattr(inst, "strerror", None):
filename = getattr(inst, "filename", None)
if filename:
ui.warn(
_("%s: %s\n") % (encoding.strtolocal(inst.strerror), inst.filename),
error=_("abort"),
)
else:
ui.warn(
_("%s\n") % encoding.strtolocal(inst.strerror), error=_("abort")
)
if not pycompat.iswindows:
# For permission errors on POSIX. Show more information about the
# current user, group, and stat results.
num = getattr(inst, "errno", None)
if filename is not None and num in {errno.EACCES, errno.EPERM}:
if util.istest():
uid = 42
else:
uid = os.getuid()
ui.warn(_("(current process runs with uid %s)\n") % uid)
_printstat(ui, filename)
_printstat(ui, os.path.dirname(filename))
else:
ui.warn(_("%s\n") % inst, error=_("abort"))
except OSError as inst:
if getattr(inst, "filename", None) is not None:
ui.warn(
_("%s: %s\n") % (encoding.strtolocal(inst.strerror), inst.filename),
error=_("abort"),
)
else:
ui.warn(_("%s\n") % encoding.strtolocal(inst.strerror), error=_("abort"))
except MemoryError:
ui.warn(_("out of memory\n"), error=_("abort"))
except SystemExit as inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and pass exit code to caller.
return inst.code
except socket.error as inst:
ui.warn(_("%s\n") % inst.args[-1], error=_("abort"))
except Exception as e:
if type(e).__name__ == "TApplicationException":
ui.warn(_("ThriftError: %s\n") % e, error=_("abort"))
ui.warn(_("(try 'eden doctor' to diagnose this issue)\n"))
else:
raise
return -1
def _uploadtraceback(ui, message, trace):
key = "flat/errortrace-%(host)s-%(pid)s-%(time)s" % {
"host": socket.gethostname(),
"pid": os.getpid(),
"time": time.time(),
}
payload = message + "\n\n" + trace
# TODO: Move this into a background task that renders from
# blackbox instead.
ui.log("errortrace", "Trace:\n%s\n", trace, key=key, payload=payload)
ui.log("errortracekey", "Trace key:%s\n", key, errortracekey=key)
def _printstat(ui, path):
"""Attempt to print filesystem stat information on path"""
if util.istest():
mode = uid = gid = 42
else:
try:
st = os.stat(path)
mode = st.st_mode
uid = st.st_uid
gid = st.st_gid
except Exception:
return
ui.warn(_("(%s: mode 0o%o, uid %s, gid %s)\n") % (path, mode, uid, gid))
def checknewlabel(repo, lbl, kind):
# Do not use the "kind" parameter in ui output.
# It makes strings difficult to translate.
if lbl in ["tip", ".", "null"]:
raise error.Abort(_("the name '%s' is reserved") % lbl)
for c in (":", "\0", "\n", "\r"):
if c in lbl:
raise error.Abort(_("%r cannot be used in a name") % c)
try:
int(lbl)
raise error.Abort(_("cannot use an integer as a name"))
except ValueError:
pass
def checkfilename(f):
"""Check that the filename f is an acceptable filename for a tracked file"""
if "\r" in f or "\n" in f:
raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
def checkportable(ui, f):
"""Check if filename f is portable and warn or abort depending on config"""
checkfilename(f)
abort, warn = checkportabilityalert(ui)
if abort or warn:
msg = winutil.checkwinfilename(f)
if msg:
msg = "%s: %s" % (msg, util.shellquote(f))
if abort:
raise error.Abort(msg)
ui.warn(_("%s\n") % msg, notice=_("warning"))
def checkportabilityalert(ui):
"""check if the user's config requests nothing, a warning, or abort for
non-portable filenames"""
val = ui.config("ui", "portablefilenames")
lval = val.lower()
bval = util.parsebool(val)
abort = lval == "abort"
warn = bval or lval == "warn"
if bval is None and not (warn or abort or lval == "ignore"):
raise error.ConfigError(_("ui.portablefilenames value is invalid ('%s')") % val)
return abort, warn
class casecollisionauditor(object):
def __init__(self, ui, abort, dirstate):
self._ui = ui
self._abort = abort
if not dirstate._istreestate and not dirstate._istreedirstate:
allfiles = "\0".join(dirstate._map)
self._loweredfiles = set(encoding.lower(allfiles).split("\0"))
else:
# Still need an in-memory set to collect files being tested, but
# haven't been added to treestate yet.
self._loweredfiles = set()
self._dirstate = dirstate
# The purpose of _newfiles is so that we don't complain about
# case collisions if someone were to call this object with the
# same filename twice.
self._newfiles = set()
def __call__(self, f):
if f in self._newfiles:
return
fl = encoding.lower(f)
ds = self._dirstate
shouldwarn = False
if ds._istreestate or ds._istreedirstate:
dmap = ds._map
candidates = dmap.getfiltered(fl, encoding.lower)
# Note: fl might be outside dirstate, but got "tested" here. In
# that case, the next "if" would catch it.
shouldwarn = any(f not in ds and candidate != f for candidate in candidates)
if not shouldwarn:
shouldwarn = fl in self._loweredfiles and f not in ds
self._loweredfiles.add(fl)
if shouldwarn:
msg = _("possible case-folding collision for %s") % f
if self._abort:
raise error.Abort(msg)
self._ui.warn(_("%s\n") % msg, notice=_("warning"))
self._newfiles.add(f)
def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
"""yield every hg repository under path, always recursively.
The recurse flag will only control recursion into repo working dirs"""
def errhandler(err):
if err.filename == path:
raise err
samestat = getattr(os.path, "samestat", None)
if followsym and samestat is not None:
def adddir(dirlst, dirname):
match = False
dirstat = util.stat(dirname)
for lstdirstat in dirlst:
if samestat(dirstat, lstdirstat):
match = True
break
if not match:
dirlst.append(dirstat)
return not match
else:
followsym = False
if (seen_dirs is None) and followsym:
seen_dirs = []
adddir(seen_dirs, path)
for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
dirs.sort()
if ".hg" in dirs:
yield root # found a repository
qroot = os.path.join(root, ".hg", "patches")
if os.path.isdir(os.path.join(qroot, ".hg")):
yield qroot # we have a patch queue repo here
if recurse:
# avoid recursing inside the .hg directory
dirs.remove(".hg")
else:
dirs[:] = [] # don't descend further
elif followsym:
newdirs = []
for d in dirs:
fname = os.path.join(root, d)
if adddir(seen_dirs, fname):
if os.path.islink(fname):
for hgname in walkrepos(fname, True, seen_dirs):
yield hgname
else:
newdirs.append(d)
dirs[:] = newdirs
def binnode(ctx):
"""Return binary node id for a given basectx"""
node = ctx.node()
if node is None:
return wdirid
return node
def intrev(ctx):
"""Return integer for a given basectx that can be used in comparison or
arithmetic operation"""
rev = ctx.rev()
if rev is None:
return wdirrev
return rev
def formatchangeid(ctx):
"""Format changectx as '{node|formatnode}', which is the default
template provided by cmdutil.changeset_templater
"""
repo = ctx.repo()
ui = repo.ui
if ui.debugflag:
hexfunc = hex
else:
hexfunc = short
return hexfunc(binnode(ctx))
def revsingle(repo, revspec, default=".", localalias=None):
"""Resolve a single revset with user-defined revset aliases.
This should only be used for resolving user-provided command-line flags or
arguments.
For internal code paths not interacting with user-provided arguments,
use repo.revs (ignores user-defined revset aliases) or repo.anyrevs
(respects user-defined revset aliases) instead.
"""
if not revspec and revspec != 0:
return repo[default]
# Used by amend/common calling rebase.rebase with non-string opts.
if isint(revspec):
return repo[revspec]
l = revrange(repo, [revspec], localalias=localalias)
if not l:
raise error.Abort(_("empty revision set"))
return repo[l.last()]
def _pairspec(revspec):
tree = revsetlang.parse(revspec)
return tree and tree[0] in ("range", "rangepre", "rangepost", "rangeall")
def revpair(repo, revs):
if not revs:
return repo.dirstate.p1(), None
l = revrange(repo, revs)
if not l:
first = second = None
elif l.isascending():
first = l.min()
second = l.max()
elif l.isdescending():
first = l.max()
second = l.min()
else:
first = l.first()
second = l.last()
if first is None:
raise error.Abort(_("empty revision range"))
if (
first == second
and len(revs) >= 2
and not all(revrange(repo, [r]) for r in revs)
):
raise error.Abort(_("empty revision on one side of range"))
# if top-level is range expression, the result must always be a pair
if first == second and len(revs) == 1 and not _pairspec(revs[0]):
return repo.lookup(first), None
return repo.lookup(first), repo.lookup(second)
def revrange(repo, specs, localalias=None):
"""Execute 1 to many revsets and return the union.
This is the preferred mechanism for executing revsets using user-specified
config options, such as revset aliases.
The revsets specified by ``specs`` will be executed via a chained ``OR``
expression. If ``specs`` is empty, an empty result is returned.
``specs`` can contain integers, in which case they are assumed to be
revision numbers.
It is assumed the revsets are already formatted. If you have arguments
that need to be expanded in the revset, call ``revsetlang.formatspec()``
and pass the result as an element of ``specs``.
Specifying a single revset is allowed.
Returns a ``revset.abstractsmartset`` which is a list-like interface over
integer revisions.
This should only be used for resolving user-provided command-line flags or
arguments.
For internal code paths not interacting with user-provided arguments,
use repo.revs (ignores user-defined revset aliases) or repo.anyrevs
(respects user-defined revset aliases) instead.
"""
# Used by amend/common calling rebase.rebase with non-string opts.
if isinstance(specs, smartset.abstractsmartset):
return specs
allspecs = []
for spec in specs:
if isint(spec):
# specs are usually strings. int means legacy code using rev
# numbers. revsetlang no longer accepts int revs. Wrap it before
# passing to revsetlang.
spec = revsetlang.formatspec("%d", spec)
allspecs.append(spec)
legacyrevnum = repo.ui.config("devel", "legacy.revnum")
with repo.ui.configoverride({("devel", "legacy.revnum:real"): legacyrevnum}):
return repo.anyrevs(allspecs, user=True, localalias=localalias)
def expandpats(pats):
"""Expand bare globs when running on windows.
On posix we assume it already has already been done by sh."""
if not util.expandglobs:
return list(pats)
ret = []
for kindpat in pats:
kind, pat = matchmod._patsplit(kindpat, None)
if kind is None:
try:
globbed = glob.glob(pat)
except re.error:
globbed = [pat]
if globbed:
ret.extend(globbed)
continue
ret.append(kindpat)
return ret
def matchandpats(
ctx,
pats=(),
opts=None,
globbed=False,
default="relpath",
badfn=None,
emptyalways=True,
):
"""Return a matcher and the patterns that were used.
The matcher will warn about bad matches, unless an alternate badfn callback
is provided."""
if pats == ("",):
pats = []
if opts is None:
opts = {}
if not globbed and default == "relpath":
pats = expandpats(pats or [])
def bad(f, msg):
ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
if badfn is None:
badfn = bad
m = ctx.match(
pats,
opts.get("include"),
opts.get("exclude"),
default,
badfn=badfn,
emptyalways=emptyalways,
)
if m.always():
pats = []
return m, pats
def match(
ctx,
pats=(),
opts=None,
globbed=False,
default="relpath",
badfn=None,
emptyalways=True,
):
"""Return a matcher that will warn about bad matches."""
return matchandpats(
ctx, pats, opts, globbed, default, badfn=badfn, emptyalways=emptyalways
)[0]
def matchall(repo):
"""Return a matcher that will efficiently match everything."""
return matchmod.always(repo.root, repo.getcwd())
def matchfiles(repo, files, badfn=None):
"""Return a matcher that will efficiently match exactly these files."""
return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
def parsefollowlinespattern(repo, rev, pat, msg):
"""Return a file name from `pat` pattern suitable for usage in followlines
logic.
"""
if not matchmod.patkind(pat):
return pathutil.canonpath(repo.root, repo.getcwd(), pat)
else:
ctx = repo[rev]
m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
files = [f for f in ctx if m(f)]
if len(files) != 1:
raise error.ParseError(msg)
return files[0]
def origpath(ui, repo, filepath):
"""customize where .orig files are created
Fetch user defined path from config file: [ui] origbackuppath = <path>
Fall back to default (filepath with .orig suffix) if not specified
"""
origbackuppath = ui.config("ui", "origbackuppath")
if not origbackuppath:
return filepath + ".orig"
# Convert filepath from an absolute path into a path inside the repo.
filepathfromroot = util.normpath(os.path.relpath(filepath, start=repo.root))
origvfs = vfs.vfs(repo.wjoin(origbackuppath))
origbackupdir = origvfs.dirname(filepathfromroot)
if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
ui.note(_("creating directory: %s\n") % origvfs.join(origbackupdir))
# Remove any files that conflict with the backup file's path
for f in reversed(list(util.finddirs(filepathfromroot))):
if origvfs.isfileorlink(f):
ui.note(_("removing conflicting file: %s\n") % origvfs.join(f))
origvfs.unlink(f)
break
origvfs.makedirs(origbackupdir)
if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
ui.note(
_("removing conflicting directory: %s\n") % origvfs.join(filepathfromroot)
)
origvfs.rmtree(filepathfromroot, forcibly=True)
return origvfs.join(filepathfromroot)
class _containsnode(object):
"""proxy __contains__(node) to container.__contains__ which accepts revs"""
def __init__(self, repo, revcontainer):
self._torev = repo.changelog.rev
self._revcontains = revcontainer.__contains__
def __contains__(self, node):
return self._revcontains(self._torev(node))
def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
"""do common cleanups when old nodes are replaced by new nodes
That includes writing obsmarkers or stripping nodes, and moving bookmarks.
(we might also want to move working directory parent in the future)
By default, bookmark moves are calculated automatically from 'replacements',
but 'moves' can be used to override that. Also, 'moves' may include
additional bookmark moves that should not have associated obsmarkers.
replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
have replacements. operation is a string, like "rebase".
metadata is dictionary containing metadata to be stored in obsmarker if
obsolescence is enabled.
Return the calculated 'moves' mapping that is from a single old node to a
single new node.
"""
if not replacements and not moves:
return {}
# translate mapping's other forms
if not util.safehasattr(replacements, "items"):
replacements = {n: () for n in replacements}
# Calculate bookmark movements
if moves is None:
moves = {}
# Unfiltered repo is needed since nodes in replacements might be hidden.
unfi = repo
for oldnode, newnodes in replacements.items():
if oldnode in moves:
continue
if len(newnodes) > 1:
# usually a split, take the one with biggest rev number
newnode = next(unfi.set("max(%ln)", newnodes)).node()
elif len(newnodes) == 0:
# Handle them in a second loop
continue
else:
newnode = newnodes[0]
moves[oldnode] = newnode
# Move bookmarks pointing to stripped commits backwards.
# If hit a replaced node, use the replacement.
def movebackwards(node):
p1 = unfi.changelog.parents(node)[0]
if p1 == nullid:
return p1
elif p1 in moves:
return moves[p1]
elif p1 in replacements:
return movebackwards(p1)
else:
return p1
for oldnode, newnodes in replacements.items():
if oldnode in moves:
continue
assert len(newnodes) == 0
moves[oldnode] = movebackwards(oldnode)
with repo.transaction("cleanup") as tr:
# Move bookmarks
bmarks = repo._bookmarks
bmarkchanges = []
allnewnodes = [n for ns in replacements.values() for n in ns]
for oldnode, newnode in moves.items():
oldbmarks = repo.nodebookmarks(oldnode)
if not oldbmarks:
continue
from . import bookmarks # avoid import cycle
repo.ui.debug(
"moving bookmarks %r from %s to %s\n"
% (oldbmarks, hex(oldnode), hex(newnode))
)
# Delete divergent bookmarks being parents of related newnodes
deleterevs = repo.revs(
"parents(roots(%ln & (::%n))) - parents(%n)",
allnewnodes,
newnode,
oldnode,
)
deletenodes = _containsnode(repo, deleterevs)
for name in oldbmarks:
bmarkchanges.append((name, newnode))
for b in bookmarks.divergent2delete(repo, deletenodes, name):
bmarkchanges.append((b, None))
if bmarkchanges:
bmarks.applychanges(repo, tr, bmarkchanges)
# adjust visibility, or strip nodes
strip = True
if visibility.tracking(repo):
visibility.remove(repo, replacements.keys())
strip = False
if strip:
from . import repair # avoid import cycle
tostrip = list(replacements)
if tostrip:
repair.delayedstrip(repo.ui, repo, tostrip, operation)
return moves
def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
if opts is None:
opts = {}
m = matcher
if dry_run is None:
dry_run = opts.get("dry_run")
if similarity is None:
similarity = float(opts.get("similarity") or 0)
ret = 0
rejected = []
def badfn(f, msg):
if f in m.files():
m.bad(f, msg)
rejected.append(f)
badmatch = matchmod.badmatch(m, badfn)
added, unknown, deleted, removed, forgotten = _interestingfiles(repo, badmatch)
unknownset = set(unknown + forgotten)
toprint = unknownset.copy()
toprint.update(deleted)
for abs in sorted(toprint):
if repo.ui.verbose or not m.exact(abs):
if abs in unknownset:
status = _("adding %s\n") % m.uipath(abs)
else:
status = _("removing %s\n") % m.uipath(abs)
repo.ui.status(status)
renames = _findrenames(repo, m, added + unknown, removed + deleted, similarity)
if not dry_run:
_markchanges(repo, unknown + forgotten, deleted, renames)
for f in rejected:
if f in m.files():
return 1
return ret
def marktouched(repo, files, similarity=0.0):
"""Assert that files have somehow been operated upon. files are relative to
the repo root."""
m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
rejected = []
added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
if repo.ui.verbose:
unknownset = set(unknown + forgotten)
toprint = unknownset.copy()
toprint.update(deleted)
for abs in sorted(toprint):
if abs in unknownset:
status = _("adding %s\n") % abs
else:
status = _("removing %s\n") % abs
repo.ui.status(status)
renames = _findrenames(repo, m, added + unknown, removed + deleted, similarity)
_markchanges(repo, unknown + forgotten, deleted, renames)
for f in rejected:
if f in m.files():
return 1
return 0
def _interestingfiles(repo, matcher):
"""Walk dirstate with matcher, looking for files that addremove would care
about.
This is different from dirstate.status because it doesn't care about
whether files are modified or clean."""
removed, forgotten = [], []
audit_path = pathutil.pathauditor(repo.root, cached=True)
dirstate = repo.dirstate
exists = repo.wvfs.isfileorlink
status = dirstate.status(matcher, False, False, True)
unknown = [file for file in status.unknown if audit_path.check(file)]
for file in status.removed:
# audit here to make sure "file" hasn't reappeared behind a symlink
if exists(file) and audit_path.check(file):
if dirstate.normalize(file) == file:
forgotten.append(file)
else:
removed.append(file)
else:
removed.append(file)
# The user may have specified ignored files. It's expensive to compute them
# via status, so let's manually add them here.
ignored = repo.dirstate._ignore
unknown.extend(
file
for file in matcher.files()
if ignored(file) and repo.wvfs.isfileorlink(file) and audit_path.check(file)
)
return status.added, unknown, status.deleted, removed, forgotten
def _findrenames(repo, matcher, added, removed, similarity):
"""Find renames from removed files to added ones."""
renames = {}
if similarity > 0:
for old, new, score in similar.findrenames(repo, added, removed, similarity):
if repo.ui.verbose or not matcher.exact(old) or not matcher.exact(new):
repo.ui.status(
_("recording removal of %s as rename to %s " "(%d%% similar)\n")
% (matcher.rel(old), matcher.rel(new), score * 100)
)
renames[new] = old
return renames
def _markchanges(repo, unknown, deleted, renames):
"""Marks the files in unknown as added, the files in deleted as removed,
and the files in renames as copied."""
wctx = repo[None]
with repo.wlock():
wctx.forget(deleted)
wctx.add(unknown)
for new, old in pycompat.iteritems(renames):
wctx.copy(old, new)
def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
"""Update the dirstate to reflect the intent of copying src to dst. For
different reasons it might not end with dst being marked as copied from src.
"""
origsrc = repo.dirstate.copied(src) or src
if dst == origsrc: # copying back a copy?
if repo.dirstate[dst] not in "mn" and not dryrun:
repo.dirstate.normallookup(dst)
else:
if repo.dirstate[origsrc] == "a" and origsrc == src:
if not ui.quiet:
ui.warn(
_(
"%s has not been committed yet, so no copy "
"data will be stored for %s.\n"
)
% (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
)
if repo.dirstate[dst] in "?r" and not dryrun:
wctx.add([dst])
elif not dryrun:
wctx.copy(origsrc, dst)
def readrequires(opener, supported=None):
"""Reads and parses .hg/requires or .hg/store/requires and checks if all
entries found are in the list of supported features.
If supported is None, read all features without checking.
"""
requirements = set(opener.readutf8("requires").splitlines())
missings = []
if supported:
for r in requirements:
if r not in supported:
if not r or not r[0].isalnum():
raise error.RequirementError(
_("%s file is corrupt") % opener.join("requires")
)
missings.append(r)
missings.sort()
if missings:
raise error.RequirementError(
_("repository requires features unknown to this Mercurial: %s")
% " ".join(missings),
hint=_(
"see https://mercurial-scm.org/wiki/MissingRequirement"
" for more information"
),
)
return requirements
def writerequires(opener, requirements):
content = "".join("%s\n" % r for r in sorted(requirements))
opener.writeutf8("requires", content)
class filecachesubentry(object):
def __init__(self, path, stat):
self.path = path
self.cachestat = None
if stat:
path = self.path
else:
path = None
self.cachestat = filecachesubentry.stat(path)
def refresh(self):
self.cachestat = filecachesubentry.stat(self.path)
def changed(self):
newstat = filecachesubentry.stat(self.path)
if self.cachestat != newstat:
self.cachestat = newstat
return True
else:
return False
@staticmethod
def stat(path):
return util.cachestat(path)
class filecacheentry(object):
def __init__(self, paths, stat=True):
self._entries = []
for path in paths:
self._entries.append(filecachesubentry(path, stat))
def changed(self):
"""true if any entry has changed"""
for entry in self._entries:
if entry.changed():
return True
return False
def refresh(self):
for entry in self._entries:
entry.refresh()
class filecache(object):
"""A property like decorator that tracks files under .hg/ for updates.
Records stat info when called in _filecache.
On subsequent calls, compares old stat info with new info, and recreates the
object when any of the files changes, updating the new stat info in
_filecache.
Mercurial either atomic renames or appends for files under .hg,
so to ensure the cache is reliable we need the filesystem to be able
to tell us if a file has been replaced. If it can't, we fallback to
recreating the object on every call (essentially the same behavior as
propertycache).
"""
def __init__(self, *paths):
self.paths = [
path if isinstance(path, tuple) else (path, self.join) for path in paths
]
def join(self, obj, fname):
"""Used to compute the runtime path of a cached file.
Users should subclass filecache and provide their own version of this
function to call the appropriate join function on 'obj' (an instance
of the class that its member function was decorated).
"""
raise NotImplementedError
def __call__(self, func):
self.func = func
self.name = func.__name__
return self
def __get__(self, obj, type=None):
# if accessed on the class, return the descriptor itself.
if obj is None:
return self
# do we need to check if the file changed?
if self.name in obj.__dict__:
assert self.name in obj._filecache, self.name
return obj.__dict__[self.name]
entry = obj._filecache.get(self.name)
if entry:
if entry.changed():
entry.obj = self.func(obj)
else:
paths = [joiner(obj, path) for (path, joiner) in self.paths]
# We stat -before- creating the object so our cache doesn't lie if
# a writer modified between the time we read and stat
entry = filecacheentry(paths, True)
entry.obj = self.func(obj)
obj._filecache[self.name] = entry
obj.__dict__[self.name] = entry.obj
return entry.obj
def __set__(self, obj, value):
if self.name not in obj._filecache:
# we add an entry for the missing value because X in __dict__
# implies X in _filecache
paths = [joiner(obj, path) for (path, joiner) in self.paths]
ce = filecacheentry(paths, False)
obj._filecache[self.name] = ce
else:
ce = obj._filecache[self.name]
ce.obj = value # update cached copy
obj.__dict__[self.name] = value # update copy returned by obj.x
def __delete__(self, obj):
try:
del obj.__dict__[self.name]
except KeyError:
raise AttributeError(self.name)
def extdatasource(repo, source):
"""Gather a map of rev -> value dict from the specified source
A source spec is treated as a URL, with a special case shell: type
for parsing the output from a shell command.
The data is parsed as a series of newline-separated records where
each record is a revision specifier optionally followed by a space
and a freeform string value. If the revision is known locally, it
is converted to a rev, otherwise the record is skipped.
Note that both key and value are treated as UTF-8 and converted to
the local encoding. This allows uniformity between local and
remote data sources.
"""
spec = repo.ui.config("extdata", source)
if not spec:
raise error.Abort(_("unknown extdata source '%s'") % source)
data = {}
src = proc = None
try:
if spec.startswith("shell:"):
# external commands should be run relative to the repo root
cmd = spec[6:]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
close_fds=util.closefds,
stdout=subprocess.PIPE,
cwd=repo.root,
)
src = proc.stdout
else:
# treat as a URL or file
src = url.open(repo.ui, spec)
for l in src:
if b" " in l:
k, v = l.strip().split(b" ", 1)
else:
k, v = l.strip(), b""
k = k.decode("utf8")
try:
data[repo[k].rev()] = v.decode("utf8")
except (error.LookupError, error.RepoLookupError):
pass # we ignore data for nodes that don't exist locally
finally:
if proc:
proc.communicate()
if src:
src.close()
if proc and proc.returncode != 0:
raise error.Abort(
_("extdata command '%s' failed: %s")
% (cmd, util.explainexit(proc.returncode)[0])
)
return data
def gdinitconfig(ui):
"""helper function to know if a repo should be created as general delta"""
# experimental config: format.generaldelta
return ui.configbool("format", "generaldelta") or ui.configbool(
"format", "usegeneraldelta"
)
def gddeltaconfig(ui):
"""helper function to know if incoming delta should be optimised"""
# experimental config: format.generaldelta
return ui.configbool("format", "generaldelta")
class simplekeyvaluefile(object):
"""A simple file with key=value lines
Keys must be alphanumerics and start with a letter, values must not
contain '\n' characters"""
firstlinekey = "__firstline"
def __init__(self, vfs, path, keys=None):
self.vfs = vfs
self.path = path
def read(self, firstlinenonkeyval=False):
"""Read the contents of a simple key-value file
'firstlinenonkeyval' indicates whether the first line of file should
be treated as a key-value pair or reuturned fully under the
__firstline key."""
lines = self.vfs.readutf8(self.path).splitlines(True)
d = {}
if firstlinenonkeyval:
if not lines:
e = _("empty simplekeyvalue file")
raise error.CorruptedState(e)
# we don't want to include '\n' in the __firstline
d[self.firstlinekey] = lines[0][:-1]
del lines[0]
try:
# the 'if line.strip()' part prevents us from failing on empty
# lines which only contain '\n' therefore are not skipped
# by 'if line'
updatedict = dict(line[:-1].split("=", 1) for line in lines if line.strip())
if self.firstlinekey in updatedict:
e = _("%r can't be used as a key")
raise error.CorruptedState(e % self.firstlinekey)
d.update(updatedict)
except ValueError as e:
raise error.CorruptedState(str(e))
return d
def write(self, data, firstline=None):
"""Write key=>value mapping to a file
data is a dict. Keys must be alphanumerical and start with a letter.
Values must not contain newline characters.
If 'firstline' is not None, it is written to file before
everything else, as it is, not in a key=value form"""
lines = []
if firstline is not None:
lines.append("%s\n" % firstline)
for k, v in data.items():
if k == self.firstlinekey:
e = "key name '%s' is reserved" % self.firstlinekey
raise error.ProgrammingError(e)
if not k[0].isalpha():
e = "keys must start with a letter in a key-value file"
raise error.ProgrammingError(e)
if not k.isalnum():
e = "invalid key name in a simple key-value file"
raise error.ProgrammingError(e)
if "\n" in v:
e = "invalid value in a simple key-value file"
raise error.ProgrammingError(e)
lines.append("%s=%s\n" % (k, v))
with self.vfs(self.path, mode="wb", atomictemp=True) as fp:
fp.write("".join(lines).encode("utf-8"))
def nodesummaries(repo, nodes, maxnumnodes=4):
if len(nodes) <= maxnumnodes or repo.ui.verbose:
return " ".join(short(h) for h in nodes)
first = " ".join(short(h) for h in nodes[:maxnumnodes])
return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
def wrapconvertsink(sink):
"""Allow extensions to wrap the sink returned by convcmd.convertsink()
before it is used, whether or not the convert extension was formally loaded.
"""
return sink
def contextnodesupportingwdir(ctx):
"""Returns `ctx`'s node, or `wdirid` if it is a `workingctx`.
Alas, `workingxtx.node()` normally returns None, necessitating this
convinience function for when you need to serialize the workingxctx.
`repo[wdirid]` works fine so there's no need the reverse function.
"""
from edenscm.mercurial import context
if isinstance(ctx, context.workingctx):
return wdirid
# Neither `None` nor `wdirid` feels right here:
if isinstance(ctx, context.overlayworkingctx):
raise error.ProgrammingError(
"contextnodesupportingwdir doesn't support " "overlayworkingctx"
)
return ctx.node()
def trackrevnumfortests(repo, specs):
"""Attempt to collect information to replace revision number with revset
expressions in tests.
This works with the TESTFILE and TESTLINE environment variable set by
run-tests.py.
Information will be written to $TESTDIR/.testrevnum.
"""
if not util.istest():
return
trackrevnum = encoding.environ.get("TRACKREVNUM")
testline = encoding.environ.get("TESTLINE")
testfile = encoding.environ.get("TESTFILE")
testdir = encoding.environ.get("TESTDIR")
if not trackrevnum or not testline or not testfile or not testdir:
return
for spec in specs:
# 'spec' should be in sys.argv
if not any(spec in a for a in pycompat.sysargv):
continue
# Consider 'spec' as a revision number.
rev = int(spec)
if rev < -1:
continue
ctx = repo[rev]
if not ctx:
return
# Check candidate revset expressions.
candidates = []
if rev == -1:
candidates.append("null")
desc = ctx.description()
if desc:
candidates.append("desc(%s)" % desc.split()[0])
candidates.append("max(desc(%s))" % desc.split()[0])
candidates.append("%s" % ctx.hex())
for candidate in candidates:
try:
nodes = list(repo.nodes(candidate))
except Exception:
continue
if nodes == [ctx.node()]:
with open(testdir + "/.testrevnum", "ab") as f:
f.write(
"fix(%r, %s, %r, %r)\n" % (testfile, testline, spec, candidate)
)
break
def revf64encode(rev):
"""Convert rev to within f64 "safe" range.
This avoids issues that JSON cannot represent the revs precisely.
"""
if rev is not None and rev >= 0x100000000000000:
rev -= 0xFF000000000000
return rev
def revf64decode(rev):
"""Convert rev encoded by revf64encode back to the original rev
>>> revs = [i + j for i in [0, 1 << 56] for j in range(2)] + [None]
>>> encoded = [revf64encode(i) for i in revs]
>>> decoded = [revf64decode(i) for i in encoded]
>>> revs == decoded
True
"""
if rev is not None and 0x1000000000000 <= rev < 0x100000000000000:
rev += 0xFF000000000000
return rev
def setup(ui):
if not ui.configbool("experimental", "revf64compat"):
# Disable f64 compatibility
global revf64encode
def revf64encode(rev):
return rev<|fim▁end|>
|
except (error.IndexedLogError, error.MetaLogError) as inst:
ui.warn(_("internal storage is corrupted\n"), error=_("abort"))
|
<|file_name|>soundcloud.py<|end_file_name|><|fim▁begin|>import asyncio
import demjson
from bot import user_steps, sender, get, downloader
from message import Message
client_id = ''#YOUR CLIENT ID
async def search(query):
global guest_client_id
search_url = 'https://api.soundcloud.com/search?q=%s&facet=model&limit=30&offset=0&linked_partitioning=1&client_id='+client_id
url = search_url % query
response = await get(url)
r = demjson.decode(response)
res = []
for entity in r['collection']:
if entity['kind'] == 'track':
res.append([entity['title'], entity['permalink_url']])
return res<|fim▁hole|>
async def getfile(url):
response = await get(
"https://api.soundcloud.com/resolve?url={}&client_id="+client_id.format(url))
r = demjson.decode(response)
return r['stream_url'] + "?client_id="+client_id
@asyncio.coroutine
async def run(message, matches, chat_id, step):
from_id = message['from']['id']
if step == 0:
await sender(
Message(chat_id).set_text("*Please Wait*\nI'm Searching all Music with this name", parse_mode="markdown"))
user_steps[from_id] = {"name": "Soundcloud", "step": 1, "data": {}}
i = 0
show_keyboard = {'keyboard': [], "selective": True}
matches = matches.replace(" ", "+")
for song in await search(matches):
title, link = song[0], song[1]
user_steps[from_id]['data'][title] = link
show_keyboard['keyboard'].append([title])
i += 1
if i == 20:
break
if len(show_keyboard['keyboard']) in [0, 1]:
hide_keyboard = {'hide_keyboard': True, 'selective': True}
del user_steps[from_id]
return [Message(chat_id).set_text("*Not Found*",
reply_to_message_id=message['message_id'], reply_markup=hide_keyboard,
parse_mode="markdown")]
return [Message(chat_id).set_text("Select One Of these :", reply_to_message_id=message['message_id'],
reply_markup=show_keyboard)]
elif step == 1:
try:
hide_keyboard = {'hide_keyboard': True, "selective": True}
await sender(Message(chat_id).set_text("*Please Wait*\nLet me Save this Music For You",
reply_to_message_id=message['message_id'],
reply_markup=hide_keyboard, parse_mode="markdown"))
await downloader(await getfile(user_steps[from_id]['data'][message['text']]),
"tmp/{}.mp3".format(message['text']))
del user_steps[from_id]
return [Message(chat_id).set_audio("tmp/{}.mp3".format(message['text']), title=message['text'],
performer="@Siarobot")]
except Exception as e:
del user_steps[from_id]
return [Message(chat_id).set_text("*Wrong Input*\n_Try Again_", parse_mode="markdown")]
plugin = {
"name": "Soundcloud",
"desc": "Download a Music From Sound Cloud\n\n"
"*For Start :*\n`/sc michael jackson billie jean`",
"usage": ["/sc \\[`Search`]"],
"run": run,
"sudo": False,
"patterns": ["^[/!#]sc (.*)$"]
}<|fim▁end|>
| |
<|file_name|>reporter.js<|end_file_name|><|fim▁begin|>var _ = require('lodash');
var path = require('path');
function cucumberJUnitReporter(providedConfig, builder) {
var config = _.defaults(providedConfig || {}, {
reportDir: 'test_reports',
reportPrefix: 'TEST-',
reportSuffix: '.xml',
reportFile: 'test_results.xml',
oneReportPerFeature: true,
numberSteps: true
});
var suite = builder;
var featurePath;
var featureName;
var scenarioName;
var stepCounter = 0;
function getCurrentTestClassName() {
var testClassName = '';
if (featureName) {
testClassName += 'Feature: ' + featureName.replace(/\./g, ' ');
}
if (scenarioName) {
testClassName += '.Scenario: ' + scenarioName.replace(/\./g, ' ');
}
return testClassName;
}
function getFeatureReportPath() {
var reportName = config.reportPrefix +
featurePath.replace(/[\/]/g, '.') +
config.reportSuffix;
return path.join(config.reportDir, reportName);
}
function getGlobalReportPath() {
return path.join(config.reportDir, config.reportFile);
}
function getStepName(stepCount, step) {
var name = '';
if (config.numberSteps) {
if (stepCount < 10) {
name += '0';
}
name += stepCount + '. ';
}
name += step.getKeyword() + step.getName();
return name;
}
function formatTime(duration) {
if (typeof duration === 'number') {
return Math.round(duration / 1e6) / 1e3;
}
return null;
}
function registerHandlers() {
this.registerHandler('BeforeFeature', function (event, callback) {
var feature = event.getPayloadItem('feature');
featureName = feature.getName();
featurePath = path.relative(process.cwd(), feature.getUri());
suite = builder.testSuite().name(featureName);
callback();
});
this.registerHandler('BeforeScenario', function (event, callback) {
var scenario = event.getPayloadItem('scenario');
scenarioName = scenario.getName();
stepCounter = 0;
callback();
});
this.registerHandler('StepResult', function (event, callback) {
var stepResult = event.getPayloadItem('stepResult');
var step = stepResult.getStep();
var stepName = step.getName();
if (typeof stepName === "undefined" && stepResult.isSuccessful()) {
callback();
return;
}
stepCounter++;
var testCase = suite.testCase()
.className(getCurrentTestClassName())
.name(getStepName(stepCounter, step));
if (stepResult.isSuccessful()) {
testCase.time(formatTime(stepResult.getDuration()));
} else if (stepResult.isSkipped()) {
testCase.skipped();
} else if (!stepResult.isPending() && !stepResult.isUndefined()) {
var failureException = stepResult.getFailureException();
testCase.failure(failureException).time(formatTime(stepResult.getDuration()));
if (failureException.stack) {
testCase.stacktrace(failureException.stack);
}
}
callback();
});
this.registerHandler('AfterScenario', function (event, callback) {
scenarioName = undefined;
callback();
});
this.registerHandler('AfterFeature', function (event, callback) {
if (config.oneReportPerFeature) {
builder.writeTo(getFeatureReportPath());
builder = builder.newBuilder();
}
featureName = undefined;
featurePath = undefined;
suite = builder;
callback();
});
this.registerHandler('AfterFeatures', function (event, callback) {
if (!config.oneReportPerFeature) {
builder.writeTo(getGlobalReportPath());<|fim▁hole|> }
return registerHandlers;
}
module.exports = cucumberJUnitReporter;<|fim▁end|>
|
}
callback();
});
|
<|file_name|>mail_mail.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
<|fim▁hole|> this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
return '%s@%s' % (this.alias_name, this.alias_domain)
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
""" Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
}
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:[email protected] in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True<|fim▁end|>
|
def _get_default_from(self, cr, uid, context=None):
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import json
import logging
import pytz
import datetime
import dateutil.parser
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import redirect
from django.conf import settings
from mitxmako.shortcuts import render_to_response
from django_future.csrf import ensure_csrf_cookie
from track.models import TrackingLog
from pytz import UTC
log = logging.getLogger("tracking")
LOGFIELDS = ['username', 'ip', 'event_source', 'event_type', 'event', 'agent', 'page', 'time', 'host']
def log_event(event):
"""Write tracking event to log file, and optionally to TrackingLog model."""
event_str = json.dumps(event)
log.info(event_str[:settings.TRACK_MAX_EVENT])
if settings.MITX_FEATURES.get('ENABLE_SQL_TRACKING_LOGS'):
event['time'] = dateutil.parser.parse(event['time'])
tldat = TrackingLog(**dict((x, event[x]) for x in LOGFIELDS))
try:
tldat.save()
except Exception as err:
log.exception(err)
def user_track(request):
"""
Log when POST call to "event" URL is made by a user. Uses request.REQUEST
to allow for GET calls.
GET or POST call should provide "event_type", "event", and "page" arguments.
"""
try: # TODO: Do the same for many of the optional META parameters
username = request.user.username
except:
username = "anonymous"
try:
scookie = request.META['HTTP_COOKIE'] # Get cookies
scookie = ";".join([c.split('=')[1] for c in scookie.split(";") if "sessionid" in c]).strip() # Extract session ID
except:
scookie = ""
try:
agent = request.META['HTTP_USER_AGENT']
except:
agent = ''
event = {
"username": username,
"session": scookie,
"ip": request.META['REMOTE_ADDR'],<|fim▁hole|> "page": request.REQUEST['page'],
"time": datetime.datetime.now(UTC).isoformat(),
"host": request.META['SERVER_NAME'],
}
log_event(event)
return HttpResponse('success')
def server_track(request, event_type, event, page=None):
"""Log events related to server requests."""
try:
username = request.user.username
except:
username = "anonymous"
try:
agent = request.META['HTTP_USER_AGENT']
except:
agent = ''
event = {
"username": username,
"ip": request.META['REMOTE_ADDR'],
"event_source": "server",
"event_type": event_type,
"event": event,
"agent": agent,
"page": page,
"time": datetime.datetime.now(UTC).isoformat(),
"host": request.META['SERVER_NAME'],
}
if event_type.startswith("/event_logs") and request.user.is_staff: # don't log
return
log_event(event)
def task_track(request_info, task_info, event_type, event, page=None):
"""
Logs tracking information for events occuring within celery tasks.
The `event_type` is a string naming the particular event being logged,
while `event` is a dict containing whatever additional contextual information
is desired.
The `request_info` is a dict containing information about the original
task request. Relevant keys are `username`, `ip`, `agent`, and `host`.
While the dict is required, the values in it are not, so that {} can be
passed in.
In addition, a `task_info` dict provides more information about the current
task, to be stored with the `event` dict. This may also be an empty dict.
The `page` parameter is optional, and allows the name of the page to
be provided.
"""
# supplement event information with additional information
# about the task in which it is running.
full_event = dict(event, **task_info)
# All fields must be specified, in case the tracking information is
# also saved to the TrackingLog model. Get values from the task-level
# information, or just add placeholder values.
event = {
"username": request_info.get('username', 'unknown'),
"ip": request_info.get('ip', 'unknown'),
"event_source": "task",
"event_type": event_type,
"event": full_event,
"agent": request_info.get('agent', 'unknown'),
"page": page,
"time": datetime.datetime.utcnow().isoformat(),
"host": request_info.get('host', 'unknown')
}
log_event(event)
@login_required
@ensure_csrf_cookie
def view_tracking_log(request, args=''):
"""View to output contents of TrackingLog model. For staff use only."""
if not request.user.is_staff:
return redirect('/')
nlen = 100
username = ''
if args:
for arg in args.split('/'):
if arg.isdigit():
nlen = int(arg)
if arg.startswith('username='):
username = arg[9:]
record_instances = TrackingLog.objects.all().order_by('-time')
if username:
record_instances = record_instances.filter(username=username)
record_instances = record_instances[0:nlen]
# fix dtstamp
fmt = '%a %d-%b-%y %H:%M:%S' # "%Y-%m-%d %H:%M:%S %Z%z"
for rinst in record_instances:
rinst.dtstr = rinst.time.replace(tzinfo=pytz.utc).astimezone(pytz.timezone('US/Eastern')).strftime(fmt)
return render_to_response('tracking_log.html', {'records': record_instances})<|fim▁end|>
|
"event_source": "browser",
"event_type": request.REQUEST['event_type'],
"event": request.REQUEST['event'],
"agent": agent,
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* lodash 3.0.3 (Custom Build) <https://lodash.com/>
* Build: `lodash modularize exports="npm" -o ./`
* Copyright 2012-2016 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
* Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <https://lodash.com/license>
*/
<|fim▁hole|>/** Used for built-in method references. */
var objectProto = Object.prototype;
/**
* Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
* of values.
*/
var objectToString = objectProto.toString;
/**
* Checks if `value` is classified as a boolean primitive or object.
*
* @static
* @memberOf _
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
* @example
*
* _.isBoolean(false);
* // => true
*
* _.isBoolean(null);
* // => false
*/
function isBoolean(value) {
return value === true || value === false ||
(isObjectLike(value) && objectToString.call(value) == boolTag);
}
/**
* Checks if `value` is object-like. A value is object-like if it's not `null`
* and has a `typeof` result of "object".
*
* @static
* @memberOf _
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
* @example
*
* _.isObjectLike({});
* // => true
*
* _.isObjectLike([1, 2, 3]);
* // => true
*
* _.isObjectLike(_.noop);
* // => false
*
* _.isObjectLike(null);
* // => false
*/
function isObjectLike(value) {
return !!value && typeof value == 'object';
}
module.exports = isBoolean;<|fim▁end|>
|
/** `Object#toString` result references. */
var boolTag = '[object Boolean]';
|
<|file_name|>fs.rs<|end_file_name|><|fim▁begin|>#![stable(feature = "metadata_ext", since = "1.1.0")]
use crate::fs::Metadata;
use crate::sys_common::AsInner;
#[allow(deprecated)]
use crate::os::freebsd::raw;
/// OS-specific extensions to [`fs::Metadata`].
///
/// [`fs::Metadata`]: crate::fs::Metadata
#[stable(feature = "metadata_ext", since = "1.1.0")]
pub trait MetadataExt {
/// Gain a reference to the underlying `stat` structure which contains
/// the raw information returned by the OS.
///
/// The contents of the returned `stat` are **not** consistent across
/// Unix platforms. The `os::unix::fs::MetadataExt` trait contains the
/// cross-Unix abstractions contained within the raw stat.
#[stable(feature = "metadata_ext", since = "1.1.0")]
#[rustc_deprecated(
since = "1.8.0",
reason = "deprecated in favor of the accessor \
methods of this trait"
)]
#[allow(deprecated)]
fn as_raw_stat(&self) -> &raw::stat;
<|fim▁hole|> fn st_ino(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_mode(&self) -> u32;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_nlink(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_uid(&self) -> u32;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_gid(&self) -> u32;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_rdev(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_size(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_atime(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_atime_nsec(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_mtime(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_mtime_nsec(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_ctime(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_ctime_nsec(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_birthtime(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_birthtime_nsec(&self) -> i64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_blksize(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_blocks(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_flags(&self) -> u32;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_gen(&self) -> u32;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_lspare(&self) -> u32;
}
#[stable(feature = "metadata_ext", since = "1.1.0")]
impl MetadataExt for Metadata {
#[allow(deprecated)]
fn as_raw_stat(&self) -> &raw::stat {
// The methods below use libc::stat, so they work fine when libc is built with FreeBSD 12 ABI.
// This method would just return nonsense.
#[cfg(freebsd12)]
panic!("as_raw_stat not supported with FreeBSD 12 ABI");
#[cfg(not(freebsd12))]
unsafe {
&*(self.as_inner().as_inner() as *const libc::stat as *const raw::stat)
}
}
fn st_dev(&self) -> u64 {
self.as_inner().as_inner().st_dev as u64
}
fn st_ino(&self) -> u64 {
self.as_inner().as_inner().st_ino as u64
}
fn st_mode(&self) -> u32 {
self.as_inner().as_inner().st_mode as u32
}
fn st_nlink(&self) -> u64 {
self.as_inner().as_inner().st_nlink as u64
}
fn st_uid(&self) -> u32 {
self.as_inner().as_inner().st_uid as u32
}
fn st_gid(&self) -> u32 {
self.as_inner().as_inner().st_gid as u32
}
fn st_rdev(&self) -> u64 {
self.as_inner().as_inner().st_rdev as u64
}
fn st_size(&self) -> u64 {
self.as_inner().as_inner().st_size as u64
}
fn st_atime(&self) -> i64 {
self.as_inner().as_inner().st_atime as i64
}
fn st_atime_nsec(&self) -> i64 {
self.as_inner().as_inner().st_atime_nsec as i64
}
fn st_mtime(&self) -> i64 {
self.as_inner().as_inner().st_mtime as i64
}
fn st_mtime_nsec(&self) -> i64 {
self.as_inner().as_inner().st_mtime_nsec as i64
}
fn st_ctime(&self) -> i64 {
self.as_inner().as_inner().st_ctime as i64
}
fn st_ctime_nsec(&self) -> i64 {
self.as_inner().as_inner().st_ctime_nsec as i64
}
fn st_birthtime(&self) -> i64 {
self.as_inner().as_inner().st_birthtime as i64
}
fn st_birthtime_nsec(&self) -> i64 {
self.as_inner().as_inner().st_birthtime_nsec as i64
}
fn st_blksize(&self) -> u64 {
self.as_inner().as_inner().st_blksize as u64
}
fn st_blocks(&self) -> u64 {
self.as_inner().as_inner().st_blocks as u64
}
fn st_gen(&self) -> u32 {
self.as_inner().as_inner().st_gen as u32
}
fn st_flags(&self) -> u32 {
self.as_inner().as_inner().st_flags as u32
}
#[cfg(freebsd12)]
fn st_lspare(&self) -> u32 {
panic!("st_lspare not supported with FreeBSD 12 ABI");
}
#[cfg(not(freebsd12))]
fn st_lspare(&self) -> u32 {
self.as_inner().as_inner().st_lspare as u32
}
}<|fim▁end|>
|
#[stable(feature = "metadata_ext2", since = "1.8.0")]
fn st_dev(&self) -> u64;
#[stable(feature = "metadata_ext2", since = "1.8.0")]
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>""" A universal module with functions / classes without dependencies. """
import sys
import contextlib
import functools
import re
import os
from jedi._compatibility import reraise
_sep = os.path.sep
if os.path.altsep is not None:
_sep += os.path.altsep
_path_re = re.compile('(?:\.[^{0}]+|[{0}]__init__\.py)$'.format(re.escape(_sep)))
del _sep
def to_list(func):
def wrapper(*args, **kwargs):<|fim▁hole|>
def unite(iterable):
"""Turns a two dimensional array into a one dimensional."""
return set(typ for types in iterable for typ in types)
class UncaughtAttributeError(Exception):
"""
Important, because `__getattr__` and `hasattr` catch AttributeErrors
implicitly. This is really evil (mainly because of `__getattr__`).
`hasattr` in Python 2 is even more evil, because it catches ALL exceptions.
Therefore this class originally had to be derived from `BaseException`
instead of `Exception`. But because I removed relevant `hasattr` from
the code base, we can now switch back to `Exception`.
:param base: return values of sys.exc_info().
"""
def safe_property(func):
return property(reraise_uncaught(func))
def reraise_uncaught(func):
"""
Re-throw uncaught `AttributeError`.
Usage: Put ``@rethrow_uncaught`` in front of the function
which does **not** suppose to raise `AttributeError`.
AttributeError is easily get caught by `hasattr` and another
``except AttributeError`` clause. This becomes problem when you use
a lot of "dynamic" attributes (e.g., using ``@property``) because you
can't distinguish if the property does not exist for real or some code
inside of the "dynamic" attribute through that error. In a well
written code, such error should not exist but getting there is very
difficult. This decorator is to help us getting there by changing
`AttributeError` to `UncaughtAttributeError` to avoid unexpected catch.
This helps us noticing bugs earlier and facilitates debugging.
.. note:: Treating StopIteration here is easy.
Add that feature when needed.
"""
@functools.wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except AttributeError:
exc_info = sys.exc_info()
reraise(UncaughtAttributeError(exc_info[1]), exc_info[2])
return wrapper
class PushBackIterator(object):
def __init__(self, iterator):
self.pushes = []
self.iterator = iterator
self.current = None
def push_back(self, value):
self.pushes.append(value)
def __iter__(self):
return self
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
if self.pushes:
self.current = self.pushes.pop()
else:
self.current = next(self.iterator)
return self.current
@contextlib.contextmanager
def ignored(*exceptions):
"""
Context manager that ignores all of the specified exceptions. This will
be in the standard library starting with Python 3.4.
"""
try:
yield
except exceptions:
pass
def indent_block(text, indention=' '):
"""This function indents a text block with a default of four spaces."""
temp = ''
while text and text[-1] == '\n':
temp += text[-1]
text = text[:-1]
lines = text.split('\n')
return '\n'.join(map(lambda s: indention + s, lines)) + temp
def dotted_from_fs_path(fs_path, sys_path):
"""
Changes `/usr/lib/python3.4/email/utils.py` to `email.utils`. I.e.
compares the path with sys.path and then returns the dotted_path. If the
path is not in the sys.path, just returns None.
"""
if os.path.basename(fs_path).startswith('__init__.'):
# We are calculating the path. __init__ files are not interesting.
fs_path = os.path.dirname(fs_path)
# prefer
# - UNIX
# /path/to/pythonX.Y/lib-dynload
# /path/to/pythonX.Y/site-packages
# - Windows
# C:\path\to\DLLs
# C:\path\to\Lib\site-packages
# over
# - UNIX
# /path/to/pythonX.Y
# - Windows
# C:\path\to\Lib
path = ''
for s in sys_path:
if (fs_path.startswith(s) and len(path) < len(s)):
path = s
# - Window
# X:\path\to\lib-dynload/datetime.pyd => datetime
module_path = fs_path[len(path):].lstrip(os.path.sep).lstrip('/')
# - Window
# Replace like X:\path\to\something/foo/bar.py
return _path_re.sub('', module_path).replace(os.path.sep, '.').replace('/', '.')<|fim▁end|>
|
return list(func(*args, **kwargs))
return wrapper
|
<|file_name|>webParser.py<|end_file_name|><|fim▁begin|>"""
Copyright 2013 Shine Wang
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib
import re
from HTMLParser import HTMLParser
from courseClasses import Course, Lecture, Tutorial, Reserve
class CustomHTMLParser(HTMLParser):
"""this class reads a HTML stream, then parses out the "data" fields"""
def __init__(self, webData):
HTMLParser.__init__(self)
self.webData = webData
def handle_data(self, data):
"""takes out the data"""
self.webData.append(data.strip())
class WebParser:
""""A WebParser is created for each and every course,
to parse the corresponding web page"""
requestURL = "http://www.adm.uwaterloo.ca/cgi-bin/" \
"cgiwrap/infocour/salook.pl"
def __init__(self):
self.webData = []
self.index = -1
self.session = None
self.thisCourse = None
def run(self, courseString, sessionString):
"""this is the method that the main class can call
if successful, returns the Course class
if not, returns an error message"""
self.session = self.parseSession(sessionString)
if self.session is None:
return "SessionNameWrongError"
courseString = map(lambda x: x.upper(), courseString.split())
try:
self.thisCourse = Course(self.session, courseString[0],
courseString[1])
except:
return "CourseNameWrongError"
if self.getWebData(self.thisCourse):
return "WebPageError"
elif self.parseWebData():
return "CourseNotFoundError"
else:
self.processCourseInfo()
self.postProcess(self.thisCourse)
return self.thisCourse
def parseSession(self, sessionString):
try:
ret = "1"
ret += sessionString.split()[1][-2:] # last 2 digits of year
tempMap = (("fall", "9"), ("winter", "1"), ("spring", "5"))
for season in tempMap:
if season[0] in sessionString.lower():
ret += season[1]
return ret
except:
return None
def getWebData(self, course):
"""submits a POST query, initializes HTMLParser"""
try:
params = urllib.urlencode({"sess": course.session,
"subject": course.subject,
"cournum": course.catalogNumber})
page = urllib.urlopen(WebParser.requestURL, params)
parser = CustomHTMLParser(self.webData)
# we use .replace() because HTMLParser ignores " ",
# which would screwn up our table
parser.feed(page.read().replace(" ", " "))
except:
return "WebPageError"
def parseWebData(self):
"""We try to find the beginning of the desired table"""
# now, we find the start index and pass that on along
# with the webData
for i in xrange(len(self.webData)-3):
if self.webData[i] == self.thisCourse.subject \
and self.webData[i+2] == self.thisCourse.catalogNumber:
self.index = i
break
if self.index == -1: # website not found
return "CourseNotFound"
def processCourseInfo(self):
"""now, we do the heavy-duty processing of the data table"""
# sets basic attrs of thisCourse
self.thisCourse.units = self.webData[self.index+4]
self.thisCourse.title = self.webData[self.index+6]
while self.webData[self.index] != "Instructor":
self.index += 1
# processing row-by-row
while not self.endOfRow(self.webData[self.index]):
if self.webData[self.index] != "":
self.processSlot()
self.index += 1
if self.index == len(self.webData):
return
def processSlot(self):
"""we check to see if this is the BEGINNING of a valid row"""
if (self.webData[self.index+1][:3].upper() == "LEC"
or self.webData[self.index+1][:3].upper() == "LAB") \
and "ONLINE" not in self.webData[self.index+2]:
# we don't want online classes!
# processing a lecture row
lec = Lecture()
if self.processClass(lec, self.index, self.webData):
return
self.thisCourse.lectures.append(lec)
elif self.webData[self.index+1][:3].upper() == "TUT":
# processing a tutorial row
tut = Tutorial()
if self.processClass(tut, self.index, self.webData):
return
self.thisCourse.tutorials.append(tut)
elif self.webData[self.index][:7].upper() == "RESERVE":
# processing a reserve row
res = Reserve()
self.processReserve(res, self.index, self.webData)
if self.thisCourse.lectures:
self.thisCourse.lectures[-1].reserves.append(res)
# note: we leave out the TST (exam?) times for now
def processReserve(self, res, index, webData):
"""processing reservations for certain types of students"""
res.name = webData[index][9:]
# we remove the "only" suffix (which is annoyingly pointless)
if "only" in res.name:
res.name = res.name[:-5]
# also, the "students" suffx<|fim▁hole|> # now, we merge the match list
while not webData[index].isdigit():
index += 1
# retriving enrollment numbers
res.enrlCap = int(webData[index])
res.enrlTotal = int(webData[index+1])
def processClass(self, lec, index, webData):
"""we process a typical lecture or tutorial row"""
attr1 = ["classNumber", "compSec", "campusLocation"]
for i in xrange(len(attr1)):
setattr(lec, attr1[i], webData[index+i].strip())
index += 6
attr2 = ["enrlCap", "enrlTotal", "waitCap", "waitTotal"]
for i in xrange(len(attr2)):
setattr(lec, attr2[i], int(webData[index+i]))
index += 4
# parsing the "Times Days/Date" field
match = re.search(r"([:\d]+)-([:\d]+)(\w+)", webData[index])
if not match:
# we return an error message in the "TBA" case
return "NoTimeError"
attr3 = ["startTime", "endTime", "days"]
for i in xrange(len(attr3)):
setattr(lec, attr3[i], match.group(i+1).strip())
index += 1
if len(webData[index].split()) == 2:
# sometimes, no building, room, and instructor will be given
# this is mostly for Laurier courses
lec.building, lec.room = webData[index].split()
lec.instructor = webData[index+1].strip()
def endOfRow(self, data):
"""returns true if the current data-cell is the last cell
of this course; else - false"""
# the last cell is of the form: ##/##-##/## or
# "Information last updated
if re.search(r"\d+/\d+-\d+/\d+", data) or \
"Information last updated" in data:
return True
else:
return False
def postProcess(self, course):
"""this function will convert the class times to minutes-past-
the-previous-midnight, and converts the days to numbers.
Also, some reservation-postprocessing"""
map(lambda x: x.calcMiscSeats(), course.lectures)
for lec in course.lectures:
lec.courseID = course.subject + " " + course.catalogNumber
for tut in course.tutorials:
tut.courseID = course.subject + " " + course.catalogNumber
for slot in course.lectures + course.tutorials:
# first, we convert time to 24hr time
# earliest start time for a class is 8:30am
# night classes start at/before 7:00pm
if 1 <= int(slot.startTime.split(":")[0]) <= 7:
slot.startTime, slot.endTime = \
map(lambda x: "{}:{}".format(str(int(x.split(":")[0])
+ 12), x[-2:]), [slot.startTime,
slot.endTime])
elif int(slot.startTime.split(":")[0]) > int(
slot.endTime.split(":")[0]):
# e.g. 12:00 to 1:00
slot.endTime = "{}:{}".format(str(int(
slot.endTime.split(":")[0])+12), slot.endTime[-2:])
# now, we write to slot.sTime, slot.eTime
# (minutes-past-midnight...)
slot.sTime, slot.eTime = map(lambda x: int(x[:2]) * 60 +
int(x[-2:]),
[slot.startTime, slot.endTime])
# we write to slot.ndays, where ndays is a string of numbers,
# 0->4
if "M" in slot.days:
slot.ndays += "0"
i = slot.days.find("T")
if i != -1 and (i == len(slot.days) - 1 or
slot.days[i+1] != 'h'):
# basically, if not Th (for Thursday)
slot.ndays += "1"
# now, for the rest of the days...
for i in [("W", "2"), ("Th", "3"), ("F", "4")]:
if i[0] in slot.days:
slot.ndays += i[1]
# we make a small adjustment to campusLocation,
# removing whitespace
slot.campusLocation = slot.campusLocation.split()[0]
# we make the prof name "first last" instead of
# "last,first middle"
if slot.instructor != "":
s = slot.instructor.split(" ")
for i in s:
if "," in i:
# we want the 2 words connected by the ","
slot.instructor = " ".join(reversed(list(
i.split(","))))<|fim▁end|>
|
if "students" in res.name or "Students" in res.name:
res.name = res.name[:-9]
|
<|file_name|>chrome_user_manager_impl.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/login/users/chrome_user_manager_impl.h"
#include <cstddef>
#include <set>
#include "ash/multi_profile_uma.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/command_line.h"
#include "base/compiler_specific.h"
#include "base/format_macros.h"
#include "base/logging.h"
#include "base/metrics/histogram.h"
#include "base/prefs/pref_registry_simple.h"
#include "base/prefs/pref_service.h"
#include "base/prefs/scoped_user_pref_update.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/thread_task_runner_handle.h"
#include "base/values.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chrome_notification_types.h"
#include "chrome/browser/chromeos/login/demo_mode/demo_app_launcher.h"
#include "chrome/browser/chromeos/login/session/user_session_manager.h"
#include "chrome/browser/chromeos/login/signin/auth_sync_observer.h"
#include "chrome/browser/chromeos/login/signin/auth_sync_observer_factory.h"
#include "chrome/browser/chromeos/login/users/avatar/user_image_manager_impl.h"
#include "chrome/browser/chromeos/login/users/multi_profile_user_controller.h"
#include "chrome/browser/chromeos/login/users/supervised_user_manager_impl.h"
#include "chrome/browser/chromeos/policy/browser_policy_connector_chromeos.h"
#include "chrome/browser/chromeos/policy/device_local_account.h"
#include "chrome/browser/chromeos/profiles/multiprofiles_session_aborted_dialog.h"
#include "chrome/browser/chromeos/profiles/profile_helper.h"
#include "chrome/browser/chromeos/session_length_limiter.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/supervised_user/chromeos/manager_password_service_factory.h"
#include "chrome/browser/supervised_user/chromeos/supervised_user_password_service_factory.h"
#include "chrome/common/chrome_constants.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/common/crash_keys.h"
#include "chrome/common/pref_names.h"
#include "chrome/grit/theme_resources.h"
#include "chromeos/chromeos_switches.h"
#include "chromeos/login/user_names.h"
#include "chromeos/settings/cros_settings_names.h"
#include "components/session_manager/core/session_manager.h"
#include "components/user_manager/remove_user_delegate.h"
#include "components/user_manager/user_image/user_image.h"
#include "components/user_manager/user_type.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_service.h"
#include "policy/policy_constants.h"
#include "ui/base/resource/resource_bundle.h"
#include "ui/wm/core/wm_core_switches.h"
using content::BrowserThread;
namespace chromeos {
namespace {
// A vector pref of the the regular users known on this device, arranged in LRU
// order.
const char kRegularUsers[] = "LoggedInUsers";
// A vector pref of the public accounts defined on this device.
const char kPublicAccounts[] = "PublicAccounts";
// A string pref that gets set when a public account is removed but a user is
// currently logged into that account, requiring the account's data to be
// removed after logout.
const char kPublicAccountPendingDataRemoval[] =
"PublicAccountPendingDataRemoval";
} // namespace
// static
void ChromeUserManagerImpl::RegisterPrefs(PrefRegistrySimple* registry) {
ChromeUserManager::RegisterPrefs(registry);
registry->RegisterListPref(kPublicAccounts);
registry->RegisterStringPref(kPublicAccountPendingDataRemoval, std::string());
SupervisedUserManager::RegisterPrefs(registry);
SessionLengthLimiter::RegisterPrefs(registry);
}
// static
scoped_ptr<ChromeUserManager> ChromeUserManagerImpl::CreateChromeUserManager() {
return scoped_ptr<ChromeUserManager>(new ChromeUserManagerImpl());
}
ChromeUserManagerImpl::ChromeUserManagerImpl()
: ChromeUserManager(base::ThreadTaskRunnerHandle::Get(),
BrowserThread::GetBlockingPool()),
cros_settings_(CrosSettings::Get()),
device_local_account_policy_service_(NULL),
supervised_user_manager_(new SupervisedUserManagerImpl(this)),
weak_factory_(this) {
UpdateNumberOfUsers();
// UserManager instance should be used only on UI thread.
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
registrar_.Add(this,
chrome::NOTIFICATION_OWNERSHIP_STATUS_CHANGED,
content::NotificationService::AllSources());
registrar_.Add(this,
chrome::NOTIFICATION_LOGIN_USER_PROFILE_PREPARED,
content::NotificationService::AllSources());
registrar_.Add(this,
chrome::NOTIFICATION_PROFILE_CREATED,
content::NotificationService::AllSources());
// Since we're in ctor postpone any actions till this is fully created.
if (base::MessageLoop::current()) {
base::MessageLoop::current()->PostTask(
FROM_HERE,
base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies,
weak_factory_.GetWeakPtr()));
}
local_accounts_subscription_ = cros_settings_->AddSettingsObserver(
kAccountsPrefDeviceLocalAccounts,
base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies,
weak_factory_.GetWeakPtr()));
multi_profile_user_controller_.reset(
new MultiProfileUserController(this, GetLocalState()));
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
avatar_policy_observer_.reset(new policy::CloudExternalDataPolicyObserver(
cros_settings_,
connector->GetDeviceLocalAccountPolicyService(),
policy::key::kUserAvatarImage,
this));
avatar_policy_observer_->Init();
wallpaper_policy_observer_.reset(new policy::CloudExternalDataPolicyObserver(
cros_settings_,
connector->GetDeviceLocalAccountPolicyService(),
policy::key::kWallpaperImage,
this));
wallpaper_policy_observer_->Init();
}
ChromeUserManagerImpl::~ChromeUserManagerImpl() {
}
void ChromeUserManagerImpl::Shutdown() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::Shutdown();
local_accounts_subscription_.reset();
// Stop the session length limiter.
session_length_limiter_.reset();
if (device_local_account_policy_service_)
device_local_account_policy_service_->RemoveObserver(this);
for (UserImageManagerMap::iterator it = user_image_managers_.begin(),
ie = user_image_managers_.end();
it != ie;
++it) {
it->second->Shutdown();
}
multi_profile_user_controller_.reset();
avatar_policy_observer_.reset();
wallpaper_policy_observer_.reset();
registrar_.RemoveAll();
}
MultiProfileUserController*
ChromeUserManagerImpl::GetMultiProfileUserController() {
return multi_profile_user_controller_.get();
}
UserImageManager* ChromeUserManagerImpl::GetUserImageManager(
const std::string& user_id) {
UserImageManagerMap::iterator ui = user_image_managers_.find(user_id);
if (ui != user_image_managers_.end())
return ui->second.get();
linked_ptr<UserImageManagerImpl> mgr(new UserImageManagerImpl(user_id, this));
user_image_managers_[user_id] = mgr;
return mgr.get();
}
SupervisedUserManager* ChromeUserManagerImpl::GetSupervisedUserManager() {
return supervised_user_manager_.get();
}
user_manager::UserList ChromeUserManagerImpl::GetUsersAdmittedForMultiProfile()
const {
// Supervised users are not allowed to use multi-profiles.
if (GetLoggedInUsers().size() == 1 &&
GetPrimaryUser()->GetType() != user_manager::USER_TYPE_REGULAR) {
return user_manager::UserList();
}
user_manager::UserList result;
const user_manager::UserList& users = GetUsers();
for (user_manager::UserList::const_iterator it = users.begin();
it != users.end();
++it) {
if ((*it)->GetType() == user_manager::USER_TYPE_REGULAR &&
!(*it)->is_logged_in()) {
MultiProfileUserController::UserAllowedInSessionReason check;
multi_profile_user_controller_->IsUserAllowedInSession((*it)->email(),
&check);
if (check ==
MultiProfileUserController::NOT_ALLOWED_PRIMARY_USER_POLICY_FORBIDS) {
return user_manager::UserList();
}
// Users with a policy that prevents them being added to a session will be
// shown in login UI but will be grayed out.
// Same applies to owner account (see http://crbug.com/385034).
if (check == MultiProfileUserController::ALLOWED ||
check == MultiProfileUserController::NOT_ALLOWED_POLICY_FORBIDS ||
check == MultiProfileUserController::NOT_ALLOWED_OWNER_AS_SECONDARY ||
check ==
MultiProfileUserController::NOT_ALLOWED_POLICY_CERT_TAINTED) {
result.push_back(*it);
}
}
}
return result;
}
user_manager::UserList ChromeUserManagerImpl::GetUnlockUsers() const {
const user_manager::UserList& logged_in_users = GetLoggedInUsers();
if (logged_in_users.empty())
return user_manager::UserList();
user_manager::UserList unlock_users;
Profile* profile =
ProfileHelper::Get()->GetProfileByUserUnsafe(GetPrimaryUser());
std::string primary_behavior =
profile->GetPrefs()->GetString(prefs::kMultiProfileUserBehavior);
// Specific case: only one logged in user or
// primary user has primary-only multi-profile policy.
if (logged_in_users.size() == 1 ||
primary_behavior == MultiProfileUserController::kBehaviorPrimaryOnly) {
if (GetPrimaryUser()->can_lock())
unlock_users.push_back(primary_user_);
} else {
// Fill list of potential unlock users based on multi-profile policy state.
for (user_manager::UserList::const_iterator it = logged_in_users.begin();
it != logged_in_users.end();
++it) {
user_manager::User* user = (*it);
Profile* profile = ProfileHelper::Get()->GetProfileByUserUnsafe(user);
const std::string behavior =
profile->GetPrefs()->GetString(prefs::kMultiProfileUserBehavior);
if (behavior == MultiProfileUserController::kBehaviorUnrestricted &&
user->can_lock()) {
unlock_users.push_back(user);
} else if (behavior == MultiProfileUserController::kBehaviorPrimaryOnly) {
NOTREACHED()
<< "Spotted primary-only multi-profile policy for non-primary user";
}
}
}
return unlock_users;
}
void ChromeUserManagerImpl::SessionStarted() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::SessionStarted();
content::NotificationService::current()->Notify(
chrome::NOTIFICATION_SESSION_STARTED,
content::Source<UserManager>(this),
content::Details<const user_manager::User>(GetActiveUser()));
}
void ChromeUserManagerImpl::RemoveUserInternal(
const std::string& user_email,
user_manager::RemoveUserDelegate* delegate) {
CrosSettings* cros_settings = CrosSettings::Get();
const base::Closure& callback =
base::Bind(&ChromeUserManagerImpl::RemoveUserInternal,
weak_factory_.GetWeakPtr(),
user_email,
delegate);
// Ensure the value of owner email has been fetched.
if (CrosSettingsProvider::TRUSTED !=
cros_settings->PrepareTrustedValues(callback)) {
// Value of owner email is not fetched yet. RemoveUserInternal will be
// called again after fetch completion.
return;
}
std::string owner;
cros_settings->GetString(kDeviceOwner, &owner);
if (user_email == owner) {
// Owner is not allowed to be removed from the device.
return;
}
RemoveNonOwnerUserInternal(user_email, delegate);
}
void ChromeUserManagerImpl::SaveUserOAuthStatus(
const std::string& user_id,
user_manager::User::OAuthTokenStatus oauth_token_status) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::SaveUserOAuthStatus(user_id, oauth_token_status);
GetUserFlow(user_id)->HandleOAuthTokenStatusChange(oauth_token_status);
}
void ChromeUserManagerImpl::SaveUserDisplayName(
const std::string& user_id,
const base::string16& display_name) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::SaveUserDisplayName(user_id, display_name);
// Do not update local state if data stored or cached outside the user's
// cryptohome is to be treated as ephemeral.
if (!IsUserNonCryptohomeDataEphemeral(user_id))
supervised_user_manager_->UpdateManagerName(user_id, display_name);
}
void ChromeUserManagerImpl::StopPolicyObserverForTesting() {
avatar_policy_observer_.reset();
wallpaper_policy_observer_.reset();
}
void ChromeUserManagerImpl::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
switch (type) {
case chrome::NOTIFICATION_OWNERSHIP_STATUS_CHANGED:
if (!device_local_account_policy_service_) {
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()
->browser_policy_connector_chromeos();
device_local_account_policy_service_ =
connector->GetDeviceLocalAccountPolicyService();
if (device_local_account_policy_service_)
device_local_account_policy_service_->AddObserver(this);
}
RetrieveTrustedDevicePolicies();
UpdateOwnership();
break;
case chrome::NOTIFICATION_LOGIN_USER_PROFILE_PREPARED: {
Profile* profile = content::Details<Profile>(details).ptr();
if (IsUserLoggedIn() && !IsLoggedInAsGuest() && !IsLoggedInAsKioskApp()) {
if (IsLoggedInAsSupervisedUser())
SupervisedUserPasswordServiceFactory::GetForProfile(profile);
if (IsLoggedInAsRegularUser())
ManagerPasswordServiceFactory::GetForProfile(profile);
if (!profile->IsOffTheRecord()) {
AuthSyncObserver* sync_observer =
AuthSyncObserverFactory::GetInstance()->GetForProfile(profile);
sync_observer->StartObserving();
multi_profile_user_controller_->StartObserving(profile);
}
}
break;
}
case chrome::NOTIFICATION_PROFILE_CREATED: {
Profile* profile = content::Source<Profile>(source).ptr();
user_manager::User* user =
ProfileHelper::Get()->GetUserByProfile(profile);
if (user != NULL)
user->set_profile_is_created();
// If there is pending user switch, do it now.
if (!GetPendingUserSwitchID().empty()) {
// Call SwitchActiveUser async because otherwise it may cause
// ProfileManager::GetProfile before the profile gets registered
// in ProfileManager. It happens in case of sync profile load when
// NOTIFICATION_PROFILE_CREATED is called synchronously.
base::MessageLoop::current()->PostTask(
FROM_HERE,
base::Bind(&ChromeUserManagerImpl::SwitchActiveUser,
weak_factory_.GetWeakPtr(),
GetPendingUserSwitchID()));
SetPendingUserSwitchID(std::string());
}
break;
}
default:
NOTREACHED();
}
}
void ChromeUserManagerImpl::OnExternalDataSet(const std::string& policy,
const std::string& user_id) {
if (policy == policy::key::kUserAvatarImage)
GetUserImageManager(user_id)->OnExternalDataSet(policy);
else if (policy == policy::key::kWallpaperImage)
WallpaperManager::Get()->OnPolicySet(policy, user_id);
else
NOTREACHED();
}
void ChromeUserManagerImpl::OnExternalDataCleared(const std::string& policy,
const std::string& user_id) {
if (policy == policy::key::kUserAvatarImage)
GetUserImageManager(user_id)->OnExternalDataCleared(policy);
else if (policy == policy::key::kWallpaperImage)
WallpaperManager::Get()->OnPolicyCleared(policy, user_id);
else
NOTREACHED();
}
void ChromeUserManagerImpl::OnExternalDataFetched(
const std::string& policy,
const std::string& user_id,
scoped_ptr<std::string> data) {
if (policy == policy::key::kUserAvatarImage)
GetUserImageManager(user_id)->OnExternalDataFetched(policy, data.Pass());
else if (policy == policy::key::kWallpaperImage)
WallpaperManager::Get()->OnPolicyFetched(policy, user_id, data.Pass());
else
NOTREACHED();
}
void ChromeUserManagerImpl::OnPolicyUpdated(const std::string& user_id) {
const user_manager::User* user = FindUser(user_id);
if (!user || user->GetType() != user_manager::USER_TYPE_PUBLIC_ACCOUNT)
return;
UpdatePublicAccountDisplayName(user_id);
}
void ChromeUserManagerImpl::OnDeviceLocalAccountsChanged() {
// No action needed here, changes to the list of device-local accounts get
// handled via the kAccountsPrefDeviceLocalAccounts device setting observer.
}
bool ChromeUserManagerImpl::CanCurrentUserLock() const {
return ChromeUserManager::CanCurrentUserLock() &&
GetCurrentUserFlow()->CanLockScreen();
}
bool ChromeUserManagerImpl::IsUserNonCryptohomeDataEphemeral(
const std::string& user_id) const {
// Data belonging to the obsolete public accounts whose data has not been
// removed yet is not ephemeral.
bool is_obsolete_public_account = IsPublicAccountMarkedForRemoval(user_id);
return !is_obsolete_public_account &&
ChromeUserManager::IsUserNonCryptohomeDataEphemeral(user_id);
}
bool ChromeUserManagerImpl::AreEphemeralUsersEnabled() const {
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
return GetEphemeralUsersEnabled() &&
(connector->IsEnterpriseManaged() || !GetOwnerEmail().empty());
}
const std::string& ChromeUserManagerImpl::GetApplicationLocale() const {
return g_browser_process->GetApplicationLocale();
}
PrefService* ChromeUserManagerImpl::GetLocalState() const {
return g_browser_process ? g_browser_process->local_state() : NULL;
}
void ChromeUserManagerImpl::HandleUserOAuthTokenStatusChange(
const std::string& user_id,
user_manager::User::OAuthTokenStatus status) const {
GetUserFlow(user_id)->HandleOAuthTokenStatusChange(status);
}
bool ChromeUserManagerImpl::IsEnterpriseManaged() const {
policy::BrowserPolicyConnectorChromeOS* connector =
g_browser_process->platform_part()->browser_policy_connector_chromeos();
return connector->IsEnterpriseManaged();
}
void ChromeUserManagerImpl::LoadPublicAccounts(
std::set<std::string>* public_sessions_set) {
const base::ListValue* prefs_public_sessions =
GetLocalState()->GetList(kPublicAccounts);
std::vector<std::string> public_sessions;
ParseUserList(*prefs_public_sessions,
std::set<std::string>(),
&public_sessions,
public_sessions_set);
for (std::vector<std::string>::const_iterator it = public_sessions.begin();
it != public_sessions.end();
++it) {
users_.push_back(user_manager::User::CreatePublicAccountUser(*it));
UpdatePublicAccountDisplayName(*it);
}
}
void ChromeUserManagerImpl::PerformPreUserListLoadingActions() {
// Clean up user list first. All code down the path should be synchronous,
// so that local state after transaction rollback is in consistent state.
// This process also should not trigger EnsureUsersLoaded again.
if (supervised_user_manager_->HasFailedUserCreationTransaction())
supervised_user_manager_->RollbackUserCreationTransaction();
}
void ChromeUserManagerImpl::PerformPostUserListLoadingActions() {
for (user_manager::UserList::iterator ui = users_.begin(), ue = users_.end();
ui != ue;
++ui) {
GetUserImageManager((*ui)->email())->LoadUserImage();
}
}
void ChromeUserManagerImpl::PerformPostUserLoggedInActions(
bool browser_restart) {
// Initialize the session length limiter and start it only if
// session limit is defined by the policy.
session_length_limiter_.reset(
new SessionLengthLimiter(NULL, browser_restart));
}
bool ChromeUserManagerImpl::IsDemoApp(const std::string& user_id) const {
return DemoAppLauncher::IsDemoAppSession(user_id);
}
bool ChromeUserManagerImpl::IsKioskApp(const std::string& user_id) const {
policy::DeviceLocalAccount::Type device_local_account_type;
return policy::IsDeviceLocalAccountUser(user_id,
&device_local_account_type) &&
device_local_account_type ==
policy::DeviceLocalAccount::TYPE_KIOSK_APP;
}
bool ChromeUserManagerImpl::IsPublicAccountMarkedForRemoval(
const std::string& user_id) const {
return user_id ==
GetLocalState()->GetString(kPublicAccountPendingDataRemoval);
}
void ChromeUserManagerImpl::RetrieveTrustedDevicePolicies() {
// Local state may not be initialized in unit_tests.
if (!GetLocalState())
return;
SetEphemeralUsersEnabled(false);
SetOwnerEmail(std::string());
// Schedule a callback if device policy has not yet been verified.
if (CrosSettingsProvider::TRUSTED !=
cros_settings_->PrepareTrustedValues(
base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies,
weak_factory_.GetWeakPtr()))) {
return;
}
bool ephemeral_users_enabled = false;
cros_settings_->GetBoolean(kAccountsPrefEphemeralUsersEnabled,
&ephemeral_users_enabled);
SetEphemeralUsersEnabled(ephemeral_users_enabled);
std::string owner_email;
cros_settings_->GetString(kDeviceOwner, &owner_email);
SetOwnerEmail(owner_email);
EnsureUsersLoaded();
bool changed = UpdateAndCleanUpPublicAccounts(
policy::GetDeviceLocalAccounts(cros_settings_));
// If ephemeral users are enabled and we are on the login screen, take this
// opportunity to clean up by removing all regular users except the owner.
if (GetEphemeralUsersEnabled() && !IsUserLoggedIn()) {
ListPrefUpdate prefs_users_update(GetLocalState(), kRegularUsers);
prefs_users_update->Clear();
for (user_manager::UserList::iterator it = users_.begin();
it != users_.end();) {
const std::string user_email = (*it)->email();
if ((*it)->GetType() == user_manager::USER_TYPE_REGULAR &&
user_email != GetOwnerEmail()) {
RemoveNonCryptohomeData(user_email);
DeleteUser(*it);
it = users_.erase(it);
changed = true;
} else {
if ((*it)->GetType() != user_manager::USER_TYPE_PUBLIC_ACCOUNT)
prefs_users_update->Append(new base::StringValue(user_email));
++it;
}
}
}
if (changed)
NotifyUserListChanged();
}
void ChromeUserManagerImpl::GuestUserLoggedIn() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::GuestUserLoggedIn();
// TODO(nkostylev): Add support for passing guest session cryptohome
// mount point. Legacy (--login-profile) value will be used for now.
// http://crosbug.com/230859
active_user_->SetStubImage(
user_manager::UserImage(
*ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
IDR_PROFILE_PICTURE_LOADING)),
user_manager::User::USER_IMAGE_INVALID,
false);
// Initializes wallpaper after active_user_ is set.
WallpaperManager::Get()->SetUserWallpaperNow(chromeos::login::kGuestUserName);
}
void ChromeUserManagerImpl::RegularUserLoggedIn(const std::string& user_id) {
ChromeUserManager::RegularUserLoggedIn(user_id);
if (IsCurrentUserNew())
WallpaperManager::Get()->SetUserWallpaperNow(user_id);
GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), false);
WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded();
// Make sure that new data is persisted to Local State.
GetLocalState()->CommitPendingWrite();
}
void ChromeUserManagerImpl::RegularUserLoggedInAsEphemeral(
const std::string& user_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ChromeUserManager::RegularUserLoggedInAsEphemeral(user_id);
GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), false);
WallpaperManager::Get()->SetUserWallpaperNow(user_id);
}
void ChromeUserManagerImpl::SupervisedUserLoggedIn(const std::string& user_id) {
// TODO(nkostylev): Refactor, share code with RegularUserLoggedIn().
// Remove the user from the user list.
active_user_ = RemoveRegularOrSupervisedUserFromList(user_id);
// If the user was not found on the user list, create a new user.
if (!GetActiveUser()) {
SetIsCurrentUserNew(true);
active_user_ = user_manager::User::CreateSupervisedUser(user_id);
// Leaving OAuth token status at the default state = unknown.
WallpaperManager::Get()->SetUserWallpaperNow(user_id);
} else {
if (supervised_user_manager_->CheckForFirstRun(user_id)) {
SetIsCurrentUserNew(true);
WallpaperManager::Get()->SetUserWallpaperNow(user_id);
} else {
SetIsCurrentUserNew(false);
}
}
// Add the user to the front of the user list.
ListPrefUpdate prefs_users_update(GetLocalState(), kRegularUsers);
prefs_users_update->Insert(0, new base::StringValue(user_id));
users_.insert(users_.begin(), active_user_);
// Now that user is in the list, save display name.
if (IsCurrentUserNew()) {
SaveUserDisplayName(GetActiveUser()->email(),
GetActiveUser()->GetDisplayName());
}
GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), true);
WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded();
// Make sure that new data is persisted to Local State.
GetLocalState()->CommitPendingWrite();
}
void ChromeUserManagerImpl::PublicAccountUserLoggedIn(
user_manager::User* user) {
SetIsCurrentUserNew(true);
active_user_ = user;
// The UserImageManager chooses a random avatar picture when a user logs in
// for the first time. Tell the UserImageManager that this user is not new to
// prevent the avatar from getting changed.
GetUserImageManager(user->email())->UserLoggedIn(false, true);
WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded();
}
void ChromeUserManagerImpl::KioskAppLoggedIn(const std::string& app_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
policy::DeviceLocalAccount::Type device_local_account_type;
DCHECK(policy::IsDeviceLocalAccountUser(app_id, &device_local_account_type));
DCHECK_EQ(policy::DeviceLocalAccount::TYPE_KIOSK_APP,
device_local_account_type);
active_user_ = user_manager::User::CreateKioskAppUser(app_id);
active_user_->SetStubImage(
user_manager::UserImage(
*ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
IDR_PROFILE_PICTURE_LOADING)),
user_manager::User::USER_IMAGE_INVALID,
false);
WallpaperManager::Get()->SetUserWallpaperNow(app_id);
// TODO(bartfab): Add KioskAppUsers to the users_ list and keep metadata like
// the kiosk_app_id in these objects, removing the need to re-parse the
// device-local account list here to extract the kiosk_app_id.
const std::vector<policy::DeviceLocalAccount> device_local_accounts =
policy::GetDeviceLocalAccounts(cros_settings_);
const policy::DeviceLocalAccount* account = NULL;
for (std::vector<policy::DeviceLocalAccount>::const_iterator it =
device_local_accounts.begin();
it != device_local_accounts.end();
++it) {
if (it->user_id == app_id) {
account = &*it;
break;
}
}
std::string kiosk_app_id;
if (account) {
kiosk_app_id = account->kiosk_app_id;
} else {
LOG(ERROR) << "Logged into nonexistent kiosk-app account: " << app_id;
NOTREACHED();
}
CommandLine* command_line = CommandLine::ForCurrentProcess();
command_line->AppendSwitch(::switches::kForceAppMode);
command_line->AppendSwitchASCII(::switches::kAppId, kiosk_app_id);
// Disable window animation since kiosk app runs in a single full screen
// window and window animation causes start-up janks.
command_line->AppendSwitch(wm::switches::kWindowAnimationsDisabled);
}
void ChromeUserManagerImpl::DemoAccountLoggedIn() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
active_user_ =
user_manager::User::CreateKioskAppUser(DemoAppLauncher::kDemoUserName);
active_user_->SetStubImage(
user_manager::UserImage(
*ResourceBundle::GetSharedInstance().GetImageSkiaNamed(
IDR_PROFILE_PICTURE_LOADING)),
user_manager::User::USER_IMAGE_INVALID,
false);
WallpaperManager::Get()->SetUserWallpaperNow(DemoAppLauncher::kDemoUserName);
CommandLine* command_line = CommandLine::ForCurrentProcess();
command_line->AppendSwitch(::switches::kForceAppMode);
command_line->AppendSwitchASCII(::switches::kAppId,
DemoAppLauncher::kDemoAppId);
// Disable window animation since the demo app runs in a single full screen
// window and window animation causes start-up janks.
CommandLine::ForCurrentProcess()->AppendSwitch(
wm::switches::kWindowAnimationsDisabled);
}
void ChromeUserManagerImpl::RetailModeUserLoggedIn() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
SetIsCurrentUserNew(true);
active_user_ = user_manager::User::CreateRetailModeUser();
GetUserImageManager(chromeos::login::kRetailModeUserName)
->UserLoggedIn(IsCurrentUserNew(), true);
WallpaperManager::Get()->SetUserWallpaperNow(
chromeos::login::kRetailModeUserName);
}
void ChromeUserManagerImpl::NotifyOnLogin() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
UserSessionManager::OverrideHomedir();
UpdateNumberOfUsers();
ChromeUserManager::NotifyOnLogin();
// TODO(nkostylev): Deprecate this notification in favor of
// ActiveUserChanged() observer call.
content::NotificationService::current()->Notify(
chrome::NOTIFICATION_LOGIN_USER_CHANGED,
content::Source<UserManager>(this),
content::Details<const user_manager::User>(GetActiveUser()));
UserSessionManager::GetInstance()->PerformPostUserLoggedInActions();
}
void ChromeUserManagerImpl::UpdateOwnership() {
bool is_owner = DeviceSettingsService::Get()->HasPrivateOwnerKey();
VLOG(1) << "Current user " << (is_owner ? "is owner" : "is not owner");
SetCurrentUserIsOwner(is_owner);
}
void ChromeUserManagerImpl::RemoveNonCryptohomeData(
const std::string& user_id) {
ChromeUserManager::RemoveNonCryptohomeData(user_id);
WallpaperManager::Get()->RemoveUserWallpaperInfo(user_id);
GetUserImageManager(user_id)->DeleteUserImage();
supervised_user_manager_->RemoveNonCryptohomeData(user_id);
multi_profile_user_controller_->RemoveCachedValues(user_id);
}
void
ChromeUserManagerImpl::CleanUpPublicAccountNonCryptohomeDataPendingRemoval() {
PrefService* local_state = GetLocalState();
const std::string public_account_pending_data_removal =
local_state->GetString(kPublicAccountPendingDataRemoval);
if (public_account_pending_data_removal.empty() ||
(IsUserLoggedIn() &&
public_account_pending_data_removal == GetActiveUser()->email())) {
return;
}
RemoveNonCryptohomeData(public_account_pending_data_removal);
local_state->ClearPref(kPublicAccountPendingDataRemoval);
}
void ChromeUserManagerImpl::CleanUpPublicAccountNonCryptohomeData(
const std::vector<std::string>& old_public_accounts) {
std::set<std::string> users;
for (user_manager::UserList::const_iterator it = users_.begin();
it != users_.end();
++it)
users.insert((*it)->email());
// If the user is logged into a public account that has been removed from the
// user list, mark the account's data as pending removal after logout.
if (IsLoggedInAsPublicAccount()) {
const std::string active_user_id = GetActiveUser()->email();
if (users.find(active_user_id) == users.end()) {
GetLocalState()->SetString(kPublicAccountPendingDataRemoval,
active_user_id);
users.insert(active_user_id);
}
}
// Remove the data belonging to any other public accounts that are no longer
// found on the user list.
for (std::vector<std::string>::const_iterator it =
old_public_accounts.begin();
it != old_public_accounts.end();
++it) {
if (users.find(*it) == users.end())
RemoveNonCryptohomeData(*it);
}
}
bool ChromeUserManagerImpl::UpdateAndCleanUpPublicAccounts(
const std::vector<policy::DeviceLocalAccount>& device_local_accounts) {
// Try to remove any public account data marked as pending removal.<|fim▁hole|> CleanUpPublicAccountNonCryptohomeDataPendingRemoval();
// Get the current list of public accounts.
std::vector<std::string> old_public_accounts;
for (user_manager::UserList::const_iterator it = users_.begin();
it != users_.end();
++it) {
if ((*it)->GetType() == user_manager::USER_TYPE_PUBLIC_ACCOUNT)
old_public_accounts.push_back((*it)->email());
}
// Get the new list of public accounts from policy.
std::vector<std::string> new_public_accounts;
for (std::vector<policy::DeviceLocalAccount>::const_iterator it =
device_local_accounts.begin();
it != device_local_accounts.end();
++it) {
// TODO(mnissler, nkostylev, bartfab): Process Kiosk Apps within the
// standard login framework: http://crbug.com/234694
if (it->type == policy::DeviceLocalAccount::TYPE_PUBLIC_SESSION)
new_public_accounts.push_back(it->user_id);
}
// If the list of public accounts has not changed, return.
if (new_public_accounts.size() == old_public_accounts.size()) {
bool changed = false;
for (size_t i = 0; i < new_public_accounts.size(); ++i) {
if (new_public_accounts[i] != old_public_accounts[i]) {
changed = true;
break;
}
}
if (!changed)
return false;
}
// Persist the new list of public accounts in a pref.
ListPrefUpdate prefs_public_accounts_update(GetLocalState(), kPublicAccounts);
prefs_public_accounts_update->Clear();
for (std::vector<std::string>::const_iterator it =
new_public_accounts.begin();
it != new_public_accounts.end();
++it) {
prefs_public_accounts_update->AppendString(*it);
}
// Remove the old public accounts from the user list.
for (user_manager::UserList::iterator it = users_.begin();
it != users_.end();) {
if ((*it)->GetType() == user_manager::USER_TYPE_PUBLIC_ACCOUNT) {
if (*it != GetLoggedInUser())
DeleteUser(*it);
it = users_.erase(it);
} else {
++it;
}
}
// Add the new public accounts to the front of the user list.
for (std::vector<std::string>::const_reverse_iterator it =
new_public_accounts.rbegin();
it != new_public_accounts.rend();
++it) {
if (IsLoggedInAsPublicAccount() && *it == GetActiveUser()->email())
users_.insert(users_.begin(), GetLoggedInUser());
else
users_.insert(users_.begin(),
user_manager::User::CreatePublicAccountUser(*it));
UpdatePublicAccountDisplayName(*it);
}
for (user_manager::UserList::iterator
ui = users_.begin(),
ue = users_.begin() + new_public_accounts.size();
ui != ue;
++ui) {
GetUserImageManager((*ui)->email())->LoadUserImage();
}
// Remove data belonging to public accounts that are no longer found on the
// user list.
CleanUpPublicAccountNonCryptohomeData(old_public_accounts);
return true;
}
void ChromeUserManagerImpl::UpdatePublicAccountDisplayName(
const std::string& user_id) {
std::string display_name;
if (device_local_account_policy_service_) {
policy::DeviceLocalAccountPolicyBroker* broker =
device_local_account_policy_service_->GetBrokerForUser(user_id);
if (broker)
display_name = broker->GetDisplayName();
}
// Set or clear the display name.
SaveUserDisplayName(user_id, base::UTF8ToUTF16(display_name));
}
UserFlow* ChromeUserManagerImpl::GetCurrentUserFlow() const {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
if (!IsUserLoggedIn())
return GetDefaultUserFlow();
return GetUserFlow(GetLoggedInUser()->email());
}
UserFlow* ChromeUserManagerImpl::GetUserFlow(const std::string& user_id) const {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
FlowMap::const_iterator it = specific_flows_.find(user_id);
if (it != specific_flows_.end())
return it->second;
return GetDefaultUserFlow();
}
void ChromeUserManagerImpl::SetUserFlow(const std::string& user_id,
UserFlow* flow) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ResetUserFlow(user_id);
specific_flows_[user_id] = flow;
}
void ChromeUserManagerImpl::ResetUserFlow(const std::string& user_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
FlowMap::iterator it = specific_flows_.find(user_id);
if (it != specific_flows_.end()) {
delete it->second;
specific_flows_.erase(it);
}
}
bool ChromeUserManagerImpl::AreSupervisedUsersAllowed() const {
bool supervised_users_allowed = false;
cros_settings_->GetBoolean(kAccountsPrefSupervisedUsersEnabled,
&supervised_users_allowed);
return supervised_users_allowed;
}
UserFlow* ChromeUserManagerImpl::GetDefaultUserFlow() const {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
if (!default_flow_.get())
default_flow_.reset(new DefaultUserFlow());
return default_flow_.get();
}
void ChromeUserManagerImpl::NotifyUserListChanged() {
content::NotificationService::current()->Notify(
chrome::NOTIFICATION_USER_LIST_CHANGED,
content::Source<UserManager>(this),
content::NotificationService::NoDetails());
}
void ChromeUserManagerImpl::NotifyUserAddedToSession(
const user_manager::User* added_user,
bool user_switch_pending) {
if (user_switch_pending)
SetPendingUserSwitchID(added_user->email());
UpdateNumberOfUsers();
ChromeUserManager::NotifyUserAddedToSession(added_user, user_switch_pending);
}
void ChromeUserManagerImpl::OnUserNotAllowed(const std::string& user_email) {
LOG(ERROR) << "Shutdown session because a user is not allowed to be in the "
"current session";
chromeos::ShowMultiprofilesSessionAbortedDialog(user_email);
}
void ChromeUserManagerImpl::UpdateNumberOfUsers() {
size_t users = GetLoggedInUsers().size();
if (users) {
// Write the user number as UMA stat when a multi user session is possible.
if ((users + GetUsersAdmittedForMultiProfile().size()) > 1)
ash::MultiProfileUMA::RecordUserCount(users);
}
base::debug::SetCrashKeyValue(
crash_keys::kNumberOfUsers,
base::StringPrintf("%" PRIuS, GetLoggedInUsers().size()));
}
} // namespace chromeos<|fim▁end|>
| |
<|file_name|>CategoryTree.py<|end_file_name|><|fim▁begin|>import json
from collections import (
Counter,
defaultdict as deft
)
from copy import deepcopy as cp
# from cPickle import (
# dump as to_pickle,
# load as from_pickle
# )
from StringIO import StringIO
from TfIdfMatrix import TfIdfMatrix
from Tools import from_csv
class CategoryTree:
def __init__(self, categories_by_concept, terms,
categories, tfidf, max_depth=5, min_df=20
):
self.min_df = min_df
self.path_categories_by_concept = categories_by_concept
self.path_categories = categories
self.path_terms = terms
self.max_depth = max_depth
self.observed_category = deft(bool)
self.id_by_concept = dict([])
self.concept_by_id = dict([])
self.term_is_category = deft(bool)
self.parents_by_category = dict([])
self.parents_by_concept = deft(list)
self.id_by_term = dict([])
self.term_by_id = dict([])
self.has_parents = deft(bool)
self.tfidf = tfidf
self.pulling = set([])
self.vector_by_category = deft(Counter)
self.contributors_by_category = deft(set)
self.projected = Counter()
def build(self):
for i, c in enumerate(self.concept_by_id.values()):
self(c)
if not i % 100:
t = float(len(self.concept_by_id.keys()))
print i, int(t), round(i / t, 2)
# if i >= 5000:
# break
def dump(self):
# Simulate a file with StringIO
out = open('vector.dump.txt', 'wb')
for i, (_id, projections) in enumerate(self.projected.items()):
if not i % 100:
print i, len(self.projected.keys())
if not projections:
continue
features = [<|fim▁hole|> (self.tfidf.word_by_id[wid], round(weight, 4))
for wid, weight in self.vector_by_category[_id].most_common()
if round(weight, 4)
]
record = (
_id,
self.concept_by_id[_id],
features
)
out.write('%s\n' % str(record))
out.close()
def __call__(self, category):
self.pulling = set([])
return self.__pull(None, 0, category, dict([]))
def __get_parents(self, _id):
parents = []
name = self.concept_by_id[_id]
if (
not self.observed_category[name] or
not self.observed_category[_id] or
not self.has_parents[_id]
):
return []
else:
for i in self.parents_by_category[_id]:
if not self.observed_category[i]:
continue
_name = self.concept_by_id[i]
parents.append(_name)
return set(parents) - self.pulling
def __pull(self, vector, depth, category, tree):
_id = self.id_by_concept[category]
if not self.pulling:
# print
# print
# print category, _id
# print [self.term_by_id[x] for x in self.contributors_by_category[_id]]
# print self.vector_by_category[_id].most_common(20)
vector = self.vector_by_category[_id]
if not self.observed_category[category]:
return dict([])
parents = self.__get_parents(_id)
if not parents or depth >= self.max_depth:
tree[category] = dict([])
else:
subtree = dict([])
self.pulling.update(parents)
for parent in parents:
subtree = self.__pull(vector, depth + 1, parent, subtree)
tree[category] = subtree
self.__project(vector, tree)
return tree
def __project(self, vector, tree):
if not tree.keys():
return
else:
for key, subtree in tree.items():
_id = self.id_by_concept[key]
self.projected[_id] += 1
self.__add2vec(vector, _id)
self.__project(vector, subtree)
def __add2vec(self, vector, _id):
# for w, weight in vector.items():
# __id = self.tfidf.id_by_word[w]
for __id, weight in vector.items():
self.vector_by_category[_id][__id] += weight
def load(self):
self.__load_terms()
self.__load_categories()
self.__load_assignments()
def __load_categories(self):
for concept, _id in from_csv(self.path_categories):
_id = int(_id)
self.id_by_concept[concept] = _id
self.concept_by_id[_id] = concept
self.observed_category[_id] = True
self.observed_category[concept] = True
# print concept, _id, len(self.id_by_concept.keys())
# exit()
def __load_terms(self):
for term, _id in from_csv(self.path_terms):
_id = int(_id)
self.term_by_id[_id] = term
self.id_by_term[term] = _id
if not term.startswith('Category:'):
continue
self.term_is_category[term] = True
self.term_is_category[_id] = True
def __load_assignments(self):
for row in from_csv(self.path_categories_by_concept):
ints = [int(field) for field in row]
term_id = ints[0]
term = self.term_by_id[term_id]
if self.term_is_category[term_id] and \
self.observed_category[term]:
term = self.term_by_id[term_id]
cat_id = self.id_by_concept[term]
assignments = [i for i in ints[1:] if self.observed_category[i]]
self.parents_by_category[cat_id] = assignments
self.has_parents[cat_id] = True
else:
vector = self.tfidf.content(term_id)
assignments = [i for i in ints[1:] if self.observed_category[i]]
self.parents_by_concept[term_id] = assignments
for a_id in assignments:
for w, weight in vector:
if self.tfidf.df[w] < self.min_df:
continue
#print term, term_id, self.concept_by_id[a_id], w, self.vector_by_category[a_id][w], '\t+%f' % weight
self.vector_by_category[a_id][w] += weight
self.contributors_by_category[a_id].update([term_id])
if __name__ == '__main__':
import random
from random import shuffle as randomize
tfidf = TfIdfMatrix()
tfidf.load_features('bkp.big.out/vector.term.csv')
tfidf.load_distribution('bkp.big.out/vector.index.csv')
# tfidf.load_features('vector.term.csv')
# tfidf.load_distribution('vector.index.csv')
ctree = CategoryTree(
'bkp.big.out/category.index.csv',
'bkp.big.out/term.csv',
'bkp.big.out/category.csv',
# 'category.index.csv',
# 'term.csv',
# 'category.csv',
tfidf,
max_depth=1
)
ctree.load()
ctree.build()
ctree.dump()<|fim▁end|>
| |
<|file_name|>platform.cpp<|end_file_name|><|fim▁begin|>#include "../platform.h"
<|fim▁hole|>namespace Platform {
void sleep(long seconds, long milliseconds) {} //stub
int createWindow(char *title, int width, int height, int bpp, bool fullscreen) { return 0; }
int closeWindow() { return 0; }
void swapBuffers() {}
void pump() { //do our work here...
}
void grabMouse() {}
void ungrabMouse() {}
void toggleGrabMouse() {}
bool getActive() { return true; }
void setActive( bool active ) {}
void* getWindowHandle() { return (void*)0; }
}<|fim▁end|>
| |
<|file_name|>bn.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'indent', 'bn', {
indent: 'ইনডেন্ট বাড়াও',
<|fim▁hole|><|fim▁end|>
|
outdent: 'ইনডেন্ট কমাও'
} );
|
<|file_name|>YourlsServiceImpl.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Erwin Müller <[email protected]>
*
* This file is part of sscontrol-httpd-yourls.
*
* sscontrol-httpd-yourls is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* sscontrol-httpd-yourls is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
* for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with sscontrol-httpd-yourls. If not, see <http://www.gnu.org/licenses/>.
*/
package com.anrisoftware.sscontrol.httpd.yourls;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.ACCESS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.API_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.BACKUP_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.CONVERT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DATABASE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DEBUG_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DRIVER_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.GMT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.HOST_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.LANGUAGE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.MODE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.OFFSET_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.OVERRIDE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PASSWORD_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PORT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PREFIX_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.RESERVED_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.SITE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.STATS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.TARGET_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.UNIQUE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.URLS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.USER_KEY;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import com.anrisoftware.sscontrol.core.api.ServiceException;
import com.anrisoftware.sscontrol.core.groovy.statementsmap.StatementsException;
import com.anrisoftware.sscontrol.core.groovy.statementsmap.StatementsMap;
import com.anrisoftware.sscontrol.core.groovy.statementstable.StatementsTable;
import com.anrisoftware.sscontrol.core.groovy.statementstable.StatementsTableFactory;
import com.anrisoftware.sscontrol.core.overridemode.OverrideMode;
import com.anrisoftware.sscontrol.core.yesno.YesNoFlag;
import com.anrisoftware.sscontrol.httpd.domain.Domain;
import com.anrisoftware.sscontrol.httpd.webserviceargs.DefaultWebService;
import com.anrisoftware.sscontrol.httpd.webserviceargs.DefaultWebServiceFactory;
import com.google.inject.assistedinject.Assisted;
/**
* <i>Yourls</i> service.
*
* @see <a href="http://yourls.org/>http://yourls.org/</a>
*
* @author Erwin Mueller, [email protected]
* @since 1.0
*/
class YourlsServiceImpl implements YourlsService {
/**
* The <i>Yourls</i> service name.
*/
public static final String SERVICE_NAME = "yourls";
private final DefaultWebService service;
private final StatementsMap statementsMap;
private StatementsTable statementsTable;
/**
* @see YourlsServiceFactory#create(Map, Domain)
*/
@Inject
YourlsServiceImpl(DefaultWebServiceFactory webServiceFactory,
@Assisted Map<String, Object> args, @Assisted Domain domain) {
this.service = webServiceFactory.create(SERVICE_NAME, args, domain);
this.statementsMap = service.getStatementsMap();
setupStatements(statementsMap, args);
}
private void setupStatements(StatementsMap map, Map<String, Object> args) {
map.addAllowed(DATABASE_KEY, OVERRIDE_KEY, BACKUP_KEY, ACCESS_KEY,
USER_KEY, GMT_KEY, UNIQUE_KEY, CONVERT_KEY, RESERVED_KEY,
SITE_KEY, LANGUAGE_KEY);
map.setAllowValue(true, DATABASE_KEY, ACCESS_KEY, RESERVED_KEY,
SITE_KEY, LANGUAGE_KEY);
map.addAllowedKeys(DATABASE_KEY, USER_KEY, PASSWORD_KEY, HOST_KEY,
PORT_KEY, PREFIX_KEY, DRIVER_KEY);
map.addAllowedKeys(OVERRIDE_KEY, MODE_KEY);
map.addAllowedKeys(BACKUP_KEY, TARGET_KEY);
map.addAllowedKeys(ACCESS_KEY, STATS_KEY, API_KEY);
map.addAllowedKeys(GMT_KEY, OFFSET_KEY);
map.addAllowedKeys(UNIQUE_KEY, URLS_KEY);
map.addAllowedKeys(CONVERT_KEY, MODE_KEY);
}
@Inject
public final void setStatementsTable(StatementsTableFactory factory) {
StatementsTable table = factory.create(this, SERVICE_NAME);
table.addAllowed(DEBUG_KEY, USER_KEY);
table.setAllowArbitraryKeys(true, DEBUG_KEY);
table.addAllowedKeys(USER_KEY, PASSWORD_KEY);
this.statementsTable = table;
}
@Override
public Domain getDomain() {
return service.getDomain();
}
@Override
public String getName() {
return SERVICE_NAME;
}
public void setAlias(String alias) throws ServiceException {
service.setAlias(alias);
}
@Override
public String getAlias() {
return service.getAlias();
}
public void setId(String id) throws ServiceException {
service.setId(id);
}
@Override
public String getId() {
return service.getId();
}
public void setRef(String ref) throws ServiceException {
service.setRef(ref);
}
@Override
public String getRef() {
return service.getRef();
}
public void setRefDomain(String ref) throws ServiceException {
service.setRefDomain(ref);
}
@Override
public String getRefDomain() {
return service.getRefDomain();
}
public void setPrefix(String prefix) throws ServiceException {
service.setPrefix(prefix);<|fim▁hole|> return service.getPrefix();
}
@Override
public Map<String, Object> debugLogging(String key) {
return statementsTable.tableKeys(DEBUG_KEY, key);
}
@Override
public Map<String, Object> getDatabase() {
@SuppressWarnings("serial")
Map<String, Object> map = new HashMap<String, Object>() {
@Override
public Object put(String key, Object value) {
if (value != null) {
return super.put(key, value);
} else {
return null;
}
}
};
StatementsMap m = statementsMap;
map.put(DATABASE_KEY.toString(), m.value(DATABASE_KEY));
map.put(USER_KEY.toString(), m.mapValue(DATABASE_KEY, USER_KEY));
map.put(PASSWORD_KEY.toString(), m.mapValue(DATABASE_KEY, PASSWORD_KEY));
map.put(HOST_KEY.toString(), m.mapValue(DATABASE_KEY, HOST_KEY));
map.put(PORT_KEY.toString(), m.mapValue(DATABASE_KEY, PORT_KEY));
map.put(PREFIX_KEY.toString(), m.mapValue(DATABASE_KEY, PREFIX_KEY));
map.put(DRIVER_KEY.toString(), m.mapValue(DATABASE_KEY, DRIVER_KEY));
return map.size() == 0 ? null : map;
}
@Override
public OverrideMode getOverrideMode() {
return statementsMap.mapValue(OVERRIDE_KEY, MODE_KEY);
}
@Override
public URI getBackupTarget() {
return statementsMap.mapValueAsURI(BACKUP_KEY, TARGET_KEY);
}
@Override
public Access getSiteAccess() {
return statementsMap.value(ACCESS_KEY);
}
@Override
public Access getStatsAccess() {
return statementsMap.mapValue(ACCESS_KEY, STATS_KEY);
}
@Override
public Access getApiAccess() {
return statementsMap.mapValue(ACCESS_KEY, API_KEY);
}
@Override
public Integer getGmtOffset() {
return statementsMap.mapValue(GMT_KEY, OFFSET_KEY);
}
@Override
public Boolean getUniqueUrls() {
Object value = statementsMap.mapValue(UNIQUE_KEY, URLS_KEY);
if (value instanceof YesNoFlag) {
return ((YesNoFlag) value).asBoolean();
} else {
return (Boolean) value;
}
}
@Override
public Convert getUrlConvertMode() {
return statementsMap.mapValue(CONVERT_KEY, MODE_KEY);
}
@Override
public List<String> getReserved() {
return statementsMap.valueAsStringList(RESERVED_KEY);
}
@Override
public String getLanguage() {
return statementsMap.value(LANGUAGE_KEY);
}
@Override
public Map<String, String> getUsers() {
return statementsTable.tableKeys(USER_KEY, PASSWORD_KEY);
}
@Override
public String getSite() {
return statementsMap.value(SITE_KEY);
}
public Object methodMissing(String name, Object args) {
try {
return service.methodMissing(name, args);
} catch (StatementsException e) {
return statementsTable.methodMissing(name, args);
}
}
@Override
public String toString() {
return service.toString();
}
}<|fim▁end|>
|
}
@Override
public String getPrefix() {
|
<|file_name|>viewport_rect_item.py<|end_file_name|><|fim▁begin|># This code is licensed under the MIT License (see LICENSE file for details)
from PyQt5 import Qt
class ViewportRectItem(Qt.QGraphicsObject):
size_changed = Qt.pyqtSignal(Qt.QSizeF)
def __init__(self):
super().__init__()
self.setFlags(
Qt.QGraphicsItem.ItemIgnoresTransformations |
Qt.QGraphicsItem.ItemSendsGeometryChanges |
Qt.QGraphicsItem.ItemSendsScenePositionChanges |
Qt.QGraphicsItem.ItemHasNoContents
)
self._size = Qt.QSizeF()
# Children are generally overlay items that should appear over anything else rather than z-fighting
self.setZValue(10)
@property
def size(self):
return self._size<|fim▁hole|>
@size.setter
def size(self, v):
if not isinstance(v, Qt.QSizeF):
v = Qt.QSizeF(v)
if self._size != v:
self.prepareGeometryChange()
self._size = v
self.size_changed.emit(v)
def boundingRect(self):
return Qt.QRectF(Qt.QPointF(), self._size)<|fim▁end|>
| |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.<|fim▁hole|>// Code generated by cloud.google.com/go/internal/gapicgen/gensnippets. DO NOT EDIT.
// [START osconfig_v1_generated_OsConfigZonalService_UpdateOSPolicyAssignment_sync]
package main
import (
"context"
osconfig "cloud.google.com/go/osconfig/apiv1"
osconfigpb "google.golang.org/genproto/googleapis/cloud/osconfig/v1"
)
func main() {
ctx := context.Background()
c, err := osconfig.NewOsConfigZonalClient(ctx)
if err != nil {
// TODO: Handle error.
}
defer c.Close()
req := &osconfigpb.UpdateOSPolicyAssignmentRequest{
// TODO: Fill request struct fields.
// See https://pkg.go.dev/google.golang.org/genproto/googleapis/cloud/osconfig/v1#UpdateOSPolicyAssignmentRequest.
}
op, err := c.UpdateOSPolicyAssignment(ctx, req)
if err != nil {
// TODO: Handle error.
}
resp, err := op.Wait(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
// [END osconfig_v1_generated_OsConfigZonalService_UpdateOSPolicyAssignment_sync]<|fim▁end|>
| |
<|file_name|>mac_notes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Tests for mac notes plugin."""
from __future__ import unicode_literals
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import mac_notes
from tests.parsers.sqlite_plugins import test_lib
class MacNotesTest(test_lib.SQLitePluginTestCase):
"""Tests for mac notes database plugin."""
def testProcess(self):
"""Test the Process function on a Mac Notes file."""
plugin_object = mac_notes.MacNotesPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['NotesV7.storedata'], plugin_object)
self.assertEqual(storage_writer.number_of_events, 6)
self.assertEqual(storage_writer.number_of_warnings, 0)
events = list(storage_writer.GetEvents())
# Check the first note.
event = events[0]
self.CheckTimestamp(event.timestamp, '2014-02-11 02:38:27.097813')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_CREATION)<|fim▁hole|> self.assertEqual(event_data.title, expected_title)
expected_text = (
'building 4th brandy gibs microsoft office body soul and peace '
'example.com 3015555555: plumbing and heating claim#123456 Small '
'business ')
self.assertEqual(event_data.text, expected_text)
expected_short_message = 'title:{0:s}'.format(expected_title)
expected_message = 'title:{0:s} note_text:{1:s}'.format(
expected_title, expected_text)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_title = 'building 4th brandy gibs'
|
<|file_name|>md5.rs<|end_file_name|><|fim▁begin|>// http://rosettacode.org/wiki/MD5
extern crate crypto;
use crypto::digest::Digest;
use crypto::md5::Md5;
fn main() {
let mut sh = Md5::new();
sh.input_str("The quick brown fox jumped over the lazy dog's back");
println!("{}", sh.result_str());<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>test_logarithm_1p.py<|end_file_name|><|fim▁begin|>import unittest
import numpy
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
@testing.parameterize(*testing.product({
'shape': [(), (3, 2)],
}))
class Log1pFunctionTest(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(.5, 1, self.shape).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
self.ggx = \
numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = F.log1p(x)
testing.assert_allclose(
numpy.log1p(self.x), y.data, atol=1e-7, rtol=1e-7)
def test_log1p_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_log1p_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
gradient_check.check_backward(F.log1p, x_data, y_grad, dtype='d')
def test_log1p_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_log1p_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
def check_double_backward(self, x_data, y_grad, x_grad_grad):
gradient_check.check_double_backward(
F.log1p, x_data, y_grad, x_grad_grad, dtype=numpy.float64)
def test_log1p_double_backward_cpu(self):
self.check_double_backward(self.x, self.gy, self.ggx)
@attr.gpu
def test_log1p_double_backward_gpu(self):<|fim▁hole|> self.check_double_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.gy),
cuda.to_gpu(self.ggx))
def test_log1p(self):
self.assertEqual(
chainer.functions.math.logarithm_1p.Log1p().label, 'log1p')
testing.run_module(__name__, __file__)<|fim▁end|>
| |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>(() => {
'use strict';
angular.module('RestTestApp')
.config(($urlRouterProvider, $locationProvider) => {
$locationProvider.html5Mode(true);
})
.config(function(RestTestStateConfigProvider) {
RestTestStateConfigProvider.initialize();<|fim▁hole|>})();<|fim▁end|>
|
});
|
<|file_name|>material-modal.js<|end_file_name|><|fim▁begin|>define(function(require, exports, module) {
var Notify = require('common/bootstrap-notify');
var FileChooser = require('../widget/file/file-chooser3');
exports.run = function() {
var $form = $("#course-material-form");
var materialChooser = new FileChooser({
element: '#material-file-chooser'
});
materialChooser.on('change', function(item) {
$form.find('[name="fileId"]').val(item.id);
});
$form.on('click', '.delete-btn', function(){
var $btn = $(this);
if (!confirm(Translator.trans('真的要删除该资料吗?'))) {
return ;
}
$.post($btn.data('url'), function(){
$btn.parents('.list-group-item').remove();
Notify.success(Translator.trans('资料已删除'));
});
});
$form.on('submit', function(){
if ($form.find('[name="fileId"]').val().length == 0) {
Notify.danger(Translator.trans('请先上传文件或添加资料网络链接!'));
return false;
}
$.post($form.attr('action'), $form.serialize(), function(html){
Notify.success(Translator.trans('资料添加成功!'));
$("#material-list").append(html).show();
$form.find('.text-warning').hide();
$form.find('[name="fileId"]').val('');
$form.find('[name="link"]').val('');
$form.find('[name="description"]').val('');
materialChooser.open();
}).fail(function(){
Notify.success(Translator.trans('资料添加失败,请重试!'));
});
return false;
});
<|fim▁hole|> };
});<|fim▁end|>
|
$('.modal').on('hidden.bs.modal', function(){
window.location.reload();
});
|
<|file_name|>gen_dxgi_format_table.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2016 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# gen_dxgi_format_table.py:
# Code generation for DXGI format map.
from datetime import date
import sys
sys.path.append('../..')
import angle_format
template_cpp = """// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using data from {data_source_name}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// DXGI format info:
// Determining metadata about a DXGI format.
#include "libANGLE/renderer/Format.h"
using namespace angle;
namespace rx
{{
namespace d3d11
{{
GLenum GetComponentType(DXGI_FORMAT dxgiFormat)
{{
switch (dxgiFormat)
{{
{component_type_cases} default:
break;
}}
UNREACHABLE();
return GL_NONE;
}}
}} // namespace d3d11
namespace d3d11_angle
{{
const Format &GetFormat(DXGI_FORMAT dxgiFormat)
{{
switch (dxgiFormat)
{{<|fim▁hole|>{format_cases} default:
break;
}}
UNREACHABLE();
return Format::Get(Format::ID::NONE);
}}
}} // namespace d3d11_angle
}} // namespace rx
"""
template_format_case = """ case DXGI_FORMAT_{dxgi_format}:
return {result};
"""
template_undefined_case = """ case DXGI_FORMAT_{dxgi_format}:
break;
"""
def format_case(dxgi_format, result):
return template_format_case.format(
dxgi_format = dxgi_format,
result = result)
def undefined_case(dxgi_format):
return template_undefined_case.format(dxgi_format = dxgi_format)
component_cases = ""
format_cases = ""
input_data = 'dxgi_format_data.json'
dxgi_map = angle_format.load_json(input_data)
types = {
'SNORM': 'GL_SIGNED_NORMALIZED',
'UNORM': 'GL_UNSIGNED_NORMALIZED',
'SINT': 'GL_INT',
'UINT': 'GL_UNSIGNED_INT',
'FLOAT': 'GL_FLOAT',
'SHAREDEXP': 'GL_FLOAT'
}
angle_to_gl = angle_format.load_inverse_table('../../angle_format_map.json')
all_angle = angle_to_gl.keys()
for dxgi_format, angle_format in sorted(dxgi_map.iteritems()):
found = [ctype in dxgi_format for ctype in types.keys()]
count = reduce((lambda a, b: int(a) + int(b)), found)
component_type = 'GL_NONE'
if count == 1:
gltype = next(gltype for ctype, gltype in types.iteritems() if ctype in dxgi_format)
component_cases += format_case(dxgi_format, gltype)
else:
component_cases += undefined_case(dxgi_format)
if angle_format == "":
angle_format = dxgi_format
if angle_format in all_angle:
angle_format = "Format::Get(Format::ID::" + angle_format + ")"
format_cases += format_case(dxgi_format, angle_format)
else:
format_cases += undefined_case(dxgi_format)
with open('dxgi_format_map_autogen.cpp', 'wt') as out_file:
output_cpp = template_cpp.format(
script_name = sys.argv[0],
data_source_name = input_data,
copyright_year = date.today().year,
component_type_cases = component_cases,
format_cases = format_cases)
out_file.write(output_cpp)
out_file.close()<|fim▁end|>
| |
<|file_name|>_virtual_hub_route_table_v2_s_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubRouteTableV2SOperations(object):
"""VirtualHubRouteTableV2SOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
route_table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualHubRouteTableV2"
"""Retrieves the details of a VirtualHubRouteTableV2.
:param resource_group_name: The resource group name of the VirtualHubRouteTableV2.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param route_table_name: The name of the VirtualHubRouteTableV2.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualHubRouteTableV2, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.VirtualHubRouteTableV2
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHubRouteTableV2"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
route_table_name, # type: str
virtual_hub_route_table_v2_parameters, # type: "_models.VirtualHubRouteTableV2"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualHubRouteTableV2"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHubRouteTableV2"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_hub_route_table_v2_parameters, 'VirtualHubRouteTableV2')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
route_table_name, # type: str
virtual_hub_route_table_v2_parameters, # type: "_models.VirtualHubRouteTableV2"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualHubRouteTableV2"]
"""Creates a VirtualHubRouteTableV2 resource if it doesn't exist else updates the existing
VirtualHubRouteTableV2.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param route_table_name: The name of the VirtualHubRouteTableV2.
:type route_table_name: str
:param virtual_hub_route_table_v2_parameters: Parameters supplied to create or update
VirtualHubRouteTableV2.
:type virtual_hub_route_table_v2_parameters: ~azure.mgmt.network.v2020_11_01.models.VirtualHubRouteTableV2
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualHubRouteTableV2 or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_11_01.models.VirtualHubRouteTableV2]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHubRouteTableV2"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
route_table_name=route_table_name,
virtual_hub_route_table_v2_parameters=virtual_hub_route_table_v2_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
route_table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
route_table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a VirtualHubRouteTableV2.
:param resource_group_name: The resource group name of the VirtualHubRouteTableV2.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param route_table_name: The name of the VirtualHubRouteTableV2.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
route_table_name=route_table_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListVirtualHubRouteTableV2SResult"]
"""Retrieves the details of all VirtualHubRouteTableV2s.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualHubRouteTableV2SResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.ListVirtualHubRouteTableV2SResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVirtualHubRouteTableV2SResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubRouteTableV2SResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
<|fim▁hole|> return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables'} # type: ignore<|fim▁end|>
|
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
|
<|file_name|>testcase_empty.rs<|end_file_name|><|fim▁begin|>// https://rustbyexample.com/generics/bounds/testcase_empty.html
// http://rust-lang-ja.org/rust-by-example/generics/bounds/testcase_empty.html
struct Cardinal;
struct BlueJay;
struct Turkey;
trait Red {}
trait Blue {}
impl Red for Cardinal {}
impl Blue for BlueJay {}
// These functions are only valid for types which implement these
// traits. The fact that the traits are empty is irrelevant.
fn red<T: Red>(_: &T) -> &'static str { "red" }
fn blue<T: Blue>(_: &T) -> &'static str { "blue" }
fn main() {<|fim▁hole|> // `red()` won't work on a blue jay nor vice versa
// because of the bounds.
println!("A cardinal is {}", red(&cardinal));
println!("A blue jay is {}", blue(&blue_jay));
//println!("A turkey is {}", red(&_turkey));
// ^ TODO: Try uncommenting this line.
// error[E0277]: the trait bound `Turkey: Red` is not satisfied
}<|fim▁end|>
|
let cardinal = Cardinal;
let blue_jay = BlueJay;
let _turkey = Turkey;
|
<|file_name|>test_event.py<|end_file_name|><|fim▁begin|>"""Test event helpers."""
# pylint: disable=protected-access,too-many-public-methods
# pylint: disable=too-few-public-methods
import unittest
from datetime import datetime, timedelta
from astral import Astral
from homeassistant.bootstrap import setup_component
import homeassistant.core as ha
from homeassistant.const import MATCH_ALL
from homeassistant.helpers.event import (
track_point_in_utc_time,
track_point_in_time,
track_utc_time_change,
track_time_change,
track_state_change,
track_sunrise,
track_sunset,
)
from homeassistant.components import sun
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
class TestEventHelpers(unittest.TestCase):
"""Test the Home Assistant event helpers."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_track_point_in_time(self):
"""Test track point in time."""
before_birthday = datetime(1985, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
birthday_paulus = datetime(1986, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
after_birthday = datetime(1987, 7, 9, 12, 0, 0, tzinfo=dt_util.UTC)
runs = []
track_point_in_utc_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
self._send_time_changed(before_birthday)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self._send_time_changed(birthday_paulus)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
# A point in time tracker will only fire once, this should do nothing
self._send_time_changed(birthday_paulus)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
track_point_in_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
self._send_time_changed(after_birthday)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
unsub = track_point_in_time(
self.hass, lambda x: runs.append(1), birthday_paulus)
unsub()
self._send_time_changed(after_birthday)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
def test_track_time_change(self):
"""Test tracking time change."""
wildcard_runs = []
specific_runs = []
unsub = track_time_change(self.hass, lambda x: wildcard_runs.append(1))
unsub_utc = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), second=[0, 30])
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 15))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(2, len(wildcard_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 30))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
unsub()
unsub_utc()
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 30))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
def test_track_state_change(self):
"""Test track_state_change."""
# 2 lists to track how often our callbacks get called
specific_runs = []
wildcard_runs = []
wildercard_runs = []
track_state_change(
self.hass, 'light.Bowl', lambda a, b, c: specific_runs.append(1),
'on', 'off')
track_state_change(
self.hass, 'light.Bowl',
lambda _, old_s, new_s: wildcard_runs.append((old_s, new_s)))
track_state_change(
self.hass, MATCH_ALL,
lambda _, old_s, new_s: wildercard_runs.append((old_s, new_s)))
# Adding state to state machine
self.hass.states.set("light.Bowl", "on")
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self.assertEqual(1, len(wildercard_runs))
self.assertIsNone(wildcard_runs[-1][0])
self.assertIsNotNone(wildcard_runs[-1][1])
# Set same state should not trigger a state change/listener
self.hass.states.set('light.Bowl', 'on')
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))
self.assertEqual(1, len(wildcard_runs))
self.assertEqual(1, len(wildercard_runs))
# State change off -> on
self.hass.states.set('light.Bowl', 'off')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(2, len(wildcard_runs))
self.assertEqual(2, len(wildercard_runs))
# State change off -> off
self.hass.states.set('light.Bowl', 'off', {"some_attr": 1})
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(3, len(wildcard_runs))
self.assertEqual(3, len(wildercard_runs))
# State change off -> on
self.hass.states.set('light.Bowl', 'on')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(4, len(wildcard_runs))
self.assertEqual(4, len(wildercard_runs))
self.hass.states.remove('light.bowl')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(5, len(wildcard_runs))
self.assertEqual(5, len(wildercard_runs))
self.assertIsNotNone(wildcard_runs[-1][0])
self.assertIsNone(wildcard_runs[-1][1])
self.assertIsNotNone(wildercard_runs[-1][0])
self.assertIsNone(wildercard_runs[-1][1])
# Set state for different entity id
self.hass.states.set('switch.kitchen', 'on')
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self.assertEqual(5, len(wildcard_runs))
self.assertEqual(6, len(wildercard_runs))
def test_track_sunrise(self):
"""Test track the sunrise."""
latitude = 32.87336
longitude = 117.22743
# Setup sun component
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
setup_component(self.hass, sun.DOMAIN, {
sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
# Get next sunrise/sunset
astral = Astral()
utc_now = dt_util.utcnow()
mod = -1
while True:
next_rising = (astral.sunrise_utc(utc_now +
timedelta(days=mod), latitude, longitude))
if next_rising > utc_now:
break
mod += 1
# Track sunrise
runs = []
unsub = track_sunrise(self.hass, lambda: runs.append(1))
offset_runs = []
offset = timedelta(minutes=30)
unsub2 = track_sunrise(self.hass, lambda: offset_runs.append(1),
offset)
# run tests
self._send_time_changed(next_rising - offset)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_rising)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_rising + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
unsub()
unsub2()
self._send_time_changed(next_rising + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
def test_track_sunset(self):
"""Test track the sunset."""
latitude = 32.87336
longitude = 117.22743
# Setup sun component
self.hass.config.latitude = latitude
self.hass.config.longitude = longitude
setup_component(self.hass, sun.DOMAIN, {
sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
# Get next sunrise/sunset
astral = Astral()
utc_now = dt_util.utcnow()
mod = -1
while True:
next_setting = (astral.sunset_utc(utc_now +
timedelta(days=mod), latitude, longitude))
if next_setting > utc_now:
break
mod += 1
# Track sunset
runs = []
unsub = track_sunset(self.hass, lambda: runs.append(1))
offset_runs = []
offset = timedelta(minutes=30)
unsub2 = track_sunset(self.hass, lambda: offset_runs.append(1), offset)
# Run tests
self._send_time_changed(next_setting - offset)
self.hass.block_till_done()
self.assertEqual(0, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_setting)
self.hass.block_till_done()
self.assertEqual(1, len(runs))
self.assertEqual(0, len(offset_runs))
self._send_time_changed(next_setting + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
unsub()
unsub2()
self._send_time_changed(next_setting + offset)
self.hass.block_till_done()
self.assertEqual(2, len(runs))
self.assertEqual(1, len(offset_runs))
def _send_time_changed(self, now):
"""Send a time changed event."""
self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: now})
def test_periodic_task_minute(self):
"""Test periodic tasks per minute."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), minute='/5')
self._send_time_changed(datetime(2014, 5, 24, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 3, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 12, 5, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 24, 12, 5, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_hour(self):
"""Test periodic tasks per hour."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), hour='/2')
self._send_time_changed(datetime(2014, 5, 24, 22, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 23, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 24, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 25, 1, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 25, 2, 0, 0))
self.hass.block_till_done()
self.assertEqual(3, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 25, 2, 0, 0))
self.hass.block_till_done()
self.assertEqual(3, len(specific_runs))
def test_periodic_task_day(self):
"""Test periodic tasks per day."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), day='/2')<|fim▁hole|>
self._send_time_changed(datetime(2014, 5, 3, 12, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2014, 5, 4, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2014, 5, 4, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_year(self):
"""Test periodic tasks per year."""
specific_runs = []
unsub = track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), year='/2')
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2015, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
self._send_time_changed(datetime(2016, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
unsub()
self._send_time_changed(datetime(2016, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(2, len(specific_runs))
def test_periodic_task_wrong_input(self):
"""Test periodic tasks with wrong input."""
specific_runs = []
track_utc_time_change(
self.hass, lambda x: specific_runs.append(1), year='/two')
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(0, len(specific_runs))<|fim▁end|>
|
self._send_time_changed(datetime(2014, 5, 2, 0, 0, 0))
self.hass.block_till_done()
self.assertEqual(1, len(specific_runs))
|
<|file_name|>my.ts<|end_file_name|><|fim▁begin|>/* Burmese locals for flatpickr */
import { CustomLocale } from "../types/locale";
import { FlatpickrFn } from "../types/instance";
const fp =
typeof window !== "undefined" && window.flatpickr !== undefined
? window.flatpickr
: ({
l10ns: {},
} as FlatpickrFn);
export const Burmese: CustomLocale = {
weekdays: {
shorthand: ["နွေ", "လာ", "ဂါ", "ဟူး", "ကြာ", "သော", "နေ"],
longhand: [
"တနင်္ဂနွေ",
"တနင်္လာ",
"အင်္ဂါ",
"ဗုဒ္ဓဟူး",
"ကြာသပတေး",
"သောကြာ",
"စနေ",<|fim▁hole|> shorthand: [
"ဇန်",
"ဖေ",
"မတ်",
"ပြီ",
"မေ",
"ဇွန်",
"လိုင်",
"သြ",
"စက်",
"အောက်",
"နို",
"ဒီ",
],
longhand: [
"ဇန်နဝါရီ",
"ဖေဖော်ဝါရီ",
"မတ်",
"ဧပြီ",
"မေ",
"ဇွန်",
"ဇူလိုင်",
"သြဂုတ်",
"စက်တင်ဘာ",
"အောက်တိုဘာ",
"နိုဝင်ဘာ",
"ဒီဇင်ဘာ",
],
},
firstDayOfWeek: 1,
ordinal: () => {
return "";
},
time_24hr: true,
};
fp.l10ns.my = Burmese;
export default fp.l10ns;<|fim▁end|>
|
],
},
months: {
|
<|file_name|>segmented_end_impl.hpp<|end_file_name|><|fim▁begin|>/*=============================================================================
Copyright (c) 2011 Eric Niebler
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(BOOST_FUSION_SEGMENTED_END_IMPL_HPP_INCLUDED)
#define BOOST_FUSION_SEGMENTED_END_IMPL_HPP_INCLUDED
#include <boost/fusion/support/config.hpp>
#include <boost/mpl/assert.hpp>
#include <boost/type_traits/add_const.hpp>
#include <boost/type_traits/remove_reference.hpp>
#include <boost/fusion/sequence/intrinsic_fwd.hpp>
#include <boost/fusion/container/list/cons_fwd.hpp>
#include <boost/fusion/support/is_segmented.hpp>
namespace boost { namespace fusion
{
template <typename First, typename Last>
struct iterator_range;
}}
namespace boost { namespace fusion { namespace detail
{
//auto segmented_end_impl( seq, stack )
//{
// assert(is_segmented(seq));
// auto it = end(segments(seq));
// return cons(iterator_range(it, it), stack);
//}
template <typename Sequence, typename Stack>
struct segmented_end_impl
{
BOOST_MPL_ASSERT((traits::is_segmented<Sequence>));
typedef
typename result_of::end<
typename remove_reference<
typename add_const<
typename result_of::segments<Sequence>::type
>::type
>::type
>::type
end_type;
<|fim▁hole|> typedef cons<pair_type, Stack> type;
BOOST_CONSTEXPR BOOST_FUSION_GPU_ENABLED
static pair_type make_pair(end_type end)
{
return pair_type(end, end);
}
BOOST_CONSTEXPR BOOST_FUSION_GPU_ENABLED
static type call(Sequence & seq, Stack stack)
{
return type(
make_pair(fusion::end(fusion::segments(seq))),
stack);
}
};
}}}
#endif<|fim▁end|>
|
typedef iterator_range<end_type, end_type> pair_type;
|
<|file_name|>border_none.rs<|end_file_name|><|fim▁begin|>//! Bayer reader without any additional border logic.
use std::io::Read;
use ::BayerResult;
use bayer::*;
pub struct BorderNone8;
pub struct BorderNone16BE;
pub struct BorderNone16LE;
impl BorderNone8 {
pub fn new() -> Self {
BorderNone8
}
}
impl BayerRead8 for BorderNone8 {
fn read_line(&self, r: &mut Read, dst: &mut [u8])
-> BayerResult<()> {
read_exact_u8(r, dst)
}
}
impl BorderNone16BE {
pub fn new() -> Self {
BorderNone16BE
}
}
impl BayerRead16 for BorderNone16BE {
fn read_line(&self, r: &mut Read, dst: &mut [u16])
-> BayerResult<()> {
read_exact_u16be(r, dst)
}
}
impl BorderNone16LE {
pub fn new() -> Self {
BorderNone16LE
}
}
impl BayerRead16 for BorderNone16LE {
fn read_line(&self, r: &mut Read, dst: &mut [u16])
-> BayerResult<()> {
read_exact_u16le(r, dst)<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>OutputValues.java<|end_file_name|><|fim▁begin|>package eu.clarin.weblicht.bindings.cmd.ws;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
/**
*
* @author akislev
*/
@XmlAccessorType(value = XmlAccessType.FIELD)
public class OutputValues extends AbstractValues<OutputValue> {
@XmlElement(name = "ParameterValue", required = true)
private List<OutputValue> values;
OutputValues() {
}
@Override
protected List<OutputValue> getValues() {
return values;
}
public boolean add(OutputValue value) {
if (values == null) {
values = new ArrayList<OutputValue>();
}
return values.add(value);
}
@Override
public OutputValues copy() {
OutputValues v = (OutputValues) super.copy();<|fim▁hole|> }
}<|fim▁end|>
|
v.values = copy(values);
return v;
|
<|file_name|>AlgorithmSortBy.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/sagemaker/model/AlgorithmSortBy.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace SageMaker
{
namespace Model
{
namespace AlgorithmSortByMapper
{
static const int Name_HASH = HashingUtils::HashString("Name");
static const int CreationTime_HASH = HashingUtils::HashString("CreationTime");
AlgorithmSortBy GetAlgorithmSortByForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());<|fim▁hole|> if (hashCode == Name_HASH)
{
return AlgorithmSortBy::Name;
}
else if (hashCode == CreationTime_HASH)
{
return AlgorithmSortBy::CreationTime;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<AlgorithmSortBy>(hashCode);
}
return AlgorithmSortBy::NOT_SET;
}
Aws::String GetNameForAlgorithmSortBy(AlgorithmSortBy enumValue)
{
switch(enumValue)
{
case AlgorithmSortBy::Name:
return "Name";
case AlgorithmSortBy::CreationTime:
return "CreationTime";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace AlgorithmSortByMapper
} // namespace Model
} // namespace SageMaker
} // namespace Aws<|fim▁end|>
| |
<|file_name|>Ping.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Alcatel-Lucent Enterprise
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nw.providers.Provider import Provider
import subprocess
import re
from logging import getLogger
# /!\ Warning: this Provider uses the ping system command and has been designed for Linux (Debian Wheezy).
# List of data the Ping Provider can return (set in Provider's config field 'requested_data').
# If the Provider is configured with another requested_data, an exception is raised.
# If no requested_data is configured for Ping Provider, status is used by default.
_data_available = [
'status', # returns the status code (integer) of ping command execution: 0 = success, other = error occurred
'ping_response', # returns the whole std output of ping command (string)
'pkt_transmitted', # returns the number of packets transmitted (integer) (extracted from stdout of ping command using a regex)
'pkt_received', # returns the number of packets received (integer) (extracted from stdout of ping command using a regex)
'pkt_loss', # returns the number of packets loss (integer) (extracted from stdout of ping command using a regex)
'ping_avg', # returns the average ping time (in ms) (float) (extracted from stdout of ping command using a regex)
'ping_min', # returns the min ping time (in ms) (float) (extracted from stdout of ping command using a regex)
'ping_max' # returns the max ping time (in ms) (float) (extracted from stdout of ping command using a regex)
]
class Ping(Provider):
# Overload _mandatory_parameters and _optional_parameters to list the parameters required by HttpRequest provider
_mandatory_parameters = [
'ping_addr' # IP address or hostname of the machine to ping<|fim▁hole|> ]
_optional_parameters = [
'requested_data', # (string) Requested data (default is 'status' which returns the status code of ping command execution). See _data_available for available options.
'count', # (integer) -c option of ping: Stop after sending (and receiving) count ECHO_RESPONSE packets. If not defined, default value is 1.
'timeout' # (integer) -W option of ping: Time to wait for a response, in seconds. The option affects only timeout in absense of any responses, otherwise ping waits for two RTTs.
]
def __init__(self, options):
Provider.__init__(self, options)
# Build ping command
self.ping_cmd = "ping"
# Add -c option
if not self._config.get('count'):
getLogger(__name__).info('Option "count" is not provided to provider Ping, use default value (1)')
self.count = 1
else:
self.count = self._config.get('count')
self.ping_cmd += " -c " + str(self.count)
# Add -W option if requested
if self._config.get('timeout'):
self.ping_cmd += " -W " + str(self._config.get('timeout'))
# Add ping address
self.ping_cmd += " " + self._config.get('ping_addr')
# Load requested data (default is 'status')
self.requested_data = self._config.get('requested_data') or "status"
def process(self):
if (self.requested_data == "status"):
return self._getPingStatus()
else:
# TODO: better management of ping errors
try:
ping_data = self._performPing()
except:
return None # Ping error
# Return the requested data
if (self.requested_data == "ping_response"):
return ping_data.ping_response
if (self.requested_data == "pkt_transmitted"):
return ping_data.pkt_transmitted
if (self.requested_data == "pkt_received"):
return ping_data.pkt_received
elif (self.requested_data == "pkt_loss"):
return ping_data.pkt_loss
if (self.requested_data == "ping_avg"):
return ping_data.ping_avg
if (self.requested_data == "ping_min"):
return ping_data.ping_min
if (self.requested_data == "ping_max"):
return ping_data.ping_max
# Simply execute ping command to retrieve the command's returned code
def _getPingStatus(self):
getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd)
returncode = subprocess.call(self.ping_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
getLogger(__name__).debug('Ping command returned status code: ' + str(returncode))
return returncode
# Execute ping command and returned a PingData object in case of success
def _performPing(self):
getLogger(__name__).debug('Call ping command with the following options: ' + self.ping_cmd)
(output, error) = subprocess.Popen(self.ping_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True).communicate()
if output:
getLogger(__name__).debug('Ping command returned: ' + output)
return PingData(output)
else:
getLogger(__name__).debug('Ping error: ' + error)
raise Exception(error)
# This function is called by __init__ of the abstract Provider class, it verify during the object initialization if the Provider' configuration is valid.
def _isConfigValid(self):
Provider._isConfigValid(self)
# If requested_data is provided, check if it is managed by Ping provider
if self._config.get('requested_data') and not (self._config.get('requested_data') in _data_available):
getLogger(__name__).error('Parameter requested_data "' + self._config.get('requested_data') + '" provided to provider Ping is not allowed. Allowed conditions are: ' + str(_data_available))
return False
return True
class PingData:
"""
Class extracting ping statistics data using regexps on ping command response.
/!\ Warning: regexp used to extract information applies on string returned by ping command on Linux (tested on Debian Wheezy).
Extracted data are:
- ping_response = the whole output of ping command
- pkt_transmitted = number of packets transmitted (integer)
- pkt_received = number of packets received (integer)
- pkt_loss = packet loss rate in percentage (float)
- ping_min = ping minimum response time in milliseconds (float)
- ping_avg = ping average response time in milliseconds (float)
- ping_max = ping maximum response time in milliseconds (float)
- ping_stdev = standard deviation of ping response time in milliseconds (float)
"""
def __init__(self, ping_response):
if not ping_response:
raise Exception("Can't create PingData object without ping response data")
self.ping_response = ping_response
# Extract packets data from statistics section of Ping response
result = re.search('(?P<pkt_transmitted>\d)\spackets\stransmitted,\s(?P<pkt_received>\d)?\s?\w*\sreceived,\s(?P<pkt_loss>[\d]*?\.?[\d]*)\%\spacket\sloss', self.ping_response)
self.pkt_transmitted = int(result.group('pkt_transmitted'))
self.pkt_received = int(result.group('pkt_received'))
self.pkt_loss = float(result.group('pkt_loss'))
# Extract time stats from statistics section of Ping response
result = re.search('min\/avg\/max\/\w*\s=\s(?P<ping_min>[\d]*\.[\d]*)\/(?P<ping_avg>[\d]*\.[\d]*)\/(?P<ping_max>[\d]*\.[\d]*)\/(?P<ping_stddev>[\d]*\.[\d]*)', self.ping_response)
self.ping_min = float(result.group('ping_min'))
self.ping_avg = float(result.group('ping_avg'))
self.ping_max = float(result.group('ping_max'))
self.ping_stddev = float(result.group('ping_stddev'))<|fim▁end|>
| |
<|file_name|>jquery.usquare.js<|end_file_name|><|fim▁begin|>/*
uSquare 1.0 - Universal Responsive Grid
Copyright (c) 2012 Br0 (shindiristudio.com)
Project site: http://codecanyon.net/
Project demo: http://shindiristudio.com/usquare/
*/
(function($) {
function uSquareItem(element, options) {
this.$item = $(element);
this.$parent = options.$parent;
this.options = options;
this.$trigger = this.$(options.trigger);
this.$close = this.$('.close');
this.$info = this.$(options.moreInfo);
this.$trigger_text = this.$trigger.find('.usquare_square_text_wrapper');
this.$usquare_about = this.$info.find('.usquare_about');
this.$trigger.on('click', $.proxy(this.show, this));
this.$close.on('click', $.proxy(this.close, this));
options.$overlay.on('click', $.proxy(this.close, this));
};
uSquareItem.prototype = {
show: function(e) {
e.preventDefault();
if (!this.$parent.data('in_trans'))
{
if (!this.$item.data('showed'))<|fim▁hole|> if (this.options.before_item_opening_callback) this.options.before_item_opening_callback(this.$item);
var item_position = this.$item.position();
var trigger_text_position;
var this_backup=this;
var moving=0;
if (item_position.top>0) // && this.$parent.width()>=640)
{
var parent_position=this.$parent.offset();
var parent_top = parent_position.top;
var non_visible_area=$(window).scrollTop()-parent_top;
var going_to=item_position.top;
if (non_visible_area>0)
{
var non_visible_row=Math.floor(non_visible_area/this.$item.height())+1;
going_to=this.$item.height()*non_visible_row;
going_to=item_position.top-going_to;
}
if (going_to>0) moving=1;
if (moving)
{
this.$item.data('moved', going_to);
var top_string='-'+going_to+'px';
var speed=this.options.opening_speed+(going_to/160)*100;
this.$item.animate({top: top_string}, speed, this.options.easing, function(){
trigger_text_position = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$trigger_text.data('top', trigger_text_position);
this_backup.$trigger_text.css('top', trigger_text_position);
this_backup.$trigger_text.css('bottom', 'auto');
this_backup.$trigger_text.animate({'top': 0}, 'slow');
});
}
}
if (!moving)
{
trigger_text_position = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$trigger_text.data('top', trigger_text_position);
this_backup.$trigger_text.css('top', trigger_text_position);
this_backup.$trigger_text.css('bottom', 'auto');
this_backup.$trigger_text.animate({'top': 0}, 'slow');
}
this.$item.addClass('usquare_block_selected');
var height_backup=this.$info.css('height');
this.$info.css('height', 0);
this.$info.show();
this.$usquare_about.mCustomScrollbar("update");
if (this.options.before_info_rolling_callback) this.options.before_info_rolling_callback(this.$item);
this.$info.animate({height:height_backup}, 'slow', this.options.easing, function()
{
this_backup.$parent.data('in_trans', 0);
if (this_backup.options.after_info_rolling_callback) this_backup.options.after_info_rolling_callback(this_backup.$item);
});
}
}
},
close: function(e) {
e.preventDefault();
if (!this.$parent.data('in_trans'))
{
if (this.$item.data('showed'))
{
var this_backup=this;
this.$info.hide();
var trigger_text_position_top = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$item.removeClass('usquare_block_selected');
if (this.$item.data('moved'))
{
var top_backup=this.$item.data('moved');
var speed=this.options.closing_speed+(top_backup/160)*100;
this.$item.data('moved', 0);
this.$item.animate({'top': 0}, speed, this.options.easing, function()
{
this_backup.$trigger_text.animate({'top': trigger_text_position_top}, 'slow');
});
}
else
{
this_backup.$trigger_text.animate({'top': trigger_text_position_top}, 'slow');
}
this.$item.data('showed', 0);
}
}
},
$: function (selector) {
return this.$item.find(selector);
}
};
function uSquare(element, options) {
var self = this;
this.options = $.extend({}, $.fn.uSquare.defaults, options);
this.$element = $(element);
this.$overlay = this.$('.usquare_module_shade');
this.$items = this.$(this.options.block);
this.$triggers = this.$(this.options.trigger);
this.$closes = this.$('.close');
this.$triggers.on('click', $.proxy(this.overlayShow, this));
this.$closes.on('click', $.proxy(this.overlayHide, this));
this.$overlay.on('click', $.proxy(this.overlayHide, this));
$.each( this.$items, function(i, element) {
new uSquareItem(element, $.extend(self.options, {$overlay: self.$overlay, $parent: self.$element }) );
});
};
uSquare.prototype = {
$: function (selector) {
return this.$element.find(selector);
},
overlayShow: function() {
this.$overlay.fadeIn('slow', function(){
$(this).css({opacity : 0.5});
})
},
overlayHide: function() {
if (!this.$element.data('in_trans'))
{
this.$overlay.fadeOut('slow');
}
}
};
$.fn.uSquare = function ( option ) {
return this.each(function () {
var $this = $(this),
data = $this.data('tooltip'),
options = typeof option == 'object' && option;
data || $this.data('tooltip', (data = new uSquare(this, options)));
(typeof option == 'string') && data[option]();
});
};
$.fn.uSquare.Constructor = uSquare;
$.fn.uSquare.defaults = {
block: '.usquare_block',
trigger: '.usquare_square',
moreInfo: '.usquare_block_extended',
opening_speed: 300,
closing_speed: 500,
easing: 'swing',
before_item_opening_callback: null,
before_info_rolling_callback: null,
after_info_rolling_callback: null
};
})(jQuery);
$(window).load(function() {
$(".usquare_about").mCustomScrollbar();
});<|fim▁end|>
|
{
this.$parent.data('in_trans', 1);
this.$item.data('showed', 1);
|
<|file_name|>webpack.base.config.js<|end_file_name|><|fim▁begin|>const path = require('path');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const resolve = (dir) => {
return path.join(__dirname, '..', dir)<|fim▁hole|>
const config = {
output: {
path: path.join(__dirname, '..', 'dist', 'assets'),
filename: '[name].[hash].js',
publicPath: '/assets/',
},
resolve: {
extensions: ['.js', '.vue', '.json'],
alias: {
'~': resolve('src'),
'vue$': 'vue/dist/vue.runtime.esm.js',
'vuex$': 'vuex/dist/vuex.esm.js',
},
},
module: {
rules: [
{
test: /\.js$/,
include: [
resolve('src'),
resolve('node_modules/@material'),
],
use: ['babel-loader'],
},
{
test: /\.vue$/,
loader: 'vue-loader',
options: {
extractCSS: isProduction,
postcss: [
require('autoprefixer')({
browsers: ['IE 9', 'IE 10', 'IE 11', 'last 2 versions'],
}),
],
},
},
],
},
plugins: [
new CopyWebpackPlugin([
{
from: resolve('src/static'),
to: resolve('dist'),
},
]),
],
};
module.exports = config;<|fim▁end|>
|
};
const isProduction = process.env.NODE_ENV === 'production';
|
<|file_name|>_optical_flow.py<|end_file_name|><|fim▁begin|>import itertools
import os
import re
from abc import ABC, abstractmethod
from glob import glob
from pathlib import Path
import numpy as np
import torch
from PIL import Image
from ..io.image import _read_png_16
from .utils import verify_str_arg
from .vision import VisionDataset
__all__ = (
"KittiFlow",
"Sintel",
"FlyingThings3D",
"FlyingChairs",
"HD1K",
)
class FlowDataset(ABC, VisionDataset):
# Some datasets like Kitti have a built-in valid_flow_mask, indicating which flow values are valid
# For those we return (img1, img2, flow, valid_flow_mask), and for the rest we return (img1, img2, flow),
# and it's up to whatever consumes the dataset to decide what valid_flow_mask should be.
_has_builtin_flow_mask = False
def __init__(self, root, transforms=None):
super().__init__(root=root)
self.transforms = transforms
self._flow_list = []
self._image_list = []
def _read_img(self, file_name):
img = Image.open(file_name)
if img.mode != "RGB":
img = img.convert("RGB")
return img
@abstractmethod
def _read_flow(self, file_name):
# Return the flow or a tuple with the flow and the valid_flow_mask if _has_builtin_flow_mask is True
pass
def __getitem__(self, index):
img1 = self._read_img(self._image_list[index][0])
img2 = self._read_img(self._image_list[index][1])
if self._flow_list: # it will be empty for some dataset when split="test"
flow = self._read_flow(self._flow_list[index])
if self._has_builtin_flow_mask:
flow, valid_flow_mask = flow
else:
valid_flow_mask = None
else:
flow = valid_flow_mask = None
if self.transforms is not None:
img1, img2, flow, valid_flow_mask = self.transforms(img1, img2, flow, valid_flow_mask)
if self._has_builtin_flow_mask or valid_flow_mask is not None:
# The `or valid_flow_mask is not None` part is here because the mask can be generated within a transform
return img1, img2, flow, valid_flow_mask
else:
return img1, img2, flow
def __len__(self):
return len(self._image_list)
def __rmul__(self, v):
return torch.utils.data.ConcatDataset([self] * v)
class Sintel(FlowDataset):
"""`Sintel <http://sintel.is.tue.mpg.de/>`_ Dataset for optical flow.
The dataset is expected to have the following structure: ::
root
Sintel
testing
clean
scene_1
scene_2
...
final
scene_1
scene_2
...
training
clean
scene_1
scene_2
...
final
scene_1
scene_2
...
flow
scene_1
scene_2
...
Args:
root (string): Root directory of the Sintel Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
pass_name (string, optional): The pass to use, either "clean" (default), "final", or "both". See link above for
details on the different passes.
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", pass_name="clean", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both"))
passes = ["clean", "final"] if pass_name == "both" else [pass_name]
root = Path(root) / "Sintel"
flow_root = root / "training" / "flow"
for pass_name in passes:
split_dir = "training" if split == "train" else split
image_root = root / split_dir / pass_name
for scene in os.listdir(image_root):
image_list = sorted(glob(str(image_root / scene / "*.png")))
for i in range(len(image_list) - 1):
self._image_list += [[image_list[i], image_list[i + 1]]]
if split == "train":
self._flow_list += sorted(glob(str(flow_root / scene / "*.flo")))
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.
``flow`` is None if ``split="test"``.
If a valid flow mask is generated within the ``transforms`` parameter,
a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_flo(file_name)
class KittiFlow(FlowDataset):
"""`KITTI <http://www.cvlibs.net/datasets/kitti/eval_scene_flow.php?benchmark=flow>`__ dataset for optical flow (2015).
The dataset is expected to have the following structure: ::
root
KittiFlow
testing
image_2
training
image_2
flow_occ
Args:
root (string): Root directory of the KittiFlow Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
"""
_has_builtin_flow_mask = True
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
root = Path(root) / "KittiFlow" / (split + "ing")
images1 = sorted(glob(str(root / "image_2" / "*_10.png")))
images2 = sorted(glob(str(root / "image_2" / "*_11.png")))
if not images1 or not images2:
raise FileNotFoundError(
"Could not find the Kitti flow images. Please make sure the directory structure is correct."
)
for img1, img2 in zip(images1, images2):
self._image_list += [[img1, img2]]
if split == "train":
self._flow_list = sorted(glob(str(root / "flow_occ" / "*_10.png")))
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 4-tuple with ``(img1, img2, flow, valid_flow_mask)``
where ``valid_flow_mask`` is a numpy boolean mask of shape (H, W)
indicating which flow values are valid. The flow is a numpy array of
shape (2, H, W) and the images are PIL images. ``flow`` and ``valid_flow_mask`` are None if
``split="test"``.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_16bits_png_with_flow_and_valid_mask(file_name)
class FlyingChairs(FlowDataset):
"""`FlyingChairs <https://lmb.informatik.uni-freiburg.de/resources/datasets/FlyingChairs.en.html#flyingchairs>`_ Dataset for optical flow.
You will also need to download the FlyingChairs_train_val.txt file from the dataset page.
The dataset is expected to have the following structure: ::
root
FlyingChairs
data
00001_flow.flo
00001_img1.ppm
00001_img2.ppm
...
FlyingChairs_train_val.txt
Args:
root (string): Root directory of the FlyingChairs Dataset.
split (string, optional): The dataset split, either "train" (default) or "val"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "val"))
root = Path(root) / "FlyingChairs"
images = sorted(glob(str(root / "data" / "*.ppm")))
flows = sorted(glob(str(root / "data" / "*.flo")))
split_file_name = "FlyingChairs_train_val.txt"
if not os.path.exists(root / split_file_name):
raise FileNotFoundError(
"The FlyingChairs_train_val.txt file was not found - please download it from the dataset page (see docstring)."
)
split_list = np.loadtxt(str(root / split_file_name), dtype=np.int32)
for i in range(len(flows)):
split_id = split_list[i]
if (split == "train" and split_id == 1) or (split == "val" and split_id == 2):
self._flow_list += [flows[i]]
self._image_list += [[images[2 * i], images[2 * i + 1]]]
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.<|fim▁hole|> a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_flo(file_name)
class FlyingThings3D(FlowDataset):
"""`FlyingThings3D <https://lmb.informatik.uni-freiburg.de/resources/datasets/SceneFlowDatasets.en.html>`_ dataset for optical flow.
The dataset is expected to have the following structure: ::
root
FlyingThings3D
frames_cleanpass
TEST
TRAIN
frames_finalpass
TEST
TRAIN
optical_flow
TEST
TRAIN
Args:
root (string): Root directory of the intel FlyingThings3D Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
pass_name (string, optional): The pass to use, either "clean" (default) or "final" or "both". See link above for
details on the different passes.
camera (string, optional): Which camera to return images from. Can be either "left" (default) or "right" or "both".
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", pass_name="clean", camera="left", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
split = split.upper()
verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both"))
passes = {
"clean": ["frames_cleanpass"],
"final": ["frames_finalpass"],
"both": ["frames_cleanpass", "frames_finalpass"],
}[pass_name]
verify_str_arg(camera, "camera", valid_values=("left", "right", "both"))
cameras = ["left", "right"] if camera == "both" else [camera]
root = Path(root) / "FlyingThings3D"
directions = ("into_future", "into_past")
for pass_name, camera, direction in itertools.product(passes, cameras, directions):
image_dirs = sorted(glob(str(root / pass_name / split / "*/*")))
image_dirs = sorted(Path(image_dir) / camera for image_dir in image_dirs)
flow_dirs = sorted(glob(str(root / "optical_flow" / split / "*/*")))
flow_dirs = sorted(Path(flow_dir) / direction / camera for flow_dir in flow_dirs)
if not image_dirs or not flow_dirs:
raise FileNotFoundError(
"Could not find the FlyingThings3D flow images. "
"Please make sure the directory structure is correct."
)
for image_dir, flow_dir in zip(image_dirs, flow_dirs):
images = sorted(glob(str(image_dir / "*.png")))
flows = sorted(glob(str(flow_dir / "*.pfm")))
for i in range(len(flows) - 1):
if direction == "into_future":
self._image_list += [[images[i], images[i + 1]]]
self._flow_list += [flows[i]]
elif direction == "into_past":
self._image_list += [[images[i + 1], images[i]]]
self._flow_list += [flows[i + 1]]
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.
``flow`` is None if ``split="test"``.
If a valid flow mask is generated within the ``transforms`` parameter,
a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_pfm(file_name)
class HD1K(FlowDataset):
"""`HD1K <http://hci-benchmark.iwr.uni-heidelberg.de/>`__ dataset for optical flow.
The dataset is expected to have the following structure: ::
root
hd1k
hd1k_challenge
image_2
hd1k_flow_gt
flow_occ
hd1k_input
image_2
Args:
root (string): Root directory of the HD1K Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
"""
_has_builtin_flow_mask = True
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
root = Path(root) / "hd1k"
if split == "train":
# There are 36 "sequences" and we don't want seq i to overlap with seq i + 1, so we need this for loop
for seq_idx in range(36):
flows = sorted(glob(str(root / "hd1k_flow_gt" / "flow_occ" / f"{seq_idx:06d}_*.png")))
images = sorted(glob(str(root / "hd1k_input" / "image_2" / f"{seq_idx:06d}_*.png")))
for i in range(len(flows) - 1):
self._flow_list += [flows[i]]
self._image_list += [[images[i], images[i + 1]]]
else:
images1 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*10.png")))
images2 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*11.png")))
for image1, image2 in zip(images1, images2):
self._image_list += [[image1, image2]]
if not self._image_list:
raise FileNotFoundError(
"Could not find the HD1K images. Please make sure the directory structure is correct."
)
def _read_flow(self, file_name):
return _read_16bits_png_with_flow_and_valid_mask(file_name)
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` where ``valid_flow_mask``
is a numpy boolean mask of shape (H, W)
indicating which flow values are valid. The flow is a numpy array of
shape (2, H, W) and the images are PIL images. ``flow`` and ``valid_flow_mask`` are None if
``split="test"``.
"""
return super().__getitem__(index)
def _read_flo(file_name):
"""Read .flo file in Middlebury format"""
# Code adapted from:
# http://stackoverflow.com/questions/28013200/reading-middlebury-flow-files-with-python-bytes-array-numpy
# Everything needs to be in little Endian according to
# https://vision.middlebury.edu/flow/code/flow-code/README.txt
with open(file_name, "rb") as f:
magic = np.fromfile(f, "c", count=4).tobytes()
if magic != b"PIEH":
raise ValueError("Magic number incorrect. Invalid .flo file")
w = int(np.fromfile(f, "<i4", count=1))
h = int(np.fromfile(f, "<i4", count=1))
data = np.fromfile(f, "<f4", count=2 * w * h)
return data.reshape(h, w, 2).transpose(2, 0, 1)
def _read_16bits_png_with_flow_and_valid_mask(file_name):
flow_and_valid = _read_png_16(file_name).to(torch.float32)
flow, valid_flow_mask = flow_and_valid[:2, :, :], flow_and_valid[2, :, :]
flow = (flow - 2 ** 15) / 64 # This conversion is explained somewhere on the kitti archive
valid_flow_mask = valid_flow_mask.bool()
# For consistency with other datasets, we convert to numpy
return flow.numpy(), valid_flow_mask.numpy()
def _read_pfm(file_name):
"""Read flow in .pfm format"""
with open(file_name, "rb") as f:
header = f.readline().rstrip()
if header != b"PF":
raise ValueError("Invalid PFM file")
dim_match = re.match(rb"^(\d+)\s(\d+)\s$", f.readline())
if not dim_match:
raise Exception("Malformed PFM header.")
w, h = (int(dim) for dim in dim_match.groups())
scale = float(f.readline().rstrip())
if scale < 0: # little-endian
endian = "<"
scale = -scale
else:
endian = ">" # big-endian
data = np.fromfile(f, dtype=endian + "f")
data = data.reshape(h, w, 3).transpose(2, 0, 1)
data = np.flip(data, axis=1) # flip on h dimension
data = data[:2, :, :]
return data.astype(np.float32)<|fim▁end|>
|
``flow`` is None if ``split="val"``.
If a valid flow mask is generated within the ``transforms`` parameter,
|
<|file_name|>ssr.spec.js<|end_file_name|><|fim▁begin|>import test from 'ava'
import Vue from 'vue'
import {createRenderer} from 'vue-server-renderer'
import ContentPlaceholder from '../src'
test.cb('ssr', t => {
const rows = [{height: '5px', boxes: [[0, '40px']]}]
const renderer = createRenderer()
const app = new Vue({
template: '<content-placeholder :rows="rows"></content-placeholder>',
components: {ContentPlaceholder},
data: {rows}
})
renderer.renderToString(app, (err, html) => {
t.falsy(err)
t.true(html.includes('data-server-rendered'))
t.end()
})<|fim▁hole|><|fim▁end|>
|
})
|
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# Account Cut-off Base module for OpenERP
# Copyright (C) 2013 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Cut-off Base',
'version': '0.1',
'category': 'Accounting & Finance',
'license': 'AGPL-3',
'summary': 'Base module for Account Cut-offs',
'description': """
This module contains objets, fields and menu entries that are used by other
cut-off modules. So you need to install other cut-off modules to get the
additionnal functionalities :
* the module *account_cutoff_prepaid* will manage prepaid cut-offs based on
start date and end date,<|fim▁hole|> on the status of the pickings.
Please contact Alexis de Lattre from Akretion <[email protected]>
for any help or question about this module.
""",
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'depends': ['account_accountant'],
'data': [
'company_view.xml',
'account_cutoff_view.xml',
'security/ir.model.access.csv',
'security/account_cutoff_base_security.xml',
],
'installable': True,
'active': False,
}<|fim▁end|>
|
* the module *account_cutoff_accrual_picking* will manage the accruals based
|
<|file_name|>radarChart.ts<|end_file_name|><|fim▁begin|>/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../../../_references.ts"/>
module powerbi.visuals.samples {
import ClassAndSelector = jsCommon.CssConstants.ClassAndSelector;
import CreateClassAndSelector = jsCommon.CssConstants.createClassAndSelector;
import PixelConverter = jsCommon.PixelConverter;
export interface RadarChartConstructorOptions {
animator?: IGenericAnimator;
svg?: D3.Selection;
margin?: IMargin;
}
export interface RadarChartDatapoint extends SelectableDataPoint {
x: number;
y: number;
y0?: number;
color?: string;
value?: number;
tooltipInfo?: TooltipDataItem[];
labelFormatString?: string;
labelFontSize?: string;
}
export interface RadarChartData {
legendData: LegendData;
series: RadarChartSeries[];
settings: RadarChartSettings;
dataLabelsSettings: PointDataLabelsSettings;
}
export interface RadarChartSeries {
fill: string;
name: string;
data: RadarChartDatapoint[];
identity: SelectionId;
}
export interface RadarChartSettings {
showLegend?: boolean;
}
export interface RadarChartBehaviorOptions {
selection: D3.Selection;
clearCatcher: D3.Selection;
}
/**
* RadarChartBehavior
*/
export class RadarChartWebBehavior implements IInteractiveBehavior {
private selection: D3.Selection;
public bindEvents(options: RadarChartBehaviorOptions, selectionHandler: ISelectionHandler): void {
var selection = this.selection = options.selection;
var clearCatcher = options.clearCatcher;
selection.on('click', function (d: SelectableDataPoint) {
selectionHandler.handleSelection(d, d3.event.ctrlKey);
d3.event.stopPropagation();
});
clearCatcher.on('click', function () {
selectionHandler.handleClearSelection();
});
}
public renderSelection(hasSelection: boolean): void {
this.selection.style("opacity", (d: SelectableDataPoint) => (hasSelection && !d.selected) ? RadarChart.DimmedAreaFillOpacity : RadarChart.NodeFillOpacity);
}
}
export class RadarChart implements IVisual {
public static capabilities: VisualCapabilities = {
dataRoles: [
{
displayName: 'Category',
name: 'Category',
kind: powerbi.VisualDataRoleKind.Grouping,
},
{
displayName: 'Y Axis',
name: 'Y',
kind: powerbi.VisualDataRoleKind.Measure,
},
],
dataViewMappings: [{
conditions: [{ 'Category': { min: 1, max: 1 } }],
categorical: {
categories: {
for: { in: 'Category' },
dataReductionAlgorithm: { top: {} }
},
values: {
select: [{ bind: { to: 'Y' } }]
}
}
}],
objects: {
general: {
displayName: data.createDisplayNameGetter('Visual_General'),
properties: {
formatString: {
type: { formatting: { formatString: true } },
},
},
},
legend: {
displayName: data.createDisplayNameGetter('Visual_Legend'),
description: data.createDisplayNameGetter('Visual_LegendDescription'),
properties: {
show: {
displayName: data.createDisplayNameGetter('Visual_Show'),
type: { bool: true }
},
position: {
displayName: data.createDisplayNameGetter('Visual_LegendPosition'),
description: data.createDisplayNameGetter('Visual_LegendPositionDescription'),
type: { enumeration: legendPosition.type }
},
showTitle: {
displayName: data.createDisplayNameGetter('Visual_LegendShowTitle'),
description: data.createDisplayNameGetter('Visual_LegendShowTitleDescription'),
type: { bool: true }
},
titleText: {
displayName: data.createDisplayNameGetter('Visual_LegendName'),
description: data.createDisplayNameGetter('Visual_LegendNameDescription'),
type: { text: true },
suppressFormatPainterCopy: true
},
labelColor: {
displayName: data.createDisplayNameGetter('Visual_LegendTitleColor'),
type: { fill: { solid: { color: true } } }
},
fontSize: {
displayName: data.createDisplayNameGetter('Visual_TextSize'),
type: { formatting: { fontSize: true } }
}
}
},
dataPoint: {
displayName: data.createDisplayNameGetter('Visual_DataPoint'),
description: data.createDisplayNameGetter('Visual_DataPointDescription'),
properties: {
fill: {
displayName: data.createDisplayNameGetter('Visual_Fill'),
type: { fill: { solid: { color: true } } }
}
}
},
labels: {
displayName: data.createDisplayNameGetter('Visual_DataPointsLabels'),
description: data.createDisplayNameGetter('Visual_DataPointsLabelsDescription'),
properties: {
show: {
displayName: data.createDisplayNameGetter('Visual_Show'),
type: { bool: true }
},
color: {
displayName: data.createDisplayNameGetter('Visual_LabelsFill'),
description: data.createDisplayNameGetter('Visual_LabelsFillDescription'),
type: { fill: { solid: { color: true } } }
},
labelDisplayUnits: {
displayName: data.createDisplayNameGetter('Visual_DisplayUnits'),
description: data.createDisplayNameGetter('Visual_DisplayUnitsDescription'),
type: { formatting: { labelDisplayUnits: true } },
suppressFormatPainterCopy: true,
},
labelPrecision: {
displayName: data.createDisplayNameGetter('Visual_Precision'),
description: data.createDisplayNameGetter('Visual_PrecisionDescription'),
placeHolderText: data.createDisplayNameGetter('Visual_Precision_Auto'),
type: { numeric: true },
suppressFormatPainterCopy: true,
},
fontSize: {
displayName: data.createDisplayNameGetter('Visual_TextSize'),
type: { formatting: { fontSize: true } }
},
}
}
}
};
/** Note: Public for testability */
public static formatStringProp: DataViewObjectPropertyIdentifier = {
objectName: 'general',
propertyName: 'formatString',
};
private static Properties: any = {
legend: {
show: <DataViewObjectPropertyIdentifier>{ objectName: 'legend', propertyName: 'show' }
},
dataPoint: {
fill: <DataViewObjectPropertyIdentifier>{ objectName: 'dataPoint', propertyName: 'fill' }
},
labels: {
show: <DataViewObjectPropertyIdentifier>{ objectName: 'labels', propertyName: 'show' },
color: <DataViewObjectPropertyIdentifier>{ objectName: 'labels', propertyName: 'color' },
displayUnits: <DataViewObjectPropertyIdentifier>{ objectName: 'labels', propertyName: 'labelDisplayUnits' },
precision: <DataViewObjectPropertyIdentifier>{ objectName: 'labels', propertyName: 'labelPrecision' },
fontSize: <DataViewObjectPropertyIdentifier>{ objectName: 'labels', propertyName: 'fontSize' },
}
};
private static VisualClassName = 'radarChart';
private static Segments: ClassAndSelector = CreateClassAndSelector('segments');
private static SegmentNode: ClassAndSelector = CreateClassAndSelector('segmentNode');
private static ZeroSegment: ClassAndSelector = CreateClassAndSelector('zeroSegment');
private static ZeroSegmentNode: ClassAndSelector = CreateClassAndSelector('zeroSegmentNode');
private static ZeroLabel: ClassAndSelector = CreateClassAndSelector('zeroLabel');
private static Axis: ClassAndSelector = CreateClassAndSelector('axis');
private static AxisNode: ClassAndSelector = CreateClassAndSelector('axisNode');
private static AxisLabel: ClassAndSelector = CreateClassAndSelector('axisLabel');
private static Chart: ClassAndSelector = CreateClassAndSelector('chart');
private static ChartNode: ClassAndSelector = CreateClassAndSelector('chartNode');
private static ChartArea: ClassAndSelector = CreateClassAndSelector('chartArea');
private static ChartPolygon: ClassAndSelector = CreateClassAndSelector('chartPolygon');
private static ChartDot: ClassAndSelector = CreateClassAndSelector('chartDot');
private static MaxPrecision: number = 17;
private static MinPrecision: number = 0;
private svg: D3.Selection;
private segments: D3.Selection;
private zeroSegment: D3.Selection;
private axis: D3.Selection;
private chart: D3.Selection;
private mainGroupElement: D3.Selection;
private colors: IDataColorPalette;
private viewport: IViewport;
private interactivityService: IInteractivityService;
private animator: IGenericAnimator;
private margin: IMargin;
private legend: ILegend;
private legendObjectProperties: DataViewObject;
private radarChartData: RadarChartData;
private isInteractiveChart: boolean;
private zeroPointRadius: number;
private static DefaultMargin: IMargin = {
top: 50,
bottom: 50,
right: 100,
left: 100
};
private static SegmentLevels: number = 6;
private static SegmentFactor: number = 1;
private static Radians: number = 2 * Math.PI;
private static Scale: number = 1;
public static NodeFillOpacity = 1;
public static AreaFillOpacity = 0.6;
public static DimmedAreaFillOpacity = 0.4;
private angle: number;
private radius: number;
public static AxesLabelsFontFamily: string = "sans-serif";
public static AxesLabelsfontSize: string = "11px";
public static AxesLabelsMaxWidth: number = 200;
public static converter(dataView: DataView, colors: IDataColorPalette): RadarChartData {
if (!dataView ||
!dataView.categorical ||
!dataView.categorical.categories ||
!(dataView.categorical.categories.length > 0) ||
!dataView.categorical.categories[0] ||
!dataView.categorical.values ||
!(dataView.categorical.values.length > 0) ||
!colors) {
return {
legendData: {
dataPoints: []
},
settings: {
showLegend: true
},
series: [],
dataLabelsSettings: dataLabelUtils.getDefaultPointLabelSettings(),
};
}
var catDv: DataViewCategorical = dataView.categorical,
values: DataViewValueColumns = catDv.values,
grouped: DataViewValueColumnGroup[] = catDv && catDv.values ? catDv.values.grouped() : null,
series: RadarChartSeries[] = [],
colorHelper = new ColorHelper(colors, RadarChart.Properties.dataPoint.fill);
var legendData: LegendData = {
fontSize: 8.25,
dataPoints: [],
title: ""
};
//Parse legend settings
var legendSettings: RadarChartSettings = RadarChart.parseSettings(dataView);
var dataLabelsSettings: PointDataLabelsSettings = RadarChart.parseLabelSettings(dataView);
for (var i = 0, iLen = values.length; i < iLen; i++) {
var color = colors.getColorByIndex(i).value,
serieIdentity: SelectionId,
queryName: string,
displayName: string,
dataPoints: RadarChartDatapoint[] = [];
var columnGroup: DataViewValueColumnGroup = grouped
&& grouped.length > i && grouped[i].values? grouped[i] : null;
if (values[i].source) {
var source = values[i].source;
if (source.queryName) {
queryName = source.queryName;
serieIdentity = SelectionId.createWithMeasure(queryName);
}
if (source.displayName)
displayName = source.displayName;
if (source.objects) {
var objects: any = source.objects;
color = colorHelper.getColorForMeasure(objects, queryName);
}
}
legendData.dataPoints.push({
label: displayName,
color: color,
icon: LegendIcon.Box,
selected: false,
identity: serieIdentity
});
for (var k = 0, kLen = values[i].values.length; k < kLen; k++) {
var dataPointIdentity: SelectionId = SelectionIdBuilder
.builder()
.withMeasure(queryName)
.withCategory(catDv.categories[0], k)
.withSeries(dataView.categorical.values, columnGroup)
.createSelectionId();
var tooltipInfo: TooltipDataItem[] = TooltipBuilder.createTooltipInfo(RadarChart.formatStringProp,
catDv,
catDv.categories[0].values[k],
values[i].values[k],
null,
null,
i);
var labelFormatString = valueFormatter.getFormatString(catDv.values[i].source, RadarChart.formatStringProp);
var fontSizeInPx = jsCommon.PixelConverter.fromPoint(dataLabelsSettings.fontSize);
dataPoints.push({
x: k,
y: <number>values[i].values[k],
color: color,
identity: dataPointIdentity,
selected: false,
tooltipInfo: tooltipInfo,
value: <number>values[i].values[k],
labelFormatString: labelFormatString,
labelFontSize: fontSizeInPx,
});
}
if (dataPoints.length > 0)
series.push({
fill: color,
name: displayName,
data: dataPoints,
identity: serieIdentity,
});
}
return {
legendData: legendData,
settings: legendSettings,
series: series,
dataLabelsSettings: dataLabelsSettings,
};
}
public constructor(options?: RadarChartConstructorOptions) {
if (options) {
if (options.svg)
this.svg = options.svg;
if (options.animator)
this.animator = options.animator;
if (options.margin)
this.margin = options.margin;
}
}
public init(options: VisualInitOptions): void {
var element = options.element;
if (!this.svg) {
this.svg = d3.select(element.get(0)).append('svg');
this.svg.style('position', 'absolute');
}
if (!this.margin)
this.margin = RadarChart.DefaultMargin;
this.svg.classed(RadarChart.VisualClassName, true);
this.interactivityService = visuals.createInteractivityService(options.host);
this.isInteractiveChart = options.interactivity && options.interactivity.isInteractiveLegend;
this.legend = createLegend(element,
this.isInteractiveChart,
this.interactivityService,
true,
LegendPosition.Top);
this.colors = options.style.colorPalette.dataColors;
this.mainGroupElement = this.svg.append('g');
this.segments = this.mainGroupElement
.append('g')
.classed(RadarChart.Segments.class, true);
this.zeroSegment = this.mainGroupElement
.append('g')
.classed(RadarChart.ZeroSegment.class, true);
this.axis = this.mainGroupElement
.append('g')
.classed(RadarChart.Axis.class, true);
this.chart = this.mainGroupElement
.append('g')
.classed(RadarChart.Chart.class, true);
}
public update(options: VisualUpdateOptions): void {
if (!options.dataViews || !options.dataViews[0])
return;
var dataView = options.dataViews[0];
this.radarChartData = RadarChart.converter(dataView, this.colors);
var categories: any[] = [],
series = this.radarChartData.series,
dataViewMetadataColumn: DataViewMetadataColumn,
duration = AnimatorCommon.GetAnimationDuration(this.animator, options.suppressAnimations);
if (dataView.categorical &&
dataView.categorical.categories &&
dataView.categorical.categories[0] &&
dataView.categorical.categories[0].values)
categories = dataView.categorical.categories[0].values;
if (dataView.metadata && dataView.metadata.columns && dataView.metadata.columns.length > 0)
dataViewMetadataColumn = dataView.metadata.columns[0];
this.viewport = {
height: options.viewport.height > 0 ? options.viewport.height : 0,
width: options.viewport.width > 0 ? options.viewport.width : 0
};
this.parseLegendProperties(dataView);
this.renderLegend(this.radarChartData);
this.updateViewport();
this.svg
.attr({
'height': this.viewport.height,
'width': this.viewport.width
});
var mainGroup = this.mainGroupElement;
mainGroup.attr('transform', SVGUtil.translate(this.viewport.width / 2, this.viewport.height / 2));
var width: number = this.viewport.width - this.margin.left - this.margin.right;
var height: number = this.viewport.height - this.margin.top - this.margin.bottom;
this.angle = RadarChart.Radians / categories.length;
this.radius = RadarChart.SegmentFactor * RadarChart.Scale * Math.min(width, height) / 2;
this.drawCircularSegments(categories);
this.drawAxes(categories);
this.drawAxesLabels(categories, dataViewMetadataColumn);
this.drawChart(series, duration);
this.drawDataLabels(series);
this.drawZeroCircularSegment(categories);
if (this.zeroPointRadius !== 0)
this.drawZeroLabel();
else
this.mainGroupElement.selectAll(RadarChart.ZeroLabel.selector).remove();
}
private getRadarChartLabelLayout(labelSettings: PointDataLabelsSettings, allDataPoints: RadarChartDatapoint[]): ILabelLayout {
var formattersCache = dataLabelUtils.createColumnFormatterCacheManager();
var angle: number = this.angle;
var viewport = this.viewport;
var halfHeight = this.viewport.height / 2;
var halfWidth = this.viewport.width / 2;
var y: any = this.calculateChartDomain(this.radarChartData.series);
return {
labelText: (d: RadarChartDatapoint) => {
var formmater = formattersCache.getOrCreate(d.labelFormatString, labelSettings);
if (labelSettings.displayUnits === 0) {
var maxDataPoint: RadarChartDatapoint = _.max(allDataPoints, d => d.value);
var maxValue = maxDataPoint.value > 0 ? maxDataPoint.value : 0;
formmater = formattersCache.getOrCreate(d.labelFormatString, labelSettings, maxValue);
}
return dataLabelUtils.getLabelFormattedText({ label: formmater.format(d.value), maxWidth: viewport.width, fontSize: labelSettings.fontSize });
},
labelLayout: {
x: (d: RadarChartDatapoint) => -1 * y(d.y) * Math.sin(d.x * angle) + halfWidth,
y: (d: RadarChartDatapoint) => -1 * y(d.y) * Math.cos(d.x * angle) + halfHeight - 7,
},
filter: (d: RadarChartDatapoint) => {
return (d != null && d.value != null);
},
style: {
'fill': labelSettings.labelColor,
'font-size': (d: RadarChartDatapoint) => PixelConverter.fromPoint(labelSettings.fontSize),
},
};
}
private drawCircularSegments(values: string[]): void {
var data = [];
var angle: number = this.angle,
factor: number = RadarChart.SegmentFactor,
levels: number = RadarChart.SegmentLevels,
radius: number = this.radius;
for (var level = 0; level < levels - 1; level++) {
var levelFactor: number = radius * ((level + 1) / levels);
var transform: number = -1 * levelFactor;
for (var i = 0; i < values.length; i++)
data.push({
x1: levelFactor * (1 - factor * Math.sin(i * angle)),
y1: levelFactor * (1 - factor * Math.cos(i * angle)),
x2: levelFactor * (1 - factor * Math.sin((i + 1) * angle)),
y2: levelFactor * (1 - factor * Math.cos((i + 1) * angle)),
translate: SVGUtil.translate(transform, transform)
});
}
var selection = this.mainGroupElement
.select(RadarChart.Segments.selector)
.selectAll(RadarChart.SegmentNode.selector)
.data(data);
selection
.enter()
.append('svg:line')
.classed(RadarChart.SegmentNode.class, true);
selection
.attr({
'x1': item => item.x1,
'y1': item => item.y1,
'x2': item => item.x2,
'y2': item => item.y2,
'transform': item => item.translate
});
selection.exit().remove();
}
private drawDataLabels(series: RadarChartSeries[]): void {
var allDataPoints: RadarChartDatapoint[] = this.getAllDataPointsList(series);
if (this.radarChartData.dataLabelsSettings.show) {
var layout = this.getRadarChartLabelLayout(this.radarChartData.dataLabelsSettings, allDataPoints);
var viewport = this.viewport;
var labels = dataLabelUtils.drawDefaultLabelsForDataPointChart(allDataPoints, this.mainGroupElement, layout, viewport);
labels.attr('transform', SVGUtil.translate(-(viewport.width / 2), -(viewport.height / 2)));
}
else
dataLabelUtils.cleanDataLabels(this.mainGroupElement);
}
private drawAxes(values: string[]): void {
var angle: number = this.angle,
radius: number = -1 * this.radius;
var selection: D3.Selection = this.mainGroupElement
.select(RadarChart.Axis.selector)
.selectAll(RadarChart.AxisNode.selector);
var axis = selection.data(values);
axis
.enter()
.append('svg:line');
axis
.attr({
'x1': 0,
'y1': 0,
'x2': (name, i) => radius * Math.sin(i * angle),
'y2': (name, i) => radius * Math.cos(i * angle)
})
.classed(RadarChart.AxisNode.class, true);
axis.exit().remove();
}
private drawAxesLabels(values: string[], dataViewMetadataColumn?: DataViewMetadataColumn): void {
var angle: number = this.angle,
radius: number = -1 * this.radius,
length: number = values.length;
var formatter = valueFormatter.create({
format: valueFormatter.getFormatString(dataViewMetadataColumn, RadarChart.formatStringProp, true),
value: values[0],
value2: values[length - 1],
});
var selection: D3.Selection = this.mainGroupElement
.select(RadarChart.Axis.selector)
.selectAll(RadarChart.AxisLabel.selector);
var labels = selection.data(values);
labels
.enter()
.append('svg:text');
labels
.attr({
'text-anchor': 'middle',
'dy': '1.5em',
'transform': SVGUtil.translate(0, -10),
'x': (name, i) => { return (radius - 30) * Math.sin(i * angle); },
'y': (name, i) => { return (radius - 20) * Math.cos(i * angle); }
})
.text(item => {
var properties: TextProperties = {
fontFamily: RadarChart.AxesLabelsFontFamily,
fontSize: RadarChart.AxesLabelsfontSize,
text: formatter.format(item)
};
return TextMeasurementService.getTailoredTextOrDefault(properties, Math.min(RadarChart.AxesLabelsMaxWidth, this.viewport.width));
})
.classed(RadarChart.AxisLabel.class, true);
labels.exit().remove();
}
private drawChart(series: RadarChartSeries[], duration: number): void {
var angle: number = this.angle,
dotRadius: number = 5,
dataPoints: RadarChartDatapoint[][] = this.getDataPoints(series);
var stack = d3.layout.stack();
var layers = stack(dataPoints);
var y: any = this.calculateChartDomain(series);
var calculatePoints = (points) => {
return points.map((value) => {
var x1 = -1 * y(value.y) * Math.sin(value.x * angle);
var y1 = -1 * y(value.y) * Math.cos(value.x * angle);
return `${x1},${y1}`;
}).join(' ');
};
var areas = this.chart.selectAll(RadarChart.ChartArea.selector).data(layers);
areas
.enter()
.append('g')
.classed(RadarChart.ChartArea.class, true);
var polygon = areas.selectAll(RadarChart.ChartPolygon.selector).data(d => {
if (d && d.length > 0) {
return [d];
}
return [];
});
polygon
.enter()
.append('polygon')
.classed(RadarChart.ChartPolygon.class, true);
polygon
.style('fill', d => d[0].color)
.style('opacity', RadarChart.DimmedAreaFillOpacity)
.on('mouseover', function (d) {
d3.select(this).transition()
.duration(duration)
.style('opacity', RadarChart.AreaFillOpacity);
})
.on('mouseout', function (d) {
d3.select(this).transition()
.duration(duration)
.style('opacity', RadarChart.DimmedAreaFillOpacity);
})
.attr('points', calculatePoints);
polygon.exit().remove();
areas.exit().remove();
var selection = this.chart.selectAll(RadarChart.ChartNode.selector).data(layers);
selection
.enter()
.append('g')
.classed(RadarChart.ChartNode.class, true);
var dots = selection.selectAll(RadarChart.ChartDot.selector)
.data((d: RadarChartDatapoint[]) => { return d.filter(d => d.y != null); });
dots.enter()
.append('svg:circle')
.classed(RadarChart.ChartDot.class, true);
dots.attr('r', dotRadius)
.attr({
'cx': (value) => -1 * y(value.y) * Math.sin(value.x * angle),
'cy': (value) => -1 * y(value.y) * Math.cos(value.x * angle)
})
.style('fill', d => d.color);
dots.exit().remove();
TooltipManager.addTooltip(dots, (tooltipEvent: TooltipEvent) => tooltipEvent.data.tooltipInfo, true);
selection.exit().remove();
var behaviorOptions: RadarChartBehaviorOptions = undefined;
if (this.interactivityService) {
// Register interactivity
var dataPointsToBind = this.getAllDataPointsList(series);
behaviorOptions = { selection: dots, clearCatcher: this.svg };
this.interactivityService.bind(dataPointsToBind, new RadarChartWebBehavior(), behaviorOptions);
}
}
private calculateChartDomain(series: RadarChartSeries[]): any {
var radius: number = this.radius,
dataPointsList: RadarChartDatapoint[] = this.getAllDataPointsList(series);
var minValue: number = d3.min(dataPointsList, (d) => { return d.y; });
var maxValue: number = d3.max(dataPointsList, (d) => { return d.y; });
if (this.isPercentChart(dataPointsList)) {
minValue = minValue >= 0 ? 0 : -1;
maxValue = maxValue <= 0 ? 0 : 1;
}
var y = d3.scale.linear()
.domain([minValue, maxValue]).range([0, radius]);
// Calculate zero ring radius
this.zeroPointRadius = ((minValue < 0) && (maxValue > 0)) ? y(0) : 0;
return y;
}
private renderLegend(radarChartData: RadarChartData): void {
if (!radarChartData.legendData)
return;
var legendData: LegendData = radarChartData.legendData;
if (this.legendObjectProperties) {
LegendData.update(legendData, this.legendObjectProperties);
var position = <string>this.legendObjectProperties[legendProps.position];
if (position)
this.legend.changeOrientation(LegendPosition[position]);
}
else
this.legend.changeOrientation(LegendPosition.Top);
var viewport = this.viewport;
this.legend.drawLegend(legendData, { height: viewport.height, width: viewport.width });
Legend.positionChartArea(this.svg, this.legend);
}
private drawZeroCircularSegment(values: string[]): void {
var data = [];
var angle: number = this.angle,
factor: number = RadarChart.SegmentFactor,
radius: number = this.zeroPointRadius,
transform: number = -1 * radius;
for (var i = 0; i < values.length; i++)
data.push({
x1: radius * (1 - factor * Math.sin(i * angle)),
y1: radius * (1 - factor * Math.cos(i * angle)),
x2: radius * (1 - factor * Math.sin((i + 1) * angle)),
y2: radius * (1 - factor * Math.cos((i + 1) * angle)),
translate: SVGUtil.translate(transform, transform)
});
var selection = this.mainGroupElement
.select(RadarChart.ZeroSegment.selector)
.selectAll(RadarChart.ZeroSegmentNode.selector)
.data(data);
selection
.enter()
.append('svg:line')
.classed(RadarChart.ZeroSegmentNode.class, true);
selection
.attr({
'x1': item => item.x1,
'y1': item => item.y1,
'x2': item => item.x2,
'y2': item => item.y2,
'transform': item => item.translate
});
selection.exit().remove();
}
private drawZeroLabel() {
var data = [];
data.push({
'x': this.zeroPointRadius * (1 - RadarChart.SegmentFactor) + 5,
'y': -1 * this.zeroPointRadius
});
var zeroLabel = this.mainGroupElement
.select(RadarChart.ZeroSegment.selector)
.selectAll(RadarChart.ZeroLabel.selector).data(data);
zeroLabel
.enter()
.append('text')
.classed(RadarChart.ZeroLabel.class, true).text("0");
zeroLabel
.attr({
'x': item => item.x,
'y': item => item.y
});
}
private getDataPoints(series: RadarChartSeries[]): RadarChartDatapoint[][] {
var dataPoints: RadarChartDatapoint[][] = [];
for (var i: number = 0; i < series.length; i++) {
dataPoints.push(series[i].data);
}
return dataPoints;
}
private getAllDataPointsList(series: RadarChartSeries[]): RadarChartDatapoint[] {
var dataPoints: RadarChartDatapoint[] = [];
for (var i: number = 0; i < series.length; i++) {
dataPoints = dataPoints.concat(series[i].data);
}
return dataPoints;
}
private isPercentChart(dataPointsList: RadarChartDatapoint[]): boolean {
for (var i: number = 0; i < dataPointsList.length; i++) {
if (dataPointsList[i].labelFormatString.indexOf("%") === -1) {
return false;
}
}
return true;
}
private parseLegendProperties(dataView: DataView): void {
if (!dataView || !dataView.metadata) {
this.legendObjectProperties = {};
return;
}
this.legendObjectProperties = DataViewObjects.getObject(dataView.metadata.objects, "legend", {});
}
private static parseSettings(dataView: DataView): RadarChartSettings {
var objects: DataViewObjects;
if (!dataView || !dataView.metadata || !dataView.metadata.columns || !dataView.metadata.objects)
objects = null;
else
objects = dataView.metadata.objects;
return {
showLegend: DataViewObjects.getValue(objects, RadarChart.Properties.legend.show, true)
};
}
private static getPrecision(value: number): number {
return Math.max(RadarChart.MinPrecision, Math.min(RadarChart.MaxPrecision, value));
}
private static parseLabelSettings(dataView: DataView): PointDataLabelsSettings {
var objects: DataViewObjects;
if (!dataView || !dataView.metadata || !dataView.metadata.objects)
objects = null;
else
objects = dataView.metadata.objects;
var dataLabelsSettings: PointDataLabelsSettings = dataLabelUtils.getDefaultPointLabelSettings();
var labelsObj: PointDataLabelsSettings = {
show: DataViewObjects.getValue(objects, RadarChart.Properties.labels.show, dataLabelsSettings.show),
labelColor: DataViewObjects.getFillColor(objects, RadarChart.Properties.labels.color, dataLabelsSettings.labelColor),
displayUnits: DataViewObjects.getValue(objects, RadarChart.Properties.labels.displayUnits, dataLabelsSettings.displayUnits),
precision: RadarChart.getPrecision(DataViewObjects.getValue(objects, RadarChart.Properties.labels.precision, dataLabelsSettings.precision)),
fontSize: DataViewObjects.getValue(objects, RadarChart.Properties.labels.fontSize, dataLabelsSettings.fontSize),
position: dataLabelsSettings.position
};
return labelsObj;
}
// This function returns the values to be displayed in the property pane for each object.
// Usually it is a bind pass of what the property pane gave you, but sometimes you may want to do
// validation and return other values/defaults
public enumerateObjectInstances(options: EnumerateVisualObjectInstancesOptions): VisualObjectInstanceEnumeration {
var enumeration = new ObjectEnumerationBuilder();
var settings: RadarChartSettings;
if (!this.radarChartData || !this.radarChartData.settings)
return [];
settings = this.radarChartData.settings;
switch (options.objectName) {
case "legend":
enumeration.pushInstance(this.enumerateLegend(settings));
break;
case "dataPoint":
this.enumerateDataPoint(enumeration);
break;
case 'labels':
this.enumerateDataLabels(enumeration);
break;
}
return enumeration.complete();
}
private getLabelSettingsOptions(enumeration: ObjectEnumerationBuilder, labelSettings: PointDataLabelsSettings): VisualDataLabelsSettingsOptions {
return {
enumeration: enumeration,
dataLabelsSettings: labelSettings,
show: true,
displayUnits: true,
precision: true,
fontSize: true,
};
}
private enumerateDataLabels(enumeration: ObjectEnumerationBuilder): void {
var labelSettings = this.radarChartData.dataLabelsSettings;
//Draw default settings
dataLabelUtils.enumerateDataLabels(this.getLabelSettingsOptions(enumeration, labelSettings));
}
private enumerateLegend(settings: RadarChartSettings): VisualObjectInstance {
var showTitle: boolean = true,
titleText: string = "",
legend: VisualObjectInstance,
labelColor: DataColorPalette,
fontSize: number = 8;
showTitle = DataViewObject.getValue(this.legendObjectProperties, legendProps.showTitle, showTitle);
titleText = DataViewObject.getValue(this.legendObjectProperties, legendProps.titleText, titleText);
labelColor = DataViewObject.getValue(this.legendObjectProperties, legendProps.labelColor, labelColor);
fontSize = DataViewObject.getValue(this.legendObjectProperties, legendProps.fontSize, fontSize);
legend = {
objectName: "legend",
displayName: "legend",
selector: null,
properties: {
show: settings.showLegend,
position: LegendPosition[this.legend.getOrientation()],
showTitle: showTitle,
titleText: titleText,
labelColor: labelColor,
fontSize: fontSize,
}
};
return legend;
}
private enumerateDataPoint(enumeration: ObjectEnumerationBuilder): void {
if (!this.radarChartData || !this.radarChartData.series)
return;
var series: RadarChartSeries[] = this.radarChartData.series;
for (var i: number = 0; i < series.length; i++) {
var serie = series[i];
enumeration.pushInstance({
objectName: "dataPoint",
displayName: serie.name,
selector: ColorHelper.normalizeSelector(serie.identity.getSelector(), false),
properties: {
fill: { solid: { color: serie.fill } }
}
});
}
}
private updateViewport(): void {
var legendMargins: IViewport = this.legend.getMargins(),
legendPosition: LegendPosition;
<|fim▁hole|> case LegendPosition.Top:
case LegendPosition.TopCenter:
case LegendPosition.Bottom:
case LegendPosition.BottomCenter:
this.viewport.height -= legendMargins.height;
break;
case LegendPosition.Left:
case LegendPosition.LeftCenter:
case LegendPosition.Right:
case LegendPosition.RightCenter:
this.viewport.width -= legendMargins.width;
break;
}
}
}
}<|fim▁end|>
|
legendPosition = LegendPosition[<string>this.legendObjectProperties[legendProps.position]];
switch (legendPosition) {
|
<|file_name|>avatar.ts<|end_file_name|><|fim▁begin|>import { gql } from '@apollo/client'
import avatarFragment from 'v2/components/AvatarUploader/fragments/avatar'
export default gql`
query AvatarCheck {
me {<|fim▁hole|> }
}
${avatarFragment}
`<|fim▁end|>
|
...Avatar
|
<|file_name|>line_iterator.rs<|end_file_name|><|fim▁begin|>pub struct LineIterator<'a> {
data: &'a str,
line_number: usize,
line_start: usize,
line_end: usize,
done: bool
}
impl<'a> LineIterator<'a> {
pub fn new(data: &str) -> LineIterator {
LineIterator{
data,
line_number: 0,
line_start: 0,
line_end: 0,
done: false
}
}
fn out_of_data(&self) -> bool {
self.line_end == self.data.len()
}
}
impl<'a> Iterator for LineIterator<'a> {
type Item = (usize, &'a str);
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None
}
// Move the range beyond its previous position.
self.line_start = self.line_end;
// We track trailing newlines because, if the buffer ends immediately
// after one, we want to return one last line on the next iteration.
let mut trailing_newline = false;
// Find the next line range.
for c in self.data[self.line_start..].chars() {
// Extend the current line range to include this char.
self.line_end += c.len_utf8();
if c == '\n' {
trailing_newline = true;
break;
}
}
let line = Some((
self.line_number,
&self.data[
self.line_start..self.line_end
]
));
// Flag the iterator as done as soon as we've exhausted its data,
// and have given one last line for data with a trailing newline.
if self.out_of_data() && !trailing_newline {
self.done = true;
} else {
self.line_number += 1;
}
line
}
}
#[cfg(test)]
mod tests {
use super::LineIterator;
#[test]
fn next_produces_a_value_for_empty_data() {
let mut lines = LineIterator::new("");
assert_eq!(Some((0, "")), lines.next());
}
#[test]
fn next_includes_trailing_newlines() {
let mut lines = LineIterator::new("line\nanother line\n");
assert_eq!(Some((0, "line\n")), lines.next());
assert_eq!(Some((1, "another line\n")), lines.next());
assert_eq!(Some((2, "")), lines.next());
}<|fim▁hole|> #[test]
fn next_stops_at_end_of_data() {
let mut lines = LineIterator::new("line\nanother line");
lines.next();
lines.next();
assert_eq!(None, lines.next());
}
}<|fim▁end|>
| |
<|file_name|>freeze_bills.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from django.core.management.base import BaseCommand<|fim▁hole|>from mks.models import Knesset
class Command(BaseCommand):
help = "Freeze bills staged in previous knessets"
option_list = BaseCommand.option_list + (
make_option(
'-n', action='store_true', dest="dryrun", default=False,
help='Dry run, changes nothing in the db, just display results'
),
)
def handle(self, *args, **options):
start_date = Knesset.objects.current_knesset().start_date
valid_stages = [key for (key, val) in BILL_STAGE_CHOICES
if key.isnumeric() and 1 < int(key) < 6]
bills = Bill.objects.filter(stage_date__lte=start_date,
stage__in=valid_stages)
total = Bill.objects.count()
found = bills.count()
msg = "Found {0} bills of {1} in stages {2} and dated before {3}"
print(msg.format(found, total, u','.join(valid_stages), start_date))
if options['dryrun']:
print("Not updating the db, dry run was specified")
else:
print('Settings {0} bills stage to u"0"'.format(found))
bills.update(stage=u'0')<|fim▁end|>
|
from optparse import make_option
from laws.models import Bill
from laws.vote_choices import BILL_STAGE_CHOICES
|
<|file_name|>init.js<|end_file_name|><|fim▁begin|>require.config( {
//By default load any module IDs from js/lib
baseUrl: "app/",
//except, if the module ID starts with "app",
//load it from the js/app directory. paths
//config is relative to the baseUrl, and<|fim▁hole|> lib: "../lib",
}
} );
requirejs( ["lib/angular/angular"], function() {
require( ["app"], function() {
angular.element( document ).ready( function() {
angular.bootstrap( document, ['dotaApp'] );
} );
} );
} );<|fim▁end|>
|
//never includes a ".js" extension since
//the paths config could be for a directory.
paths: {
|
<|file_name|>keystore.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The cert-manager Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// This file defines methods used for PKCS#12 support.
// This is an experimental feature and the contents of this file are intended
// to be absorbed into a more fully fledged implementing ahead of the v0.15
// release.
// This should hopefully not exist by the next time you come to read this :)
package secretsmanager
import (
"bytes"
"crypto/rand"
"crypto/x509"
"time"
jks "github.com/pavel-v-chernykh/keystore-go"
"software.sslmate.com/src/go-pkcs12"
"github.com/jetstack/cert-manager/pkg/util/pki"
)
const (
// pkcs12SecretKey is the name of the data entry in the Secret resource
// used to store the p12 file.
pkcs12SecretKey = "keystore.p12"
// Data Entry Name in the Secret resource for PKCS12 containing Certificate Authority
pkcs12TruststoreKey = "truststore.p12"
// jksSecretKey is the name of the data entry in the Secret resource
// used to store the jks file.
jksSecretKey = "keystore.jks"
// Data Entry Name in the Secret resource for JKS containing Certificate Authority
jksTruststoreKey = "truststore.jks"
)
// encodePKCS12Keystore will encode a PKCS12 keystore using the password provided.
// The key, certificate and CA data must be provided in PKCS1 or PKCS8 PEM format.
// If the certificate data contains multiple certificates, the first will be used
// as the keystores 'certificate' and the remaining certificates will be prepended
// to the list of CAs in the resulting keystore.
func encodePKCS12Keystore(password string, rawKey []byte, certPem []byte, caPem []byte) ([]byte, error) {
key, err := pki.DecodePrivateKeyBytes(rawKey)
if err != nil {
return nil, err
}
certs, err := pki.DecodeX509CertificateChainBytes(certPem)
if err != nil {
return nil, err
}
var cas []*x509.Certificate
if len(caPem) > 0 {
cas, err = pki.DecodeX509CertificateChainBytes(caPem)
if err != nil {
return nil, err
}
}
// prepend the certificate chain to the list of certificates as the PKCS12
// library only allows setting a single certificate.
if len(certs) > 1 {
cas = append(certs[1:], cas...)
}
return pkcs12.Encode(rand.Reader, key, certs[0], cas, password)
}
func encodePKCS12Truststore(password string, caPem []byte) ([]byte, error) {
ca, err := pki.DecodeX509CertificateBytes(caPem)
if err != nil {
return nil, err
}
var cas = []*x509.Certificate{ca}
return pkcs12.EncodeTrustStore(rand.Reader, cas, password)
}
func encodeJKSKeystore(password []byte, rawKey []byte, certPem []byte, caPem []byte) ([]byte, error) {
// encode the private key to PKCS8
key, err := pki.DecodePrivateKeyBytes(rawKey)
if err != nil {
return nil, err
}
keyDER, err := x509.MarshalPKCS8PrivateKey(key)
if err != nil {
return nil, err
}
// encode the certificate chain
chain, err := pki.DecodeX509CertificateChainBytes(certPem)
if err != nil {
return nil, err
}
certs := make([]jks.Certificate, len(chain))
for i, cert := range chain {
certs[i] = jks.Certificate{
Type: "X509",
Content: cert.Raw,
}
}
ks := jks.KeyStore{
"certificate": &jks.PrivateKeyEntry{
Entry: jks.Entry{
CreationDate: time.Now(),
},
PrivKey: keyDER,
CertChain: certs,
},
}
// add the CA certificate, if set
if len(caPem) > 0 {
ca, err := pki.DecodeX509CertificateBytes(caPem)
if err != nil {
return nil, err
}
ks["ca"] = &jks.TrustedCertificateEntry{
Entry: jks.Entry{
CreationDate: time.Now(),
},
Certificate: jks.Certificate{
Type: "X509",
Content: ca.Raw,
},
}
}
buf := &bytes.Buffer{}
if err := jks.Encode(buf, ks, password); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func encodeJKSTruststore(password []byte, caPem []byte) ([]byte, error) {
ca, err := pki.DecodeX509CertificateBytes(caPem)
if err != nil {
return nil, err
}
ks := jks.KeyStore{
"ca": &jks.TrustedCertificateEntry{
Entry: jks.Entry{
CreationDate: time.Now(),
},
Certificate: jks.Certificate{
Type: "X509",
Content: ca.Raw,<|fim▁hole|> },
}
buf := &bytes.Buffer{}
if err := jks.Encode(buf, ks, password); err != nil {
return nil, err
}
return buf.Bytes(), nil
}<|fim▁end|>
|
},
|
<|file_name|>ConfigCheck.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#############################################################################
# File : ConfigCheck.py
# Package : rpmlint
# Author : Frederic Lepied
# Created on : Sun Oct 3 21:48:20 1999
# Purpose :
#############################################################################
import AbstractCheck
from Filter import addDetails, printError, printWarning
class ConfigCheck(AbstractCheck.AbstractCheck):
def __init__(self):<|fim▁hole|>
def check_binary(self, pkg):
config_files = pkg.configFiles()
noreplace_files = pkg.noreplaceFiles()
for c in config_files:
if c.startswith("/var/lib/games/"):
printError(pkg, "score-file-must-not-be-conffile", c)
elif not c.startswith("/etc/") and not c.startswith("/var/"):
printWarning(pkg, "non-etc-or-var-file-marked-as-conffile", c)
if c not in noreplace_files:
printWarning(pkg, "conffile-without-noreplace-flag", c)
# Create an object to enable the auto registration of the test
check = ConfigCheck()
# Add information about checks
addDetails(
'score-file-must-not-be-conffile',
"""A file in /var/lib/games/ is a configuration file. Store your conf
files in /etc instead.""",
'non-etc-or-var-file-marked-as-conffile',
"""A file not in /etc or /var is marked as being a configuration file.
Please put your conf files in /etc or /var.""",
'conffile-without-noreplace-flag',
"""A configuration file is stored in your package without the noreplace flag.
A way to resolve this is to put the following in your SPEC file:
%config(noreplace) /etc/your_config_file_here
""",
)
# ConfigCheck.py ends here
# ex: ts=4 sw=4 et<|fim▁end|>
|
AbstractCheck.AbstractCheck.__init__(self, "ConfigCheck")
|
<|file_name|>jsonInterfaces.ts<|end_file_name|><|fim▁begin|>// The current version of this file can be retrieved by:
// GET /api/language/webast
module TDev.AST.Json
{
// This module describes an AST for TouchDevelop scripts. The documentation
// is severely lacking, so the best way to figure things out is to write a
// TouchDevelop script, and type (in the console):
//
// "TDev.AST.Json.dump(TDev.Script)"
//
// which will dump the currently-edited script in this representation. The
// converse operation is:
//
// "TDev.AST.Json.serialize(yourJsonAst)"
//
// Beware: the composition of these two operations is *not* the
// identity. In particular, [dump] will resolve implicit optional arguments,
// while [serialize] expects them to be left out.
// These two interfaces are never used. Actually, whenever a field has type
// [JNodeRef], this is a lie, and its type is [string].
export interface JNodeRef { dummyNodeRef: number; }
export interface JTypeRef { dummyTypeRef: number; }
// the JTypeRef may be either a simple string, or if it starts with '{',
// it is JSON-encoded and conforms to one of these interfaces or is just a string (API type)
export interface JGenericTypeInstance extends JTypeRef {
g: string;
a?: JTypeRef[];
}
export interface JUserType extends JTypeRef {
o: string;
}
export interface JLibraryType extends JUserType {
l: JNodeRef;
}
/*abstract*/ export interface JNode
{
nodeType:string; // name of interface without leading J and with lower-case first letter
id:string; // do not depend on the particular format of these strings
}
/*abstract*/ export interface JDecl extends JNode
{
name: string;
unused?: boolean;
}
/*abstract*/ export interface JToken extends JNode { }
/*abstract*/ export interface JExpr extends JToken { }
// This corresponds to the [operator] syntactic class defined in the
// OOPSLA'15 submission. When adopting the "hybrid AST" point of view,
// an expression is decomposed as a series of tokens. The [JOperator]
// interface covers operators (assignment, comparison, boolean and
// arithmetic operators), but also *digits*.
//
// For instance, "1 + 10 = 11" will generate:
// [JOperator 1; JOperator +; JOperator 0; JOperator =; JOperator 1; JOperator 1]
export interface JOperator extends JToken { op:string; }
// A reference to a "property", i.e. something defined for an object of that
// type. There is no good way of figuring out what should the [parent] be
// when generating such properties; probably the best way is to dump a
// TouchDevelop AST.
export interface JPropertyRef extends JToken
{
name:string;
parent: JTypeRef; // if used as token this is ignored when building
// if used as JCall it's needed for operators
declId?: JNodeRef; // filled when the property is user-defined
}
export interface JStringLiteral extends JExpr {
value:string;
enumValue?:string;
}
export interface JBooleanLiteral extends JExpr { value:boolean; }
// A number literal is only used when adopting the "tree" view for
// expressions (see comment on [JExprHolder]).
export interface JNumberLiteral extends JExpr {
value:number;
// If parsing 'stringForm' yields 'value', 'stringForm' is used
// Otherwise stringified form of 'value' is used
stringForm?:string;
}
// when building expressions of these three types you can provide localId/type or name;
// if you provide both, name is ignored
export interface JLocalRef extends JExpr
{
name:string;
localId:JNodeRef;
}
export interface JPlaceholder extends JExpr
{
name:string;
type:JTypeRef;
}
// A singleton (probably) references one of the top-level categories such as
// libraries or data. When trying to call "♻ l → foo(x1, x2)", one may
// understand that the following call takes place:
// ♻ -> l -> foo(x1, x2)
// and the following AST is generated:
// JCall { name: foo, parent: l, args: [
// JCall { name: l, parent: ♻, args: [ JSingletonRef ♻ ] },
// x1,
// x2
// ]}
// this is surprising, because when calling "1 + 2", we generate a call that
// has two arguments only.
export interface JSingletonRef extends JExpr
{
name:string;
// type is ignored when building
type:JTypeRef;
libraryName?:string; // if this is a reference to a namespace in a library, this gives the name of library
}
// It seems like TouchDevelop has an extra invariant that a [JCall] must
// *always* be wrapped in a [JExprHolder].
export interface JCall extends JPropertyRef, JExpr
{
args:JExpr[];
// If we are calling a *type* T on an expression (e.g. create ->
// Collection of -> T), then T will be in there.
typeArgs?: JTypeRef[];
// The field below, if present, determines without ambiguity the nature
// of the call.
// - extension (the new special syntax)
// - field (reading a record field)
// Other types of calls can be determined by careful inspection of the
// receiver. See the C++ code emitter.
callType?: string;
}
// Expressions can be represented in two different manners.
// - The first one is as a series of tokens. This would correspond to the
// "hybrid AST" described in the OOPSLA'15 submission. In that
// representation, the [tree] field is null and the [tokens] field
// contains the list of tokens.
// - The second one is as an actual AST, with a proper tree structure. In
// that case, the [tokens] field is null and [tree] must contain a proper
// tree.
//
// TouchDevelop conflates variable binding and expressions. This means that
// every expression is flagged with the variables that are introduced at
// this stage. For instance, "var x = 1" will be translated as a
// [JExprHolder] where [locals] contains a [JLocalDef x], and either:
// - [tokens] is [JLocalRef x; JOperator :=; JOperator 1], or
// - [tree] is [JCall { name: ":=", parent: "Unknown", args: [JLocalRef x, JNumberLiteral 1] }]
//
// This is not the traditional notion of binding! The variable's scope is
// not limited to the tokens, but rather extends until the end of the parent
// block.
export interface JExprHolder extends JNode
{
// if tokens is unset, will try to use tree
tokens:JToken[];
tree:JExpr;
locals:JLocalDef[]; // locals variables defined in this expression
}
/*abstract*/ export interface JStmt extends JNode
{
// this is available when using the short form
locals?: JLocalDef[];
}
export interface JComment extends JStmt { text: string; }
export interface JFor extends JStmt
{
index:JLocalDef;
bound:JExprHolder;
body:JStmt[];
}
export interface JForeach extends JStmt
{
iterator:JLocalDef;
collection:JExprHolder;
conditions:JCondition[];
body:JStmt[];
}
/*abstract*/ export interface JCondition extends JNode {
// this is available when using the short form
locals?: JLocalDef[];
}
export interface JWhere extends JCondition { condition: JExprHolder; }
export interface JWhile extends JStmt
{
condition:JExprHolder;
body:JStmt[];
}
export interface JContinue extends JStmt {}
export interface JBreak extends JStmt {}
export interface JReturn extends JExprStmt {}
export interface JShow extends JExprStmt {}
// Sequences of if / else if / else statements are not represented the usual
// way. That is, instead of having a structured AST:
//
// if
// |- condition1
// |- then-branch1 = ...
// |- else-branch = if
// |- condition2
// |- then-branch2
// |- else-branch2
//
// the TouchDevelop AST adopts the following (unusual) representation.
//
// if
// |- condition1
// |- then-branch1 = ...
// |- else-branch = null
// if
// |- condition2<|fim▁hole|> // |- isElseIf = true
//
// This is NOT equivalent to the representation above (condition2 may
// subsume condition1), so the extra flag "isElseIf" is set and (I suppose)
// gets some special treatment when it comes to running / compiling the
// program.
export interface JIf extends JStmt
{
condition:JExprHolder;
thenBody:JStmt[];
elseBody:JStmt[];
isElseIf:boolean;
}
export interface JBoxed extends JStmt { body:JStmt[]; }
export interface JExprStmt extends JStmt { expr:JExprHolder; }
export interface JInlineActions extends JExprStmt { actions:JInlineAction[]; }
export interface JInlineAction extends JNode
{
reference:JLocalDef;
inParameters:JLocalDef[];
outParameters:JLocalDef[];
body:JStmt[];
locals?:JLocalDef[]; // this contains the reference in short mode; it never contains anything else
isImplicit:boolean;
isOptional:boolean;
}
export interface JOptionalParameter extends JNode
{
name:string;
declId:JNodeRef;
expr:JExprHolder;
}
/*abstract*/ export interface JActionBase extends JDecl
{
inParameters:JLocalDef[];
outParameters:JLocalDef[];
// note that events should be always treated as private, but for historical reasons this field can be true or false
isPrivate:boolean;
isOffline: boolean;
isQuery: boolean;
isTest: boolean;
isAsync:boolean;
description: string;
}
export interface JActionType extends JActionBase
{
}
export interface JAction extends JActionBase { body: JStmt[]; }
export interface JPage extends JActionBase
{
initBody:JStmt[];
displayBody:JStmt[];
initBodyId?:string;
displayBodyId?:string;
hasModelParameter?:boolean;
}
export interface JEvent extends JActionBase
{
// when building provide name or both eventName and eventVariableId (which take precedence over name)
eventName:string;
eventVariableId:JNodeRef;
body:JStmt[];
}
export interface JLibAction extends JActionBase
{
parentLibId:JNodeRef; // this can be empty - it means "current script"
}
export interface JLibAbstractType extends JDecl
{
}
export interface JLibActionType extends JActionBase
{
}
/*abstract*/ export interface JGlobalDef extends JDecl
{
comment:string;
type:JTypeRef;
isReadonly:boolean;
isTransient:boolean;
isCloudEnabled:boolean;
}
export interface JArt extends JGlobalDef {
url: string;
// If it's a string art, contains its value.
value: string;
}
export interface JData extends JGlobalDef { }
export interface JLibrary extends JDecl
{
libIdentifier: string;
libIsPublished: boolean;
scriptName: string; // name of the script to which the library resolves
exportedTypes: string; // space separated; obsolete, use exportedTypeDefs
exportedTypeDefs: JDecl[]; // JLibAbstractType or JLibActionType
exportedActions: JLibAction[];
resolveClauses: JResolveClause[];
}
/*abstract*/ export interface JBinding extends JNode
{
name:string; // name of the formal argument
isExplicit:boolean; // was it explicitly specified by the user
// implicit bindings are ignored when building expressions
}
export interface JTypeBinding extends JBinding { type:JTypeRef; }
export interface JActionBinding extends JBinding { actionId:JNodeRef; }
export interface JResolveClause extends JNode
{
name:string;
// points to a JLibrary (not publish-id);
// it may be null for binding to the current script
defaultLibId:JNodeRef;
withTypes: JTypeBinding[];
withActions: JActionBinding[];
}
export interface JRecord extends JDecl
{
comment: string;
category: string; // "object", "table", "index", or "decorator"
isCloudEnabled: boolean;
isCloudPartiallyEnabled: boolean;
isPersistent: boolean;
isExported: boolean;
keys: JRecordKey[];
fields: JRecordField[];
}
export interface JRecordField extends JNode
{
name:string;
type:JTypeRef;
}
export interface JRecordKey extends JRecordField {}
// local variable or a parameter
export interface JLocalDef extends JNode
{
name:string;
type:JTypeRef;
}
// Response to:
// GET /api/<script-id>/webast
export interface JApp extends JNode
{
// both versions are comma-separated list of tokens/features
textVersion: string;
jsonVersion: string;
name: string;
comment: string;
// The name and icon are only given here if they are explicitly specified by the user.
icon?: string; // name of the icon, e.g., "Bolt"
color?: string; // e.g., #ff00ff
// These two are always present. They are ignored when building new scripts.
autoIcon:string;
autoColor:string;
platform: string; // comma-separated
isLibrary: boolean;
allowExport: boolean;
showAd: boolean;
hasIds: boolean; // does it have stable, persistent ids for every stmt
rootId: string;
decls: JDecl[];
deletedDecls: JDecl[]; // these are present when a node was deleted but is still referenced from somewhere
libraryName?: string; // when used in reflection info
libraryId?: string; // when used in reflection info
}
//
// API description
//
export interface JPropertyParameter
{
name: string;
type: JTypeRef;
writesMutable?: boolean; // are fields of the object referenced by this paramter being written to
readsMutable?: boolean; // .... read from
defaultValue?: JToken[];
stringValues?: string[]; // these show up in intelli buttons; they are usually all allowed values for a parameter
}
export interface JProperty
{
name: string;
help: string;
usage_count: number; // this is used for syntax autocompletion priority
runOnInvalid?: boolean; // should the property by run even if one of the arguments is 'invalid'
isHidden?: boolean; // doesn't show in autocompletion
isAsync?: boolean;
isObsolete?: boolean; // generates a warning
isDbgOnly?: boolean; // an experimental feature; not visible in regular builds
isBetaOnly?: boolean; // a feature in testing; visible in /app/beta
jsName: string; // how is the property refered to from JavaScript
infixPriority?: number; // when present, this is an infix operator with given priority
// higher number is higher priority; even assosiates left, odd - right
pausesInterpreter?: boolean; // is this a potentially-async operation
usesStackFrame?: boolean; // is the implementation passed IStackFrame object
missingWeb?: boolean; // is the implementation missing from the general web version
missingWab?: boolean; // .... from web version running with WebAppBooster
capabilities?: string; // comma-separated list of required platform capabilities (if any)
result: JPropertyParameter;
parameters: JPropertyParameter[];
}
export interface JTypeDef
{
name: string;
help: string;
icon: string; // a name of the icon representing this type
isAction?: boolean; // is it a function type; look for 'run' property for the signature
isData: boolean; // false for singleton types
stemName: string; // used when auto-naming variables of this type
jsName: string; // how is the type refered to from JavaScript
isDbgOnly?: boolean; // an experimental feature; not visible in regular builds
isBetaOnly?: boolean; // a feature in testing; visible in /app/beta
isSerializable: boolean; // do we support automatic serialization of this type
isBuiltin?: boolean; // true for number, boolean, string; the JS calling convention is different for these
ctxLocal?: boolean; // can it be used as local variable
ctxGlobal?: boolean; // .... as global variable
ctxField?: boolean; // .... as field of a record
ctxLocalKey?: boolean; // .... as key in a local index
ctxGcKey?: boolean; // can it have decorators
ctxCloudKey?: boolean;
ctxRowKey?: boolean;
ctxCloudField?: boolean;
ctxWallTap?: boolean; // do global variables of this type get 'wall tap' events
ctxEnumerable?: boolean; // can it be used with foreach construct
ctxJson?: boolean; // can it be json exported/imported
properties: JProperty[];
}
// GET /api/language/apis
export interface JApis
{
textVersion:string;
jsonVersion: string;
types:JTypeDef[];
}
/*
The short format
~~~~~~~~~~~~~~~~
The main difference between the full JSON format and the short JSON format is
representation of `JExprHolder` nodes. Whenever the full JSON format says node
`JBar` has a field `foo` of type `JExprHolder`, then in the short format `JBar`
has a field `foo` of type `string` and a field `locals` of type `JLocalDef[]`.
Additionally, the fields `index` in `JFor` and `iterator` in `JForeach` are
absent, and the loop-bound variable is instead stored as the first element of
`locals`.
The string placed instead of the `JExprHolder` node can be turned into sequence
of tokens using the following function:
export function shortToTokens(shortForm:string)
{
function uq(s:string) {
var r = ""
for (var i = 0; i < s.length; ++i) {
var c = s.charAt(i);
if (c == "_") {
r += " ";
} else if (c == "/") {
r += String.fromCharCode(parseInt(s.slice(i + 1, i + 5), 16))
i += 4
} else {
r += c;
}
}
return r;
}
function oneToken(s:string) {
var v = s.slice(1)
switch (s[0]) {
case ",": return { nodeType: "operator", op: v }
case "#": return { nodeType: "propertyRef", declId: v }
case ".": return { nodeType: "propertyRef", name: uq(v) }
case "'": return { nodeType: "stringLiteral", value: uq(v) }
case "F":
case "T": return { nodeType: "booleanLiteral", value: (s[0] == "T") }
case "$": return { nodeType: "localRef", localId: v }
case ":": return { nodeType: "singletonRef", name: uq(v) }
case "?":
var cln = v.indexOf(':')
if (cln > 0)
return { nodeType: "placeholder", type: uq(v.slice(0, cln)), name: uq(v.slice(cln + 1)) }
else
return { nodeType: "placeholder", type: uq(v) }
default:
throw new Error("wrong short form: " + s)
}
}
if (!shortForm) return []; // handles "" and null; the code below is incorrect for ""
return shortForm.split(" ").map(oneToken)
}
In other words, it's space separated sequence of strings, where the first
character denotes the kind of token and remaining characters are the payload.
The string is quoted by replacing spaces with underscores and all other
non-alphanumeric characters with unicode sequences preceeded by a slash (not
backslash to avoid double quoting in JSON).
Short diff format
~~~~~~~~~~~~~~~~~
Every object node in the short JSON format has a field named `id`. This is used
when formulating diffs. The diff is set of updates to nodes of given ids. For
every id there is a set of `fieldName`, `value` pairs.
For example consider:
A = {
"id": "01",
"one": "one",
"two": 2,
"baz": [
{ "id": "02", "enabled": true },
{ "id": "03", "x": 0 }
]
}
B = {
"id": "01",
"one": "seven",
"two": 2,
"baz": [
{ "id": "02", "enabled": true },
{ "id": "05", "y": 7, "z": 13 }
]
}
diff(A, B) = {
// new node, assignment given for all fields
"05": { "y": 7, "z": 13 },
// updated node
"01": {
"one": "seven", // the field "one" is now "seven"
// the field "two" is not mentioned and thus unchanged
// the field "baz" now contains two nodes, ids of which are given
"baz": [ "02", "05" ]
},
// the node "03" is deleted
"03": null
}
The JSON diff relies on the following properties of the short JSON format:
Fields of JNodes always contain either:
1. a JSON primitive value (string, boolean, integer, null), or
2. a sequence of JNodes
Every JNode has a unique 'id' field.
This is why JFor.bound and JForeach.collection fields are missing. In the diff
format sequence of strings is always treated as a sequence of node ids.
The following function can be used to apply JSON diff:
function indexIds(obj:any)
{
var oldById:any = {}
function findIds(o:any) {
if (!o) return;
if (Array.isArray(o)) {
for (var i = 0; i < o.length; ++i)
findIds(o[i])
} else if (typeof o === "object") {
if (!o.id) Util.oops("no id for " + JSON.stringify(o))
if (oldById.hasOwnProperty(o.id)) Util.oops("duplicate id " + o.id)
oldById[o.id] = o
var k = Object.keys(o)
for (var i = 0; i < k.length; ++i)
findIds(o[k[i]])
}
}
findIds(obj)
return oldById
}
export function applyJsonDiff(base:any, diff:any)
{
var byId = indexIds(base)
var k = Object.keys(diff)
for (var i = 0; i < k.length; ++i) {
var id = k[i]
var upd = diff[id]
if (upd === undefined) continue;
var trg = byId[id]
if (upd === null) {
if (!trg) Util.oops("apply diff: no target id " + id)
trg.__deleted = true;
continue;
}
if (!trg) {
byId[id] = trg = { id: id }
}
var kk = Object.keys(upd)
for (var j = 0; j < kk.length; ++j) {
var f = kk[j]
var v = upd[f]
if (Array.isArray(v) && typeof v[0] === "string")
v = v.map(id => {
var r = byId[id]
if (!r) { r = byId[id] = { id: id } }
return r
})
Util.assert(f != "nodeType" || !trg[f])
trg[f] = v
}
}
var newIds = indexIds(base)
k = Object.keys(newIds)
for (var i = 0; i < k.length; ++i) {
var id = k[i]
if (newIds[k[i]].__deleted)
Util.oops("dangling id after diff " + id)
}
}
*/
}<|fim▁end|>
|
// |- then-branch2
// |- else-branch2
|
<|file_name|>2101001.js<|end_file_name|><|fim▁begin|>/*
This file is part of the OdinMS Maple Story Server
Copyright (C) 2008 Patrick Huy <[email protected]>
Matthias Butz <[email protected]>
Jan Christian Meyer <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License version 3
as published by the Free Software Foundation. You may not use, modify
or distribute this program under any other version of the
GNU Affero General Public License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>
/*
Jiyur
Little Kid
*/
function start() {
cm.sendNext("I miss my sister... she's always working at the palace as their servant and I only get to see her on Sundays. The King and Queen are so selfish.");
cm.dispose();
}<|fim▁end|>
|
*/
|
<|file_name|>exponential.rs<|end_file_name|><|fim▁begin|>/**
* Exponential Functions
*
* This module corresponds to Section 8.2 of the [GLSL 4.30.6 specification]
* (http://www.opengl.org/registry/doc/GLSLangSpec.4.30.6.pdf).
*/
use numeric::funs::*;
use numeric::traits::*;
use vec::{Vector, Vec2, Vec3, Vec4};
// Exp
pub trait ExpVector<T>: Vector<T> {
pure fn pow_t(&self, n: T) -> self;
pure fn pow_v(&self, n: &self) -> self;
}
pub impl<T:Copy Exp> Vec2<T>: ExpVector<T> {
#[inline(always)]
pure fn pow_t(&self, n: T) -> Vec2<T> {
Vec2::new(pow(&self[0], &n),
pow(&self[1], &n))
}
#[inline(always)]
pure fn pow_v(&self, n: &Vec2<T>) -> Vec2<T> {
Vec2::new(pow(&self[0], &n[0]),
pow(&self[1], &n[1]))
}
}
pub impl<T:Copy Exp> Vec2<T>: Exp {
#[inline(always)]
pure fn pow(&self, n: &Vec2<T>) -> Vec2<T> {
self.pow_v(n)
}
#[inline(always)]
pure fn exp(&self) -> Vec2<T> {
Vec2::new(exp(&self[0]),
exp(&self[1]))
}
#[inline(always)]
pure fn log_(&self) -> Vec2<T> {
Vec2::new(log_(&self[0]),
log_(&self[1]))
}
#[inline(always)]
pure fn exp2(&self) -> Vec2<T> {
Vec2::new(exp2(&self[0]),
exp2(&self[1]))
}
#[inline(always)]
pure fn log2(&self) -> Vec2<T> {
Vec2::new(log2(&self[0]),
log2(&self[1]))
}
#[inline(always)]
pure fn sqrt(&self) -> Vec2<T> {
Vec2::new(sqrt(&self[0]),
sqrt(&self[1]))
}
#[inline(always)]
pure fn inv_sqrt(&self) -> Vec2<T> {
Vec2::new(inv_sqrt(&self[0]),
inv_sqrt(&self[1]))
}
}
pub impl<T:Copy Exp> Vec3<T>: ExpVector<T> {
#[inline(always)]
pure fn pow_t(&self, n: T) -> Vec3<T> {
Vec3::new(pow(&self[0], &n),
pow(&self[1], &n),
pow(&self[2], &n))
}
#[inline(always)]
pure fn pow_v(&self, n: &Vec3<T>) -> Vec3<T> {
Vec3::new(pow(&self[0], &n[0]),
pow(&self[1], &n[1]),
pow(&self[2], &n[2]))
}
}
pub impl<T:Copy Exp> Vec3<T>: Exp {
#[inline(always)]
pure fn pow(&self, n: &Vec3<T>) -> Vec3<T> {
self.pow_v(n)
}
#[inline(always)]
pure fn exp(&self) -> Vec3<T> {
Vec3::new(exp(&self[0]),
exp(&self[1]),
exp(&self[2]))
}<|fim▁hole|> #[inline(always)]
pure fn log_(&self) -> Vec3<T> {
Vec3::new(log_(&self[0]),
log_(&self[1]),
log_(&self[2]))
}
#[inline(always)]
pure fn exp2(&self) -> Vec3<T> {
Vec3::new(exp2(&self[0]),
exp2(&self[1]),
exp2(&self[2]))
}
#[inline(always)]
pure fn log2(&self) -> Vec3<T> {
Vec3::new(log2(&self[0]),
log2(&self[1]),
log2(&self[2]))
}
#[inline(always)]
pure fn sqrt(&self) -> Vec3<T> {
Vec3::new(sqrt(&self[0]),
sqrt(&self[1]),
sqrt(&self[2]))
}
#[inline(always)]
pure fn inv_sqrt(&self) -> Vec3<T> {
Vec3::new(inv_sqrt(&self[0]),
inv_sqrt(&self[1]),
inv_sqrt(&self[2]))
}
}
pub impl<T:Copy Exp> Vec4<T>: ExpVector<T> {
#[inline(always)]
pure fn pow_t(&self, n: T) -> Vec4<T> {
Vec4::new(pow(&self[0], &n),
pow(&self[1], &n),
pow(&self[2], &n),
pow(&self[3], &n))
}
#[inline(always)]
pure fn pow_v(&self, n: &Vec4<T>) -> Vec4<T> {
Vec4::new(pow(&self[0], &n[0]),
pow(&self[1], &n[1]),
pow(&self[2], &n[2]),
pow(&self[3], &n[3]))
}
}
pub impl<T:Copy Exp> Vec4<T>: Exp {
#[inline(always)]
pure fn pow(&self, n: &Vec4<T>) -> Vec4<T> {
self.pow_v(n)
}
#[inline(always)]
pure fn exp(&self) -> Vec4<T> {
Vec4::new(exp(&self[0]),
exp(&self[1]),
exp(&self[2]),
exp(&self[3]))
}
#[inline(always)]
pure fn log_(&self) -> Vec4<T> {
Vec4::new(log_(&self[0]),
log_(&self[1]),
log_(&self[2]),
log_(&self[3]))
}
#[inline(always)]
pure fn exp2(&self) -> Vec4<T> {
Vec4::new(exp2(&self[0]),
exp2(&self[1]),
exp2(&self[2]),
exp2(&self[3]))
}
#[inline(always)]
pure fn log2(&self) -> Vec4<T> {
Vec4::new(log2(&self[0]),
log2(&self[1]),
log2(&self[2]),
log2(&self[3]))
}
#[inline(always)]
pure fn sqrt(&self) -> Vec4<T> {
Vec4::new(sqrt(&self[0]),
sqrt(&self[1]),
sqrt(&self[2]),
sqrt(&self[3]))
}
#[inline(always)]
pure fn inv_sqrt(&self) -> Vec4<T> {
Vec4::new(inv_sqrt(&self[0]),
inv_sqrt(&self[1]),
inv_sqrt(&self[2]),
inv_sqrt(&self[3]))
}
}<|fim▁end|>
| |
<|file_name|>test_django_cassandra_models.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from unittest import skipIf
import copy
import uuid
from cassandra.cqlengine import ValidationError as CQLValidationError
from django.core import validators
from django.forms import fields
from common.models import CassandraFamilyMember
from django_cassandra_engine.test import TestCase as CassandraTestCase
class TestDjangoCassandraModel(CassandraTestCase):
def setUp(self):
self.some_uuid = uuid.uuid4()
self.family_member = CassandraFamilyMember.objects.create(
id=self.some_uuid,
first_name="Homer",
last_name="Simpson",
is_real=False,
favourite_number=666,
favourite_float_number=43.4,
created_on=datetime.now(),
)
def test_model_is_hashable(self):
models = set()
models.add(self.family_member)
self.assertEqual(1, len(models))
def test_serializable_value(self):
self.assertEqual(self.some_uuid, self.family_member.serializable_value("id"))
self.assertEqual(
self.family_member.first_name,
self.family_member.serializable_value("first_name"),
)
def test_clone_queryset(self):
qset = CassandraFamilyMember.objects.filter(id=self.some_uuid)
self.assertNotEqual(id(qset._clone()), id(qset))
def test_create(self):
family_member = self.family_member
self.assertEqual(family_member.first_name, "Homer")
self.assertEqual(family_member.last_name, "Simpson")
self.assertEqual(family_member.is_real, False)
self.assertEqual(family_member.favourite_number, 666)
self.assertEqual(family_member.favourite_float_number, 43.4)
def test_get_by_pk(self):
got_family_member = CassandraFamilyMember.objects.allow_filtering().get(
pk=self.family_member.id
)
self.assertIsNotNone(got_family_member)
def test_exclude(self):
results = CassandraFamilyMember.objects.exclude(id=self.some_uuid)
for model in results:
self.assertNotEqual(model.id, self.some_uuid)
def test_exclude_after_filter(self):
results = CassandraFamilyMember.objects.filter(id=self.some_uuid).exclude(
last_name="Simpson"
)
self.assertEqual(len(results), 0)
def test_exclude_after_all(self):
keeper = CassandraFamilyMember.objects.create(
id=uuid.uuid4(),
first_name="Ned",
last_name="Flanders",
is_real=False,
favourite_number=666,
favourite_float_number=43.4,
created_on=datetime.now(),
)
results = CassandraFamilyMember.objects.all().exclude(last_name="Simpson")
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, keeper.id)
def test_get_by_pk_returns_primary_key_instead_of_partition_key(self):
got_family_member = CassandraFamilyMember.objects.allow_filtering().get(
pk=self.family_member.id
)
self.assertEqual(got_family_member.pk, self.family_member.id)
def test_default_manager_is_set(self):
self.assertTrue(
isinstance(
CassandraFamilyMember._default_manager,
type(CassandraFamilyMember.objects),
)<|fim▁hole|> self.assertTrue(
isinstance(
CassandraFamilyMember._base_manager,
type(CassandraFamilyMember.objects),
)
)
self.assertTrue(hasattr(CassandraFamilyMember._default_manager, "all"))
self.assertTrue(hasattr(CassandraFamilyMember._default_manager, "filter"))
def test_get_queryset(self):
results = CassandraFamilyMember.objects.get_queryset()
self.assertTrue(results[0].id, self.some_uuid)
def test_calling_queryset_methods_not_through_manager_raises(self):
with self.assertRaises(AttributeError):
CassandraFamilyMember.all()
with self.assertRaises(AttributeError):
CassandraFamilyMember.get()
with self.assertRaises(AttributeError):
CassandraFamilyMember.filter()
def test_manager_has_a_name(self):
self.assertEqual(CassandraFamilyMember._default_manager.name, "objects")
def test_can_migrate(self):
self.assertFalse(CassandraFamilyMember._meta.can_migrate(connection=None))
def test_get_all_related_objects_with_model(self):
self.assertEqual(
CassandraFamilyMember._meta.get_all_related_objects_with_model(),
[],
)
def test_related_objects_property(self):
self.assertEqual(CassandraFamilyMember._meta.related_objects, [])
def test_db_table(self):
self.assertEqual(
CassandraFamilyMember._meta.db_table,
"common_cassandrafamilymember",
)
def test_pk_attribute(self):
self.assertEqual(
CassandraFamilyMember._meta.pk,
CassandraFamilyMember._meta.get_field("id"),
)
def test_get_fields(self):
expected_field_names = [
"id",
"first_name",
"last_name",
"is_real",
"favourite_number",
"favourite_float_number",
"created_on",
]
fields = CassandraFamilyMember._meta._get_fields()
self.assertEqual(len(fields), len(expected_field_names))
self.assertEqual([f.name for f in fields], expected_field_names)
def test_meta_attrs(self):
self.assertEqual(
CassandraFamilyMember._meta.model_name, "cassandrafamilymember"
)
self.assertEqual(CassandraFamilyMember._meta.swappable, False)
self.assertEqual(CassandraFamilyMember._meta.managed, False)
def test_values_list_with_id_pk_field_returns_it(self):
all_things = CassandraFamilyMember.objects.allow_filtering().filter(
id=self.some_uuid
)
self.assertEqual(
list(all_things.values_list("id", flat=True)), [self.some_uuid]
)
def test_values_list_with_pk_returns_the_primary_key_field_uuid(self):
all_things = CassandraFamilyMember.objects.allow_filtering().filter(
id=self.some_uuid
)
model = all_things[0]
self.assertEqual(
list(all_things.values_list("pk")),
[
[
model.id,
model.first_name,
model.last_name,
model.favourite_float_number,
]
],
)
def test_values_list_with_pk_can_return_multiple_pks(self):
some_uuid = uuid.uuid4()
family_member = CassandraFamilyMember.objects.create(
id=some_uuid,
first_name="Homer",
last_name="Simpson",
is_real=False,
favourite_number=666,
favourite_float_number=43.4,
created_on=datetime.now(),
)
all_things = CassandraFamilyMember.objects.allow_filtering().filter(
id=some_uuid
)
expected = [
[
family_member.id,
family_member.first_name,
family_member.last_name,
family_member.favourite_float_number,
]
]
self.assertEqual(len(all_things.values_list("pk")), len(expected))
def test_private_fields_are_set(self):
private_fields = [f.name for f in CassandraFamilyMember._meta.private_fields]
expected_private_fields = [
"id",
"first_name",
"last_name",
"is_real",
"favourite_number",
"favourite_float_number",
"created_on",
]
self.assertEqual(private_fields, expected_private_fields)
def test_model_doesnotexist_is_raised_when_record_not_found(self):
with self.assertRaises(CassandraFamilyMember.DoesNotExist):
not_found_uuid = uuid.uuid4()
CassandraFamilyMember.objects.allow_filtering().get(id=not_found_uuid)
class TestDjangoCassandraField(CassandraTestCase):
def setUp(self):
self.some_uuid = uuid.uuid4()
self.family_member = CassandraFamilyMember.objects.create(
id=self.some_uuid,
first_name="Homer",
last_name="Simpson",
is_real=False,
favourite_number=666,
favourite_float_number=43.4,
created_on=datetime.now(),
)
def test_attributes(self):
model_fields = self.family_member._meta._get_fields()
for field in model_fields:
allow_null = (
not field.required
and not field.is_primary_key
and not field.partition_key
) or field.has_default
self.assertEqual(field.unique_for_date, None)
self.assertEqual(field.unique_for_month, None)
self.assertEqual(field.unique_for_year, None)
self.assertEqual(field.db_column, None)
self.assertEqual(field.db_index, field.index)
self.assertEqual(field.null, allow_null)
self.assertEqual(field.blank, allow_null)
self.assertEqual(field.choices, [])
self.assertEqual(field.flatchoices, [])
self.assertEqual(field.help_text, "")
self.assertEqual(field.concrete, True)
self.assertEqual(field.editable, True)
self.assertEqual(field.many_to_many, False)
self.assertEqual(field.many_to_one, False)
self.assertEqual(field.one_to_many, False)
self.assertEqual(field.one_to_one, False)
self.assertEqual(field.hidden, False)
self.assertEqual(field.serialize, True)
self.assertEqual(field.name, field.db_field_name)
self.assertEqual(field.verbose_name, field.db_field_name)
self.assertEqual(field._verbose_name, field.db_field_name)
self.assertEqual(field.field, field)
self.assertEqual(field.model, type(self.family_member))
self.assertEqual(field.related_query_name(), None)
self.assertEqual(field.auto_created, False)
self.assertEqual(field.is_relation, False)
self.assertEqual(field.remote_field, None)
self.assertEqual(field.rel, None)
self.assertEqual(field.rel, None)
self.assertEqual(field.unique, field.is_primary_key)
self.assertEqual(field.attname, field.column_name)
self.assertEqual(field.validators, [])
self.assertEqual(field.empty_values, list(validators.EMPTY_VALUES))
def test_methods(self):
model_fields = self.family_member._meta._get_fields()
for field in model_fields:
self.assertEqual(field.get_attname(), field.attname)
self.assertEqual(field.get_cache_name(), "_{}_cache".format(field.name))
self.assertEqual(
field.value_to_string(self.family_member),
str(getattr(self.family_member, field.name)),
)
self.assertEqual(
field.pre_save(self.family_member, True),
getattr(self.family_member, field.name),
)
self.assertEqual(
field.get_prep_value(self.family_member.id), self.some_uuid
)
self.assertEqual(
field.get_db_prep_save(self.family_member.id, connection=None),
self.some_uuid,
)
self.assertTrue(isinstance(field.formfield(), fields.CharField))
self.assertEqual(field.get_internal_type(), field.__class__.__name__)
self.assertEqual(
field.get_attname_column(),
(field.db_field_name, field.db_field_name),
)
self.assertEqual(field.get_db_converters(), [])
field_with_default = self.family_member._meta.get_field("id")
self.assertTrue(
isinstance(field_with_default.get_default(), type(self.family_member.id))
)
# in Django, 'has_default' is a function, while in python-driver
# it is a property unfortunately.
self.assertEqual(field_with_default.has_default, True)
text_field = self.family_member._meta.get_field("last_name")
text_field.save_form_data(instance=self.family_member, data="new data")
self.assertEqual(self.family_member.last_name, "new data")
self.assertIsNone(field.run_validators(text_field.value))
def test_methods_which_are_not_implemented_raise(self):
model_fields = self.family_member._meta._get_fields()
methods_that_should_raise = (
"get_choices",
"get_choices_default",
"select_format",
"deconstruct",
"db_type_suffix",
"get_prep_lookup",
"get_db_prep_lookup",
"set_attributes_from_name",
"db_parameters",
"get_col",
)
for field in model_fields:
for method_name in methods_that_should_raise:
with self.assertRaises(NotImplementedError):
getattr(field, method_name)()
def test_get_pk_value_on_save_returns_true_if_field_has_default(self):
field_with_default = self.family_member._meta.get_field("id")
self.assertTrue(
field_with_default.get_pk_value_on_save(instance=self.family_member),
self.family_member.id,
)
def test_get_pk_value_on_save_returns_none_if_field_no_default(self):
field_without_default = self.family_member._meta.get_field("last_name")
self.assertIsNone(
field_without_default.get_pk_value_on_save(instance=self.family_member),
)
def test_formfield_uses_specified_form_class(self):
text_field = self.family_member._meta.get_field("last_name")
form_field = text_field.formfield(form_class=fields.BooleanField)
self.assertTrue(isinstance(form_field, fields.BooleanField))
def test_field_check_returns_error_when_name_is_pk(self):
text_field = copy.deepcopy(self.family_member._meta.get_field("last_name"))
text_field.name = "pk"
check_errors = text_field.check()
self.assertEqual(len(check_errors), 1)
def test_field_check_returns_error_when_name_ends_underscore(self):
text_field = copy.deepcopy(self.family_member._meta.get_field("last_name"))
text_field.name = "name_"
check_errors = text_field.check()
self.assertEqual(len(check_errors), 1)
def test_field_check_returns_error_when_name_contains_double_under(self):
text_field = copy.deepcopy(self.family_member._meta.get_field("last_name"))
text_field.name = "some__name"
check_errors = text_field.check()
self.assertEqual(len(check_errors), 1)
def test_field_clean(self):
text_field = copy.deepcopy(self.family_member._meta.get_field("last_name"))
self.assertEqual(text_field.clean("some val", self.family_member), "some val")
def test_field_client_raises_when_value_is_not_valid(self):
text_field = copy.deepcopy(self.family_member._meta.get_field("last_name"))
with self.assertRaises(CQLValidationError):
text_field.clean(123, self.family_member)
def test_get_filter_kwargs_for_object(self):
text_field = self.family_member._meta.get_field("last_name")
self.assertEqual(
text_field.get_filter_kwargs_for_object(obj=self.family_member),
{"last_name": self.family_member.last_name},
)
id_field = self.family_member._meta.get_field("id")
self.assertEqual(
id_field.get_filter_kwargs_for_object(obj=self.family_member),
{"id": self.family_member.id},
)<|fim▁end|>
|
)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from ajenti.api import *
from ajenti.plugins import *
info = PluginInfo(
title='BIND9',
description='BIND9 DNS server',
icon='globe',
dependencies=[
PluginDependency('main'),
PluginDependency('services'),<|fim▁hole|> BinaryDependency('named'),
],
)
def init():
import main<|fim▁end|>
| |
<|file_name|>Joystick.java<|end_file_name|><|fim▁begin|>/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008-2016. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj;
import edu.wpi.first.wpilibj.hal.FRCNetComm.tResourceType;
import edu.wpi.first.wpilibj.hal.HAL;
import edu.wpi.first.wpilibj.hal.HAL;
/**
* Handle input from standard Joysticks connected to the Driver Station. This class handles standard
* input that comes from the Driver Station. Each time a value is requested the most recent value is
* returned. There is a single class instance for each joystick and the mapping of ports to hardware
* buttons depends on the code in the driver station.
*/
public class Joystick extends GenericHID {
static final byte kDefaultXAxis = 0;
static final byte kDefaultYAxis = 1;
static final byte kDefaultZAxis = 2;
static final byte kDefaultTwistAxis = 2;
static final byte kDefaultThrottleAxis = 3;
static final int kDefaultTriggerButton = 1;
static final int kDefaultTopButton = 2;
/**
* Represents an analog axis on a joystick.
*/
public enum AxisType {
kX(0), kY(1), kZ(2), kTwist(3), kThrottle(4), kNumAxis(5);
@SuppressWarnings("MemberName")
public final int value;
private AxisType(int value) {
this.value = value;
}
}
/**
* Represents a digital button on the JoyStick.
*/
public enum ButtonType {
kTrigger(0), kTop(1), kNumButton(2);
@SuppressWarnings("MemberName")
public final int value;
private ButtonType(int value) {
this.value = value;
}
}
/**
* Represents a rumble output on the JoyStick.
*/
public enum RumbleType {
kLeftRumble, kRightRumble
}
private final DriverStation m_ds;
private final int m_port;
private final byte[] m_axes;
private final byte[] m_buttons;
private int m_outputs;
private short m_leftRumble;
private short m_rightRumble;
/**
* Construct an instance of a joystick. The joystick index is the usb port on the drivers
* station.
*
* @param port The port on the driver station that the joystick is plugged into.
*/
public Joystick(final int port) {
this(port, AxisType.kNumAxis.value, ButtonType.kNumButton.value);
m_axes[AxisType.kX.value] = kDefaultXAxis;
m_axes[AxisType.kY.value] = kDefaultYAxis;
m_axes[AxisType.kZ.value] = kDefaultZAxis;
m_axes[AxisType.kTwist.value] = kDefaultTwistAxis;
m_axes[AxisType.kThrottle.value] = kDefaultThrottleAxis;
m_buttons[ButtonType.kTrigger.value] = kDefaultTriggerButton;
m_buttons[ButtonType.kTop.value] = kDefaultTopButton;
HAL.report(tResourceType.kResourceType_Joystick, port);
}
/**
* Protected version of the constructor to be called by sub-classes.
*
* <p>This constructor allows the subclass to configure the number of constants for axes and
* buttons.
*
* @param port The port on the driver station that the joystick is plugged into.
* @param numAxisTypes The number of axis types in the enum.
* @param numButtonTypes The number of button types in the enum.
*/
protected Joystick(int port, int numAxisTypes, int numButtonTypes) {
m_ds = DriverStation.getInstance();
m_axes = new byte[numAxisTypes];
m_buttons = new byte[numButtonTypes];
m_port = port;
}
/**
* Get the X value of the joystick. This depends on the mapping of the joystick connected to the
* current port.
*
* @param hand Unused
* @return The X value of the joystick.
*/
public double getX(Hand hand) {
return getRawAxis(m_axes[AxisType.kX.value]);
}
/**
* Get the Y value of the joystick. This depends on the mapping of the joystick connected to the
* current port.
*
* @param hand Unused
* @return The Y value of the joystick.
*/
public double getY(Hand hand) {
return getRawAxis(m_axes[AxisType.kY.value]);
}
/**
* Get the Z value of the joystick. This depends on the mapping of the joystick connected to the
* current port.
*
* @param hand Unused
* @return The Z value of the joystick.
*/
public double getZ(Hand hand) {
return getRawAxis(m_axes[AxisType.kZ.value]);
}
/**
* Get the twist value of the current joystick. This depends on the mapping of the joystick
* connected to the current port.
*
* @return The Twist value of the joystick.
*/
public double getTwist() {
return getRawAxis(m_axes[AxisType.kTwist.value]);
}
/**
* Get the throttle value of the current joystick. This depends on the mapping of the joystick
* connected to the current port.
*
* @return The Throttle value of the joystick.
*/
public double getThrottle() {
return getRawAxis(m_axes[AxisType.kThrottle.value]);
}
/**
* Get the value of the axis.
*
* @param axis The axis to read, starting at 0.
* @return The value of the axis.
*/
public double getRawAxis(final int axis) {
return m_ds.getStickAxis(m_port, axis);
}
/**
* For the current joystick, return the axis determined by the argument.
*
* <p>This is for cases where the joystick axis is returned programatically, otherwise one of the
* previous functions would be preferable (for example getX()).
*
* @param axis The axis to read.
* @return The value of the axis.
*/
public double getAxis(final AxisType axis) {
switch (axis) {
case kX:
return getX();
case kY:
return getY();
case kZ:
return getZ();
case kTwist:
return getTwist();
case kThrottle:
return getThrottle();
default:
return 0.0;
}
}
/**
* For the current joystick, return the number of axis.
*/
public int getAxisCount() {
return m_ds.getStickAxisCount(m_port);
}
/**
* Read the state of the trigger on the joystick.
*
* <p>Look up which button has been assigned to the trigger and read its state.
*
* @param hand This parameter is ignored for the Joystick class and is only here to complete the
* GenericHID interface.
* @return The state of the trigger.
*/
public boolean getTrigger(Hand hand) {
return getRawButton(m_buttons[ButtonType.kTrigger.value]);
}
/**
* Read the state of the top button on the joystick.
*
* <p>Look up which button has been assigned to the top and read its state.
*
* @param hand This parameter is ignored for the Joystick class and is only here to complete the
* GenericHID interface.
* @return The state of the top button.
*/
public boolean getTop(Hand hand) {
return getRawButton(m_buttons[ButtonType.kTop.value]);
}
/**
* This is not supported for the Joystick. This method is only here to complete the GenericHID
* interface.
*
* @param hand This parameter is ignored for the Joystick class and is only here to complete the
* GenericHID interface.
* @return The state of the bumper (always false)
*/
public boolean getBumper(Hand hand) {
return false;
}
/**
* Get the button value (starting at button 1).
*
* <p>The appropriate button is returned as a boolean value.
*
* @param button The button number to be read (starting at 1).
* @return The state of the button.
*/
public boolean getRawButton(final int button) {
return m_ds.getStickButton(m_port, (byte) button);
}
/**
* For the current joystick, return the number of buttons.
*/
public int getButtonCount() {
return m_ds.getStickButtonCount(m_port);
}
/**
* Get the angle in degrees of a POV on the joystick.
*
* <p>The POV angles start at 0 in the up direction, and increase clockwise (eg right is 90,
* upper-left is 315).
*
* @param pov The index of the POV to read (starting at 0)
* @return the angle of the POV in degrees, or -1 if the POV is not pressed.
*/
public int getPOV(int pov) {
return m_ds.getStickPOV(m_port, pov);
}
/**
* For the current joystick, return the number of POVs.
*/
public int getPOVCount() {
return m_ds.getStickPOVCount(m_port);
}
/**
* Get buttons based on an enumerated type.
*
* <p>The button type will be looked up in the list of buttons and then read.
*
* @param button The type of button to read.
* @return The state of the button.
*/
public boolean getButton(ButtonType button) {
switch (button) {
case kTrigger:
return getTrigger();
case kTop:
return getTop();
default:
return false;
}
}
/**
* Get the magnitude of the direction vector formed by the joystick's current position relative to
* its origin.
*
* @return The magnitude of the direction vector
*/
public double getMagnitude() {
return Math.sqrt(Math.pow(getX(), 2) + Math.pow(getY(), 2));
}
/**
* Get the direction of the vector formed by the joystick and its origin in radians.
*
* @return The direction of the vector in radians
*/
public double getDirectionRadians() {
return Math.atan2(getX(), -getY());
}
/**
* Get the direction of the vector formed by the joystick and its origin in degrees.
*
* <p>Uses acos(-1) to represent Pi due to absence of readily accessable Pi constant in C++
*
* @return The direction of the vector in degrees
*/
public double getDirectionDegrees() {
return Math.toDegrees(getDirectionRadians());
}
/**
* Get the channel currently associated with the specified axis.
*
* @param axis The axis to look up the channel for.
* @return The channel fr the axis.
*/
public int getAxisChannel(AxisType axis) {
return m_axes[axis.value];
}
/**
* Set the channel associated with a specified axis.
*
* @param axis The axis to set the channel for.
* @param channel The channel to set the axis to.
*/
public void setAxisChannel(AxisType axis, int channel) {
m_axes[axis.value] = (byte) channel;
}
/**<|fim▁hole|> */
public boolean getIsXbox() {
return m_ds.getJoystickIsXbox(m_port);
}
/**
* Get the HID type of the current joystick.
*
* @return The HID type value of the current joystick.
*/
public int getType() {
return m_ds.getJoystickType(m_port);
}
/**
* Get the name of the current joystick.
*
* @return The name of the current joystick.
*/
public String getName() {
return m_ds.getJoystickName(m_port);
}
/**
* Get the port number of the joystick.
*
* @return The port number of the joystick.
*/
public int getPort() {
return m_port;
}
/**
* Get the axis type of a joystick axis.
*
* @return the axis type of a joystick axis.
*/
public int getAxisType(int axis) {
return m_ds.getJoystickAxisType(m_port, axis);
}
/**
* Set the rumble output for the joystick. The DS currently supports 2 rumble values, left rumble
* and right rumble.
*
* @param type Which rumble value to set
* @param value The normalized value (0 to 1) to set the rumble to
*/
public void setRumble(RumbleType type, float value) {
if (value < 0) {
value = 0;
} else if (value > 1) {
value = 1;
}
if (type == RumbleType.kLeftRumble) {
m_leftRumble = (short) (value * 65535);
} else {
m_rightRumble = (short) (value * 65535);
}
HAL.setJoystickOutputs((byte) m_port, m_outputs, m_leftRumble, m_rightRumble);
}
/**
* Set a single HID output value for the joystick.
*
* @param outputNumber The index of the output to set (1-32)
* @param value The value to set the output to
*/
public void setOutput(int outputNumber, boolean value) {
m_outputs = (m_outputs & ~(1 << (outputNumber - 1))) | ((value ? 1 : 0) << (outputNumber - 1));
HAL.setJoystickOutputs((byte) m_port, m_outputs, m_leftRumble, m_rightRumble);
}
/**
* Set all HID output values for the joystick.
*
* @param value The 32 bit output value (1 bit for each output)
*/
public void setOutputs(int value) {
m_outputs = value;
HAL.setJoystickOutputs((byte) m_port, m_outputs, m_leftRumble, m_rightRumble);
}
}<|fim▁end|>
|
* Get the value of isXbox for the current joystick.
*
* @return A boolean that is true if the controller is an xbox controller.
|
<|file_name|>AbstractExtendedActivityDataModifier.js<|end_file_name|><|fim▁begin|>var AbstractExtendedActivityDataModifier = Fiber.extend(function(base) {
return {
content: '',
isAuthorOfViewedActivity: null,
dataViews: [],
summaryGrid: null,
init: function(analysisData, appResources, userSettings, athleteId, athleteIdAuthorOfActivity, basicInfos) {
this.analysisData_ = analysisData;
this.appResources_ = appResources;
this.userSettings_ = userSettings;
this.athleteId_ = athleteId;
this.athleteIdAuthorOfActivity_ = athleteIdAuthorOfActivity;
this.basicInfos = basicInfos;
this.isAuthorOfViewedActivity = (this.athleteIdAuthorOfActivity_ == this.athleteId_);
this.speedUnitsData = this.getSpeedUnitData();
this.setDataViewsNeeded();
},
modify: function() {
_.each(this.dataViews, function(view) {
// Append result of view.render() to this.content
view.render();
this.content += view.getContent();
}.bind(this));
},
placeSummaryPanel: function(panelAdded) {
this.makeSummaryGrid(2, 4);
this.insertContentSummaryGridContent();
$('.inline-stats.section').first().after(this.summaryGrid.html()).each(function() {
// Grid placed
if (panelAdded) panelAdded();
});
},
placeExtendedStatsButton: function(buttonAdded) {
var htmlButton = '<section>';
htmlButton += '<a class="button btn-block btn-primary" id="extendedStatsButton" href="#">';
htmlButton += 'Show extended statistics';
htmlButton += '</a>';
htmlButton += '</section>';
$('.inline-stats.section').first().after(htmlButton).each(function() {
$('#extendedStatsButton').click(function() {
$.fancybox({
'width': '100%',
'height': '100%',
'autoScale': true,
'transitionIn': 'fade',
'transitionOut': 'fade',
'type': 'iframe',
'content': '<div class="stravistiXExtendedData">' + this.content + '</div>'
});
// For each view start making the assossiated graphs
_.each(this.dataViews, function(view) {
view.displayGraph();
}.bind(this));
}.bind(this));
if (buttonAdded) buttonAdded();
}.bind(this));
},
makeSummaryGrid: function(columns, rows) {
var summaryGrid = '';
summaryGrid += '<div>';
summaryGrid += '<div class="summaryGrid">';
summaryGrid += '<table>';
for (var i = 0; i < rows; i++) {
summaryGrid += '<tr>';
for (var j = 0; j < columns; j++) {
summaryGrid += '<td data-column="' + j + '" data-row="' + i + '">';
summaryGrid += '</td>';
}
summaryGrid += '</tr>';<|fim▁hole|> this.summaryGrid = $(summaryGrid);
},
insertContentAtGridPosition: function(columnId, rowId, data, title, units, userSettingKey) {
var onClickHtmlBehaviour = "onclick='javascript:window.open(\"" + this.appResources_.settingsLink + "#/commonSettings?viewOptionHelperId=" + userSettingKey + "\",\"_blank\");'";
if (this.summaryGrid) {
var content = '<span class="summaryGridDataContainer" ' + onClickHtmlBehaviour + '>' + data + ' <span class="summaryGridUnits">' + units + '</span><br /><span class="summaryGridTitle">' + title + '</span></span>';
this.summaryGrid.find('[data-column=' + columnId + '][data-row=' + rowId + ']').html(content);
} else {
console.error('Grid is not initialized');
}
},
insertContentSummaryGridContent: function() {
// Insert summary data
var moveRatio = '-';
if (this.analysisData_.moveRatio && this.userSettings_.displayActivityRatio) {
moveRatio = this.analysisData_.moveRatio.toFixed(2);
}
this.insertContentAtGridPosition(0, 0, moveRatio, 'Move Ratio', '', 'displayActivityRatio');
// ...
var TRIMP = activityHeartRateReserve = '-';
var activityHeartRateReserveUnit = '%';
if (this.analysisData_.heartRateData && this.userSettings_.displayAdvancedHrData) {
TRIMP = this.analysisData_.heartRateData.TRIMP.toFixed(0) + ' <span class="summarySubGridTitle">(' + this.analysisData_.heartRateData.TRIMPPerHour.toFixed(0) + ' / hour)</span>';
activityHeartRateReserve = this.analysisData_.heartRateData.activityHeartRateReserve.toFixed(0);
activityHeartRateReserveUnit = '% <span class="summarySubGridTitle">(Max: ' + this.analysisData_.heartRateData.activityHeartRateReserveMax.toFixed(0) + '% @ ' + this.analysisData_.heartRateData.maxHeartRate + 'bpm)</span>';
}
this.insertContentAtGridPosition(0, 1, TRIMP, 'TRaining IMPulse', '', 'displayAdvancedHrData');
this.insertContentAtGridPosition(1, 1, activityHeartRateReserve, 'Heart Rate Reserve Avg', activityHeartRateReserveUnit, 'displayAdvancedHrData');
// ...
var climbTime = '-';
var climbTimeExtra = '';
if (this.analysisData_.gradeData && this.userSettings_.displayAdvancedGradeData) {
climbTime = Helper.secondsToHHMMSS(this.analysisData_.gradeData.upFlatDownInSeconds.up);
climbTimeExtra = '<span class="summarySubGridTitle">(' + (this.analysisData_.gradeData.upFlatDownInSeconds.up / this.analysisData_.gradeData.upFlatDownInSeconds.total * 100).toFixed(0) + '% of time)</span>';
}
this.insertContentAtGridPosition(0, 2, climbTime, 'Time climbing', climbTimeExtra, 'displayAdvancedGradeData');
},
/**
* Affect default view needed
*/
setDataViewsNeeded: function() {
// By default we have... If data exist of course...
// Featured view
if (this.analysisData_) {
var featuredDataView = new FeaturedDataView(this.analysisData_, this.userSettings_, this.basicInfos);
featuredDataView.setAppResources(this.appResources_);
featuredDataView.setIsAuthorOfViewedActivity(this.isAuthorOfViewedActivity);
this.dataViews.push(featuredDataView);
}
// Heart view
if (this.analysisData_.heartRateData && this.userSettings_.displayAdvancedHrData) {
var heartRateDataView = new HeartRateDataView(this.analysisData_.heartRateData, 'hrr', this.userSettings_);
heartRateDataView.setAppResources(this.appResources_);
heartRateDataView.setIsAuthorOfViewedActivity(this.isAuthorOfViewedActivity);
this.dataViews.push(heartRateDataView);
}
},
getSpeedUnitData: function() {
var measurementPreference = currentAthlete.get('measurement_preference');
var units = (measurementPreference == 'meters') ? 'km' : 'mi';
var speedUnitPerhour = (measurementPreference == 'meters') ? 'km/h' : 'mi/h';
var speedUnitFactor = (speedUnitPerhour == 'km/h') ? 1 : 0.62137;
return [speedUnitPerhour, speedUnitFactor, units];
},
}
});<|fim▁end|>
|
}
summaryGrid += '</table>';
summaryGrid += '</div>';
summaryGrid += '</div>';
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
# Register your models here.
from polls.models import Question,Choice
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fields = ["question_text", "pub_date"]<|fim▁hole|> inlines = [ChoiceInline]
list_display = ('question_text', 'pub_date', 'was_published_recently')
search_fields = ['question_text']
list_filter = ['pub_date']
admin.site.register(Question, QuestionAdmin)<|fim▁end|>
| |
<|file_name|>LiDAR_tools.py<|end_file_name|><|fim▁begin|>import numpy as np
import laspy as las
# Determine if a point is inside a given polygon or not
# Polygon is a list of (x,y) pairs. This function
# returns True or False. The algorithm is called
# the "Ray Casting Method".
# the point_in_poly algorithm was found here:
# http://geospatialpython.com/2011/01/point-in-polygon.html
def point_in_poly(x,y,poly):
n = len(poly)
inside = False
p1x,p1y = poly[0]<|fim▁hole|> if x <= max(p1x,p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x,p1y = p2x,p2y
return inside
# This one is my own version of the ray-trace algorithm which utilises the numpy arrays so that a list of x and y coordinates can be processed in one call and only points inside polygon are returned alongside the indices in case required for future referencing. This saves a fair bit of looping.
def points_in_poly(x,y,poly):
n = len(poly)
inside=np.zeros(x.size,dtype=bool)
xints=np.zeros(x.size)
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y=poly[i % n]
if p1y!=p2y:
xints[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)] = (y[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)]-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x==p2x:
inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)] = np.invert(inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)])
else:
inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x),x<=xints],axis=0)] = np.invert(inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x),x<=xints],axis=0)])
p1x,p1y = p2x,p2y
return x[inside],y[inside], inside
# This retrieves all points within circular neighbourhood, Terget point is the location around which the neighbourhood search is conducted, for a specified search radius. x and y are vectors with the x and y coordinates of the test points
def points_in_radius(x,y,target_x, target_y,radius):
inside=np.zeros(x.size,dtype=bool)
d2=(x-target_x)**2+(y-target_y)**2
inside = d2<=radius**2
return x[inside],y[inside], inside
# filter lidar wth polygon
# This function has been updated to include an option to filter by first return location.
# The reason for this is so full collections of returns associated with each LiDAR pulse
# can be retrieved, which can be an issue at edges in multi-return analyses
def filter_lidar_data_by_polygon(in_pts,polygon,filter_by_first_return_location = False):
pts = np.zeros((0,in_pts.shape[1]))
if in_pts.shape[0]>0:
if filter_by_first_return_location:
# find first returns
mask = in_pts[:,3]==1
x_temp, y_temp, inside_temp = points_in_poly(in_pts[mask,0],in_pts[mask,1],polygon)
shots = np.unique(in_pts[mask,6][inside_temp]) # index 6 refers to GPS time
inside = np.in1d(in_pts[:,6],shots) # this function retrieves all points corresponding to this GPS time
x = in_pts[inside,0]
y = in_pts[inside,1]
x_temp=None
y_temp=None
inside_temp=None
else:
x,y,inside = points_in_poly(in_pts[:,0],in_pts[:,1],polygon)
pts = in_pts[inside,:]
else:
print("\t\t\t no points in polygon")
return pts
# filter lidar by circular neighbourhood
def filter_lidar_data_by_neighbourhood(in_pts,target_xy,radius):
pts = np.zeros((0,in_pts.shape[1]))
if in_pts.shape[0]>0:
x,y,inside = points_in_radius(in_pts[:,0],in_pts[:,1],target_xy[0],target_xy[1],radius)
pts = in_pts[inside,:]
else:
print( "\t\t\t no points in neighbourhood")
return pts<|fim▁end|>
|
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from show_latent import LatentView
|
<|file_name|>TankMessageCodes.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2020 by Stefan Schubert under the MIT License (MIT).
* See project LICENSE file for the detailed terms and conditions.
*/
package de.bluewhale.sabi.webclient.rest.exceptions;
import de.bluewhale.sabi.exception.ExceptionCode;
import de.bluewhale.sabi.exception.MessageCode;
import de.bluewhale.sabi.exception.TankExceptionCodes;
/**
* MessageCodes that may arise by using the Tank Restservice
*
* @author schubert
*/
public enum TankMessageCodes implements MessageCode {
NO_SUCH_TANK(TankExceptionCodes.TANK_NOT_FOUND_OR_DOES_NOT_BELONG_TO_USER);
// ------------------------------ FIELDS ------------------------------
private TankExceptionCodes exceptionCode;
<|fim▁hole|>// --------------------------- CONSTRUCTORS ---------------------------
TankMessageCodes() {
exceptionCode = null;
}
TankMessageCodes(TankExceptionCodes pExceptionCode) {
exceptionCode = pExceptionCode;
}
// --------------------- GETTER / SETTER METHODS ---------------------
@Override
public ExceptionCode getExceptionCode() {
return exceptionCode;
}
}<|fim▁end|>
| |
<|file_name|>translate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import codecs
import csv
import fnmatch
import inspect
import locale
import os
import openerp.sql_db as sql_db
import re
import logging
import tarfile
import tempfile
import threading
from babel.messages import extract
from collections import defaultdict
from datetime import datetime
from lxml import etree
from os.path import join
import config
import misc
from misc import SKIPPED_ELEMENT_TYPES
import osutil
import openerp
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
# used to notify web client that these translations should be loaded in the UI
WEB_TRANSLATION_COMMENT = "openerp-web"
SKIPPED_ELEMENTS = ('script', 'style')
_LOCALE2WIN32 = {
'af_ZA': 'Afrikaans_South Africa',
'sq_AL': 'Albanian_Albania',
'ar_SA': 'Arabic_Saudi Arabia',
'eu_ES': 'Basque_Spain',
'be_BY': 'Belarusian_Belarus',
'bs_BA': 'Bosnian_Bosnia and Herzegovina',
'bg_BG': 'Bulgarian_Bulgaria',
'ca_ES': 'Catalan_Spain',
'hr_HR': 'Croatian_Croatia',
'zh_CN': 'Chinese_China',
'zh_TW': 'Chinese_Taiwan',
'cs_CZ': 'Czech_Czech Republic',
'da_DK': 'Danish_Denmark',
'nl_NL': 'Dutch_Netherlands',
'et_EE': 'Estonian_Estonia',
'fa_IR': 'Farsi_Iran',
'ph_PH': 'Filipino_Philippines',
'fi_FI': 'Finnish_Finland',
'fr_FR': 'French_France',
'fr_BE': 'French_France',
'fr_CH': 'French_France',
'fr_CA': 'French_France',
'ga': 'Scottish Gaelic',
'gl_ES': 'Galician_Spain',
'ka_GE': 'Georgian_Georgia',
'de_DE': 'German_Germany',
'el_GR': 'Greek_Greece',
'gu': 'Gujarati_India',
'he_IL': 'Hebrew_Israel',
'hi_IN': 'Hindi',
'hu': 'Hungarian_Hungary',
'is_IS': 'Icelandic_Iceland',
'id_ID': 'Indonesian_indonesia',
'it_IT': 'Italian_Italy',
'ja_JP': 'Japanese_Japan',
'kn_IN': 'Kannada',
'km_KH': 'Khmer',
'ko_KR': 'Korean_Korea',
'lo_LA': 'Lao_Laos',
'lt_LT': 'Lithuanian_Lithuania',
'lat': 'Latvian_Latvia',
'ml_IN': 'Malayalam_India',
'mi_NZ': 'Maori',
'mn': 'Cyrillic_Mongolian',
'no_NO': 'Norwegian_Norway',
'nn_NO': 'Norwegian-Nynorsk_Norway',
'pl': 'Polish_Poland',
'pt_PT': 'Portuguese_Portugal',
'pt_BR': 'Portuguese_Brazil',
'ro_RO': 'Romanian_Romania',
'ru_RU': 'Russian_Russia',
'sr_CS': 'Serbian (Cyrillic)_Serbia and Montenegro',
'sk_SK': 'Slovak_Slovakia',
'sl_SI': 'Slovenian_Slovenia',
#should find more specific locales for spanish countries,
#but better than nothing
'es_AR': 'Spanish_Spain',
'es_BO': 'Spanish_Spain',
'es_CL': 'Spanish_Spain',
'es_CO': 'Spanish_Spain',
'es_CR': 'Spanish_Spain',
'es_DO': 'Spanish_Spain',
'es_EC': 'Spanish_Spain',
'es_ES': 'Spanish_Spain',
'es_GT': 'Spanish_Spain',
'es_HN': 'Spanish_Spain',
'es_MX': 'Spanish_Spain',
'es_NI': 'Spanish_Spain',
'es_PA': 'Spanish_Spain',
'es_PE': 'Spanish_Spain',
'es_PR': 'Spanish_Spain',
'es_PY': 'Spanish_Spain',
'es_SV': 'Spanish_Spain',
'es_UY': 'Spanish_Spain',
'es_VE': 'Spanish_Spain',
'sv_SE': 'Swedish_Sweden',
'ta_IN': 'English_Australia',
'th_TH': 'Thai_Thailand',
'tr_TR': 'Turkish_Turkey',
'uk_UA': 'Ukrainian_Ukraine',
'vi_VN': 'Vietnamese_Viet Nam',
'tlh_TLH': 'Klingon',
}
# These are not all english small words, just those that could potentially be isolated within views
ENGLISH_SMALL_WORDS = set("as at by do go if in me no of ok on or to up us we".split())
class UNIX_LINE_TERMINATOR(csv.excel):
lineterminator = '\n'
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
#
# Warning: better use self.pool.get('ir.translation')._get_source if you can
#
def translate(cr, name, source_type, lang, source=None):
if source and name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s and src=%s', (lang, source_type, str(name), source))
elif name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s', (lang, source_type, str(name)))
elif source:
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, source_type, source))
res_trans = cr.fetchone()
res = res_trans and res_trans[0] or False
return res
class GettextAlias(object):
def _get_db(self):
# find current DB based on thread/worker db name (see netsvc)
db_name = getattr(threading.currentThread(), 'dbname', None)
if db_name:
return sql_db.db_connect(db_name)
def _get_cr(self, frame, allow_create=True):
# try, in order: cr, cursor, self.env.cr, self.cr,
# request.env.cr
if 'cr' in frame.f_locals:
return frame.f_locals['cr'], False
if 'cursor' in frame.f_locals:
return frame.f_locals['cursor'], False
s = frame.f_locals.get('self')
if hasattr(s, 'env'):
return s.env.cr, False
if hasattr(s, 'cr'):
return s.cr, False
try:
from openerp.http import request
return request.env.cr, False
except RuntimeError:
pass
if allow_create:
# create a new cursor
db = self._get_db()
if db is not None:
return db.cursor(), True
return None, False
def _get_uid(self, frame):
# try, in order: uid, user, self.env.uid
if 'uid' in frame.f_locals:
return frame.f_locals['uid']
if 'user' in frame.f_locals:
return int(frame.f_locals['user']) # user may be a record
s = frame.f_locals.get('self')
return s.env.uid
def _get_lang(self, frame):
# try, in order: context.get('lang'), kwargs['context'].get('lang'),
# self.env.lang, self.localcontext.get('lang'), request.env.lang
lang = None
if frame.f_locals.get('context'):
lang = frame.f_locals['context'].get('lang')
if not lang:
kwargs = frame.f_locals.get('kwargs', {})
if kwargs.get('context'):
lang = kwargs['context'].get('lang')
if not lang:
s = frame.f_locals.get('self')
if hasattr(s, 'env'):
lang = s.env.lang
if not lang:
if hasattr(s, 'localcontext'):
lang = s.localcontext.get('lang')
if not lang:
try:
from openerp.http import request
lang = request.env.lang
except RuntimeError:
pass
if not lang:
# Last resort: attempt to guess the language of the user
# Pitfall: some operations are performed in sudo mode, and we
# don't know the originial uid, so the language may
# be wrong when the admin language differs.
pool = getattr(s, 'pool', None)
(cr, dummy) = self._get_cr(frame, allow_create=False)
uid = self._get_uid(frame)
if pool and cr and uid:
lang = pool['res.users'].context_get(cr, uid)['lang']
return lang
def __call__(self, source):
res = source
cr = None
is_new_cr = False
try:
frame = inspect.currentframe()
if frame is None:
return source
frame = frame.f_back
if not frame:
return source
lang = self._get_lang(frame)
if lang:
cr, is_new_cr = self._get_cr(frame)
if cr:
# Try to use ir.translation to benefit from global cache if possible
registry = openerp.registry(cr.dbname)
res = registry['ir.translation']._get_source(cr, SUPERUSER_ID, None, ('code','sql_constraint'), lang, source)
else:
_logger.debug('no context cursor detected, skipping translation for "%r"', source)
else:
_logger.debug('no translation language detected, skipping translation for "%r" ', source)
except Exception:
_logger.debug('translation went wrong for "%r", skipped', source)
# if so, double-check the root/base translations filenames
finally:
if cr and is_new_cr:
cr.close()
return res
_ = GettextAlias()
def quote(s):
"""Returns quoted PO term string, with special PO characters escaped"""
assert r"\n" not in s, "Translation terms may not include escaped newlines ('\\n'), please use only literal newlines! (in '%s')" % s
return '"%s"' % s.replace('\\','\\\\') \
.replace('"','\\"') \
.replace('\n', '\\n"\n"')
re_escaped_char = re.compile(r"(\\.)")
re_escaped_replacements = {'n': '\n', }
def _sub_replacement(match_obj):
return re_escaped_replacements.get(match_obj.group(1)[1], match_obj.group(1)[1])
def unquote(str):
"""Returns unquoted PO term string, with special PO characters unescaped"""
return re_escaped_char.sub(_sub_replacement, str[1:-1])
# class to handle po files
class TinyPoFile(object):
def __init__(self, buffer):
self.buffer = buffer
def warn(self, msg, *args):
_logger.warning(msg, *args)
def __iter__(self):
self.buffer.seek(0)
self.lines = self._get_lines()
self.lines_count = len(self.lines)
self.first = True
self.extra_lines= []
return self
def _get_lines(self):
lines = self.buffer.readlines()
# remove the BOM (Byte Order Mark):
if len(lines):
lines[0] = unicode(lines[0], 'utf8').lstrip(unicode( codecs.BOM_UTF8, "utf8"))
lines.append('') # ensure that the file ends with at least an empty line
return lines
def cur_line(self):
return self.lines_count - len(self.lines)
def next(self):
trans_type = name = res_id = source = trad = None
if self.extra_lines:
trans_type, name, res_id, source, trad, comments = self.extra_lines.pop(0)
if not res_id:
res_id = '0'
else:
comments = []
targets = []
line = None
fuzzy = False
while not line:
if 0 == len(self.lines):
raise StopIteration()
line = self.lines.pop(0).strip()
while line.startswith('#'):
if line.startswith('#~ '):
break
if line.startswith('#.'):
line = line[2:].strip()
if not line.startswith('module:'):
comments.append(line)
elif line.startswith('#:'):
# Process the `reference` comments. Each line can specify
# multiple targets (e.g. model, view, code, selection,
# ...). For each target, we will return an additional
# entry.
for lpart in line[2:].strip().split(' '):
trans_info = lpart.strip().split(':',2)
if trans_info and len(trans_info) == 2:
# looks like the translation trans_type is missing, which is not
# unexpected because it is not a GetText standard. Default: 'code'
trans_info[:0] = ['code']
if trans_info and len(trans_info) == 3:
# this is a ref line holding the destination info (model, field, record)
targets.append(trans_info)
elif line.startswith('#,') and (line[2:].strip() == 'fuzzy'):
fuzzy = True
line = self.lines.pop(0).strip()
if not self.lines:
raise StopIteration()
while not line:
# allow empty lines between comments and msgid
line = self.lines.pop(0).strip()
if line.startswith('#~ '):
while line.startswith('#~ ') or not line.strip():
if 0 == len(self.lines):
raise StopIteration()
line = self.lines.pop(0)
# This has been a deprecated entry, don't return anything
return self.next()
if not line.startswith('msgid'):
raise Exception("malformed file: bad line: %s" % line)
source = unquote(line[6:])
line = self.lines.pop(0).strip()
if not source and self.first:
self.first = False
# if the source is "" and it's the first msgid, it's the special
# msgstr with the informations about the traduction and the
# traductor; we skip it
self.extra_lines = []
while line:
line = self.lines.pop(0).strip()
return self.next()
while not line.startswith('msgstr'):
if not line:
raise Exception('malformed file at %d'% self.cur_line())
source += unquote(line)
line = self.lines.pop(0).strip()
trad = unquote(line[7:])
line = self.lines.pop(0).strip()
while line:
trad += unquote(line)
line = self.lines.pop(0).strip()
if targets and not fuzzy:
# Use the first target for the current entry (returned at the
# end of this next() call), and keep the others to generate
# additional entries (returned the next next() calls).
trans_type, name, res_id = targets.pop(0)
for t, n, r in targets:
if t == trans_type == 'code': continue
self.extra_lines.append((t, n, r, source, trad, comments))
if name is None:
if not fuzzy:
self.warn('Missing "#:" formated comment at line %d for the following source:\n\t%s',
self.cur_line(), source[:30])
return self.next()
return trans_type, name, res_id, source, trad, '\n'.join(comments)
def write_infos(self, modules):
import openerp.release as release
self.buffer.write("# Translation of %(project)s.\n" \
"# This file contains the translation of the following modules:\n" \
"%(modules)s" \
"#\n" \
"msgid \"\"\n" \
"msgstr \"\"\n" \
'''"Project-Id-Version: %(project)s %(version)s\\n"\n''' \
'''"Report-Msgid-Bugs-To: \\n"\n''' \
'''"POT-Creation-Date: %(now)s\\n"\n''' \
'''"PO-Revision-Date: %(now)s\\n"\n''' \
'''"Last-Translator: <>\\n"\n''' \
'''"Language-Team: \\n"\n''' \
'''"MIME-Version: 1.0\\n"\n''' \
'''"Content-Type: text/plain; charset=UTF-8\\n"\n''' \
'''"Content-Transfer-Encoding: \\n"\n''' \
'''"Plural-Forms: \\n"\n''' \
"\n"
% { 'project': release.description,
'version': release.version,
'modules': reduce(lambda s, m: s + "#\t* %s\n" % m, modules, ""),
'now': datetime.utcnow().strftime('%Y-%m-%d %H:%M')+"+0000",
}
)
def write(self, modules, tnrs, source, trad, comments=None):
plurial = len(modules) > 1 and 's' or ''
self.buffer.write("#. module%s: %s\n" % (plurial, ', '.join(modules)))
if comments:
self.buffer.write(''.join(('#. %s\n' % c for c in comments)))
code = False
for typy, name, res_id in tnrs:
self.buffer.write("#: %s:%s:%s\n" % (typy, name, res_id))
if typy == 'code':
code = True
if code:
# only strings in python code are python formated
self.buffer.write("#, python-format\n")
if not isinstance(trad, unicode):
trad = unicode(trad, 'utf8')
if not isinstance(source, unicode):
source = unicode(source, 'utf8')
msg = "msgid %s\n" \
"msgstr %s\n\n" \
% (quote(source), quote(trad))
self.buffer.write(msg.encode('utf8'))
# Methods to export the translation file
def trans_export(lang, modules, buffer, format, cr):
def _process(format, modules, rows, buffer, lang):
if format == 'csv':
writer = csv.writer(buffer, 'UNIX')
# write header first
writer.writerow(("module","type","name","res_id","src","value"))
for module, type, name, res_id, src, trad, comments in rows:
# Comments are ignored by the CSV writer
writer.writerow((module, type, name, res_id, src, trad))
elif format == 'po':
writer = TinyPoFile(buffer)
writer.write_infos(modules)
# we now group the translations by source. That means one translation per source.
grouped_rows = {}
for module, type, name, res_id, src, trad, comments in rows:
row = grouped_rows.setdefault(src, {})
row.setdefault('modules', set()).add(module)
if not row.get('translation') and trad != src:
row['translation'] = trad
row.setdefault('tnrs', []).append((type, name, res_id))
row.setdefault('comments', set()).update(comments)
for src, row in sorted(grouped_rows.items()):
if not lang:
# translation template, so no translation value
row['translation'] = ''
elif not row.get('translation'):
row['translation'] = src
writer.write(row['modules'], row['tnrs'], src, row['translation'], row['comments'])
elif format == 'tgz':
rows_by_module = {}<|fim▁hole|> rows_by_module.setdefault(module, []).append(row)
tmpdir = tempfile.mkdtemp()
for mod, modrows in rows_by_module.items():
tmpmoddir = join(tmpdir, mod, 'i18n')
os.makedirs(tmpmoddir)
pofilename = (lang if lang else mod) + ".po" + ('t' if not lang else '')
buf = file(join(tmpmoddir, pofilename), 'w')
_process('po', [mod], modrows, buf, lang)
buf.close()
tar = tarfile.open(fileobj=buffer, mode='w|gz')
tar.add(tmpdir, '')
tar.close()
else:
raise Exception(_('Unrecognized extension: must be one of '
'.csv, .po, or .tgz (received .%s).' % format))
translations = trans_generate(lang, modules, cr)
modules = set(t[0] for t in translations)
_process(format, modules, translations, buffer, lang)
del translations
def trans_parse_xsl(de):
return list(set(trans_parse_xsl_aux(de, False)))
def trans_parse_xsl_aux(de, t):
res = []
for n in de:
t = t or n.get("t")
if t:
if isinstance(n, SKIPPED_ELEMENT_TYPES) or n.tag.startswith('{http://www.w3.org/1999/XSL/Transform}'):
continue
if n.text:
l = n.text.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
if n.tail:
l = n.tail.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
res.extend(trans_parse_xsl_aux(n, t))
return res
def trans_parse_rml(de):
res = []
for n in de:
for m in n:
if isinstance(m, SKIPPED_ELEMENT_TYPES) or not m.text:
continue
string_list = [s.replace('\n', ' ').strip() for s in re.split('\[\[.+?\]\]', m.text)]
for s in string_list:
if s:
res.append(s.encode("utf8"))
res.extend(trans_parse_rml(n))
return res
def _push(callback, term, source_line):
""" Sanity check before pushing translation terms """
term = (term or "").strip().encode('utf8')
# Avoid non-char tokens like ':' '...' '.00' etc.
if len(term) > 8 or any(x.isalpha() for x in term):
callback(term, source_line)
def trans_parse_view(element, callback):
""" Helper method to recursively walk an etree document representing a
regular view and call ``callback(term)`` for each translatable term
that is found in the document.
:param ElementTree element: root of etree document to extract terms from
:param callable callback: a callable in the form ``f(term, source_line)``,
that will be called for each extracted term.
"""
for el in element.iter():
if (not isinstance(el, SKIPPED_ELEMENT_TYPES)
and el.tag.lower() not in SKIPPED_ELEMENTS
and el.get("translation", '').strip() != "off"
and el.text):
_push(callback, el.text, el.sourceline)
if el.tail:
_push(callback, el.tail, el.sourceline)
for attr in ('string', 'help', 'sum', 'confirm', 'placeholder'):
value = el.get(attr)
if value:
_push(callback, value, el.sourceline)
# tests whether an object is in a list of modules
def in_modules(object_name, modules):
if 'all' in modules:
return True
module_dict = {
'ir': 'base',
'res': 'base',
'workflow': 'base',
}
module = object_name.split('.')[0]
module = module_dict.get(module, module)
return module in modules
def _extract_translatable_qweb_terms(element, callback):
""" Helper method to walk an etree document representing
a QWeb template, and call ``callback(term)`` for each
translatable term that is found in the document.
:param etree._Element element: root of etree document to extract terms from
:param Callable callback: a callable in the form ``f(term, source_line)``,
that will be called for each extracted term.
"""
# not using elementTree.iterparse because we need to skip sub-trees in case
# the ancestor element had a reason to be skipped
for el in element:
if isinstance(el, SKIPPED_ELEMENT_TYPES): continue
if (el.tag.lower() not in SKIPPED_ELEMENTS
and "t-js" not in el.attrib
and not ("t-jquery" in el.attrib and "t-operation" not in el.attrib)
and el.get("t-translation", '').strip() != "off"):
_push(callback, el.text, el.sourceline)
for att in ('title', 'alt', 'label', 'placeholder'):
if att in el.attrib:
_push(callback, el.attrib[att], el.sourceline)
_extract_translatable_qweb_terms(el, callback)
_push(callback, el.tail, el.sourceline)
def babel_extract_qweb(fileobj, keywords, comment_tags, options):
"""Babel message extractor for qweb template files.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should
be recognized as translation functions
:param comment_tags: a list of translator tags to search for and
include in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)``
tuples
:rtype: Iterable
"""
result = []
def handle_text(text, lineno):
result.append((lineno, None, text, []))
tree = etree.parse(fileobj)
_extract_translatable_qweb_terms(tree.getroot(), handle_text)
return result
def trans_generate(lang, modules, cr):
dbname = cr.dbname
registry = openerp.registry(dbname)
trans_obj = registry['ir.translation']
model_data_obj = registry['ir.model.data']
uid = 1
query = 'SELECT name, model, res_id, module' \
' FROM ir_model_data'
query_models = """SELECT m.id, m.model, imd.module
FROM ir_model AS m, ir_model_data AS imd
WHERE m.id = imd.res_id AND imd.model = 'ir.model' """
if 'all_installed' in modules:
query += ' WHERE module IN ( SELECT name FROM ir_module_module WHERE state = \'installed\') '
query_models += " AND imd.module in ( SELECT name FROM ir_module_module WHERE state = 'installed') "
query_param = None
if 'all' not in modules:
query += ' WHERE module IN %s'
query_models += ' AND imd.module in %s'
query_param = (tuple(modules),)
query += ' ORDER BY module, model, name'
query_models += ' ORDER BY module, model'
cr.execute(query, query_param)
_to_translate = set()
def push_translation(module, type, name, id, source, comments=None):
# empty and one-letter terms are ignored, they probably are not meant to be
# translated, and would be very hard to translate anyway.
if not source or len(source.strip()) <= 1:
return
tnx = (module, source, name, id, type, tuple(comments or ()))
_to_translate.add(tnx)
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def push(mod, type, name, res_id, term):
term = (term or '').strip()
if len(term) > 2 or term in ENGLISH_SMALL_WORDS:
push_translation(mod, type, name, res_id, term)
def get_root_view(xml_id):
view = model_data_obj.xmlid_to_object(cr, uid, xml_id)
if view:
while view.mode != 'primary':
view = view.inherit_id
xml_id = view.get_external_id(cr, uid).get(view.id, xml_id)
return xml_id
for (xml_name,model,res_id,module) in cr.fetchall():
module = encode(module)
model = encode(model)
xml_name = "%s.%s" % (module, encode(xml_name))
if model not in registry:
_logger.error("Unable to find object %r", model)
continue
Model = registry[model]
if not Model._translate:
# explicitly disabled
continue
obj = Model.browse(cr, uid, res_id)
if not obj.exists():
_logger.warning("Unable to find object %r with id %d", model, res_id)
continue
if model=='ir.ui.view':
d = etree.XML(encode(obj.arch))
if obj.type == 'qweb':
view_id = get_root_view(xml_name)
push_qweb = lambda t,l: push(module, 'view', 'website', view_id, t)
_extract_translatable_qweb_terms(d, push_qweb)
else:
push_view = lambda t,l: push(module, 'view', obj.model, xml_name, t)
trans_parse_view(d, push_view)
elif model=='ir.actions.wizard':
pass # TODO Can model really be 'ir.actions.wizard' ?
elif model=='ir.model.fields':
try:
field_name = encode(obj.name)
except AttributeError, exc:
_logger.error("name error in %s: %s", xml_name, str(exc))
continue
objmodel = registry.get(obj.model)
if (objmodel is None or field_name not in objmodel._columns
or not objmodel._translate):
continue
field_def = objmodel._columns[field_name]
name = "%s,%s" % (encode(obj.model), field_name)
push_translation(module, 'field', name, 0, encode(field_def.string))
if field_def.help:
push_translation(module, 'help', name, 0, encode(field_def.help))
if field_def.translate:
ids = objmodel.search(cr, uid, [])
obj_values = objmodel.read(cr, uid, ids, [field_name])
for obj_value in obj_values:
res_id = obj_value['id']
if obj.name in ('ir.model', 'ir.ui.menu'):
res_id = 0
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', model),
('res_id', '=', res_id),
])
if not model_data_ids:
push_translation(module, 'model', name, 0, encode(obj_value[field_name]))
if hasattr(field_def, 'selection') and isinstance(field_def.selection, (list, tuple)):
for dummy, val in field_def.selection:
push_translation(module, 'selection', name, 0, encode(val))
elif model=='ir.actions.report.xml':
name = encode(obj.report_name)
fname = ""
if obj.report_rml:
fname = obj.report_rml
parse_func = trans_parse_rml
report_type = "report"
elif obj.report_xsl:
fname = obj.report_xsl
parse_func = trans_parse_xsl
report_type = "xsl"
if fname and obj.report_type in ('pdf', 'xsl'):
try:
report_file = misc.file_open(fname)
try:
d = etree.parse(report_file)
for t in parse_func(d.iter()):
push_translation(module, report_type, name, 0, t)
finally:
report_file.close()
except (IOError, etree.XMLSyntaxError):
_logger.exception("couldn't export translation for report %s %s %s", name, report_type, fname)
for field_name, field_def in obj._columns.items():
if model == 'ir.model' and field_name == 'name' and obj.name == obj.model:
# ignore model name if it is the technical one, nothing to translate
continue
if field_def.translate:
name = model + "," + field_name
try:
term = obj[field_name] or ''
except:
term = ''
push_translation(module, 'model', name, xml_name, encode(term))
# End of data for ir.model.data query results
cr.execute(query_models, query_param)
def push_constraint_msg(module, term_type, model, msg):
if not hasattr(msg, '__call__'):
push_translation(encode(module), term_type, encode(model), 0, encode(msg))
def push_local_constraints(module, model, cons_type='sql_constraints'):
"""Climb up the class hierarchy and ignore inherited constraints
from other modules"""
term_type = 'sql_constraint' if cons_type == 'sql_constraints' else 'constraint'
msg_pos = 2 if cons_type == 'sql_constraints' else 1
for cls in model.__class__.__mro__:
if getattr(cls, '_module', None) != module:
continue
constraints = getattr(cls, '_local_' + cons_type, [])
for constraint in constraints:
push_constraint_msg(module, term_type, model._name, constraint[msg_pos])
for (_, model, module) in cr.fetchall():
if model not in registry:
_logger.error("Unable to find object %r", model)
continue
model_obj = registry[model]
if model_obj._constraints:
push_local_constraints(module, model_obj, 'constraints')
if model_obj._sql_constraints:
push_local_constraints(module, model_obj, 'sql_constraints')
installed_modules = map(
lambda m: m['name'],
registry['ir.module.module'].search_read(cr, uid, [('state', '=', 'installed')], fields=['name']))
path_list = [(path, True) for path in openerp.modules.module.ad_paths]
# Also scan these non-addon paths
for bin_path in ['osv', 'report', 'modules', 'service', 'tools']:
path_list.append((os.path.join(config.config['root_path'], bin_path), True))
# non-recursive scan for individual files in root directory but without
# scanning subdirectories that may contain addons
path_list.append((config.config['root_path'], False))
_logger.debug("Scanning modules at paths: %s", path_list)
def get_module_from_path(path):
for (mp, rec) in path_list:
if rec and path.startswith(mp) and os.path.dirname(path) != mp:
path = path[len(mp)+1:]
return path.split(os.path.sep)[0]
return 'base' # files that are not in a module are considered as being in 'base' module
def verified_module_filepaths(fname, path, root):
fabsolutepath = join(root, fname)
frelativepath = fabsolutepath[len(path):]
display_path = "addons%s" % frelativepath
module = get_module_from_path(fabsolutepath)
if ('all' in modules or module in modules) and module in installed_modules:
if os.path.sep != '/':
display_path = display_path.replace(os.path.sep, '/')
return module, fabsolutepath, frelativepath, display_path
return None, None, None, None
def babel_extract_terms(fname, path, root, extract_method="python", trans_type='code',
extra_comments=None, extract_keywords={'_': None}):
module, fabsolutepath, _, display_path = verified_module_filepaths(fname, path, root)
extra_comments = extra_comments or []
if not module: return
src_file = open(fabsolutepath, 'r')
try:
for extracted in extract.extract(extract_method, src_file,
keywords=extract_keywords):
# Babel 0.9.6 yields lineno, message, comments
# Babel 1.3 yields lineno, message, comments, context
lineno, message, comments = extracted[:3]
push_translation(module, trans_type, display_path, lineno,
encode(message), comments + extra_comments)
except Exception:
_logger.exception("Failed to extract terms from %s", fabsolutepath)
finally:
src_file.close()
for (path, recursive) in path_list:
_logger.debug("Scanning files of modules at %s", path)
for root, dummy, files in osutil.walksymlinks(path):
for fname in fnmatch.filter(files, '*.py'):
babel_extract_terms(fname, path, root)
# mako provides a babel extractor: http://docs.makotemplates.org/en/latest/usage.html#babel
for fname in fnmatch.filter(files, '*.mako'):
babel_extract_terms(fname, path, root, 'mako', trans_type='report')
# Javascript source files in the static/src/js directory, rest is ignored (libs)
if fnmatch.fnmatch(root, '*/static/src/js*'):
for fname in fnmatch.filter(files, '*.js'):
babel_extract_terms(fname, path, root, 'javascript',
extra_comments=[WEB_TRANSLATION_COMMENT],
extract_keywords={'_t': None, '_lt': None})
# QWeb template files
if fnmatch.fnmatch(root, '*/static/src/xml*'):
for fname in fnmatch.filter(files, '*.xml'):
babel_extract_terms(fname, path, root, 'openerp.tools.translate:babel_extract_qweb',
extra_comments=[WEB_TRANSLATION_COMMENT])
if not recursive:
# due to topdown, first iteration is in first level
break
out = []
# translate strings marked as to be translated
for module, source, name, id, type, comments in sorted(_to_translate):
trans = '' if not lang else trans_obj._get_source(cr, uid, name, type, lang, source)
out.append((module, type, name, id, source, encode(trans) or '', comments))
return out
def trans_load(cr, filename, lang, verbose=True, module_name=None, context=None):
try:
fileobj = misc.file_open(filename)
_logger.info("loading %s", filename)
fileformat = os.path.splitext(filename)[-1][1:].lower()
result = trans_load_data(cr, fileobj, fileformat, lang, verbose=verbose, module_name=module_name, context=context)
fileobj.close()
return result
except IOError:
if verbose:
_logger.error("couldn't read translation file %s", filename)
return None
def trans_load_data(cr, fileobj, fileformat, lang, lang_name=None, verbose=True, module_name=None, context=None):
"""Populates the ir_translation table."""
if verbose:
_logger.info('loading translation file for language %s', lang)
if context is None:
context = {}
db_name = cr.dbname
registry = openerp.registry(db_name)
lang_obj = registry.get('res.lang')
trans_obj = registry.get('ir.translation')
iso_lang = misc.get_iso_codes(lang)
try:
ids = lang_obj.search(cr, SUPERUSER_ID, [('code','=', lang)])
if not ids:
# lets create the language with locale information
lang_obj.load_lang(cr, SUPERUSER_ID, lang=lang, lang_name=lang_name)
# Parse also the POT: it will possibly provide additional targets.
# (Because the POT comments are correct on Launchpad but not the
# PO comments due to a Launchpad limitation. See LP bug 933496.)
pot_reader = []
# now, the serious things: we read the language file
fileobj.seek(0)
if fileformat == 'csv':
reader = csv.reader(fileobj, quotechar='"', delimiter=',')
# read the first line of the file (it contains columns titles)
for row in reader:
fields = row
break
elif fileformat == 'po':
reader = TinyPoFile(fileobj)
fields = ['type', 'name', 'res_id', 'src', 'value', 'comments']
# Make a reader for the POT file and be somewhat defensive for the
# stable branch.
if fileobj.name.endswith('.po'):
try:
# Normally the path looks like /path/to/xxx/i18n/lang.po
# and we try to find the corresponding
# /path/to/xxx/i18n/xxx.pot file.
# (Sometimes we have 'i18n_extra' instead of just 'i18n')
addons_module_i18n, _ = os.path.split(fileobj.name)
addons_module, i18n_dir = os.path.split(addons_module_i18n)
addons, module = os.path.split(addons_module)
pot_handle = misc.file_open(os.path.join(
addons, module, i18n_dir, module + '.pot'))
pot_reader = TinyPoFile(pot_handle)
except:
pass
else:
_logger.error('Bad file format: %s', fileformat)
raise Exception(_('Bad file format'))
# Read the POT references, and keep them indexed by source string.
class Target(object):
def __init__(self):
self.value = None
self.targets = set() # set of (type, name, res_id)
self.comments = None
pot_targets = defaultdict(Target)
for type, name, res_id, src, _, comments in pot_reader:
if type is not None:
target = pot_targets[src]
target.targets.add((type, name, res_id))
target.comments = comments
# read the rest of the file
irt_cursor = trans_obj._get_import_cursor(cr, SUPERUSER_ID, context=context)
def process_row(row):
"""Process a single PO (or POT) entry."""
# dictionary which holds values for this line of the csv file
# {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
# 'src': ..., 'value': ..., 'module':...}
dic = dict.fromkeys(('type', 'name', 'res_id', 'src', 'value',
'comments', 'imd_model', 'imd_name', 'module'))
dic['lang'] = lang
dic.update(zip(fields, row))
# discard the target from the POT targets.
src = dic['src']
if src in pot_targets:
target = pot_targets[src]
target.value = dic['value']
target.targets.discard((dic['type'], dic['name'], dic['res_id']))
# This would skip terms that fail to specify a res_id
res_id = dic['res_id']
if not res_id:
return
if isinstance(res_id, (int, long)) or \
(isinstance(res_id, basestring) and res_id.isdigit()):
dic['res_id'] = int(res_id)
dic['module'] = module_name
else:
# res_id is an xml id
dic['res_id'] = None
dic['imd_model'] = dic['name'].split(',')[0]
if '.' in res_id:
dic['module'], dic['imd_name'] = res_id.split('.', 1)
else:
dic['module'], dic['imd_name'] = False, res_id
irt_cursor.push(dic)
# First process the entries from the PO file (doing so also fills/removes
# the entries from the POT file).
for row in reader:
process_row(row)
# Then process the entries implied by the POT file (which is more
# correct w.r.t. the targets) if some of them remain.
pot_rows = []
for src, target in pot_targets.iteritems():
if target.value:
for type, name, res_id in target.targets:
pot_rows.append((type, name, res_id, src, target.value, target.comments))
pot_targets.clear()
for row in pot_rows:
process_row(row)
irt_cursor.finish()
trans_obj.clear_caches()
if verbose:
_logger.info("translation file loaded succesfully")
except IOError:
filename = '[lang: %s][format: %s]' % (iso_lang or 'new', fileformat)
_logger.exception("couldn't read translation file %s", filename)
def get_locales(lang=None):
if lang is None:
lang = locale.getdefaultlocale()[0]
if os.name == 'nt':
lang = _LOCALE2WIN32.get(lang, lang)
def process(enc):
ln = locale._build_localename((lang, enc))
yield ln
nln = locale.normalize(ln)
if nln != ln:
yield nln
for x in process('utf8'): yield x
prefenc = locale.getpreferredencoding()
if prefenc:
for x in process(prefenc): yield x
prefenc = {
'latin1': 'latin9',
'iso-8859-1': 'iso8859-15',
'cp1252': '1252',
}.get(prefenc.lower())
if prefenc:
for x in process(prefenc): yield x
yield lang
def resetlocale():
# locale.resetlocale is bugged with some locales.
for ln in get_locales():
try:
return locale.setlocale(locale.LC_ALL, ln)
except locale.Error:
continue
def load_language(cr, lang):
"""Loads a translation terms for a language.
Used mainly to automate language loading at db initialization.
:param lang: language ISO code with optional _underscore_ and l10n flavor (ex: 'fr', 'fr_BE', but not 'fr-BE')
:type lang: str
"""
registry = openerp.registry(cr.dbname)
language_installer = registry['base.language.install']
oid = language_installer.create(cr, SUPERUSER_ID, {'lang': lang})
language_installer.lang_install(cr, SUPERUSER_ID, [oid], context=None)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
for row in rows:
module = row[0]
|
<|file_name|>GPsim.py<|end_file_name|><|fim▁begin|>import sys
import time
import logging
import threading
import GPy
import numpy as np
import matplotlib.pyplot as plt
import pdb
from GPhelpers import *
from IPython.display import display
from poap.strategy import FixedSampleStrategy
from poap.strategy import InputStrategy
from poap.tcpserve import ThreadedTCPServer
from poap.tcpserve import SimpleSocketWorker
from scipy.stats import norm
class GPsim:
def __init__(self, batchsize=100, prunerate=.2, timebound=10, money=1000, fevalcost=1):
self.batchsize = batchsize
self.prunerate = prunerate
self.timebound = timebound
self.money = money
self.fevalcost = fevalcost
def run(self, f, bounds):
breakcond = 1e-5
# run initial batch, deduct money
self.money = self.money - self.batchsize*self.fevalcost
eval_logX = np.random.uniform(bounds[0], bounds[1], self.batchsize)
eval_logY = f(eval_logX)
ybest = np.amin(eval_logY)
while(self.money > 0):
# calc Gaussian Process
m = calcGP(eval_logX, eval_logY)
# calc batchsize, break if necessary
self.batchsize = np.floor(self.batchsize*(1-self.prunerate))
if(self.batchsize < 2):
print "Batch Size reached Minimum"
break
# Deduct Money, evaluate new batch
self.money = self.money - self.batchsize*self.fevalcost
X = batchNewEvals_EI(m, bounds=1, batchsize=self.batchsize, fidelity=1000)
Y = f(X)
eval_logY = np.concatenate([eval_logY, Y])
eval_logX = np.concatenate([eval_logX, X])
ynew = np.amin(eval_logY)
if(np.absolute(ynew - ybest) < breakcond):
print "Break Condition Reached, Improvement Halted"
print "Num evals:", eval_logY.size
break
plotGP(m)<|fim▁hole|> print<|fim▁end|>
| |
<|file_name|>console.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2016 Alberto Gacías <[email protected]>
# Copyright (c) 2015-2016 Jose Antonio Chavarría <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gettext
_ = gettext.gettext
from gi.repository import Gtk
class Console(Gtk.Window):
def __init__(self):
super(Console, self).__init__()
sw = Gtk.ScrolledWindow()
sw.set_policy(
Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC
)
self.textview = Gtk.TextView()
self.textbuffer = self.textview.get_buffer()
self.textview.set_editable(False)
self.textview.set_wrap_mode(Gtk.WrapMode.WORD)
sw.add(self.textview)
self.set_title(_('Migasfree Console'))
self.set_icon_name('migasfree')
self.resize(640, 420)
self.set_decorated(True)
self.set_border_width(10)
self.connect('delete-event', self.on_click_hide)
box = Gtk.Box(spacing=6, orientation='vertical')
box.pack_start(sw, expand=True, fill=True, padding=0)
self.progress = Gtk.ProgressBar()
self.progress.set_pulse_step(0.02)
progress_box = Gtk.Box(False, 0, orientation='vertical')
progress_box.pack_start(self.progress, False, True, 0)
box.pack_start(progress_box, expand=False, fill=True, padding=0)
self.add(box)
def on_timeout(self, user_data):
self.progress.pulse()
return True
def on_click_hide(self, widget, data=None):
self.hide()
return True<|fim▁end|>
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
<|file_name|>_convertTemplate.go<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, Shinya Yagyu
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package main
import (
"bufio"
"log"
"os"
"path/filepath"
"strings"
)
func main() {
f, err := os.Open(os.Args[1])
if err != nil {
log.Fatal(err)
}
defer f.Close()
fout, err := os.Create(filepath.Base(os.Args[1]))
if err != nil {
log.Fatal(err)
}
defer fout.Close()
fout.WriteString("{{$root=.}}\n")
scanner := bufio.NewScanner(f)
for i := 0; scanner.Scan(); i++ {
line := scanner.Text()
line = strings.Replace(line, "{{", "{{.", -1)
line = strings.Replace(line, "{%", "{{", -1)
line = strings.Replace(line, "%}", "}}", -1)
line = strings.Replace(line, "endif", "end", -1)
line = strings.Replace(line, "endfor", "end", -1)
line = strings.Replace(line, " for ", " range $", -1)
line = strings.Replace(line, " in ", ":=", -1)
for i := 'a'; i <= 'z'; i++ {
b := i - 'a' + 'A'
line = strings.Replace(line, "."+string(i), "."+string(b), -1)
line = strings.Replace(line, ":= "+string(i), ":=$root."+string(b), -1)
}<|fim▁hole|> fout.WriteString(line + "\n")
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}<|fim▁end|>
| |
<|file_name|>jquery.pngFix.js<|end_file_name|><|fim▁begin|>/* $Id: jquery.pngFix.js,v 1.1.1.5 2009/02/04 19:23:30 gibbozer Exp $ */
/**
* --------------------------------------------------------------------
* jQuery-Plugin "pngFix"
* Version: 1.1, 11.09.2007
* by Andreas Eberhard, [email protected]
* http://jquery.andreaseberhard.de/
*
* Copyright (c) 2007 Andreas Eberhard
* Licensed under GPL (http://www.opensource.org/licenses/gpl-license.php)
*<|fim▁hole|> * - removed noConflict
* - added png-support for input type=image
* - 01.08.2007 CSS background-image support extension added by Scott Jehl, [email protected], http://www.filamentgroup.com
* 31.05.2007 initial Version 1.0
* --------------------------------------------------------------------
* @example $(function(){$(document).pngFix();});
* @desc Fixes all PNG's in the document on document.ready
*
* jQuery(function(){jQuery(document).pngFix();});
* @desc Fixes all PNG's in the document on document.ready when using noConflict
*
* @example $(function(){$('div.examples').pngFix();});
* @desc Fixes all PNG's within div with class examples
*
* @example $(function(){$('div.examples').pngFix( { blankgif:'ext.gif' } );});
* @desc Fixes all PNG's within div with class examples, provides blank gif for input with png
* --------------------------------------------------------------------
*/
(function($) {
jQuery.fn.pngFix = function(settings) {
// Settings
settings = jQuery.extend({
blankgif: 'blank.gif'
}, settings);
var ie55 = (navigator.appName == "Microsoft Internet Explorer" && parseInt(navigator.appVersion) == 4 && navigator.appVersion.indexOf("MSIE 5.5") != -1);
var ie6 = (navigator.appName == "Microsoft Internet Explorer" && parseInt(navigator.appVersion) == 4 && navigator.appVersion.indexOf("MSIE 6.0") != -1);
if (jQuery.browser.msie && (ie55 || ie6)) {
//fix images with png-source
jQuery(this).find("img[@src$=.png]").each(function() {
jQuery(this).attr('width',jQuery(this).width());
jQuery(this).attr('height',jQuery(this).height());
var prevStyle = '';
var strNewHTML = '';
var imgId = (jQuery(this).attr('id')) ? 'id="' + jQuery(this).attr('id') + '" ' : '';
var imgClass = (jQuery(this).attr('class')) ? 'class="' + jQuery(this).attr('class') + '" ' : '';
var imgTitle = (jQuery(this).attr('title')) ? 'title="' + jQuery(this).attr('title') + '" ' : '';
var imgAlt = (jQuery(this).attr('alt')) ? 'alt="' + jQuery(this).attr('alt') + '" ' : '';
var imgAlign = (jQuery(this).attr('align')) ? 'float:' + jQuery(this).attr('align') + ';' : '';
var imgHand = (jQuery(this).parent().attr('href')) ? 'cursor:hand;' : '';
if (this.style.border) {
prevStyle += 'border:'+this.style.border+';';
this.style.border = '';
}
if (this.style.padding) {
prevStyle += 'padding:'+this.style.padding+';';
this.style.padding = '';
}
if (this.style.margin) {
prevStyle += 'margin:'+this.style.margin+';';
this.style.margin = '';
}
var imgStyle = (this.style.cssText);
strNewHTML += '<span '+imgId+imgClass+imgTitle+imgAlt;
strNewHTML += 'style="position:relative;white-space:pre-line;display:inline-block;background:transparent;'+imgAlign+imgHand;
strNewHTML += 'width:' + jQuery(this).width() + 'px;' + 'height:' + jQuery(this).height() + 'px;';
strNewHTML += 'filter:progid:DXImageTransform.Microsoft.AlphaImageLoader' + '(src=\'' + jQuery(this).attr('src') + '\', sizingMethod=\'scale\');';
strNewHTML += imgStyle+'"></span>';
if (prevStyle != ''){
strNewHTML = '<span style="position:relative;display:inline-block;'+prevStyle+imgHand+'width:' + jQuery(this).width() + 'px;' + 'height:' + jQuery(this).height() + 'px;'+'">' + strNewHTML + '</span>';
}
jQuery(this).hide();
jQuery(this).after(strNewHTML);
});
// fix css background pngs
jQuery(this).find("*").each(function(){
var bgIMG = jQuery(this).css('background-image');
if(bgIMG.indexOf(".png")!=-1){
var iebg = bgIMG.split('url("')[1].split('")')[0];
jQuery(this).css('background-image', 'none');
jQuery(this).get(0).runtimeStyle.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + iebg + "',sizingMethod='scale')";
}
});
//fix input with png-source
jQuery(this).find("input[@src$=.png]").each(function() {
var bgIMG = jQuery(this).attr('src');
jQuery(this).get(0).runtimeStyle.filter = 'progid:DXImageTransform.Microsoft.AlphaImageLoader' + '(src=\'' + bgIMG + '\', sizingMethod=\'scale\');';
jQuery(this).attr('src', settings.blankgif)
});
}
return jQuery;
};
})(jQuery);<|fim▁end|>
|
* Changelog:
* 11.09.2007 Version 1.1
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
import ConfigParser
class SettingsIO(object):
""" ConfigParser abstraction """
def __init__(self, config_file):
self.config_file = config_file
self.config = ConfigParser.ConfigParser()
if os.path.exists(self.config_file):
self.config.read([self.config_file])
def read_setting(self, key, section='lutris'):
try:
value = self.config.get(section, key)
except ConfigParser.NoOptionError:
value = None<|fim▁hole|> return value
def write_setting(self, key, value, section='lutris'):
if not self.config.has_section(section):
self.config.add_section(section)
self.config.set(section, key, str(value))
with open(self.config_file, 'wb') as config_file:
self.config.write(config_file)<|fim▁end|>
|
except ConfigParser.NoSectionError:
value = None
|
<|file_name|>sq.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'image2', 'sq', {
alt: 'Tekst Alternativ',
btnUpload: 'Dërgo në server',
<|fim▁hole|> captionPlaceholder: 'Caption', // MISSING
infoTab: 'Informacione mbi Fotografinë',
lockRatio: 'Mbyll Racionin',
menu: 'Karakteristikat e Fotografisë',
pathName: 'foto',
pathNameCaption: 'caption', // MISSING
resetSize: 'Rikthe Madhësinë',
resizer: 'Click and drag to resize', // MISSING
title: 'Karakteristikat e Fotografisë',
uploadTab: 'Ngarko',
urlMissing: 'Mungon URL e burimit të fotografisë.',
altMissing: 'Alternative text is missing.' // MISSING
} );<|fim▁end|>
|
captioned: 'Captioned image', // MISSING
|
<|file_name|>test_nsxmlnodeoptions.py<|end_file_name|><|fim▁begin|>from Foundation import *
from PyObjCTools.TestSupport import *
class TestNSXMLNodeOptions (TestCase):
def testConstants(self):
self.assertEqual(NSXMLNodeOptionsNone, 0)
self.assertEqual(NSXMLNodeIsCDATA, 1 << 0)
self.assertEqual(NSXMLNodeExpandEmptyElement, 1 << 1)
self.assertEqual(NSXMLNodeCompactEmptyElement, 1 << 2)
self.assertEqual(NSXMLNodeUseSingleQuotes, 1 << 3)
self.assertEqual(NSXMLNodeUseDoubleQuotes, 1 << 4)
self.assertEqual(NSXMLDocumentTidyHTML, 1 << 9)
self.assertEqual(NSXMLDocumentTidyXML, 1 << 10)
self.assertEqual(NSXMLDocumentValidate, 1 << 13)
self.assertEqual(NSXMLNodeLoadExternalEntitiesAlways, 1 << 14)
self.assertEqual(NSXMLNodeLoadExternalEntitiesSameOriginOnly, 1 << 15)
self.assertEqual(NSXMLNodeLoadExternalEntitiesNever, 1 << 19)
self.assertEqual(NSXMLDocumentXInclude, 1 << 16)
self.assertEqual(NSXMLNodePrettyPrint, 1 << 17)
self.assertEqual(NSXMLDocumentIncludeContentTypeDeclaration, 1 << 18)
self.assertEqual(NSXMLNodePreserveNamespaceOrder, 1 << 20)
self.assertEqual(NSXMLNodePreserveAttributeOrder, 1 << 21)
self.assertEqual(NSXMLNodePreserveEntities, 1 << 22)
self.assertEqual(NSXMLNodePreservePrefixes, 1 << 23)
self.assertEqual(NSXMLNodePreserveCDATA, 1 << 24)
self.assertEqual(NSXMLNodePreserveWhitespace, 1 << 25)
self.assertEqual(NSXMLNodePreserveDTD, 1 << 26)
self.assertEqual(NSXMLNodePreserveCharacterReferences, 1 << 27)
self.assertEqual(NSXMLNodePreserveEmptyElements, (
NSXMLNodeExpandEmptyElement | NSXMLNodeCompactEmptyElement))
self.assertEqual(NSXMLNodePreserveQuotes, (NSXMLNodeUseSingleQuotes | NSXMLNodeUseDoubleQuotes))
self.assertEqual(NSXMLNodePreserveAll & 0xFFFFFFFF, 0xFFFFFFFF & (
NSXMLNodePreserveNamespaceOrder |
NSXMLNodePreserveAttributeOrder |
NSXMLNodePreserveEntities |
NSXMLNodePreservePrefixes |
NSXMLNodePreserveCDATA |
NSXMLNodePreserveEmptyElements |
NSXMLNodePreserveQuotes |
NSXMLNodePreserveWhitespace |
NSXMLNodePreserveDTD |<|fim▁hole|>
if __name__ == "__main__":
main()<|fim▁end|>
|
NSXMLNodePreserveCharacterReferences |
0xFFF00000))
|
<|file_name|>sess_file.go<|end_file_name|><|fim▁begin|>package session
import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"sync"
"time"
"github.com/insionng/macross"
)
var (
filepder = &FileProvider{}
gcMaxLifetime int64
)
// FileSessionStore File session store
type FileSessionStore struct {
sid string
lock sync.RWMutex
values map[interface{}]interface{}
}
// Set value to file session
func (fs *FileSessionStore) Set(key, value interface{}) error {
fs.lock.Lock()
defer fs.lock.Unlock()
fs.values[key] = value
return nil
}
// Get value from file session
func (fs *FileSessionStore) Get(key interface{}) interface{} {
fs.lock.RLock()
defer fs.lock.RUnlock()
if v, ok := fs.values[key]; ok {
return v
}
return nil
}
// Delete value in file session by given key
func (fs *FileSessionStore) Delete(key interface{}) error {
fs.lock.Lock()
defer fs.lock.Unlock()
delete(fs.values, key)
return nil
}
// Flush Clean all values in file session
func (fs *FileSessionStore) Flush() error {
fs.lock.Lock()
defer fs.lock.Unlock()
fs.values = make(map[interface{}]interface{})
return nil
}
// ID Get file session store id
func (fs *FileSessionStore) ID() string {
return fs.sid
}
// SessionRelease Write file session to local file with Gob string
func (fs *FileSessionStore) Release(ctx *macross.Context) (err error) {
var b []byte
b, err = EncodeGob(fs.values)
if err != nil {
return
}
_, err = os.Stat(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid))<|fim▁hole|> if err == nil {
f, err = os.OpenFile(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid), os.O_RDWR, 0777)
} else if os.IsNotExist(err) {
f, err = os.Create(path.Join(filepder.savePath, string(fs.sid[0]), string(fs.sid[1]), fs.sid))
} else {
return
}
f.Truncate(0)
f.Seek(0, 0)
f.Write(b)
f.Close()
return
}
// FileProvider File session provider
type FileProvider struct {
lock sync.RWMutex
maxLifetime int64
savePath string
}
// Init Init file session provider.
// savePath sets the session files path.
func (fp *FileProvider) Init(maxLifetime int64, savePath string) error {
fp.maxLifetime = maxLifetime
fp.savePath = savePath
return nil
}
// Read Read file session by sid.
// if file is not exist, create it.
// the file path is generated from sid string.
func (fp *FileProvider) Read(sid string) (macross.RawStore, error) {
filepder.lock.Lock()
defer filepder.lock.Unlock()
err := os.MkdirAll(path.Join(fp.savePath, string(sid[0]), string(sid[1])), 0777)
if err != nil {
println(err.Error())
}
_, err = os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
var f *os.File
if err == nil {
f, err = os.OpenFile(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid), os.O_RDWR, 0777)
} else if os.IsNotExist(err) {
f, err = os.Create(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
} else {
return nil, err
}
os.Chtimes(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid), time.Now(), time.Now())
var kv map[interface{}]interface{}
b, err := ioutil.ReadAll(f)
if err != nil {
return nil, err
}
if len(b) == 0 {
kv = make(map[interface{}]interface{})
} else {
kv, err = DecodeGob(b)
if err != nil {
return nil, err
}
}
f.Close()
ss := &FileSessionStore{sid: sid, values: kv}
return ss, nil
}
// Exist Check file session exist.
// it checkes the file named from sid exist or not.
func (fp *FileProvider) Exist(sid string) bool {
filepder.lock.Lock()
defer filepder.lock.Unlock()
_, err := os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
if err == nil {
return true
}
return false
}
// Destory Remove all files in this save path
func (fp *FileProvider) Destory(sid string) error {
filepder.lock.Lock()
defer filepder.lock.Unlock()
os.Remove(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
return nil
}
// GC Recycle files in save path
func (fp *FileProvider) GC() {
filepder.lock.Lock()
defer filepder.lock.Unlock()
gcMaxLifetime = fp.maxLifetime
filepath.Walk(fp.savePath, gcpath)
}
// SessionCount Get active file session number.
// it walks save path to count files.
func (fp *FileProvider) Count() int {
a := &activeSession{}
err := filepath.Walk(fp.savePath, func(path string, f os.FileInfo, err error) error {
return a.visit(path, f, err)
})
if err != nil {
fmt.Printf("filepath.Walk() returned %v\n", err)
return 0
}
return a.total
}
// Regenerate Generate new sid for file session.
// it delete old file and create new file named from new sid.
func (fp *FileProvider) Regenerate(oldsid, sid string) (macross.RawStore, error) {
filepder.lock.Lock()
defer filepder.lock.Unlock()
err := os.MkdirAll(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1])), 0777)
if err != nil {
println(err.Error())
}
err = os.MkdirAll(path.Join(fp.savePath, string(sid[0]), string(sid[1])), 0777)
if err != nil {
println(err.Error())
}
_, err = os.Stat(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
var newf *os.File
if err == nil {
return nil, errors.New("newsid exist")
} else if os.IsNotExist(err) {
newf, err = os.Create(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
}
_, err = os.Stat(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1]), oldsid))
var f *os.File
if err == nil {
f, err = os.OpenFile(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1]), oldsid), os.O_RDWR, 0777)
io.Copy(newf, f)
} else if os.IsNotExist(err) {
newf, err = os.Create(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid))
} else {
return nil, err
}
f.Close()
os.Remove(path.Join(fp.savePath, string(oldsid[0]), string(oldsid[1])))
os.Chtimes(path.Join(fp.savePath, string(sid[0]), string(sid[1]), sid), time.Now(), time.Now())
var kv map[interface{}]interface{}
b, err := ioutil.ReadAll(newf)
if err != nil {
return nil, err
}
if len(b) == 0 {
kv = make(map[interface{}]interface{})
} else {
kv, err = DecodeGob(b)
if err != nil {
return nil, err
}
}
ss := &FileSessionStore{sid: sid, values: kv}
return ss, nil
}
// remove file in save path if expired
func gcpath(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
if (info.ModTime().Unix() + gcMaxLifetime) < time.Now().Unix() {
os.Remove(path)
}
return nil
}
type activeSession struct {
total int
}
func (as *activeSession) visit(paths string, f os.FileInfo, err error) error {
if err != nil {
return err
}
if f.IsDir() {
return nil
}
as.total = as.total + 1
return nil
}
func init() {
Register("file", filepder)
}<|fim▁end|>
|
var f *os.File
|
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010, 2degrees Limited <[email protected]>.
# All Rights Reserved.
#
# This file is part of djangoaudit <https://launchpad.net/django-audit/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests for djangoaudit"""
from datetime import datetime, timedelta, date
from decimal import Decimal
import os
# Have to set this here to ensure this is Django-like
os.environ['DJANGO_SETTINGS_MODULE'] = "tests.fixtures.sampledjango.settings"
from django.conf import settings
from django.db.models import Sum
from nose.tools import (eq_, ok_, assert_false, assert_not_equal, assert_raises,
raises)
from pymongo.errors import PyMongoError
from fixture.django_testcase import FixtureTestCase
#from mongofixture import MongoFixtureTestCase
from djangoaudit.models import (_coerce_data_to_model_types, _audit_model,
_coerce_to_bson_compatible, AuditedModel)
from djangoaudit.connection import MONGO_CONNECTION
from tests.fixtures.sampledjango.bsg.models import *
from tests.fixtures.sampledjango.bsg.fixtures import *
class TestEnsureBSONCompatible(object):
"""Test for :func:`_coerce_to_bson_compatible`"""
def test_decimal_to_float(self):
"""Ensure that :class:`Decimal` is converted to :class:`float`"""
got = _coerce_to_bson_compatible(Decimal('1234.5678'))
expected = 1234.5678
eq_(got, expected,
"Expected %r, got %r for Decimal to float conversion" %
(expected, got))
def test_date_to_datetime(self):
"""Ensure that :class:`date` is converted to :class:`datetime`"""
got = _coerce_to_bson_compatible(date(2001, 9, 11))
expected = datetime(2001, 9, 11)
eq_(got, expected,
"Expected %r, got %r for date to datetime conversion" %
(expected, got))
class MockModelMeta(object):
""" Mock of :class:`django.db.options.Options` """
def __init__(self, app_label, model_name):
self.app_label = app_label
self.object_name = model_name
class MockModel(object):
""" Mock of :class:`django.db.models.base.Model` """
def __init__(self, app_label, model_name, pk):
self._meta = MockModelMeta(app_label, model_name)
self.pk = pk
class TestAuditModel(object):
""" Tests for :func:`djangoaudit.models.audit_model` """
def setup(self):
self.audit_collection_name = "audit_data"
self.auditing_collection = MONGO_CONNECTION\
.get_collection(self.audit_collection_name)
self.profile = MockModel("profiles", "Profile", 123)
def fetch_record_by_id(self, id):
return self.auditing_collection.find_one({"_id":id})
def test_no_changes_empty_dicts(self):
"""Check that passing two empty value dicts results in a no-op"""
result = _audit_model(self.profile, {}, {})
eq_(result, None, "No changes should not result in anything being "
"written to the database")
def test_no_changes_same_values(self):
"""Check that passing two identical dicts results in a no-op"""
result = _audit_model(self.profile,
{'foo': 1, 'bar': 'wibble', 'empty': None,
'my_date': datetime(2001, 1, 1, 9, 12)},
{'foo': 1, 'bar': 'wibble', 'empty': None,
'my_date': datetime(2001, 1, 1, 9, 12)})
eq_(result, None, "No changes should not result in anything being "
"written to the database")
def test_single_change_no_other_diff(self):
"""Check that a single changed value is correctly recorded"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a single difference key")
def test_model_data_write_out(self):
"""Check the correct data is written out for the model"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['object_app'], self.profile._meta.app_label)
eq_(saved_record['object_model'], self.profile._meta.object_name)
eq_(saved_record['object_pk'], self.profile.pk)
def test_date_stamping(self):
"""Check that a date stamp is stored in along with the record"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
record_date_stamp = saved_record['audit_date_stamp']
now = datetime.utcnow()
ok_((now - timedelta(seconds=1)) < record_date_stamp < now,
"Date stamp should be almost the same as now (now: %s, got: %s"
% (now, record_date_stamp))
def test_addition_parameter_write_out(self):
"""Check that additional parameters are correctly stored"""
result = _audit_model(self.profile, dict(foo=None), dict(foo='bar'))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
def test_single_change_others_same(self):
"""Check that a single changed value is correctly recorded when there are no other differences"""
result = _audit_model(self.profile, dict(foo=None, wibble=0),
dict(foo='bar', wibble=0))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a single difference key")
ok_('wibble' not in saved_record, "There should be no "
"record of changes to the `wibble` key")
def test_multi_change_no_others(self):
"""Check that multiple changed values are correctly recorded when there are no other items"""
result = _audit_model(self.profile, dict(foo=None, wibble=0),
dict(foo='bar', wibble=1))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a difference for key `foo`")
eq_(saved_record['wibble'], 1,
"The saved record should contain a difference for key `wibble`")
def test_multi_change_others_same(self):
"""Check that multiple changed values are correctly recorded when there are no other differences"""
result = _audit_model(self.profile, dict(foo=None, wibble=0, body_count=1.00),
dict(foo='bar', wibble=1, body_count=1.00))
assert_not_equal(result, None,
"A change should result in a database object being "
"created")
saved_record = self.fetch_record_by_id(result)
eq_(saved_record['foo'], 'bar',
"The saved record should contain a difference for key `foo`")
eq_(saved_record['wibble'], 1,
"The saved record should contain a difference for key `wibble`")
ok_('body_count' not in saved_record, "There should be no "
"record of changes to the `body_count` key")
class TestCoerceDataToModelTypes(object):
"""Tests for :func:`_coerce_data_to_model_types`"""
def setup(self):
checks = (
('age', '40', 40),
('last_flight', date(2010, 1, 1), datetime(2010, 1, 1)),
('fastest_landing',71.10, Decimal("71.10")),
('is_cylon', 0, False),
)
self.initial_data, self.final_data = {}, {}
for key, initial, final in checks:
self.initial_data[key] = initial
self.final_data[key] = final
def test_for_instance(self):
"""Test _coerce_data_to_model_types for model instances"""
pilot = Pilot()
result = _coerce_data_to_model_types(pilot, self.initial_data)
eq_(result, self.final_data,
"Expected to get: %r, got %r" % (result, self.final_data))
def test_for_class(self):
"""Test _coerce_data_to_model_types for the model itself"""
result = _coerce_data_to_model_types(Pilot, self.initial_data)
eq_(result, self.final_data,
"Expected to get: %r, got %r" % (result, self.final_data))
class TestAuditedModel(FixtureTestCase):
"""Tests for AuditedModel"""
datasets = [PilotData, VesselData]
def setUp(self):
self.audit_collection_name = "audit_data"
self.auditing_collection = MONGO_CONNECTION\
.get_collection(self.audit_collection_name)
# Now set up the records:
self.helo = Pilot.objects.filter(call_sign="Helo")[0] # wtf - no idea why fixture seems to be putting two of these in the DB
self.athena = Pilot.objects.get(call_sign="Athena")
self.starbuck = Pilot.objects.get(call_sign="Starbuck")
self.apollo = Pilot.objects.get(call_sign="Apollo")
self.longshot = Pilot.objects.get(call_sign="Longshot")
self.raptor259 = Vessel.objects.get(name=VesselData.Raptor259.name)
@raises(AttributeError)
def test_meta_class(self):
"""Check that any values specified in log_fields which are no fields on the AuditedModel class cause an AttributeError to be raised"""
class NaughtyAuditedModel(AuditedModel):<|fim▁hole|>
def test_no_changes_no_extra(self):
"""Check that when there are no changes to a AuditedModel instance, no changes are recorded"""
# Set up the operator and some notes:
self.helo.set_audit_info(operator='me',
notes='This should not be recorded')
# Save a model with no changes:
self.helo.save()
# Now read back the log to see whether anything was put in there:
num_log_items = len(list(self.helo.get_audit_log()))
eq_(num_log_items, 1, "There should be only be one log entry for this "
"object - the creation log (found %d log entries)." % num_log_items)
def test_change_non_logger_field(self):
"""Check that altering non-logged fields doesn't result in a log entry being generated"""
self.helo.craft = 0
# Set up the operator and some notes:
self.helo.set_audit_info(operator='me',
notes='This should not be recorded')
self.helo.save()
# Now read back the log to see whether anything was put in there:
num_log_items = len(list(self.helo.get_audit_log()))
eq_(num_log_items, 1, "There should be one log entry for this object - "
"the creation log (found %d log entries)." % num_log_items)
def test_create_fresh_record(self):
"""Check that creation of a record logs all the fields correctly"""
self.athena.delete()
params = dict(first_name="Sharon",
last_name="Agathon",
call_sign="Athena",
age=29,
last_flight=datetime(2000, 3, 4, 7, 18),
craft=1,
is_cylon=True,
fastest_landing=Decimal("77.90"))
new_athena = Pilot(**params)
new_athena.save()
log = list(new_athena.get_audit_log())
# Check we've only got one log entry:
eq_(len(log), 1, "There should only be one entry for this object (found"
" %d)" % len(log))
entry = log[0]
# Now verify that we've only got the correct keys in the log, once we've
# popped off the extra ones:
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, new_athena.pk, "object_pk should be %r, got %r" %
(new_athena.pk, object_pk))
# Our resulting entry should have only the audit_changes key as there is
# only audited_data remaining:
expected_keys = set(('audit_changes',))#set(new_athena.log_fields)
found_keys = set(entry.keys())
eq_(expected_keys, found_keys, "Mismatch between expected fields in the"
" log. Expected %r, got %r" % (expected_keys, found_keys))
# Now verify that what's on the new model is what was logged:
for key, value in entry['audit_changes'].iteritems():
expected = (None, getattr(new_athena, key))
eq_(value, expected, "Expected to find %r with value: %r, got %r" %
(key, expected, value))
def test_partial_update(self):
"""Check that partial data updates are recorded correctly"""
orig_name = self.longshot.last_name
self.longshot.last_name = "New name"
orig_age = self.longshot.age
self.longshot.age = 30
orig_fastest_landing = self.longshot.fastest_landing
self.longshot.fastest_landing = Decimal("75.00")
# Ensure we've got some operator testing too:
operator, notes = "me", "This record should be updated"
self.longshot.set_audit_info(operator=operator,notes=notes)
# Now do the save:
self.longshot.save()
# Read back the log:
log = list(self.longshot.get_audit_log())
eq_(len(log), 2, "There should only be two entires for this object ("
"found %d)" % len(log))
entry = log[-1]
# Now verify that we've only got the correct keys in the log, once we've
# popped off the extra ones:
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
audit_operator = entry.pop('audit_operator')
audit_notes = entry.pop('audit_notes')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, self.longshot.pk, "object_pk should be %r, got %r" %
(self.longshot.pk, object_pk))
eq_(audit_operator, operator,
"operator should be %r, got %r" % (operator, audit_operator))
eq_(audit_notes, notes,
"notes should be %r, got %r" % (notes, audit_notes))
# Check we've only got one key left (audit_changes):
expected_keys = ['audit_changes']
found_keys = entry.keys()
eq_(expected_keys, found_keys, "Expected to find keys: %r, gor %r" %
(expected_keys, found_keys))
# Ensure that the new values were correctly recorded:
changes= entry['audit_changes']
eq_(changes['last_name'], (orig_name, self.longshot.last_name))
eq_(changes['age'], (orig_age, self.longshot.age))
eq_(changes['fastest_landing'], (orig_fastest_landing,
self.longshot.fastest_landing))
def test_dual_update(self):
"""Test that two log entries are generated for dual updates"""
self.apollo.age = 40
self.apollo.save()
self.apollo.age = 30
self.apollo.save()
log = list(self.apollo.get_audit_log())
eq_(len(log), 3, "There should be three entries in the log, got %d" %
len(log))
expected_ages = [(28, 40), (40, 30)]
for entry, age in zip(log[1:], expected_ages):
eq_(entry['audit_changes']['age'], age,
"Expected age to be %r, got %r" % (entry['audit_changes']['age'], age))
def test_delete(self):
"""Check that delete() records the final state of the model prior to deletion"""
# Define the lookup key we'll need parameters to look up the record:
pk = self.starbuck.pk
self.starbuck.delete()
# Delete another to make sure we don't get log cross-over:
apollo_pk = self.apollo.pk
self.apollo.set_audit_info(notes="Extra note")
self.apollo.delete()
# Get hold of the delete log:
log = list(Pilot.get_deleted_log(pk))
# Make sure there's only one entry:
eq_(len(log), 1,
"There should only be one deleted item for this pk (found %d)" %
len(log))
entry = log[0]
for field in Pilot.log_fields:
expected = getattr(PilotData.Starbuck, field)
found = entry[field]
eq_(expected, found,
"For field %r, expected %r, got %r" % (field, expected, found))
delete_note = "Object deleted. These are the attributes at delete time."
eq_(entry['audit_notes'], delete_note,
"Expected to find notes as: %r, got %r" %
(delete_note, entry['audit_notes']))
# Get hold of the delete log for apollo to check the delete note:
entry = list(Pilot.get_deleted_log(apollo_pk))[0]
got = entry['audit_notes']
expected = "%s\nExtra note" % delete_note
eq_(expected, got, "Expected note: %r, got %r" % (expected, got))
# Since we've deleted two items we can check that we've got the log for
# both of these:
log = list(Pilot.get_deleted_log())
eq_(len(log), 2,
"There should be two deleted log entries for this class (found %d)"
% len(log))
def test_arbitrary_audit(self):
"""Test the arbitrary auditing of data against a model"""
data = dict(hair_colour="Blond",
children=0,
kill_percentage=Decimal('98.7'))
self.starbuck.set_audit_info(**data)
self.starbuck.save()
log = list(self.starbuck.get_audit_log())
eq_(len(log), 2,
"There should only be two entries in the log (found %d)" % len(log))
entry = log[-1]
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
eq_(object_app, "bsg",
"object_app should be 'bsg', got %r" % object_app)
eq_(object_model, "Pilot",
"object_model should be 'Pilot', got %r" % object_model)
eq_(object_pk, self.starbuck.pk, "object_pk should be %r, got %r" %
(self.starbuck.pk, object_pk))
# Mongo stores Decimals as floats, so coerce what we expect:
data['kill_percentage'] = float(data['kill_percentage'])
eq_(entry, data, "Expecting %r, got %r" % (data, entry))
def test_foreign_keys(self):
"""Test the foreign keyed fields don't interfere with AuditedModel"""
# Due to a call in the metaclass of AuditedModel, the
# _meta.get_all_field_names does not behave correctly unless the cache
# is cleared after this call. Aggregation is one area where this
# manifests itself - here we're ensuring this doesn't fail:
field_names = Pilot._meta.get_all_field_names()
ok_("vessels" in field_names,
"The field names for the Pilot model should contain 'vessels', got "
"%s" % field_names)
# Now verify in aggregation this works:
vessel_sum = Pilot.objects.aggregate(Sum('vessels'))['vessels__sum']
eq_(vessel_sum, 1, "There should only be one vessel, got %r"
% vessel_sum)
def test_get_creation_log(self):
"""Test that the creation log can be retrieved correctly"""
# Create a new object:
hot_dog = Pilot(
first_name="Brendan",
last_name="Costanza",
call_sign="Hot Dog",
age=25,
last_flight=datetime(2000, 6, 4, 23, 01),
craft=1,
is_cylon=False,
fastest_landing=Decimal("101.67")
)
hot_dog.set_audit_info(operator="Admin",
flight_deck="Port side")
hot_dog.save()
# Retrieve the log as a check:
initial_log = hot_dog.get_creation_log()
# Make another entry:
hot_dog.fastest_landing = Decimal("99.98")
hot_dog.save()
# Check we've got two items in the log now:
found_logs = len(list(hot_dog.get_audit_log()))
eq_(2, found_logs, "Expected to find 2 logs, got %d" % found_logs)
# Now check the creation log:
creation_log = hot_dog.get_creation_log()
eq_(creation_log, initial_log, "Expecting initial log entry to be the "
"same as the creation log. Expected:\n%r,\n\ngot\n%r" %
(initial_log, creation_log))
# Test that fail gracefully when no creation log exists:
for item in hot_dog.get_audit_log():
self.auditing_collection.remove(item['_id'])
empty_log = hot_dog.get_creation_log()
eq_(empty_log, None, "The creation log should be None")
def test_get_deletion_log(self):
"""Test that deleted data can be retrieved"""
pre_delete_data = {}
for field in self.apollo.log_fields:
pre_delete_data[field] = getattr(self.apollo, field)
pk = self.apollo.pk
self.apollo.delete()
# Get the deletion log:
entry = list(Pilot.get_deleted_log(pk))[0]
object_app = entry.pop('object_app')
object_model = entry.pop('object_model')
object_pk = entry.pop('object_pk')
id = entry.pop('_id')
audit_date_stamp = entry.pop('audit_date_stamp')
audit_is_delete = entry.pop('audit_is_delete')
audit_notes = entry.pop('audit_notes')
ok_(audit_is_delete, "Should have audit_is_delete is True")
eq_(audit_notes,
'Object deleted. These are the attributes at delete time.')
eq_(pre_delete_data, entry,
"Expected to find deletion log as: %r, got %r" %
(pre_delete_data, entry))<|fim▁end|>
|
log_fields = ['foo', 'bar', 'wibble']
|
<|file_name|>protocol.rs<|end_file_name|><|fim▁begin|>//============================================================================
//
// A simple Mandelbrot image generator in Rust
//
// Protocol for communicating with Engine task
//
// Copyright (c) 2014 Gavin Baker <[email protected]>
// Published under the MIT license
//
//============================================================================
#![allow(dead_code)]
use std::vec::Vec;
//----------------------------------------------------------------------------
pub static PREVIEW_WIDTH: i32 = 256;
pub static PREVIEW_HEIGHT: i32 = 256;
//----------------------------------------------------------------------------
<|fim▁hole|> FullRender,
}
//----------------------------------------------------------------------------
#[derive(Debug)]
pub enum EngineStatus {
Startup,
Processing(u32),
RenderComplete(RenderType, Vec<u8>),
Error(u32)
}
//----------------------------------------------------------------------------
#[derive(Debug)]
pub enum EngineCommand {
UpdateRegion(f32, f32, f32, f32),
ZoomIn,
ZoomOut,
PanLeft,
PanRight,
PanUp,
PanDown,
Render(RenderType),
Shutdown,
}
//----------------------------------------------------------------------------<|fim▁end|>
|
#[derive(Debug)]
pub enum RenderType {
PreviewRender,
|
<|file_name|>0014_auto__chg_field_field_choices.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Field.choices'
db.alter_column(u'forms_field', 'choices', self.gf('django.db.models.fields.CharField')(max_length=5000))
def backwards(self, orm):
# Changing field 'Field.choices'
db.alter_column(u'forms_field', 'choices', self.gf('django.db.models.fields.CharField')(max_length=1000))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),<|fim▁hole|> },
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'forms.field': {
'Meta': {'ordering': "(u'order',)", 'object_name': 'Field'},
'choices': ('django.db.models.fields.CharField', [], {'max_length': '5000', 'blank': 'True'}),
'condition': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'dependency': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'blank': 'True'}),
'field_type': ('django.db.models.fields.IntegerField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fields'", 'to': u"orm['forms.Form']"}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'merge': ('django.db.models.fields.CharField', [], {'default': "u'0'", 'max_length': '100', 'blank': 'True'}),
'meta': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'placeholder_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "u''", 'max_length': '100', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'forms.fieldentry': {
'Meta': {'object_name': 'FieldEntry'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fields'", 'to': u"orm['forms.FormEntry']"}),
'field_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True'})
},
u'forms.form': {
'Meta': {'object_name': 'Form'},
'button_text': ('django.db.models.fields.CharField', [], {'default': "u'Submit'", 'max_length': '50'}),
'email_copies': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email_from': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_subject': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'redirect_url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[2]', 'related_name': "u'forms_form_forms'", 'symmetrical': 'False', 'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'forms.formentry': {
'Meta': {'object_name': 'FormEntry'},
'entry_time': ('django.db.models.fields.DateTimeField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'entries'", 'to': u"orm['forms.Form']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['forms']<|fim▁end|>
|
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
|
<|file_name|>ptv.js<|end_file_name|><|fim▁begin|>// config/ptv.js<|fim▁hole|>};<|fim▁end|>
|
module.exports.ptv = {
devId: 'xxx',
devSecret: 'xxx',
|
<|file_name|>errors.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Thomas Quintana <[email protected]>
CLASS_REGISTRATION_CONFLICT = "The actor %s has already been registered as " \
"a singleton actor."
INSTANCE_REGISTRATION_CONFLICT = "The actor %s has already been registered " \
"as a non singleton actor."
INVALID_ACTOR_CLASS = "The class %s is not a subclass of " \
"'freepy.lib.actors.actor.Actor'."<|fim▁end|>
|
# with the License. You may obtain a copy of the License at
#
|
<|file_name|>Rx.KitchenSink.d.ts<|end_file_name|><|fim▁begin|>import { Subject } from './Subject';
import { Observable } from './Observable';
import { CoreOperators } from './CoreOperators';
import { Scheduler as IScheduler } from './Scheduler';
export interface KitchenSinkOperators<T> extends CoreOperators<T> {
isEmpty?: () => Observable<boolean>;
elementAt?: (index: number, defaultValue?: any) => Observable<T>;
distinctUntilKeyChanged?: (key: string, compare?: (x: any, y: any) => boolean) => Observable<T>;
find?: (predicate: (value: T, index: number, source: Observable<T>) => boolean, thisArg?: any) => Observable<T>;
findIndex?: (predicate: (value: T, index: number, source: Observable<T>) => boolean, thisArg?: any) => Observable<number>;
max?: <T, R>(comparer?: (x: R, y: T) => R) => Observable<R>;
min?: <T, R>(comparer?: (x: R, y: T) => R) => Observable<R>;
pairwise?: <R>() => Observable<R>;
timeInterval?: <T>(scheduler?: IScheduler) => Observable<T>;
mergeScan?: <T, R>(project: (acc: R, x: T) => Observable<R>, seed: R, concurrent?: number) => Observable<R>;
exhaust?: () => Observable<T>;
exhaustMap?: <R>(project: ((x: T, ix: number) => Observable<any>), projectResult?: (x: T, y: any, ix: number, iy: number) => R) => Observable<R>;
}
import { Observer } from './Observer';
import { Subscription } from './Subscription';
import { Subscriber } from './Subscriber';
import { AsyncSubject } from './subject/AsyncSubject';
import { ReplaySubject } from './subject/ReplaySubject';
import { BehaviorSubject } from './subject/BehaviorSubject';
import { ConnectableObservable } from './observable/ConnectableObservable';<|fim▁hole|>import { Notification } from './Notification';
import { EmptyError } from './util/EmptyError';
import { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';
import { ArgumentOutOfRangeError } from './util/ArgumentOutOfRangeError';
import { AsapScheduler } from './scheduler/AsapScheduler';
import { QueueScheduler } from './scheduler/QueueScheduler';
import { TimeInterval } from './operator/timeInterval';
import { TestScheduler } from './testing/TestScheduler';
import { VirtualTimeScheduler } from './scheduler/VirtualTimeScheduler';
declare var Scheduler: {
asap: AsapScheduler;
queue: QueueScheduler;
};
declare var Symbol: {
rxSubscriber: any;
};
export { Subject, Scheduler, Observable, Observer, Subscriber, Subscription, AsyncSubject, ReplaySubject, BehaviorSubject, ConnectableObservable, Notification, EmptyError, ArgumentOutOfRangeError, ObjectUnsubscribedError, TestScheduler, VirtualTimeScheduler, TimeInterval, Symbol };<|fim▁end|>
| |
<|file_name|>01_starttime_duration.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
Show how to use `dur` and `delay` parameters of play() and out()
methods to sequence events over time.
<|fim▁hole|>"""
from pyo import *
import random
s = Server(duplex=0).boot()
num = 70
freqs = [random.uniform(100, 1000) for i in range(num)]
start1 = [i * 0.5 for i in range(num)]
fade1 = Fader([1] * num, 1, 5, mul=0.03).play(dur=5, delay=start1)
a = SineLoop(freqs, feedback=0.05, mul=fade1).out(dur=5, delay=start1)
start2 = 30
dur2 = 40
snds = [
"../snds/alum1.wav",
"../snds/alum2.wav",
"../snds/alum3.wav",
"../snds/alum4.wav",
]
tabs = SndTable(snds)
fade2 = Fader(0.05, 10, dur2, mul=0.7).play(dur=dur2, delay=start2)
b = Beat(time=0.125, w1=[90, 30, 30, 20], w2=[30, 90, 50, 40], w3=[0, 30, 30, 40], poly=1).play(dur=dur2, delay=start2)
out = TrigEnv(b, tabs, b["dur"], mul=b["amp"] * fade2).out(dur=dur2, delay=start2)
start3 = 45
dur3 = 30
fade3 = Fader(15, 15, dur3, mul=0.02).play(dur=dur3, delay=start3)
fm = FM(carrier=[149, 100, 151, 50] * 3, ratio=[0.2499, 0.501, 0.75003], index=10, mul=fade3).out(
dur=dur3, delay=start3
)
s.gui(locals())<|fim▁end|>
| |
<|file_name|>block_where_pred.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
where
Ipsum: Eq,
Dolor: Eq,
Sit: Eq,
Amet: Eq,
{
// body
}<|fim▁end|>
|
// rustfmt-indent_style: Block
// Where predicate indent
|
<|file_name|>kill.rs<|end_file_name|><|fim▁begin|>#![crate_name = "kill"]
#![feature(collections, core, old_io, rustc_private, unicode)]
<|fim▁hole|> *
* For the full copyright and license information, please view the LICENSE file
* that was distributed with this source code.
*/
extern crate getopts;
extern crate libc;
extern crate collections;
extern crate serialize;
#[macro_use] extern crate log;
use std::process::Child;
use getopts::{
getopts,
optopt,
optflag,
optflagopt,
usage,
};
use signals::ALL_SIGNALS;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
#[path = "signals.rs"]
mod signals;
static NAME: &'static str = "kill";
static VERSION: &'static str = "0.0.1";
static EXIT_OK: i32 = 0;
static EXIT_ERR: i32 = 1;
#[derive(Clone)]
pub enum Mode {
Kill,
Table,
List,
Help,
Version,
}
impl Copy for Mode {}
pub fn main(args: Vec<String>) -> i32 {
let opts = [
optflag("h", "help", "display this help and exit"),
optflag("V", "version", "output version information and exit"),
optopt("s", "signal", "specify the <signal> to be sent", "SIGNAL"),
optflagopt("l", "list", "list all signal names, or convert one to a name", "LIST"),
optflag("L", "table", "list all signal names in a nice table"),
];
let usage = usage("[options] <pid> [...]", &opts);
let (args, obs_signal) = handle_obsolete(args);
let matches = match getopts(args.tail(), &opts) {
Ok(m) => m,
Err(e) => {
show_error!("{}\n{}", e, get_help_text(NAME, usage.as_slice()));
return EXIT_ERR;
},
};
let mode = if matches.opt_present("version") {
Mode::Version
} else if matches.opt_present("help") {
Mode::Help
} else if matches.opt_present("table") {
Mode::Table
} else if matches.opt_present("list") {
Mode::List
} else {
Mode::Kill
};
match mode {
Mode::Kill => return kill(matches.opt_str("signal").unwrap_or(obs_signal.unwrap_or("9".to_string())).as_slice(), matches.free),
Mode::Table => table(),
Mode::List => list(matches.opt_str("list")),
Mode::Help => help(NAME, usage.as_slice()),
Mode::Version => version(),
}
0
}
fn version() {
println!("{} {}", NAME, VERSION);
}
fn handle_obsolete(mut args: Vec<String>) -> (Vec<String>, Option<String>) {
let mut i = 0;
while i < args.len() {
// this is safe because slice is valid when it is referenced
let slice: &str = unsafe { std::mem::transmute(args[i].as_slice()) };
if slice.char_at(0) == '-' && slice.len() > 1 && slice.char_at(1).is_digit(10) {
let val = &slice[1..];
match val.parse() {
Ok(num) => {
if signals::is_signal(num) {
args.remove(i);
return (args, Some(val.to_string()));
}
}
Err(_)=> break /* getopts will error out for us */
}
}
i += 1;
}
(args, None)
}
fn table() {
let mut name_width = 0;
/* Compute the maximum width of a signal name. */
for s in ALL_SIGNALS.iter() {
if s.name.len() > name_width {
name_width = s.name.len()
}
}
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
print!("{0: >#2} {1: <#8}", idx+1, signal.name);
//TODO: obtain max signal width here
if (idx+1) % 7 == 0 {
println!("");
}
}
}
fn print_signal(signal_name_or_value: &str) {
for signal in ALL_SIGNALS.iter() {
if signal.name == signal_name_or_value || (format!("SIG{}", signal.name).as_slice()) == signal_name_or_value {
println!("{}", signal.value);
exit!(EXIT_OK as i32)
} else if signal_name_or_value == signal.value.as_slice() {
println!("{}", signal.name);
exit!(EXIT_OK as i32)
}
}
crash!(EXIT_ERR, "unknown signal name {}", signal_name_or_value)
}
fn print_signals() {
let mut pos = 0;
for (idx, signal) in ALL_SIGNALS.iter().enumerate() {
pos += signal.name.len();
print!("{}", signal.name);
if idx > 0 && pos > 73 {
println!("");
pos = 0;
} else {
pos += 1;
print!(" ");
}
}
}
fn list(arg: Option<String>) {
match arg {
Some(x) => print_signal(x.as_slice()),
None => print_signals(),
};
}
fn get_help_text(progname: &str, usage: &str) -> String {
format!("Usage: \n {0} {1}", progname, usage)
}
fn help(progname: &str, usage: &str) {
println!("{}", get_help_text(progname, usage));
}
fn kill(signalname: &str, pids: std::vec::Vec<String>) -> i32 {
let mut status = 0;
let optional_signal_value = signals::signal_by_name_or_value(signalname);
let signal_value = match optional_signal_value {
Some(x) => x,
None => crash!(EXIT_ERR, "unknown signal name {}", signalname)
};
for pid in pids.iter() {
match pid.as_slice().parse() {
Ok(x) => {
let result = Child::kill(x, signal_value as isize);
match result {
Ok(_) => (),
Err(f) => {
show_error!("{}", f);
status = 1;
}
};
},
Err(e) => crash!(EXIT_ERR, "failed to parse argument {}: {}", pid, e)
};
}
status
}<|fim▁end|>
|
/*
* This file is part of the uutils coreutils package.
*
* (c) Maciej Dziardziel <[email protected]>
|
<|file_name|>merch-display.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Location } from '@angular/common';
import { Merch } from '../data/merch';
import { MerchService } from '../data/merch.service';
@Component({
selector: 'app-merch-display',
templateUrl: './merch-display.component.html',
styleUrls: ['./merch-display.component.scss'],
})
export class MerchDisplayComponent implements OnInit {
merch: Merch[] = [];
private _serviceWorker: ServiceWorker|null = null;
constructor(
private route: ActivatedRoute,
private merchService: MerchService,
private location: Location
) {}
ngOnInit(): void {
navigator.serviceWorker.ready.then( registration => {
this._serviceWorker = registration.active;
});
this.route.params.subscribe((routeParams) => {
this.getMerch(routeParams.category);
if (this._serviceWorker) {
this._serviceWorker.postMessage({ page: routeParams.category });
}
});
}
getMerch(category: string): void {<|fim▁hole|> this.merchService
.getMerchList(category)
.then((merch) => (this.merch = merch));
}
goBack(): void {
this.location.back();
}
}<|fim▁end|>
| |
<|file_name|>issue-54462-mutable-noalias-correctness.rs<|end_file_name|><|fim▁begin|>// run-pass
//
// compile-flags: -Ccodegen-units=1 -O
fn linidx(row: usize, col: usize) -> usize {
row * 1 + col * 3
}
fn main() {
let mut mat = [1.0f32, 5.0, 9.0, 2.0, 6.0, 10.0, 3.0, 7.0, 11.0, 4.0, 8.0, 12.0];
for i in 0..2 {
for j in i+1..3 {
if mat[linidx(j, 3)] > mat[linidx(i, 3)] {
for k in 0..4 {<|fim▁hole|> let a = x.last_mut().unwrap();
let b = rest.get_mut(linidx(j, k) - linidx(i, k) - 1).unwrap();
::std::mem::swap(a, b);
}
}
}
}
assert_eq!([9.0, 5.0, 1.0, 10.0, 6.0, 2.0, 11.0, 7.0, 3.0, 12.0, 8.0, 4.0], mat);
}<|fim▁end|>
|
let (x, rest) = mat.split_at_mut(linidx(i, k) + 1);
|
<|file_name|>getters.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from pootle.core.delegate import config
from pootle.core.plugin import getter
from .exceptions import ConfigurationError
from .models import Config
@getter(config)
def config_getter(**kwargs):
sender = kwargs["sender"]
instance = kwargs.get("instance")
key = kwargs.get("key")
if sender:
if instance is not None and not isinstance(instance, sender):
raise ConfigurationError(
"'instance' must be an instance of 'sender', when specified")
conf = Config.objects.get_config_queryset(instance or sender)
elif instance:
raise ConfigurationError(
"'sender' must be defined when 'instance' is specified")
else:
conf = Config.objects.site_config()<|fim▁hole|> if isinstance(key, (list, tuple)):
return conf.list_config(key)
try:
return conf.get_config(key)
except Config.MultipleObjectsReturned as e:
raise ConfigurationError(e)<|fim▁end|>
|
if key is None:
return conf
|
<|file_name|>createproduct.js<|end_file_name|><|fim▁begin|>/* jshint node: true */
'use strict';
var util = require('util');
var _ = require('underscore');
var defaults = require('../defaults');
var options = require('../options');
var descriptor = defaults.defaultDescriptor({
'productName': {
name: 'Product Name',
required: true
},
'productDesc': {
name: 'Description'
},
'proxies': {
name: 'API Proxies',
required: true
},
'environments':{
name: 'Environments',
required: true
},
'approvalType': {
name: 'Approval Type',
required: true
},
'quota' : {
name: 'Quota',
},
'quotaInterval':{
name: 'Quota Interval'
},
'quotaTimeUnit': {
name:'Quota Time Unit'
},
'scopes': {
name: "Scope",
}
});
module.exports.descriptor = descriptor;
module.exports.run = function(opts, cb) {
options.validateSync(opts, descriptor);
if (opts.debug) {
console.log('createProduct: %j', opts);
}
var request = defaults.defaultRequest(opts);
createProduct(opts, request, function(err, results) {
if (err) {
cb(err);
} else {
if (opts.debug) {
console.log('results: %j', results);
}
cb(undefined, results);
}
});
};
function createProduct(opts,request,done){
var product = {
"approvalType": "auto",
"attributes":
[ {"name": "access", "value": "public"} ],
"scopes": []
}
product.name = opts.productName
product.displayName = opts.productName
product.description = opts.productDesc
product.proxies = []
if(opts.proxies){
var split = opts.proxies.split(',')
split.forEach(function(s){
if(s && s.trim()!= '') {
product.proxies.push(s.trim())
}
})
}
product.apiResources = []
if(opts.apiResources){
var split = opts.apiResources.split(',')
split.forEach(function(s){
if(s && s.trim()!= '') {
product.apiResources.push(s.trim())
}
})
}
if(opts.scopes){
var split = opts.scopes.split(',')
split.forEach(function(s){
if(s && s.trim()!= '') {
product.scopes.push(s.trim())
}
})
}
product.environments = []
if(opts.environments){<|fim▁hole|> product.environments.push(s.trim())
}
})
}
if(opts.quota && opts.quotaInterval && opts.quotaTimeUnit){
product.quota = opts.quota
product.quotaInterval = opts.quotaInterval
product.quotaTimeUnit = opts.quotaTimeUnit
}
var uri = util.format('%s/v1/o/%s/apiproducts', opts.baseuri, opts.organization);
request({
uri: uri,
method:'POST',
body: product,
json:true
},function(err,res,body){
var jsonBody = body
if(err){
if (opts.debug) {
console.log('Error occured %s', err);
}
done(err)
}else if (res.statusCode === 201) {
if (opts.verbose) {
console.log('Create successful');
}
if (opts.debug) {
console.log('%s', body);
}
done(undefined, jsonBody);
}else {
if (opts.verbose) {
console.error('Create Product result: %j', body);
}
var errMsg;
if (jsonBody && (jsonBody.message)) {
errMsg = jsonBody.message;
} else {
errMsg = util.format('Create Product failed with status code %d', res.statusCode);
}
done(new Error(errMsg));
}
})
}<|fim▁end|>
|
var split = opts.environments.split(',')
split.forEach(function(s){
if(s && s.trim()!= '') {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.