prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>jquery.elastislide.js<|end_file_name|><|fim▁begin|>(function( window, $, undefined ) {
// http://www.netcu.de/jquery-touchwipe-iphone-ipad-library
$.fn.touchwipe = function(settings) {
var config = {
min_move_x: 20,
min_move_y: 20,
wipeLeft: function() { },
wipeRight: function() { },
wipeUp: function() { },
wipeDown: function() { },
preventDefaultEvents: true
};
if (settings) $.extend(config, settings);
this.each(function() {
var startX;
var startY;
var isMoving = false;
function cancelTouch() {
this.removeEventListener('touchmove', onTouchMove);
startX = null;
isMoving = false;
}
function onTouchMove(e) {
if(config.preventDefaultEvents) {
e.preventDefault();
}
if(isMoving) {
var x = e.touches[0].pageX;
var y = e.touches[0].pageY;
var dx = startX - x;
var dy = startY - y;
if(Math.abs(dx) >= config.min_move_x) {
cancelTouch();
if(dx > 0) {
config.wipeLeft();
}
else {
config.wipeRight();
}
}
else if(Math.abs(dy) >= config.min_move_y) {
cancelTouch();
if(dy > 0) {
config.wipeDown();
}
else {
config.wipeUp();
}
}
}
}
function onTouchStart(e)
{
if (e.touches.length == 1) {
startX = e.touches[0].pageX;
startY = e.touches[0].pageY;
isMoving = true;
this.addEventListener('touchmove', onTouchMove, false);
}
}
if ('ontouchstart' in document.documentElement) {
this.addEventListener('touchstart', onTouchStart, false);
}
});
return this;
};
$.elastislide = function( options, element ) {
this.$el = $( element );
this._init( options );
};
$.elastislide.defaults = {
speed : 450, // animation speed
easing : '', // animation easing effect
imageW : 190, // the images width
margin : 3, // image margin right
border : 2, // image border
minItems : 1, // the minimum number of items to show.
// when we resize the window, this will make sure minItems are always shown
// (unless of course minItems is higher than the total number of elements)
current : 0, // index of the current item
// when we resize the window, the carousel will make sure this item is visible
navPrev :'<span class="es-nav-prev">Prev</span>',
navNext :'<span class="es-nav-next">Next</span>',
onClick : function() { return false; } // click item callback
};
$.elastislide.prototype = {
_init : function( options ) {
this.options = $.extend( true, {}, $.elastislide.defaults, options );
// <ul>
this.$slider = this.$el.find('ul');
// <li>
this.$items = this.$slider.children('li');
// total number of elements / images
this.itemsCount = this.$items.length;
// cache the <ul>'s parent, since we will eventually need to recalculate its width on window resize
this.$esCarousel = this.$slider.parent();
// validate options
this._validateOptions();
// set sizes and initialize some vars...
this._configure();
// add navigation buttons
this._addControls();
// initialize the events
this._initEvents();
// show the <ul>
this.$slider.show();
// slide to current's position
this._slideToCurrent( false );
},
_validateOptions : function() {
if( this.options.speed < 0 )
this.options.speed = 450;
if( this.options.margin < 0 )
this.options.margin = 4;
if( this.options.border < 0 )
this.options.border = 1;
if( this.options.minItems < 1 || this.options.minItems > this.itemsCount )
this.options.minItems = 1;
if( this.options.current > this.itemsCount - 1 )
this.options.current = 0;
},
_configure : function() {
// current item's index
this.current = this.options.current;
// the ul's parent's (div.es-carousel) width is the "visible" width
this.visibleWidth = this.$esCarousel.width();
// test to see if we need to initially resize the items
if( this.visibleWidth < this.options.minItems * ( this.options.imageW + 2 * this.options.border ) + ( this.options.minItems - 1 ) * this.options.margin ) {
this._setDim( ( this.visibleWidth - ( this.options.minItems - 1 ) * this.options.margin ) / this.options.minItems );
this._setCurrentValues();
// how many items fit with the current width
this.fitCount = this.options.minItems;
}
else {
this._setDim();
this._setCurrentValues();
}
// set the <ul> width
this.$slider.css({
width : this.sliderW
});
},
_setDim : function( elW ) {
// <li> style
this.$items.css({
marginRight : this.options.margin,
width : ( elW ) ? elW : this.options.imageW + 2 * this.options.border
}).children('a').css({ // <a> style
borderWidth : this.options.border
});
},
_setCurrentValues : function() {
// the total space occupied by one item
this.itemW = this.$items.outerWidth(true);
// total width of the slider / <ul>
// this will eventually change on window resize
this.sliderW = this.itemW * this.itemsCount;
// the ul parent's (div.es-carousel) width is the "visible" width
this.visibleWidth = this.$esCarousel.width();
// how many items fit with the current width
this.fitCount = Math.floor( this.visibleWidth / this.itemW );
},
_addControls : function() {
this.$navNext = $(this.options.navNext);
this.$navPrev = $(this.options.navPrev);
$('<div class="es-nav"/>')
.append( this.$navPrev )
.append( this.$navNext )
.appendTo( this.$el );
//this._toggleControls();
},
_toggleControls : function( dir, status ) {
// show / hide navigation buttons
if( dir && status ) {
if( status === 1 )
( dir === 'right' ) ? this.$navNext.show() : this.$navPrev.show();
else
( dir === 'right' ) ? this.$navNext.hide() : this.$navPrev.hide();
}
else if( this.current === this.itemsCount - 1 || this.fitCount >= this.itemsCount )
this.$navNext.hide();
},
_initEvents : function() {
var instance = this;
// window resize
$(window).on('resize.elastislide', function( event ) {
instance._reload();
// slide to the current element
clearTimeout( instance.resetTimeout );
instance.resetTimeout = setTimeout(function() {
instance._slideToCurrent();
}, 200);
});
// navigation buttons events
this.$navNext.on('click.elastislide', function( event ) {
instance._slide('right');
});
this.$navPrev.on('click.elastislide', function( event ) {
instance._slide('left');
});
// item click event
this.$slider.on('click.elastislide', 'li', function( event ) {
instance.options.onClick( $(this) );
return false;
});
// touch events
instance.$slider.touchwipe({
wipeLeft : function() {
instance._slide('right');
},
wipeRight : function() {
instance._slide('left');
}
});
},
reload : function( callback ) {
this._reload();
if ( callback ) callback.call();
},
_reload : function() {
var instance = this;
// set values again
instance._setCurrentValues();
// need to resize items
if( instance.visibleWidth < instance.options.minItems * ( instance.options.imageW + 2 * instance.options.border ) + ( instance.options.minItems - 1 ) * instance.options.margin ) {
instance._setDim( ( instance.visibleWidth - ( instance.options.minItems - 1 ) * instance.options.margin ) / instance.options.minItems );
instance._setCurrentValues();
instance.fitCount = instance.options.minItems;
}
else{
instance._setDim();
instance._setCurrentValues();
}
instance.$slider.css({
width : instance.sliderW + 10 // TODO: +10px seems to solve a firefox "bug" :S
});
},
_slide : function( dir, val, anim, callback ) {
// if animating return
//if( this.$slider.is(':animated') )
//return false;
// current margin left
var ml = parseFloat( this.$slider.css('margin-left') );
// val is just passed when we want an exact value for the margin left (used in the _slideToCurrent function)
if( val === undefined ) {
// how much to slide?
var amount = this.fitCount * this.itemW, val;
if( amount < 0 ) return false;
// make sure not to leave a space between the last item / first item and the end / beggining of the slider available width
if( dir === 'right' && this.sliderW - ( Math.abs( ml ) + amount ) < this.visibleWidth ) {
amount = this.sliderW - ( Math.abs( ml ) + this.visibleWidth ) - this.options.margin; // decrease the margin left
// show / hide navigation buttons
this._toggleControls( 'right', -1 );
this._toggleControls( 'left', 1 );
}
else if( dir === 'left' && Math.abs( ml ) - amount < 0 ) {
amount = Math.abs( ml );
// show / hide navigation buttons
this._toggleControls( 'left', -1 );
this._toggleControls( 'right', 1 );
}
else {
var fml; // future margin left
( dir === 'right' )
? fml = Math.abs( ml ) + this.options.margin + Math.abs( amount )
: fml = Math.abs( ml ) - this.options.margin - Math.abs( amount );<|fim▁hole|>
// show / hide navigation buttons
if( fml > 0 )
this._toggleControls( 'left', 1 );
else
this._toggleControls( 'left', -1 );
if( fml < this.sliderW - this.visibleWidth )
this._toggleControls( 'right', 1 );
else
this._toggleControls( 'right', -1 );
}
( dir === 'right' ) ? val = '-=' + amount : val = '+=' + amount
}
else {
var fml = Math.abs( val ); // future margin left
if( Math.max( this.sliderW, this.visibleWidth ) - fml < this.visibleWidth ) {
val = - ( Math.max( this.sliderW, this.visibleWidth ) - this.visibleWidth );
if( val !== 0 )
val += this.options.margin; // decrease the margin left if not on the first position
// show / hide navigation buttons
this._toggleControls( 'right', -1 );
fml = Math.abs( val );
}
// show / hide navigation buttons
if( fml > 0 )
this._toggleControls( 'left', 1 );
else
this._toggleControls( 'left', -1 );
if( Math.max( this.sliderW, this.visibleWidth ) - this.visibleWidth > fml + this.options.margin )
this._toggleControls( 'right', 1 );
else
this._toggleControls( 'right', -1 );
}
$.fn.applyStyle = ( anim === undefined ) ? $.fn.animate : $.fn.css;
var sliderCSS = { marginLeft : val };
var instance = this;
this.$slider.stop().applyStyle( sliderCSS, $.extend( true, [], { duration : this.options.speed, easing : this.options.easing, complete : function() {
if( callback ) callback.call();
} } ) );
},
_slideToCurrent : function( anim ) {
// how much to slide?
var amount = this.current * this.itemW;
this._slide('', -amount, anim );
},
add : function( $newelems, callback ) {
// adds new items to the carousel
this.$items = this.$items.add( $newelems );
this.itemsCount = this.$items.length;
this._setDim();
this._setCurrentValues();
this.$slider.css({
width : this.sliderW
});
this._slideToCurrent();
if ( callback ) callback.call( $newelems );
},
setCurrent : function( idx, callback ) {
this.current = idx;
var ml = Math.abs( parseFloat( this.$slider.css('margin-left') ) ),
posR = ml + this.visibleWidth,
fml = Math.abs( this.current * this.itemW );
if( fml + this.itemW > posR || fml < ml ) {
this._slideToCurrent();
}
if ( callback ) callback.call();
},
destroy : function( callback ) {
this._destroy( callback );
},
_destroy : function( callback ) {
this.$el.off('.elastislide').removeData('elastislide');
$(window).off('.elastislide');
if ( callback ) callback.call();
}
};
var logError = function( message ) {
if ( this.console ) {
console.error( message );
}
};
$.fn.elastislide = function( options ) {
if ( typeof options === 'string' ) {
var args = Array.prototype.slice.call( arguments, 1 );
this.each(function() {
var instance = $.data( this, 'elastislide' );
if ( !instance ) {
logError( "cannot call methods on elastislide prior to initialization; " +
"attempted to call method '" + options + "'" );
return;
}
if ( !$.isFunction( instance[options] ) || options.charAt(0) === "_" ) {
logError( "no such method '" + options + "' for elastislide instance" );
return;
}
instance[ options ].apply( instance, args );
});
}
else {
this.each(function() {
var instance = $.data( this, 'elastislide' );
if ( !instance ) {
$.data( this, 'elastislide', new $.elastislide( options, this ) );
}
});
}
return this;
};
})( window, jQuery );<|fim▁end|> | |
<|file_name|>script.py<|end_file_name|><|fim▁begin|>"""
None of the functions/objects in this module need be passed `db`.
Naming convention: a `pub` is either a pubkey or a pubkeyhash
"""
import hashlib
import bitcoin as bitcoinlib
import binascii
from bitcoin.core.key import CPubKey
from counterpartylib.lib import util
from counterpartylib.lib import config
from counterpartylib.lib import exceptions
b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
class InputError (Exception):
pass
class AddressError(Exception):
pass
class MultiSigAddressError(AddressError):
pass
class VersionByteError (AddressError):
pass
class Base58Error (AddressError):
pass
class Base58ChecksumError (Base58Error):
pass
def validate(address):
"""Make sure the address is valid.
May throw `AddressError`.
"""
# Get array of pubkeyhashes to check.
if is_multisig(address):
pubkeyhashes = pubkeyhash_array(address)
else:
pubkeyhashes = [address]
# Check validity by attempting to decode.
for pubkeyhash in pubkeyhashes:
base58_check_decode(pubkeyhash, config.ADDRESSVERSION)
def base58_encode(binary):
"""Encode the address in base58."""
# Convert big‐endian bytes to integer
n = int('0x0' + util.hexlify(binary), 16)
# Divide that integer into base58
res = []
while n > 0:
n, r = divmod(n, 58)
res.append(b58_digits[r])
res = ''.join(res[::-1])
return res
def base58_check_encode(original, version):
"""Check if base58 encoding is valid."""
b = binascii.unhexlify(bytes(original, 'utf-8'))
d = version + b
binary = d + util.dhash(d)[:4]
res = base58_encode(binary)
# Encode leading zeros as base58 zeros
czero = 0
pad = 0
for c in d:
if c == czero:
pad += 1
else:
break
address = b58_digits[0] * pad + res
if original != util.hexlify(base58_check_decode(address, version)):
raise AddressError('encoded address does not decode properly')
return address
def base58_check_decode(s, version):
"""Decode from base58."""
# Convert the string to an integer
n = 0
for c in s:
n *= 58
if c not in b58_digits:
raise Base58Error('Not a valid Base58 character: ‘{}’'.format(c))
digit = b58_digits.index(c)
n += digit
# Convert the integer to bytes
h = '%x' % n
if len(h) % 2:
h = '0' + h
res = binascii.unhexlify(h.encode('utf8'))
# Add padding back.
pad = 0
for c in s[:-1]:
if c == b58_digits[0]:
pad += 1
else:
break
k = version * pad + res
addrbyte, data, chk0 = k[0:1], k[1:-4], k[-4:]
if addrbyte != version:
raise VersionByteError('incorrect version byte')
chk1 = util.dhash(addrbyte + data)[:4]
if chk0 != chk1:
raise Base58ChecksumError('Checksum mismatch: 0x{} ≠ 0x{}'.format(util.hexlify(chk0), util.hexlify(chk1)))
return data
def is_multisig(address):
"""Check if the address is multi‐signature."""
array = address.split('_')
return len(array) > 1
def is_fully_valid(pubkey_bin):
"""Check if the public key is valid."""
cpubkey = CPubKey(pubkey_bin)
return cpubkey.is_fullyvalid
def make_canonical(address):
"""Return canonical version of the address."""
if is_multisig(address):
signatures_required, pubkeyhashes, signatures_possible = extract_array(address)
try:
[base58_check_decode(pubkeyhash, config.ADDRESSVERSION) for pubkeyhash in pubkeyhashes]
except Base58Error:
raise MultiSigAddressError('Multi‐signature address must use PubKeyHashes, not public keys.')
return construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
return address
def test_array(signatures_required, pubs, signatures_possible):
"""Check if multi‐signature data is valid."""
try:
signatures_required, signatures_possible = int(signatures_required), int(signatures_possible)
except (ValueError, TypeError):
raise MultiSigAddressError('Signature values not integers.')
if signatures_required < 1 or signatures_required > 3:
raise MultiSigAddressError('Invalid signatures_required.')
if signatures_possible < 2 or signatures_possible > 3:
raise MultiSigAddressError('Invalid signatures_possible.')
for pubkey in pubs:
if '_' in pubkey:
raise MultiSigAddressError('Invalid characters in pubkeys/pubkeyhashes.')
if signatures_possible != len(pubs):
raise InputError('Incorrect number of pubkeys/pubkeyhashes in multi‐signature address.')
def construct_array(signatures_required, pubs, signatures_possible):
"""Create a multi‐signature address."""
test_array(signatures_required, pubs, signatures_possible)
address = '_'.join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)])
return address
def extract_array(address):
"""Extract data from multi‐signature address."""
assert is_multisig(address)
array = address.split('_')
signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1]
test_array(signatures_required, pubs, signatures_possible)
return int(signatures_required), pubs, int(signatures_possible)
def pubkeyhash_array(address):
"""Return PubKeyHashes from an address."""
signatures_required, pubs, signatures_possible = extract_array(address)
if not all([is_pubkeyhash(pub) for pub in pubs]):
raise MultiSigAddressError('Invalid PubKeyHashes. Multi‐signature address must use PubKeyHashes, not public keys.')
pubkeyhashes = pubs
return pubkeyhashes
def hash160(x):
x = hashlib.sha256(x).digest()
m = hashlib.new('ripemd160')
m.update(x)
return m.digest()
def pubkey_to_pubkeyhash(pubkey):
"""Convert public key to PubKeyHash."""
pubkeyhash = hash160(pubkey)
pubkey = base58_check_encode(binascii.hexlify(pubkeyhash).decode('utf-8'), config.ADDRESSVERSION)
return pubkey
def get_asm(scriptpubkey):
# TODO: When is an exception thrown here? Can this `try` block be tighter? Can it be replaced by a conditional?
try:
asm = []
# TODO: This should be `for element in scriptpubkey`.
for op in scriptpubkey:
if type(op) == bitcoinlib.core.script.CScriptOp:
# TODO: `op = element`
asm.append(str(op))
else:
# TODO: `data = element` (?)
asm.append(op)
except bitcoinlib.core.script.CScriptTruncatedPushDataError:
raise exceptions.DecodeError('invalid pushdata due to truncation')
if not asm:
raise exceptions.DecodeError('empty output')
return asm
def get_checksig(asm):
if len(asm) == 5 and asm[0] == 'OP_DUP' and asm[1] == 'OP_HASH160' and asm[3] == 'OP_EQUALVERIFY' and asm[4] == 'OP_CHECKSIG':
pubkeyhash = asm[2]
if type(pubkeyhash) == bytes:
return pubkeyhash
raise exceptions.DecodeError('invalid OP_CHECKSIG')
def get_checkmultisig(asm):
# N‐of‐2
if len(asm) == 5 and asm[3] == 2 and asm[4] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = asm[1:3], asm[0]
if all([type(pubkey) == bytes for pubkey in pubkeys]):
return pubkeys, signatures_required
# N‐of‐3
if len(asm) == 6 and asm[4] == 3 and asm[5] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = asm[1:4], asm[0]
if all([type(pubkey) == bytes for pubkey in pubkeys]):
return pubkeys, signatures_required
raise exceptions.DecodeError('invalid OP_CHECKMULTISIG')
def scriptpubkey_to_address(scriptpubkey):
asm = get_asm(scriptpubkey)
if asm[-1] == 'OP_CHECKSIG':
try:
checksig = get_checksig(asm)
except exceptions.DecodeError: # coinbase
return None
return base58_check_encode(binascii.hexlify(checksig).decode('utf-8'), config.ADDRESSVERSION)
elif asm[-1] == 'OP_CHECKMULTISIG':
pubkeys, signatures_required = get_checkmultisig(asm)
pubkeyhashes = [pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys]
return construct_array(signatures_required, pubkeyhashes, len(pubkeyhashes))
return None
# TODO: Use `python-bitcointools` instead. (Get rid of `pycoin` dependency.)
from pycoin.encoding import wif_to_tuple_of_secret_exponent_compressed, public_pair_to_sec, EncodingError
from pycoin.ecdsa import generator_secp256k1, public_pair_for_secret_exponent
class AltcoinSupportError (Exception): pass
def private_key_to_public_key(private_key_wif):
"""Convert private key to public key."""
if config.TESTNET:
allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_TESTNET]
else:
allowable_wif_prefixes = [config.PRIVATEKEY_VERSION_MAINNET]
try:
secret_exponent, compressed = wif_to_tuple_of_secret_exponent_compressed(
private_key_wif, allowable_wif_prefixes=allowable_wif_prefixes)
except EncodingError:
raise AltcoinSupportError('pycoin: unsupported WIF prefix')
public_pair = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)
public_key = public_pair_to_sec(public_pair, compressed=compressed)
public_key_hex = binascii.hexlify(public_key).decode('utf-8')
return public_key_hex
def is_pubkeyhash(monosig_address):
"""Check if PubKeyHash is valid. """
assert not is_multisig(monosig_address)
try:
base58_check_decode(monosig_address, config.ADDRESSVERSION)
return True
except (Base58Error, VersionByteError):
return False
def make_pubkeyhash(address):
"""Create a new PubKeyHash."""
if is_multisig(address):
signatures_required, pubs, signatures_possible = extract_array(address)
pubkeyhashes = []
for pub in pubs:<|fim▁hole|> pubkeyhash = pub
else:
pubkeyhash = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(pub, 'utf-8')))
pubkeyhashes.append(pubkeyhash)
pubkeyhash_address = construct_array(signatures_required, pubkeyhashes, signatures_possible)
else:
if is_pubkeyhash(address):
pubkeyhash_address = address
else:
pubkeyhash_address = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(address, 'utf-8')))
return pubkeyhash_address
def extract_pubkeys(pub):
"""Assume pubkey if not pubkeyhash. (Check validity later.)"""
pubkeys = []
if is_multisig(pub):
_, pubs, _ = extract_array(pub)
for pub in pubs:
if not is_pubkeyhash(pub):
pubkeys.append(pub)
else:
if not is_pubkeyhash(pub):
pubkeys.append(pub)
return pubkeys
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4<|fim▁end|> | if is_pubkeyhash(pub): |
<|file_name|>PRNG.js<|end_file_name|><|fim▁begin|>import { modulo } from './Math.js'
export function random(x) {
return modulo(Math.sin(x) * 43758.5453123, 1)<|fim▁hole|><|fim▁end|> | } |
<|file_name|>lru_cache.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement # use the features of python 3
import collections
import logging
import time
# this LRUCache is optimized for concurrency, not QPS
# n: concurrency, keys stored in the cache
# m: visits not timed out, proportional to QPS * timeout
# get & set is O(1), not O(n). thus we can support very large n
# TODO: if timeout or QPS is too large, then this cache is not very efficient,
# as sweep() causes long pause
class LRUCache(collections.MutableMapping): # ABCs for read-only and mutable mappings.
"""This class is not thread safe"""
def __init__(self, timeout=60, close_callback=None, *args, **kwargs):
self.timeout = timeout # the cache expire time
self.close_callback = close_callback # called when value will be swept from cache
self._store = {} # dict<key, value>: store cache data key value
self._time_to_keys = collections.defaultdict(list) # defaultdict<time, list<key>>
# defaultdict: dict subclass that calls a factory function to supply missing values
self._keys_to_last_time = {} # dict<key, time> stores the last time of one key visited.
self._last_visits = collections.deque() # deque<time> store all the time once key is visited.
self.update(dict(*args, **kwargs)) # use the free update to set keys
def __getitem__(self, key):
# O(1)
t = time.time()
self._keys_to_last_time[key] = t
self._time_to_keys[t].append(key)
self._last_visits.append(t)
return self._store[key]
def __setitem__(self, key, value):
# O(1)
t = time.time()
self._keys_to_last_time[key] = t
self._store[key] = value
self._time_to_keys[t].append(key)<|fim▁hole|> def __delitem__(self, key):
# O(1)
del self._store[key]
del self._keys_to_last_time[key]
def __iter__(self):
return iter(self._store)
def __len__(self):
return len(self._store)
def sweep(self):
# O(m)
now = time.time()
c = 0 # use to log how many keys has been swept.
while len(self._last_visits) > 0:
least = self._last_visits[0] # fetch the oldest time point
if now - least <= self.timeout: # the oldest time point hasn't expire
break
if self.close_callback is not None: # callback function has been set
for key in self._time_to_keys[least]: # fetch each key visited on the oldest time
if key in self._store: # finded the cache key
if now - self._keys_to_last_time[key] > self.timeout:
value = self._store[key] # get the key of the last time and check expire or yet.
self.close_callback(value) # call callback
for key in self._time_to_keys[least]:
self._last_visits.popleft() # can't understand and have error personally
# @Sunny: use popleft to remove oldest time point in last visits
if key in self._store:
if now - self._keys_to_last_time[key] > self.timeout:
del self._store[key]
del self._keys_to_last_time[key]
c += 1
del self._time_to_keys[least]
if c:
logging.debug('%d keys swept' % c)
def test():
c = LRUCache(timeout=0.3)
c['a'] = 1
assert c['a'] == 1
time.sleep(0.5)
c.sweep()
assert 'a' not in c
c['a'] = 2
c['b'] = 3
time.sleep(0.2)
c.sweep()
assert c['a'] == 2
assert c['b'] == 3
time.sleep(0.2)
c.sweep()
c['b']
time.sleep(0.2)
c.sweep()
assert 'a' not in c
assert c['b'] == 3
time.sleep(0.5)
c.sweep()
assert 'a' not in c
assert 'b' not in c
if __name__ == '__main__':
test()<|fim▁end|> | self._last_visits.append(t)
|
<|file_name|>dining_philosophers.rs<|end_file_name|><|fim▁begin|>// http://rosettacode.org/wiki/Dining_philosophers
//! A Rust implementation of a solution for the Dining Philosophers Problem. We prevent a deadlock
//! by using Dijkstra's solution of making a single diner "left-handed." That is, all diners except
//! one pick up the chopstick "to their left" and then the chopstick "to their right." The
//! remaining diner performs this in reverse.
use std::thread;
use std::time::Duration;
use std::sync::{Mutex, Arc};
struct Philosopher {
name: String,
left: usize,
right: usize,
}
impl Philosopher {
fn new(name: &str, left: usize, right: usize) -> Philosopher {
Philosopher {
name: name.to_string(),
left: left,
right: right,
}
}
fn eat(&self, table: &Table) {
let _left = table.forks[self.left].lock().unwrap();
let _right = table.forks[self.right].lock().unwrap();
println!("{} is eating.", self.name);
thread::sleep(Duration::from_secs(1));
println!("{} is done eating.", self.name);
}
}
struct Table {
forks: Vec<Mutex<()>>,
}
fn main() {
let table = Arc::new(Table {
forks: vec![
Mutex::new(()),
Mutex::new(()),
Mutex::new(()),
Mutex::new(()),
Mutex::new(()),
],
});
let philosophers = vec![
Philosopher::new("Baruch Spinoza", 0, 1),
Philosopher::new("Gilles Deleuze", 1, 2),
Philosopher::new("Karl Marx", 2, 3),<|fim▁hole|> Philosopher::new("Friedrich Nietzsche", 3, 4),
Philosopher::new("Michel Foucault", 0, 4),
];
let handles: Vec<_> = philosophers.into_iter()
.map(|p| {
let table = table.clone();
thread::spawn(move || {
p.eat(&table);
})
})
.collect();
for h in handles {
h.join().unwrap();
}
}<|fim▁end|> | |
<|file_name|>repl.py<|end_file_name|><|fim▁begin|>import code
import sys
from awesomestream.jsonrpc import Client
def main():
try:
host = sys.argv[1]
except IndexError:
host = 'http://localhost:9997/'
banner = """>>> from awesomestream.jsonrpc import Client<|fim▁hole|> c = Client(host)
code.interact(banner, local={'Client': Client, 'c': c})
if __name__ == '__main__':
main()<|fim▁end|> | >>> c = Client('%s')""" % (host,) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate failure;
#[macro_use]
extern crate lazy_static;
extern crate chrono;
extern crate mp3_duration;
extern crate rayon;
extern crate regex;
extern crate reqwest;
extern crate rss;
extern crate select;
extern crate url;
mod diecast;
mod spodcast;
mod util;
mod vortex_theatre;
use std::collections::HashMap;
use util::*;
use chrono::DateTime;
use failure::ResultExt;<|fim▁hole|>use select::document::Document;
use std::borrow::ToOwned;
use std::convert::From;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::prelude::*;
use std::io::BufReader;
fn get_urls(podcast: &str) -> Result<HashMap<String, Option<Item>>> {
let urls = File::open(format!("{}.urls", podcast))
.context(format_err!("Error opening {}.urls for reading", podcast))?;
let mut buf_reader = BufReader::new(urls);
let mut contents = String::new();
buf_reader
.read_to_string(&mut contents)
.context(format_err!("Error reading {}.urls", podcast))?;
let mut result: HashMap<String, Option<Item>> =
contents.lines().map(|x| (x.to_owned(), None)).collect();
let new_urls = match podcast {
"diecast" => diecast::get_urls(&result)?,
"vortex_theatre" => vortex_theatre::get_urls(&result)?,
_ => HashMap::new(),
};
if !new_urls.is_empty() {
for (url, item) in new_urls {
result.insert(url, item);
}
// Add the new urls to the results and write it out.
let mut keys: Vec<String> = result.keys().cloned().collect();
keys.sort();
keys.reverse();
let mut urls = OpenOptions::new()
.write(true)
.truncate(true)
.open(format!("{}.urls", podcast))
.context(format_err!("Error opening {}.urls for writing", podcast))?;
urls.write_all(&keys.join("\n").as_bytes())?;
}
Ok(result)
}
fn get_rss(podcast: &str) -> Result<Channel> {
let xml = File::open(format!("{}.xml", podcast))
.context(format_err!("Error opening {}.xml", podcast))?;
Channel::read_from(BufReader::new(xml))
.context(format_err!("Error opening {}.xml", podcast))
.map_err(From::from)
}
fn process_document(url: &str, document: &Document) -> Result<Item> {
match url {
x if spodcast::matches(x) => spodcast::get_item(url, document),
x if diecast::matches(x) => diecast::get_item(url, document),
x if vortex_theatre::matches(x) => vortex_theatre::get_item(url, document),
_ => Err(format_err!("Unknown podcast: {}", url)),
}
}
fn get_item(url: &str) -> Result<Item> {
// Get the html and build an Item.
let response = reqwest::blocking::get(url)?;
let body = response.text()?;
let document = Document::from(body.as_str());
process_document(url, &document)
}
pub fn handle(podcast: &str) {
// Read podcast.urls and podcast.xml
let urls = match get_urls(podcast) {
Err(ref e) => {
print_error(e);
return;
}
Ok(urls) => urls,
};
let mut rss_data = match get_rss(podcast) {
Err(ref e) => {
print_error(e);
return;
}
Ok(rss_data) => rss_data,
};
println!("{}: {}/{}", podcast, rss_data.items().len(), urls.len());
let mut keys: Vec<String> = urls.keys().cloned().collect();
keys.sort();
keys.reverse();
let mut items: Vec<_> = keys
.par_iter()
.map(|url| {
if url.ends_with('*') {
None
} else if let Some(found) = rss_data
.items()
.iter()
.find(|item| item.link() == Some(url))
{
Some(found.clone())
} else {
// Find any missing urls.
// println!("Missing {}", url);
let item = urls[url].clone().ok_or(|| ()).or_else(|_| get_item(url));
// println!("{}: {:?}, {:?}", url, urls[url], item);
if let Err(ref e) = item {
// println!("Error in {}", url);
print_error(e);
}
item.ok()
}
})
.filter_map(|x| x)
.collect();
// Write out the new podcast.xml
items.sort_by(|a, b| {
let a_date = DateTime::parse_from_rfc2822(a.pub_date().unwrap()).unwrap();
let b_date = DateTime::parse_from_rfc2822(b.pub_date().unwrap()).unwrap();
a_date.partial_cmp(&b_date).unwrap()
});
items.reverse();
rss_data.set_items(items);
let output = File::create(format!("{}.xml", podcast)).unwrap();
rss_data.pretty_write_to(output, b' ', 2).unwrap();
}<|fim▁end|> | use rayon::prelude::*;
use rss::Channel;
use rss::Item; |
<|file_name|>tokens.rs<|end_file_name|><|fim▁begin|>use files::dump;
pub struct Token<'a>
{
pub name: &'a str,
pub range: (usize, usize),
pub value: String,
pub line_number: usize,
}
impl <'a> Token<'a>
{
#[allow(dead_code)]
pub fn clone(&self) -> Token<'a>
{
Token
{
name: self.name.clone(),
range: self.range.clone(),
value: self.value.clone(),
line_number: self.line_number.clone(),
}
}
}
pub trait Printable
{
fn print_to(&self, file_path: &str, console_out: bool);
fn format_for_print(&self, console_out: bool) -> String;
}
impl <'a> Printable for Vec<Token<'a>>
{
fn print_to(&self, file_path: &str, console_out: bool)
{
let lexed_token_string = self.format_for_print(console_out);
if file_path.len() > 0 { dump(file_path, lexed_token_string); }
}
fn format_for_print(&self, console_out: bool) -> String
{
// Create a String of 100 consecutive spaces
let mut spaces = String::new();
for _ in (0..100)
{
spaces.push_str(" ");
}
// Spacing definitions
let name_column_width = 30;
let line_number_column_width = 20;
let mut lexed_token_string: String = String::new();
for token in self.iter()
{
let name_pad_length = name_column_width - token.name.len();
let line_pad_length = line_number_column_width - token.line_number.to_string().len() - 3;
<|fim▁hole|> else { lexed_token_string.push_str(" "); }
// Line number
lexed_token_string.push_str("ln:");
lexed_token_string.push_str(&*token.line_number.to_string());
if line_pad_length > 0 { lexed_token_string.push_str(&spaces[..line_pad_length]); }
else { lexed_token_string.push_str(" "); }
// Token value
lexed_token_string.push_str(&*token.value);
lexed_token_string.push_str("\n");
if console_out { println!("\t{}\t{:?}\t{}", token.name, token.range, token.value); }
}
lexed_token_string
}
}<|fim▁end|> | // Token name
lexed_token_string.push_str(token.name);
if name_pad_length > 0 { lexed_token_string.push_str(&spaces[..name_pad_length]); } |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from errors import *
from manager import SchemaManager |
<|file_name|>initGlobalShortcuts.js<|end_file_name|><|fim▁begin|><|fim▁hole|>function initGlobalShortcuts () {
globalShortcut.register('MediaNextTrack', playbackControls.clickNextSong)
globalShortcut.register('MediaPreviousTrack', playbackControls.clickPreviousSong)
globalShortcut.register('MediaStop', playbackControls.clickPlayPause)
globalShortcut.register('MediaPlayPause', playbackControls.clickPlayPause)
}
export default initGlobalShortcuts<|fim▁end|> | import { globalShortcut } from 'electron'
import playbackControls from '../actions/playbackControls'
|
<|file_name|>EventPattern.py<|end_file_name|><|fim▁begin|>import uuid
from uqbar.objects import new
from supriya.patterns.Pattern import Pattern
class EventPattern(Pattern):
### CLASS VARIABLES ###
__slots__ = ()
<|fim▁hole|>
if not isinstance(expr, supriya.patterns.Event):
expr = supriya.patterns.NoteEvent(**expr)
if expr.get("uuid") is None:
expr = new(expr, uuid=uuid.uuid4())
return expr
### PUBLIC METHODS ###
def play(self, clock=None, server=None):
import supriya.patterns
import supriya.realtime
event_player = supriya.patterns.RealtimeEventPlayer(
self, clock=clock, server=server or supriya.realtime.Server.default()
)
event_player.start()
return event_player
def with_bus(self, calculation_rate="audio", channel_count=None, release_time=0.25):
import supriya.patterns
return supriya.patterns.Pbus(
self,
calculation_rate=calculation_rate,
channel_count=channel_count,
release_time=release_time,
)
def with_effect(self, synthdef, release_time=0.25, **settings):
import supriya.patterns
return supriya.patterns.Pfx(
self, synthdef=synthdef, release_time=release_time, **settings
)
def with_group(self, release_time=0.25):
import supriya.patterns
return supriya.patterns.Pgroup(self, release_time=release_time)<|fim▁end|> | ### SPECIAL METHODS ###
def _coerce_iterator_output(self, expr, state=None):
import supriya.patterns |
<|file_name|>date.js<|end_file_name|><|fim▁begin|>/**
* Globalize v1.4.0-alpha.2
*
* http://github.com/jquery/globalize
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license
* http://jquery.org/license
*
* Date: 2018-03-09T13:51Z
*/
/*!
* Globalize v1.4.0-alpha.2 2018-03-09T13:51Z Released under the MIT license
* http://git.io/TrdQbw
*/
(function( root, factory ) {
// UMD returnExports
if ( typeof define === "function" && define.amd ) {
// AMD
define([
"cldr",
"../globalize",
"./number",
"cldr/event",
"cldr/supplemental"
], factory );
} else if ( typeof exports === "object" ) {
// Node, CommonJS
module.exports = factory( require( "cldrjs" ), require( "../globalize" ) );
} else {
// Extend global
factory( root.Cldr, root.Globalize );
}
}(this, function( Cldr, Globalize ) {
var createError = Globalize._createError,
createErrorUnsupportedFeature = Globalize._createErrorUnsupportedFeature,
formatMessage = Globalize._formatMessage,
isPlainObject = Globalize._isPlainObject,
looseMatching = Globalize._looseMatching,
numberNumberingSystemDigitsMap = Globalize._numberNumberingSystemDigitsMap,
numberSymbol = Globalize._numberSymbol,
regexpEscape = Globalize._regexpEscape,
removeLiteralQuotes = Globalize._removeLiteralQuotes,
runtimeBind = Globalize._runtimeBind,
stringPad = Globalize._stringPad,
validate = Globalize._validate,
validateCldr = Globalize._validateCldr,
validateDefaultLocale = Globalize._validateDefaultLocale,
validateParameterPresence = Globalize._validateParameterPresence,
validateParameterType = Globalize._validateParameterType,
validateParameterTypePlainObject = Globalize._validateParameterTypePlainObject,
validateParameterTypeString = Globalize._validateParameterTypeString;
var validateParameterTypeDate = function( value, name ) {
validateParameterType( value, name, value === undefined || value instanceof Date, "Date" );
};
var createErrorInvalidParameterValue = function( name, value ) {
return createError( "E_INVALID_PAR_VALUE", "Invalid `{name}` value ({value}).", {
name: name,
value: value
});
};
/**
* Create a map between the skeleton fields and their positions, e.g.,
* {<|fim▁hole|> * ...
* }
*/
var validateSkeletonFieldsPosMap = "GyYuUrQqMLlwWEecdDFghHKkmsSAzZOvVXx".split( "" ).reduce(function( memo, item, i ) {
memo[ item ] = i;
return memo;
}, {});
/**
* validateSkeleton( skeleton )
*
* skeleton: Assume `j` has already been converted into a localized hour field.
*/
var validateSkeleton = function validateSkeleton( skeleton ) {
var last,
// Using easier to read variable.
fieldsPosMap = validateSkeletonFieldsPosMap;
// "The fields are from the Date Field Symbol Table in Date Format Patterns"
// Ref: http://www.unicode.org/reports/tr35/tr35-dates.html#availableFormats_appendItems
// I.e., check for invalid characters.
skeleton.replace( /[^GyYuUrQqMLlwWEecdDFghHKkmsSAzZOvVXx]/, function( field ) {
throw createError(
"E_INVALID_OPTIONS", "Invalid field `{invalidField}` of skeleton `{value}`",
{
invalidField: field,
type: "skeleton",
value: skeleton
}
);
});
// "The canonical order is from top to bottom in that table; that is, yM not My".
// http://www.unicode.org/reports/tr35/tr35-dates.html#availableFormats_appendItems
// I.e., check for invalid order.
skeleton.split( "" ).every(function( field ) {
if ( fieldsPosMap[ field ] < last ) {
throw createError(
"E_INVALID_OPTIONS", "Invalid order `{invalidField}` of skeleton `{value}`",
{
invalidField: field,
type: "skeleton",
value: skeleton
}
);
}
last = fieldsPosMap[ field ];
return true;
});
};
/**
* Returns a new object created by using `object`'s values as keys, and the keys as values.
*/
var objectInvert = function( object, fn ) {
fn = fn || function( object, key, value ) {
object[ value ] = key;
return object;
};
return Object.keys( object ).reduce(function( newObject, key ) {
return fn( newObject, key, object[ key ] );
}, {});
};
// Invert key and values, e.g., {"e": "eEc"} ==> {"e": "e", "E": "e", "c": "e"}.
var dateExpandPatternSimilarFieldsMap = objectInvert({
"e": "eEc",
"L": "ML"
}, function( object, key, value ) {
value.split( "" ).forEach(function( field ) {
object[ field ] = key;
});
return object;
});
var dateExpandPatternNormalizePatternType = function( character ) {
return dateExpandPatternSimilarFieldsMap[ character ] || character;
};
var datePatternRe = ( /([a-z])\1*|'([^']|'')+'|''|./ig );
var stringRepeat = function( str, count ) {
var i, result = "";
for ( i = 0; i < count; i++ ) {
result = result + str;
}
return result;
};
var dateExpandPatternAugmentFormat = function( requestedSkeleton, bestMatchFormat ) {
var i, j, matchedType, matchedLength, requestedType, requestedLength,
// Using an easier to read variable.
normalizePatternType = dateExpandPatternNormalizePatternType;
requestedSkeleton = requestedSkeleton.match( datePatternRe );
bestMatchFormat = bestMatchFormat.match( datePatternRe );
for ( i = 0; i < bestMatchFormat.length; i++ ) {
matchedType = bestMatchFormat[i].charAt( 0 );
matchedLength = bestMatchFormat[i].length;
for ( j = 0; j < requestedSkeleton.length; j++ ) {
requestedType = requestedSkeleton[j].charAt( 0 );
requestedLength = requestedSkeleton[j].length;
if ( normalizePatternType( matchedType ) === normalizePatternType( requestedType ) &&
matchedLength < requestedLength
) {
bestMatchFormat[i] = stringRepeat( matchedType, requestedLength );
}
}
}
return bestMatchFormat.join( "" );
};
var dateExpandPatternCompareFormats = function( formatA, formatB ) {
var a, b, distance, lenA, lenB, typeA, typeB, i, j,
// Using easier to read variables.
normalizePatternType = dateExpandPatternNormalizePatternType;
if ( formatA === formatB ) {
return 0;
}
formatA = formatA.match( datePatternRe );
formatB = formatB.match( datePatternRe );
if ( formatA.length !== formatB.length ) {
return -1;
}
distance = 1;
for ( i = 0; i < formatA.length; i++ ) {
a = formatA[ i ].charAt( 0 );
typeA = normalizePatternType( a );
typeB = null;
for ( j = 0; j < formatB.length; j++ ) {
b = formatB[ j ].charAt( 0 );
typeB = normalizePatternType( b );
if ( typeA === typeB ) {
break;
} else {
typeB = null;
}
}
if ( typeB === null ) {
return -1;
}
lenA = formatA[ i ].length;
lenB = formatB[ j ].length;
distance = distance + Math.abs( lenA - lenB );
// Most symbols have a small distance from each other, e.g., M ≅ L; E ≅ c; a ≅ b ≅ B;
// H ≅ k ≅ h ≅ K; ...
if ( a !== b ) {
distance += 1;
}
// Numeric (l<3) and text fields (l>=3) are given a larger distance from each other.
if ( ( lenA < 3 && lenB >= 3 ) || ( lenA >= 3 && lenB < 3 ) ) {
distance += 20;
}
}
return distance;
};
var dateExpandPatternGetBestMatchPattern = function( cldr, askedSkeleton ) {
var availableFormats, pattern, ratedFormats, skeleton,
path = "dates/calendars/gregorian/dateTimeFormats/availableFormats",
// Using easier to read variables.
augmentFormat = dateExpandPatternAugmentFormat,
compareFormats = dateExpandPatternCompareFormats;
pattern = cldr.main([ path, askedSkeleton ]);
if ( askedSkeleton && !pattern ) {
availableFormats = cldr.main([ path ]);
ratedFormats = [];
for ( skeleton in availableFormats ) {
ratedFormats.push({
skeleton: skeleton,
pattern: availableFormats[ skeleton ],
rate: compareFormats( askedSkeleton, skeleton )
});
}
ratedFormats = ratedFormats
.filter( function( format ) {
return format.rate > -1;
} )
.sort( function( formatA, formatB ) {
return formatA.rate - formatB.rate;
});
if ( ratedFormats.length ) {
pattern = augmentFormat( askedSkeleton, ratedFormats[0].pattern );
}
}
return pattern;
};
/**
* expandPattern( options, cldr )
*
* @options [Object] if String, it's considered a skeleton. Object accepts:
* - skeleton: [String] lookup availableFormat;
* - date: [String] ( "full" | "long" | "medium" | "short" );
* - time: [String] ( "full" | "long" | "medium" | "short" );
* - datetime: [String] ( "full" | "long" | "medium" | "short" );
* - raw: [String] For more info see datetime/format.js.
*
* @cldr [Cldr instance].
*
* Return the corresponding pattern.
* Eg for "en":
* - "GyMMMd" returns "MMM d, y G";
* - { skeleton: "GyMMMd" } returns "MMM d, y G";
* - { date: "full" } returns "EEEE, MMMM d, y";
* - { time: "full" } returns "h:mm:ss a zzzz";
* - { datetime: "full" } returns "EEEE, MMMM d, y 'at' h:mm:ss a zzzz";
* - { raw: "dd/mm" } returns "dd/mm";
*/
var dateExpandPattern = function( options, cldr ) {
var dateSkeleton, result, skeleton, timeSkeleton, type,
// Using easier to read variables.
getBestMatchPattern = dateExpandPatternGetBestMatchPattern;
function combineDateTime( type, datePattern, timePattern ) {
return formatMessage(
cldr.main([
"dates/calendars/gregorian/dateTimeFormats",
type
]),
[ timePattern, datePattern ]
);
}
switch ( true ) {
case "skeleton" in options:
skeleton = options.skeleton;
// Preferred hour (j).
skeleton = skeleton.replace( /j/g, function() {
return cldr.supplemental.timeData.preferred();
});
validateSkeleton( skeleton );
// Try direct map (note that getBestMatchPattern handles it).
// ... or, try to "best match" the whole skeleton.
result = getBestMatchPattern(
cldr,
skeleton
);
if ( result ) {
break;
}
// ... or, try to "best match" the date and time parts individually.
timeSkeleton = skeleton.split( /[^hHKkmsSAzZOvVXx]/ ).slice( -1 )[ 0 ];
dateSkeleton = skeleton.split( /[^GyYuUrQqMLlwWdDFgEec]/ )[ 0 ];
dateSkeleton = getBestMatchPattern(
cldr,
dateSkeleton
);
timeSkeleton = getBestMatchPattern(
cldr,
timeSkeleton
);
if ( /(MMMM|LLLL).*[Ec]/.test( dateSkeleton ) ) {
type = "full";
} else if ( /MMMM|LLLL/.test( dateSkeleton ) ) {
type = "long";
} else if ( /MMM|LLL/.test( dateSkeleton ) ) {
type = "medium";
} else {
type = "short";
}
if ( dateSkeleton && timeSkeleton ) {
result = combineDateTime( type, dateSkeleton, timeSkeleton );
} else {
result = dateSkeleton || timeSkeleton;
}
break;
case "date" in options:
case "time" in options:
result = cldr.main([
"dates/calendars/gregorian",
"date" in options ? "dateFormats" : "timeFormats",
( options.date || options.time )
]);
break;
case "datetime" in options:
result = combineDateTime( options.datetime,
cldr.main([ "dates/calendars/gregorian/dateFormats", options.datetime ]),
cldr.main([ "dates/calendars/gregorian/timeFormats", options.datetime ])
);
break;
case "raw" in options:
result = options.raw;
break;
default:
throw createErrorInvalidParameterValue({
name: "options",
value: options
});
}
return result;
};
var dateWeekDays = [ "sun", "mon", "tue", "wed", "thu", "fri", "sat" ];
/**
* firstDayOfWeek
*/
var dateFirstDayOfWeek = function( cldr ) {
return dateWeekDays.indexOf( cldr.supplemental.weekData.firstDay() );
};
/**
* getTimeZoneName( length, type )
*/
var dateGetTimeZoneName = function( length, type, timeZone, cldr ) {
var metaZone, result;
if ( !timeZone ) {
return;
}
result = cldr.main([
"dates/timeZoneNames/zone",
timeZone,
length < 4 ? "short" : "long",
type
]);
if ( result ) {
return result;
}
// The latest metazone data of the metazone array.
// TODO expand to support the historic metazones based on the given date.
metaZone = cldr.supplemental([
"metaZones/metazoneInfo/timezone", timeZone, 0,
"usesMetazone/_mzone"
]);
return cldr.main([
"dates/timeZoneNames/metazone",
metaZone,
length < 4 ? "short" : "long",
type
]);
};
/**
* timezoneHourFormatShortH( hourFormat )
*
* @hourFormat [String]
*
* Unofficial deduction of the short hourFormat given time zone `hourFormat` element.
* Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
*
* Example:
* - "+HH.mm;-HH.mm" => "+H;-H"
* - "+HH:mm;-HH:mm" => "+H;-H"
* - "+HH:mm;−HH:mm" => "+H;−H" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+H:-H"
*/
var dateTimezoneHourFormatH = function( hourFormat ) {
return hourFormat
.split( ";" )
.map(function( format ) {
return format.slice( 0, format.indexOf( "H" ) + 1 );
})
.join( ";" );
};
/**
* timezoneHourFormatLongHm( hourFormat )
*
* @hourFormat [String]
*
* Unofficial deduction of the short hourFormat given time zone `hourFormat` element.
* Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
*
* Example (hFormat === "H"): (used for short Hm)
* - "+HH.mm;-HH.mm" => "+H.mm;-H.mm"
* - "+HH:mm;-HH:mm" => "+H:mm;-H:mm"
* - "+HH:mm;−HH:mm" => "+H:mm;−H:mm" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+Hmm:-Hmm"
*
* Example (hFormat === "HH": (used for long Hm)
* - "+HH.mm;-HH.mm" => "+HH.mm;-HH.mm"
* - "+HH:mm;-HH:mm" => "+HH:mm;-HH:mm"
* - "+H:mm;-H:mm" => "+HH:mm;-HH:mm"
* - "+HH:mm;−HH:mm" => "+HH:mm;−HH:mm" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+HHmm:-HHmm"
*/
var dateTimezoneHourFormatHm = function( hourFormat, hFormat ) {
return hourFormat
.split( ";" )
.map(function( format ) {
var parts = format.split( /H+/ );
parts.splice( 1, 0, hFormat );
return parts.join( "" );
})
.join( ";" );
};
var runtimeCacheDataBind = function( key, data ) {
var fn = function() {
return data;
};
fn.dataCacheKey = key;
return fn;
};
/**
* properties( pattern, cldr )
*
* @pattern [String] raw pattern.
* ref: http://www.unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
*
* @cldr [Cldr instance].
*
* Return the properties given the pattern and cldr.
*
* TODO Support other calendar types.
*/
var dateFormatProperties = function( pattern, cldr, timeZone ) {
var properties = {
numberFormatters: {},
pattern: pattern,
timeSeparator: numberSymbol( "timeSeparator", cldr )
},
widths = [ "abbreviated", "wide", "narrow" ];
function setNumberFormatterPattern( pad ) {
properties.numberFormatters[ pad ] = stringPad( "", pad );
}
if ( timeZone ) {
properties.timeZoneData = runtimeCacheDataBind( "iana/" + timeZone, {
offsets: cldr.get([ "globalize-iana/zoneData", timeZone, "offsets" ]),
untils: cldr.get([ "globalize-iana/zoneData", timeZone, "untils" ]),
isdsts: cldr.get([ "globalize-iana/zoneData", timeZone, "isdsts" ])
});
}
pattern.replace( datePatternRe, function( current ) {
var aux, chr, daylightTzName, formatNumber, genericTzName, length, standardTzName;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
properties.preferredTime = chr = cldr.supplemental.timeData.preferred();
}
// ZZZZ: same as "OOOO".
if ( chr === "Z" && length === 4 ) {
chr = "O";
length = 4;
}
// z...zzz: "{shortRegion}", eg. "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "z" ) {
standardTzName = dateGetTimeZoneName( length, "standard", timeZone, cldr );
daylightTzName = dateGetTimeZoneName( length, "daylight", timeZone, cldr );
if ( standardTzName ) {
properties.standardTzName = standardTzName;
}
if ( daylightTzName ) {
properties.daylightTzName = daylightTzName;
}
// Fall through the "O" format in case one name is missing.
if ( !standardTzName || !daylightTzName ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
genericTzName = dateGetTimeZoneName( length, "generic", timeZone, cldr );
// Fall back to "V" format.
if ( !genericTzName ) {
chr = "V";
length = 4;
}
}
switch ( chr ) {
// Era
case "G":
properties.eras = cldr.main([
"dates/calendars/gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "y":
// Plain year.
formatNumber = true;
break;
case "Y":
// Year in "Week of Year"
properties.firstDay = dateFirstDayOfWeek( cldr );
properties.minDays = cldr.supplemental.weekData.minDays();
formatNumber = true;
break;
case "u": // Extended year. Need to be implemented.
case "U": // Cyclic year name. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter
case "Q":
case "q":
if ( length > 2 ) {
if ( !properties.quarters ) {
properties.quarters = {};
}
if ( !properties.quarters[ chr ] ) {
properties.quarters[ chr ] = {};
}
properties.quarters[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
} else {
formatNumber = true;
}
break;
// Month
case "M":
case "L":
if ( length > 2 ) {
if ( !properties.months ) {
properties.months = {};
}
if ( !properties.months[ chr ] ) {
properties.months[ chr ] = {};
}
properties.months[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
} else {
formatNumber = true;
}
break;
// Week - Week of Year (w) or Week of Month (W).
case "w":
case "W":
properties.firstDay = dateFirstDayOfWeek( cldr );
properties.minDays = cldr.supplemental.weekData.minDays();
formatNumber = true;
break;
// Day
case "d":
case "D":
case "F":
formatNumber = true;
break;
case "g":
// Modified Julian day. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "Julian day pattern `g`"
});
// Week day
case "e":
case "c":
if ( length <= 2 ) {
properties.firstDay = dateFirstDayOfWeek( cldr );
formatNumber = true;
break;
}
/* falls through */
case "E":
if ( !properties.days ) {
properties.days = {};
}
if ( !properties.days[ chr ] ) {
properties.days[ chr ] = {};
}
if ( length === 6 ) {
// If short day names are not explicitly specified, abbreviated day names are
// used instead.
// http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
// http://unicode.org/cldr/trac/ticket/6790
properties.days[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
"short"
]) || cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
"abbreviated"
]);
} else {
properties.days[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
properties.dayPeriods = {
am: cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide/am"
),
pm: cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide/pm"
)
};
break;
// Hour
case "h": // 1-12
case "H": // 0-23
case "K": // 0-11
case "k": // 1-24
// Minute
case "m":
// Second
case "s":
case "S":
case "A":
formatNumber = true;
break;
// Zone
case "v":
if ( length !== 1 && length !== 4 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
properties.genericTzName = genericTzName;
break;
case "V":
if ( length === 1 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
if ( timeZone ) {
if ( length === 2 ) {
properties.timeZoneName = timeZone;
break;
}
var timeZoneName,
exemplarCity = cldr.main([
"dates/timeZoneNames/zone", timeZone, "exemplarCity"
]);
if ( length === 3 ) {
if ( !exemplarCity ) {
exemplarCity = cldr.main([
"dates/timeZoneNames/zone/Etc/Unknown/exemplarCity"
]);
}
timeZoneName = exemplarCity;
}
if ( exemplarCity && length === 4 ) {
timeZoneName = formatMessage(
cldr.main(
"dates/timeZoneNames/regionFormat"
),
[ exemplarCity ]
);
}
if ( timeZoneName ) {
properties.timeZoneName = timeZoneName;
break;
}
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
properties.gmtFormat = cldr.main( "dates/timeZoneNames/gmtFormat" );
properties.gmtZeroFormat = cldr.main( "dates/timeZoneNames/gmtZeroFormat" );
// Unofficial deduction of the hourFormat variations.
// Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
aux = cldr.main( "dates/timeZoneNames/hourFormat" );
properties.hourFormat = length < 4 ?
[ dateTimezoneHourFormatH( aux ), dateTimezoneHourFormatHm( aux, "H" ) ] :
dateTimezoneHourFormatHm( aux, "HH" );
/* falls through */
case "Z":
case "X":
case "x":
setNumberFormatterPattern( 1 );
setNumberFormatterPattern( 2 );
break;
}
if ( formatNumber ) {
setNumberFormatterPattern( length );
}
});
return properties;
};
var dateFormatterFn = function( dateToPartsFormatter ) {
return function dateFormatter( value ) {
return dateToPartsFormatter( value ).map( function( part ) {
return part.value;
}).join( "" );
};
};
/**
* parseProperties( cldr )
*
* @cldr [Cldr instance].
*
* @timeZone [String] FIXME.
*
* Return parser properties.
*/
var dateParseProperties = function( cldr, timeZone ) {
var properties = {
preferredTimeData: cldr.supplemental.timeData.preferred()
};
if ( timeZone ) {
properties.timeZoneData = runtimeCacheDataBind( "iana/" + timeZone, {
offsets: cldr.get([ "globalize-iana/zoneData", timeZone, "offsets" ]),
untils: cldr.get([ "globalize-iana/zoneData", timeZone, "untils" ]),
isdsts: cldr.get([ "globalize-iana/zoneData", timeZone, "isdsts" ])
});
}
return properties;
};
var ZonedDateTime = (function() {
function definePrivateProperty(object, property, value) {
Object.defineProperty(object, property, {
value: value
});
}
function getUntilsIndex(original, untils) {
var index = 0;
var originalTime = original.getTime();
// TODO Should we do binary search for improved performance?
while (index < untils.length - 1 && originalTime >= untils[index]) {
index++;
}
return index;
}
function setWrap(fn) {
var offset1 = this.getTimezoneOffset();
var ret = fn();
this.original.setTime(new Date(this.getTime()));
var offset2 = this.getTimezoneOffset();
if (offset2 - offset1) {
this.original.setMinutes(this.original.getMinutes() + offset2 - offset1);
}
return ret;
}
var ZonedDateTime = function(date, timeZoneData) {
definePrivateProperty(this, "original", new Date(date.getTime()));
definePrivateProperty(this, "local", new Date(date.getTime()));
definePrivateProperty(this, "timeZoneData", timeZoneData);
definePrivateProperty(this, "setWrap", setWrap);
if (!(timeZoneData.untils && timeZoneData.offsets && timeZoneData.isdsts)) {
throw new Error("Invalid IANA data");
}
this.setTime(this.local.getTime() - this.getTimezoneOffset() * 60 * 1000);
};
ZonedDateTime.prototype.clone = function() {
return new ZonedDateTime(this.original, this.timeZoneData);
};
// Date field getters.
["getFullYear", "getMonth", "getDate", "getDay", "getHours", "getMinutes",
"getSeconds", "getMilliseconds"].forEach(function(method) {
// Corresponding UTC method, e.g., "getUTCFullYear" if method === "getFullYear".
var utcMethod = "getUTC" + method.substr(3);
ZonedDateTime.prototype[method] = function() {
return this.local[utcMethod]();
};
});
// Note: Define .valueOf = .getTime for arithmetic operations like date1 - date2.
ZonedDateTime.prototype.valueOf =
ZonedDateTime.prototype.getTime = function() {
return this.local.getTime() + this.getTimezoneOffset() * 60 * 1000;
};
ZonedDateTime.prototype.getTimezoneOffset = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
return this.timeZoneData.offsets[index];
};
// Date field setters.
["setFullYear", "setMonth", "setDate", "setHours", "setMinutes", "setSeconds", "setMilliseconds"].forEach(function(method) {
// Corresponding UTC method, e.g., "setUTCFullYear" if method === "setFullYear".
var utcMethod = "setUTC" + method.substr(3);
ZonedDateTime.prototype[method] = function(value) {
var local = this.local;
// Note setWrap is needed for seconds and milliseconds just because
// abs(value) could be >= a minute.
return this.setWrap(function() {
return local[utcMethod](value);
});
};
});
ZonedDateTime.prototype.setTime = function(time) {
return this.local.setTime(time);
};
ZonedDateTime.prototype.isDST = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
return Boolean(this.timeZoneData.isdsts[index]);
};
ZonedDateTime.prototype.inspect = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
var abbrs = this.timeZoneData.abbrs;
return this.local.toISOString().replace(/Z$/, "") + " " +
(abbrs && abbrs[index] + " " || (this.getTimezoneOffset() * -1) + " ") +
(this.isDST() ? "(daylight savings)" : "");
};
ZonedDateTime.prototype.toDate = function() {
return new Date(this.getTime());
};
// Type cast getters.
["toISOString", "toJSON", "toUTCString"].forEach(function(method) {
ZonedDateTime.prototype[method] = function() {
return this.toDate()[method]();
};
});
return ZonedDateTime;
}());
/**
* isLeapYear( year )
*
* @year [Number]
*
* Returns an indication whether the specified year is a leap year.
*/
var dateIsLeapYear = function( year ) {
return new Date( year, 1, 29 ).getMonth() === 1;
};
/**
* lastDayOfMonth( date )
*
* @date [Date]
*
* Return the last day of the given date's month
*/
var dateLastDayOfMonth = function( date ) {
return new Date( date.getFullYear(), date.getMonth() + 1, 0 ).getDate();
};
/**
* startOf changes the input to the beginning of the given unit.
*
* For example, starting at the start of a day, resets hours, minutes
* seconds and milliseconds to 0. Starting at the month does the same, but
* also sets the date to 1.
*
* Returns the modified date
*/
var dateStartOf = function( date, unit ) {
date = date instanceof ZonedDateTime ? date.clone() : new Date( date.getTime() );
switch ( unit ) {
case "year":
date.setMonth( 0 );
/* falls through */
case "month":
date.setDate( 1 );
/* falls through */
case "day":
date.setHours( 0 );
/* falls through */
case "hour":
date.setMinutes( 0 );
/* falls through */
case "minute":
date.setSeconds( 0 );
/* falls through */
case "second":
date.setMilliseconds( 0 );
}
return date;
};
/**
* Differently from native date.setDate(), this function returns a date whose
* day remains inside the month boundaries. For example:
*
* setDate( FebDate, 31 ): a "Feb 28" date.
* setDate( SepDate, 31 ): a "Sep 30" date.
*/
var dateSetDate = function( date, day ) {
var lastDay = new Date( date.getFullYear(), date.getMonth() + 1, 0 ).getDate();
date.setDate( day < 1 ? 1 : day < lastDay ? day : lastDay );
};
/**
* Differently from native date.setMonth(), this function adjusts date if
* needed, so final month is always the one set.
*
* setMonth( Jan31Date, 1 ): a "Feb 28" date.
* setDate( Jan31Date, 8 ): a "Sep 30" date.
*/
var dateSetMonth = function( date, month ) {
var originalDate = date.getDate();
date.setDate( 1 );
date.setMonth( month );
dateSetDate( date, originalDate );
};
var outOfRange = function( value, low, high ) {
return value < low || value > high;
};
/**
* parse( value, tokens, properties )
*
* @value [String] string date.
*
* @tokens [Object] tokens returned by date/tokenizer.
*
* @properties [Object] output returned by date/tokenizer-properties.
*
* ref: http://www.unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
*/
var dateParse = function( value, tokens, properties ) {
var amPm, day, daysOfYear, month, era, hour, hour12, timezoneOffset, valid,
YEAR = 0,
MONTH = 1,
DAY = 2,
HOUR = 3,
MINUTE = 4,
SECOND = 5,
MILLISECONDS = 6,
date = new Date(),
truncateAt = [],
units = [ "year", "month", "day", "hour", "minute", "second", "milliseconds" ];
// Create globalize date with given timezone data.
if ( properties.timeZoneData ) {
date = new ZonedDateTime( date, properties.timeZoneData() );
}
if ( !tokens.length ) {
return null;
}
valid = tokens.every(function( token ) {
var century, chr, value, length;
if ( token.type === "literal" ) {
// continue
return true;
}
chr = token.type.charAt( 0 );
length = token.type.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
chr = properties.preferredTimeData;
}
switch ( chr ) {
// Era
case "G":
truncateAt.push( YEAR );
era = +token.value;
break;
// Year
case "y":
value = token.value;
if ( length === 2 ) {
if ( outOfRange( value, 0, 99 ) ) {
return false;
}
// mimic dojo/date/locale: choose century to apply, according to a sliding
// window of 80 years before and 20 years after present year.
century = Math.floor( date.getFullYear() / 100 ) * 100;
value += century;
if ( value > date.getFullYear() + 20 ) {
value -= 100;
}
}
date.setFullYear( value );
truncateAt.push( YEAR );
break;
case "Y": // Year in "Week of Year"
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter (skip)
case "Q":
case "q":
break;
// Month
case "M":
case "L":
if ( length <= 2 ) {
value = token.value;
} else {
value = +token.value;
}
if ( outOfRange( value, 1, 12 ) ) {
return false;
}
// Setting the month later so that we have the correct year and can determine
// the correct last day of February in case of leap year.
month = value;
truncateAt.push( MONTH );
break;
// Week (skip)
case "w": // Week of Year.
case "W": // Week of Month.
break;
// Day
case "d":
day = token.value;
truncateAt.push( DAY );
break;
case "D":
daysOfYear = token.value;
truncateAt.push( DAY );
break;
case "F":
// Day of Week in month. eg. 2nd Wed in July.
// Skip
break;
// Week day
case "e":
case "c":
case "E":
// Skip.
// value = arrayIndexOf( dateWeekDays, token.value );
break;
// Period (AM or PM)
case "a":
amPm = token.value;
break;
// Hour
case "h": // 1-12
value = token.value;
if ( outOfRange( value, 1, 12 ) ) {
return false;
}
hour = hour12 = true;
date.setHours( value === 12 ? 0 : value );
truncateAt.push( HOUR );
break;
case "K": // 0-11
value = token.value;
if ( outOfRange( value, 0, 11 ) ) {
return false;
}
hour = hour12 = true;
date.setHours( value );
truncateAt.push( HOUR );
break;
case "k": // 1-24
value = token.value;
if ( outOfRange( value, 1, 24 ) ) {
return false;
}
hour = true;
date.setHours( value === 24 ? 0 : value );
truncateAt.push( HOUR );
break;
case "H": // 0-23
value = token.value;
if ( outOfRange( value, 0, 23 ) ) {
return false;
}
hour = true;
date.setHours( value );
truncateAt.push( HOUR );
break;
// Minute
case "m":
value = token.value;
if ( outOfRange( value, 0, 59 ) ) {
return false;
}
date.setMinutes( value );
truncateAt.push( MINUTE );
break;
// Second
case "s":
value = token.value;
if ( outOfRange( value, 0, 59 ) ) {
return false;
}
date.setSeconds( value );
truncateAt.push( SECOND );
break;
case "A":
date.setHours( 0 );
date.setMinutes( 0 );
date.setSeconds( 0 );
/* falls through */
case "S":
value = Math.round( token.value * Math.pow( 10, 3 - length ) );
date.setMilliseconds( value );
truncateAt.push( MILLISECONDS );
break;
// Zone
case "z":
case "Z":
case "O":
case "v":
case "V":
case "X":
case "x":
if ( typeof token.value === "number" ) {
timezoneOffset = token.value;
}
break;
}
return true;
});
if ( !valid ) {
return null;
}
// 12-hour format needs AM or PM, 24-hour format doesn't, ie. return null
// if amPm && !hour12 || !amPm && hour12.
if ( hour && !( !amPm ^ hour12 ) ) {
return null;
}
if ( era === 0 ) {
// 1 BC = year 0
date.setFullYear( date.getFullYear() * -1 + 1 );
}
if ( month !== undefined ) {
dateSetMonth( date, month - 1 );
}
if ( day !== undefined ) {
if ( outOfRange( day, 1, dateLastDayOfMonth( date ) ) ) {
return null;
}
date.setDate( day );
} else if ( daysOfYear !== undefined ) {
if ( outOfRange( daysOfYear, 1, dateIsLeapYear( date.getFullYear() ) ? 366 : 365 ) ) {
return null;
}
date.setMonth( 0 );
date.setDate( daysOfYear );
}
if ( hour12 && amPm === "pm" ) {
date.setHours( date.getHours() + 12 );
}
if ( timezoneOffset !== undefined ) {
date.setMinutes( date.getMinutes() + timezoneOffset - date.getTimezoneOffset() );
}
// Truncate date at the most precise unit defined. Eg.
// If value is "12/31", and pattern is "MM/dd":
// => new Date( <current Year>, 12, 31, 0, 0, 0, 0 );
truncateAt = Math.max.apply( null, truncateAt );
date = dateStartOf( date, units[ truncateAt ] );
// Get date back from globalize date.
if ( date instanceof ZonedDateTime ) {
date = date.toDate();
}
return date;
};
/**
* tokenizer( value, numberParser, properties )
*
* @value [String] string date.
*
* @numberParser [Function]
*
* @properties [Object] output returned by date/tokenizer-properties.
*
* Returns an Array of tokens, eg. value "5 o'clock PM", pattern "h 'o''clock' a":
* [{
* type: "h",
* lexeme: "5"
* }, {
* type: "literal",
* lexeme: " "
* }, {
* type: "literal",
* lexeme: "o'clock"
* }, {
* type: "literal",
* lexeme: " "
* }, {
* type: "a",
* lexeme: "PM",
* value: "pm"
* }]
*
* OBS: lexeme's are always String and may return invalid ranges depending of the token type.
* Eg. "99" for month number.
*
* Return an empty Array when not successfully parsed.
*/
var dateTokenizer = function( value, numberParser, properties ) {
var digitsRe, valid,
tokens = [],
widths = [ "abbreviated", "wide", "narrow" ];
digitsRe = properties.digitsRe;
value = looseMatching( value );
valid = properties.pattern.match( datePatternRe ).every(function( current ) {
var aux, chr, length, numeric, tokenRe,
token = {};
function hourFormatParse( tokenRe, numberParser ) {
var aux, isPositive,
match = value.match( tokenRe );
numberParser = numberParser || function( value ) {
return +value;
};
if ( !match ) {
return false;
}
isPositive = match[ 1 ];
// hourFormat containing H only, e.g., `+H;-H`
if ( match.length < 6 ) {
aux = isPositive ? 1 : 3;
token.value = numberParser( match[ aux ] ) * 60;
// hourFormat containing H and m, e.g., `+HHmm;-HHmm`
} else if ( match.length < 10 ) {
aux = isPositive ? [ 1, 3 ] : [ 5, 7 ];
token.value = numberParser( match[ aux[ 0 ] ] ) * 60 +
numberParser( match[ aux[ 1 ] ] );
// hourFormat containing H, m, and s e.g., `+HHmmss;-HHmmss`
} else {
aux = isPositive ? [ 1, 3, 5 ] : [ 7, 9, 11 ];
token.value = numberParser( match[ aux[ 0 ] ] ) * 60 +
numberParser( match[ aux[ 1 ] ] ) +
numberParser( match[ aux[ 2 ] ] ) / 60;
}
if ( isPositive ) {
token.value *= -1;
}
return true;
}
function oneDigitIfLengthOne() {
if ( length === 1 ) {
// Unicode equivalent to /\d/
numeric = true;
return tokenRe = digitsRe;
}
}
function oneOrTwoDigitsIfLengthOne() {
if ( length === 1 ) {
// Unicode equivalent to /\d\d?/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
}
}
function oneOrTwoDigitsIfLengthOneOrTwo() {
if ( length === 1 || length === 2 ) {
// Unicode equivalent to /\d\d?/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
}
}
function twoDigitsIfLengthTwo() {
if ( length === 2 ) {
// Unicode equivalent to /\d\d/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){2}" );
}
}
// Brute-force test every locale entry in an attempt to match the given value.
// Return the first found one (and set token accordingly), or null.
function lookup( path ) {
var array = properties[ path.join( "/" ) ];
if ( !array ) {
return null;
}
// array of pairs [key, value] sorted by desc value length.
array.some(function( item ) {
var valueRe = item[ 1 ];
if ( valueRe.test( value ) ) {
token.value = item[ 0 ];
tokenRe = item[ 1 ];
return true;
}
});
return null;
}
token.type = current;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "Z" ) {
// Z..ZZZ: same as "xxxx".
if ( length < 4 ) {
chr = "x";
length = 4;
// ZZZZ: same as "OOOO".
} else if ( length < 5 ) {
chr = "O";
length = 4;
// ZZZZZ: same as "XXXXX"
} else {
chr = "X";
length = 5;
}
}
if ( chr === "z" ) {
if ( properties.standardOrDaylightTzName ) {
token.value = null;
tokenRe = properties.standardOrDaylightTzName;
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
if ( properties.genericTzName ) {
token.value = null;
tokenRe = properties.genericTzName;
// Fall back to "V" format.
} else {
chr = "V";
length = 4;
}
}
if ( chr === "V" && properties.timeZoneName ) {
token.value = length === 2 ? properties.timeZoneName : null;
tokenRe = properties.timeZoneNameRe;
}
switch ( chr ) {
// Era
case "G":
lookup([
"gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "y":
case "Y":
numeric = true;
// number l=1:+, l=2:{2}, l=3:{3,}, l=4:{4,}, ...
if ( length === 1 ) {
// Unicode equivalent to /\d+/.
tokenRe = new RegExp( "^(" + digitsRe.source + ")+" );
} else if ( length === 2 ) {
// Lenient parsing: there's no year pattern to indicate non-zero-padded 2-digits
// year, so parser accepts both zero-padded and non-zero-padded for `yy`.
//
// Unicode equivalent to /\d\d?/
tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
} else {
// Unicode equivalent to /\d{length,}/
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + ",}" );
}
break;
// Quarter
case "Q":
case "q":
// number l=1:{1}, l=2:{2}.
// lookup l=3...
oneDigitIfLengthOne() || twoDigitsIfLengthTwo() ||
lookup([
"gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
break;
// Month
case "M":
case "L":
// number l=1:{1,2}, l=2:{2}.
// lookup l=3...
//
// Lenient parsing: skeleton "yMd" (i.e., one M) may include MM for the pattern,
// therefore parser accepts both zero-padded and non-zero-padded for M and MM.
// Similar for L.
oneOrTwoDigitsIfLengthOneOrTwo() || lookup([
"gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
break;
// Day
case "D":
// number {l,3}.
if ( length <= 3 ) {
// Equivalent to /\d{length,3}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + ",3}" );
}
break;
case "W":
case "F":
// number l=1:{1}.
oneDigitIfLengthOne();
break;
// Week day
case "e":
case "c":
// number l=1:{1}, l=2:{2}.
// lookup for length >=3.
if ( length <= 2 ) {
oneDigitIfLengthOne() || twoDigitsIfLengthTwo();
break;
}
/* falls through */
case "E":
if ( length === 6 ) {
// Note: if short day names are not explicitly specified, abbreviated day
// names are used instead http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"short"
]) || lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"abbreviated"
]);
} else {
lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
lookup([
"gregorian/dayPeriods/format/wide"
]);
break;
// Week
case "w":
// number l1:{1,2}, l2:{2}.
oneOrTwoDigitsIfLengthOne() || twoDigitsIfLengthTwo();
break;
// Day, Hour, Minute, or Second
case "d":
case "h":
case "H":
case "K":
case "k":
case "j":
case "m":
case "s":
// number l1:{1,2}, l2:{2}.
//
// Lenient parsing:
// - skeleton "hms" (i.e., one m) always includes mm for the pattern, i.e., it's
// impossible to use a different skeleton to parse non-zero-padded minutes,
// therefore parser accepts both zero-padded and non-zero-padded for m. Similar
// for seconds s.
// - skeleton "hms" (i.e., one h) may include h or hh for the pattern, i.e., it's
// impossible to use a different skeleton to parser non-zero-padded hours for some
// locales, therefore parser accepts both zero-padded and non-zero-padded for h.
// Similar for d (in skeleton yMd).
oneOrTwoDigitsIfLengthOneOrTwo();
break;
case "S":
// number {l}.
// Unicode equivalent to /\d{length}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + "}" );
break;
case "A":
// number {l+5}.
// Unicode equivalent to /\d{length+5}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + ( length + 5 ) + "}" );
break;
// Zone
case "v":
case "V":
case "z":
if ( tokenRe && tokenRe.test( value ) ) {
break;
}
if ( chr === "V" && length === 2 ) {
break;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
if ( value === properties[ "timeZoneNames/gmtZeroFormat" ] ) {
token.value = 0;
tokenRe = properties[ "timeZoneNames/gmtZeroFormatRe" ];
} else {
aux = properties[ "timeZoneNames/hourFormat" ].some(function( hourFormatRe ) {
if ( hourFormatParse( hourFormatRe, numberParser ) ) {
tokenRe = hourFormatRe;
return true;
}
});
if ( !aux ) {
return null;
}
}
break;
case "X":
// Same as x*, except it uses "Z" for zero offset.
if ( value === "Z" ) {
token.value = 0;
tokenRe = /^Z/;
break;
}
/* falls through */
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
aux = properties.x.some(function( hourFormatRe ) {
if ( hourFormatParse( hourFormatRe ) ) {
tokenRe = hourFormatRe;
return true;
}
});
if ( !aux ) {
return null;
}
break;
case "'":
token.type = "literal";
tokenRe = new RegExp( "^" + regexpEscape( removeLiteralQuotes( current ) ) );
break;
default:
token.type = "literal";
tokenRe = new RegExp( "^" + regexpEscape( current ) );
}
if ( !tokenRe ) {
return false;
}
// Get lexeme and consume it.
value = value.replace( tokenRe, function( lexeme ) {
token.lexeme = lexeme;
if ( numeric ) {
token.value = numberParser( lexeme );
}
return "";
});
if ( !token.lexeme ) {
return false;
}
if ( numeric && isNaN( token.value ) ) {
return false;
}
tokens.push( token );
return true;
});
if ( value !== "" ) {
valid = false;
}
return valid ? tokens : [];
};
var dateParserFn = function( numberParser, parseProperties, tokenizerProperties ) {
return function dateParser( value ) {
var tokens;
validateParameterPresence( value, "value" );
validateParameterTypeString( value, "value" );
tokens = dateTokenizer( value, numberParser, tokenizerProperties );
return dateParse( value, tokens, parseProperties ) || null;
};
};
var objectFilter = function( object, testRe ) {
var key,
copy = {};
for ( key in object ) {
if ( testRe.test( key ) ) {
copy[ key ] = object[ key ];
}
}
return copy;
};
/**
* tokenizerProperties( pattern, cldr )
*
* @pattern [String] raw pattern.
*
* @cldr [Cldr instance].
*
* Return Object with data that will be used by tokenizer.
*/
var dateTokenizerProperties = function( pattern, cldr, timeZone ) {
var digitsReSource,
properties = {
pattern: looseMatching( pattern )
},
timeSeparator = numberSymbol( "timeSeparator", cldr ),
widths = [ "abbreviated", "wide", "narrow" ];
digitsReSource = numberNumberingSystemDigitsMap( cldr );
digitsReSource = digitsReSource ? "[" + digitsReSource + "]" : "\\d";
properties.digitsRe = new RegExp( digitsReSource );
// Transform:
// - "+H;-H" -> /\+(\d\d?)|-(\d\d?)/
// - "+HH;-HH" -> /\+(\d\d)|-(\d\d)/
// - "+HHmm;-HHmm" -> /\+(\d\d)(\d\d)|-(\d\d)(\d\d)/
// - "+HH:mm;-HH:mm" -> /\+(\d\d):(\d\d)|-(\d\d):(\d\d)/
//
// If gmtFormat is GMT{0}, the regexp must fill {0} in each side, e.g.:
// - "+H;-H" -> /GMT\+(\d\d?)|GMT-(\d\d?)/
function hourFormatRe( hourFormat, gmtFormat, digitsReSource, timeSeparator ) {
var re;
if ( !digitsReSource ) {
digitsReSource = "\\d";
}
if ( !gmtFormat ) {
gmtFormat = "{0}";
}
re = hourFormat
.replace( "+", "\\+" )
// Unicode equivalent to (\\d\\d)
.replace( /HH|mm|ss/g, "((" + digitsReSource + "){2})" )
// Unicode equivalent to (\\d\\d?)
.replace( /H|m/g, "((" + digitsReSource + "){1,2})" );
if ( timeSeparator ) {
re = re.replace( /:/g, timeSeparator );
}
re = re.split( ";" ).map(function( part ) {
return gmtFormat.replace( "{0}", part );
}).join( "|" );
return new RegExp( "^" + re );
}
function populateProperties( path, value ) {
// Skip
var skipRe = /(timeZoneNames\/zone|supplemental\/metaZones|timeZoneNames\/metazone|timeZoneNames\/regionFormat|timeZoneNames\/gmtFormat)/;
if ( skipRe.test( path ) ) {
return;
}
if ( !value ) {
return;
}
// The `dates` and `calendars` trim's purpose is to reduce properties' key size only.
path = path.replace( /^.*\/dates\//, "" ).replace( /calendars\//, "" );
// Specific filter for "gregorian/dayPeriods/format/wide".
if ( path === "gregorian/dayPeriods/format/wide" ) {
value = objectFilter( value, /^am|^pm/ );
}
// Transform object into array of pairs [key, /value/], sort by desc value length.
if ( isPlainObject( value ) ) {
value = Object.keys( value ).map(function( key ) {
return [ key, new RegExp( "^" + regexpEscape( looseMatching( value[ key ] ) ) ) ];
}).sort(function( a, b ) {
return b[ 1 ].source.length - a[ 1 ].source.length;
});
// If typeof value === "string".
} else {
value = looseMatching( value );
}
properties[ path ] = value;
}
function regexpSourceSomeTerm( terms ) {
return "(" + terms.filter(function( item ) {
return item;
}).reduce(function( memo, item ) {
return memo + "|" + item;
}) + ")";
}
cldr.on( "get", populateProperties );
pattern.match( datePatternRe ).forEach(function( current ) {
var aux, chr, daylightTzName, gmtFormat, length, standardTzName;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "Z" ) {
if ( length < 5 ) {
chr = "O";
length = 4;
} else {
chr = "X";
length = 5;
}
}
// z...zzz: "{shortRegion}", eg. "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "z" ) {
standardTzName = dateGetTimeZoneName( length, "standard", timeZone, cldr );
daylightTzName = dateGetTimeZoneName( length, "daylight", timeZone, cldr );
if ( standardTzName ) {
standardTzName = regexpEscape( looseMatching( standardTzName ) );
}
if ( daylightTzName ) {
daylightTzName = regexpEscape( looseMatching( daylightTzName ) );
}
if ( standardTzName || daylightTzName ) {
properties.standardOrDaylightTzName = new RegExp(
"^" + regexpSourceSomeTerm([ standardTzName, daylightTzName ])
);
}
// Fall through the "O" format in case one name is missing.
if ( !standardTzName || !daylightTzName ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
if ( length !== 1 && length !== 4 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
var genericTzName = dateGetTimeZoneName( length, "generic", timeZone, cldr );
if ( genericTzName ) {
properties.genericTzName = new RegExp(
"^" + regexpEscape( looseMatching( genericTzName ) )
);
chr = "O";
// Fall back to "V" format.
} else {
chr = "V";
length = 4;
}
}
switch ( chr ) {
// Era
case "G":
cldr.main([
"dates/calendars/gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "u": // Extended year. Need to be implemented.
case "U": // Cyclic year name. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter
case "Q":
case "q":
if ( length > 2 ) {
cldr.main([
"dates/calendars/gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
}
break;
// Month
case "M":
case "L":
// number l=1:{1,2}, l=2:{2}.
// lookup l=3...
if ( length > 2 ) {
cldr.main([
"dates/calendars/gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
}
break;
// Day
case "g":
// Modified Julian day. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "Julian day pattern `g`"
});
// Week day
case "e":
case "c":
// lookup for length >=3.
if ( length <= 2 ) {
break;
}
/* falls through */
case "E":
if ( length === 6 ) {
// Note: if short day names are not explicitly specified, abbreviated day
// names are used instead http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"short"
]) || cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"abbreviated"
]);
} else {
cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide"
);
break;
// Zone
case "V":
if ( length === 1 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
if ( timeZone ) {
if ( length === 2 ) {
// Skip looseMatching processing since timeZone is a canonical posix value.
properties.timeZoneName = timeZone;
properties.timeZoneNameRe = new RegExp( "^" + regexpEscape( timeZone ) );
break;
}
var timeZoneName,
exemplarCity = cldr.main([
"dates/timeZoneNames/zone", timeZone, "exemplarCity"
]);
if ( length === 3 ) {
if ( !exemplarCity ) {
exemplarCity = cldr.main([
"dates/timeZoneNames/zone/Etc/Unknown/exemplarCity"
]);
}
timeZoneName = exemplarCity;
}
if ( exemplarCity && length === 4 ) {
timeZoneName = formatMessage(
cldr.main(
"dates/timeZoneNames/regionFormat"
),
[ exemplarCity ]
);
}
if ( timeZoneName ) {
timeZoneName = looseMatching( timeZoneName );
properties.timeZoneName = timeZoneName;
properties.timeZoneNameRe = new RegExp(
"^" + regexpEscape( timeZoneName )
);
}
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "z":
case "O":
gmtFormat = cldr.main( "dates/timeZoneNames/gmtFormat" );
cldr.main( "dates/timeZoneNames/gmtZeroFormat" );
cldr.main( "dates/timeZoneNames/hourFormat" );
properties[ "timeZoneNames/gmtZeroFormatRe" ] =
new RegExp( "^" + regexpEscape( properties[ "timeZoneNames/gmtZeroFormat" ] ) );
aux = properties[ "timeZoneNames/hourFormat" ];
properties[ "timeZoneNames/hourFormat" ] = (
length < 4 ?
[ dateTimezoneHourFormatHm( aux, "H" ), dateTimezoneHourFormatH( aux ) ] :
[ dateTimezoneHourFormatHm( aux, "HH" ) ]
).map(function( hourFormat ) {
return hourFormatRe(
hourFormat,
gmtFormat,
digitsReSource,
timeSeparator
);
});
/* falls through */
case "X":
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
properties.x = [
[ "+HHmm;-HHmm", "+HH;-HH" ],
[ "+HHmm;-HHmm" ],
[ "+HH:mm;-HH:mm" ],
[ "+HHmmss;-HHmmss", "+HHmm;-HHmm" ],
[ "+HH:mm:ss;-HH:mm:ss", "+HH:mm;-HH:mm" ]
][ length - 1 ].map(function( hourFormat ) {
return hourFormatRe( hourFormat );
});
}
});
cldr.off( "get", populateProperties );
return properties;
};
/**
* dayOfWeek( date, firstDay )
*
* @date
*
* @firstDay the result of `dateFirstDayOfWeek( cldr )`
*
* Return the day of the week normalized by the territory's firstDay [0-6].
* Eg for "mon":
* - return 0 if territory is GB, or BR, or DE, or FR (week starts on "mon");
* - return 1 if territory is US (week starts on "sun");
* - return 2 if territory is EG (week starts on "sat");
*/
var dateDayOfWeek = function( date, firstDay ) {
return ( date.getDay() - firstDay + 7 ) % 7;
};
/**
* distanceInDays( from, to )
*
* Return the distance in days between from and to Dates.
*/
var dateDistanceInDays = function( from, to ) {
var inDays = 864e5;
return ( to.getTime() - from.getTime() ) / inDays;
};
/**
* dayOfYear
*
* Return the distance in days of the date to the begin of the year [0-d].
*/
var dateDayOfYear = function( date ) {
return Math.floor( dateDistanceInDays( dateStartOf( date, "year" ), date ) );
};
// Invert key and values, e.g., {"year": "yY"} ==> {"y": "year", "Y": "year"}
var dateFieldsMap = objectInvert({
"era": "G",
"year": "yY",
"quarter": "qQ",
"month": "ML",
"week": "wW",
"day": "dDF",
"weekday": "ecE",
"dayperiod": "a",
"hour": "hHkK",
"minute": "m",
"second": "sSA",
"zone": "zvVOxX"
}, function( object, key, value ) {
value.split( "" ).forEach(function( symbol ) {
object[ symbol ] = key;
});
return object;
});
/**
* millisecondsInDay
*/
var dateMillisecondsInDay = function( date ) {
// TODO Handle daylight savings discontinuities
return date - dateStartOf( date, "day" );
};
/**
* hourFormat( date, format, timeSeparator, formatNumber )
*
* Return date's timezone offset according to the format passed.
* Eg for format when timezone offset is 180:
* - "+H;-H": -3
* - "+HHmm;-HHmm": -0300
* - "+HH:mm;-HH:mm": -03:00
* - "+HH:mm:ss;-HH:mm:ss": -03:00:00
*/
var dateTimezoneHourFormat = function( date, format, timeSeparator, formatNumber ) {
var absOffset,
offset = date.getTimezoneOffset();
absOffset = Math.abs( offset );
formatNumber = formatNumber || {
1: function( value ) {
return stringPad( value, 1 );
},
2: function( value ) {
return stringPad( value, 2 );
}
};
return format
// Pick the correct sign side (+ or -).
.split( ";" )[ offset > 0 ? 1 : 0 ]
// Localize time separator
.replace( ":", timeSeparator )
// Update hours offset.
.replace( /HH?/, function( match ) {
return formatNumber[ match.length ]( Math.floor( absOffset / 60 ) );
})
// Update minutes offset and return.
.replace( /mm/, function() {
return formatNumber[ 2 ]( Math.floor( absOffset % 60 ) );
})
// Update minutes offset and return.
.replace( /ss/, function() {
return formatNumber[ 2 ]( Math.floor( absOffset % 1 * 60 ) );
});
};
/**
* format( date, properties )
*
* @date [Date instance].
*
* @properties
*
* TODO Support other calendar types.
*
* Disclosure: this function borrows excerpts of dojo/date/locale.
*/
var dateFormat = function( date, numberFormatters, properties ) {
var parts = [];
var timeSeparator = properties.timeSeparator;
// create globalize date with given timezone data
if ( properties.timeZoneData ) {
date = new ZonedDateTime( date, properties.timeZoneData() );
}
properties.pattern.replace( datePatternRe, function( current ) {
var aux, dateField, type, value,
chr = current.charAt( 0 ),
length = current.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
chr = properties.preferredTime;
}
if ( chr === "Z" ) {
// Z..ZZZ: same as "xxxx".
if ( length < 4 ) {
chr = "x";
length = 4;
// ZZZZ: same as "OOOO".
} else if ( length < 5 ) {
chr = "O";
length = 4;
// ZZZZZ: same as "XXXXX"
} else {
chr = "X";
length = 5;
}
}
// z...zzz: "{shortRegion}", e.g., "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
if ( chr === "z" ) {
if ( date.isDST ) {
value = date.isDST() ? properties.daylightTzName : properties.standardTzName;
}
// Fall back to "O" format.
if ( !value ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
switch ( chr ) {
// Era
case "G":
value = properties.eras[ date.getFullYear() < 0 ? 0 : 1 ];
break;
// Year
case "y":
// Plain year.
// The length specifies the padding, but for two letters it also specifies the
// maximum length.
value = date.getFullYear();
if ( length === 2 ) {
value = String( value );
value = +value.substr( value.length - 2 );
}
break;
case "Y":
// Year in "Week of Year"
// The length specifies the padding, but for two letters it also specifies the
// maximum length.
// yearInWeekofYear = date + DaysInAWeek - (dayOfWeek - firstDay) - minDays
value = new Date( date.getTime() );
value.setDate(
value.getDate() + 7 -
dateDayOfWeek( date, properties.firstDay ) -
properties.firstDay -
properties.minDays
);
value = value.getFullYear();
if ( length === 2 ) {
value = String( value );
value = +value.substr( value.length - 2 );
}
break;
// Quarter
case "Q":
case "q":
value = Math.ceil( ( date.getMonth() + 1 ) / 3 );
if ( length > 2 ) {
value = properties.quarters[ chr ][ length ][ value ];
}
break;
// Month
case "M":
case "L":
value = date.getMonth() + 1;
if ( length > 2 ) {
value = properties.months[ chr ][ length ][ value ];
}
break;
// Week
case "w":
// Week of Year.
// woy = ceil( ( doy + dow of 1/1 ) / 7 ) - minDaysStuff ? 1 : 0.
// TODO should pad on ww? Not documented, but I guess so.
value = dateDayOfWeek( dateStartOf( date, "year" ), properties.firstDay );
value = Math.ceil( ( dateDayOfYear( date ) + value ) / 7 ) -
( 7 - value >= properties.minDays ? 0 : 1 );
break;
case "W":
// Week of Month.
// wom = ceil( ( dom + dow of `1/month` ) / 7 ) - minDaysStuff ? 1 : 0.
value = dateDayOfWeek( dateStartOf( date, "month" ), properties.firstDay );
value = Math.ceil( ( date.getDate() + value ) / 7 ) -
( 7 - value >= properties.minDays ? 0 : 1 );
break;
// Day
case "d":
value = date.getDate();
break;
case "D":
value = dateDayOfYear( date ) + 1;
break;
case "F":
// Day of Week in month. eg. 2nd Wed in July.
value = Math.floor( date.getDate() / 7 ) + 1;
break;
// Week day
case "e":
case "c":
if ( length <= 2 ) {
// Range is [1-7] (deduced by example provided on documentation)
// TODO Should pad with zeros (not specified in the docs)?
value = dateDayOfWeek( date, properties.firstDay ) + 1;
break;
}
/* falls through */
case "E":
value = dateWeekDays[ date.getDay() ];
value = properties.days[ chr ][ length ][ value ];
break;
// Period (AM or PM)
case "a":
value = properties.dayPeriods[ date.getHours() < 12 ? "am" : "pm" ];
break;
// Hour
case "h": // 1-12
value = ( date.getHours() % 12 ) || 12;
break;
case "H": // 0-23
value = date.getHours();
break;
case "K": // 0-11
value = date.getHours() % 12;
break;
case "k": // 1-24
value = date.getHours() || 24;
break;
// Minute
case "m":
value = date.getMinutes();
break;
// Second
case "s":
value = date.getSeconds();
break;
case "S":
value = Math.round( date.getMilliseconds() * Math.pow( 10, length - 3 ) );
break;
case "A":
value = Math.round( dateMillisecondsInDay( date ) * Math.pow( 10, length - 3 ) );
break;
// Zone
case "z":
break;
case "v":
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}",
// e.g., "Pacific Time".
if ( properties.genericTzName ) {
value = properties.genericTzName;
break;
}
/* falls through */
case "V":
//VVVV: "{explarCity} {Time}", e.g., "Los Angeles Time"
if ( properties.timeZoneName ) {
value = properties.timeZoneName;
break;
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
if ( date.getTimezoneOffset() === 0 ) {
value = properties.gmtZeroFormat;
} else {
// If O..OOO and timezone offset has non-zero minutes, show minutes.
if ( length < 4 ) {
aux = date.getTimezoneOffset();
aux = properties.hourFormat[ aux % 60 - aux % 1 === 0 ? 0 : 1 ];
} else {
aux = properties.hourFormat;
}
value = dateTimezoneHourFormat(
date,
aux,
timeSeparator,
numberFormatters
);
value = properties.gmtFormat.replace( /\{0\}/, value );
}
break;
case "X":
// Same as x*, except it uses "Z" for zero offset.
if ( date.getTimezoneOffset() === 0 ) {
value = "Z";
break;
}
/* falls through */
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
aux = date.getTimezoneOffset();
// If x and timezone offset has non-zero minutes, use xx (i.e., show minutes).
if ( length === 1 && aux % 60 - aux % 1 !== 0 ) {
length += 1;
}
// If (xxxx or xxxxx) and timezone offset has zero seconds, use xx or xxx
// respectively (i.e., don't show optional seconds).
if ( ( length === 4 || length === 5 ) && aux % 1 === 0 ) {
length -= 2;
}
value = [
"+HH;-HH",
"+HHmm;-HHmm",
"+HH:mm;-HH:mm",
"+HHmmss;-HHmmss",
"+HH:mm:ss;-HH:mm:ss"
][ length - 1 ];
value = dateTimezoneHourFormat( date, value, ":" );
break;
// timeSeparator
case ":":
value = timeSeparator;
break;
// ' literals.
case "'":
value = removeLiteralQuotes( current );
break;
// Anything else is considered a literal, including [ ,:/.@#], chinese, japonese, and
// arabic characters.
default:
value = current;
}
if ( typeof value === "number" ) {
value = numberFormatters[ length ]( value );
}
dateField = dateFieldsMap[ chr ];
type = dateField ? dateField : "literal";
// Concat two consecutive literals
if ( type === "literal" && parts.length && parts[ parts.length - 1 ].type === "literal" ) {
parts[ parts.length - 1 ].value += value;
return;
}
parts.push( { type: type, value: value } );
});
return parts;
};
var dateToPartsFormatterFn = function( numberFormatters, properties ) {
return function dateToPartsFormatter( value ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return dateFormat( value, numberFormatters, properties );
};
};
function optionsHasStyle( options ) {
return options.skeleton !== undefined ||
options.date !== undefined ||
options.time !== undefined ||
options.datetime !== undefined ||
options.raw !== undefined;
}
function validateRequiredCldr( path, value ) {
validateCldr( path, value, {
skip: [
/dates\/calendars\/gregorian\/dateTimeFormats\/availableFormats/,
/dates\/calendars\/gregorian\/days\/.*\/short/,
/dates\/timeZoneNames\/zone/,
/dates\/timeZoneNames\/metazone/,
/globalize-iana/,
/supplemental\/metaZones/,
/supplemental\/timeData\/(?!001)/,
/supplemental\/weekData\/(?!001)/
]
});
}
function validateOptionsPreset( options ) {
validateOptionsPresetEach( "date", options );
validateOptionsPresetEach( "time", options );
validateOptionsPresetEach( "datetime", options );
}
function validateOptionsPresetEach( type, options ) {
var value = options[ type ];
validate(
"E_INVALID_OPTIONS",
"Invalid `{{type}: \"{value}\"}`.",
value === undefined || [ "short", "medium", "long", "full" ].indexOf( value ) !== -1,
{ type: type, value: value }
);
}
function validateOptionsSkeleton( pattern, skeleton ) {
validate(
"E_INVALID_OPTIONS",
"Invalid `{skeleton: \"{value}\"}` based on provided CLDR.",
skeleton === undefined || ( typeof pattern === "string" && pattern ),
{ type: "skeleton", value: skeleton }
);
}
function validateRequiredIana( timeZone ) {
return function( path, value ) {
if ( !/globalize-iana/.test( path ) ) {
return;
}
validate(
"E_MISSING_IANA_TZ",
"Missing required IANA timezone content for `{timeZone}`: `{path}`.",
value,
{
path: path.replace( /globalize-iana\//, "" ),
timeZone: timeZone
}
);
};
}
/**
* .loadTimeZone( json )
*
* @json [JSON]
*
* Load IANA timezone data.
*/
Globalize.loadTimeZone = function( json ) {
var customData = {
"globalize-iana": json
};
validateParameterPresence( json, "json" );
validateParameterTypePlainObject( json, "json" );
Cldr.load( customData );
};
/**
* .dateFormatter( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a date formatter function (of the form below) according to the given options and the
* default/instance locale.
*
* fn( value )
*
* @value [Date]
*
* Return a function that formats a date according to the given `format` and the default/instance
* locale.
*/
Globalize.dateFormatter =
Globalize.prototype.dateFormatter = function( options ) {
var args, dateToPartsFormatter, returnFn;
validateParameterTypePlainObject( options, "options" );
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
args = [ options ];
dateToPartsFormatter = this.dateToPartsFormatter( options );
returnFn = dateFormatterFn( dateToPartsFormatter );
runtimeBind( args, this.cldr, returnFn, [ dateToPartsFormatter ] );
return returnFn;
};
/**
* .dateToPartsFormatter( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a date formatter function (of the form below) according to the given options and the
* default/instance locale.
*
* fn( value )
*
* @value [Date]
*
* Return a function that formats a date to parts according to the given `format`
* and the default/instance
* locale.
*/
Globalize.dateToPartsFormatter =
Globalize.prototype.dateToPartsFormatter = function( options ) {
var args, cldr, numberFormatters, pad, pattern, properties, returnFn,
timeZone, ianaListener;
validateParameterTypePlainObject( options, "options" );
cldr = this.cldr;
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
validateOptionsPreset( options );
validateDefaultLocale( cldr );
timeZone = options.timeZone;
validateParameterTypeString( timeZone, "options.timeZone" );
args = [ options ];
cldr.on( "get", validateRequiredCldr );
if ( timeZone ) {
ianaListener = validateRequiredIana( timeZone );
cldr.on( "get", ianaListener );
}
pattern = dateExpandPattern( options, cldr );
validateOptionsSkeleton( pattern, options.skeleton );
properties = dateFormatProperties( pattern, cldr, timeZone );
cldr.off( "get", validateRequiredCldr );
if ( ianaListener ) {
cldr.off( "get", ianaListener );
}
// Create needed number formatters.
numberFormatters = properties.numberFormatters;
delete properties.numberFormatters;
for ( pad in numberFormatters ) {
numberFormatters[ pad ] = this.numberFormatter({
raw: numberFormatters[ pad ]
});
}
returnFn = dateToPartsFormatterFn( numberFormatters, properties );
runtimeBind( args, cldr, returnFn, [ numberFormatters, properties ] );
return returnFn;
};
/**
* .dateParser( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a function that parses a string date according to the given `formats` and the
* default/instance locale.
*/
Globalize.dateParser =
Globalize.prototype.dateParser = function( options ) {
var args, cldr, numberParser, parseProperties, pattern, returnFn, timeZone,
tokenizerProperties;
validateParameterTypePlainObject( options, "options" );
cldr = this.cldr;
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
validateOptionsPreset( options );
validateDefaultLocale( cldr );
timeZone = options.timeZone;
validateParameterTypeString( timeZone, "options.timeZone" );
args = [ options ];
cldr.on( "get", validateRequiredCldr );
if ( timeZone ) {
cldr.on( "get", validateRequiredIana( timeZone ) );
}
pattern = dateExpandPattern( options, cldr );
validateOptionsSkeleton( pattern, options.skeleton );
tokenizerProperties = dateTokenizerProperties( pattern, cldr, timeZone );
parseProperties = dateParseProperties( cldr, timeZone );
cldr.off( "get", validateRequiredCldr );
if ( timeZone ) {
cldr.off( "get", validateRequiredIana( timeZone ) );
}
numberParser = this.numberParser({ raw: "0" });
returnFn = dateParserFn( numberParser, parseProperties, tokenizerProperties );
runtimeBind( args, cldr, returnFn, [ numberParser, parseProperties, tokenizerProperties ] );
return returnFn;
};
/**
* .formatDate( value, options )
*
* @value [Date]
*
* @options [Object] see date/expand_pattern for more info.
*
* Formats a date or number according to the given options string and the default/instance locale.
*/
Globalize.formatDate =
Globalize.prototype.formatDate = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return this.dateFormatter( options )( value );
};
/**
* .formatDateToParts( value, options )
*
* @value [Date]
*
* @options [Object] see date/expand_pattern for more info.
*
* Formats a date or number to parts according to the given options and the default/instance locale.
*/
Globalize.formatDateToParts =
Globalize.prototype.formatDateToParts = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return this.dateToPartsFormatter( options )( value );
};
/**
* .parseDate( value, options )
*
* @value [String]
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a Date instance or null.
*/
Globalize.parseDate =
Globalize.prototype.parseDate = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeString( value, "value" );
return this.dateParser( options )( value );
};
return Globalize;
}));<|fim▁end|> | * G: 0
* y: 1 |
<|file_name|>sleepcr.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `SLEEPCR` reader"]
pub struct R(crate::R<SLEEPCR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SLEEPCR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SLEEPCR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SLEEPCR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `SLEEPCR` writer"]
pub struct W(crate::W<SLEEPCR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SLEEPCR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SLEEPCR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SLEEPCR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "System Clock Selection Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSSEL_A {
#[doc = "0: fOFI clock"]
VALUE1 = 0,
#[doc = "1: fPLL clock"]
VALUE2 = 1,
}
impl From<SYSSEL_A> for bool {
#[inline(always)]
fn from(variant: SYSSEL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SYSSEL` reader - System Clock Selection Value"]
pub struct SYSSEL_R(crate::FieldReader<bool, SYSSEL_A>);
impl SYSSEL_R {
pub(crate) fn new(bits: bool) -> Self {
SYSSEL_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYSSEL_A {
match self.bits {
false => SYSSEL_A::VALUE1,
true => SYSSEL_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == SYSSEL_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == SYSSEL_A::VALUE2
}
}
impl core::ops::Deref for SYSSEL_R {
type Target = crate::FieldReader<bool, SYSSEL_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SYSSEL` writer - System Clock Selection Value"]
pub struct SYSSEL_W<'a> {
w: &'a mut W,
}
impl<'a> SYSSEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSSEL_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "fOFI clock"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(SYSSEL_A::VALUE1)
}
#[doc = "fPLL clock"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(SYSSEL_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
#[doc = "USB Clock Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBCR_A {
#[doc = "0: Disable"]
VALUE1 = 0,
#[doc = "1: Enable"]
VALUE2 = 1,
}
impl From<USBCR_A> for bool {
#[inline(always)]
fn from(variant: USBCR_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBCR` reader - USB Clock Control"]
pub struct USBCR_R(crate::FieldReader<bool, USBCR_A>);
impl USBCR_R {
pub(crate) fn new(bits: bool) -> Self {
USBCR_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBCR_A {
match self.bits {
false => USBCR_A::VALUE1,
true => USBCR_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == USBCR_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == USBCR_A::VALUE2
}
}
impl core::ops::Deref for USBCR_R {
type Target = crate::FieldReader<bool, USBCR_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `USBCR` writer - USB Clock Control"]
pub struct USBCR_W<'a> {
w: &'a mut W,
}
impl<'a> USBCR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBCR_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(USBCR_A::VALUE1)
}
#[doc = "Enable"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(USBCR_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16);
self.w
}
}
#[doc = "CCU Clock Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CCUCR_A {
#[doc = "0: Disable"]
VALUE1 = 0,
#[doc = "1: Enable"]
VALUE2 = 1,
}
impl From<CCUCR_A> for bool {
#[inline(always)]
fn from(variant: CCUCR_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CCUCR` reader - CCU Clock Control"]
pub struct CCUCR_R(crate::FieldReader<bool, CCUCR_A>);
impl CCUCR_R {
pub(crate) fn new(bits: bool) -> Self {
CCUCR_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CCUCR_A {
match self.bits {
false => CCUCR_A::VALUE1,
true => CCUCR_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == CCUCR_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == CCUCR_A::VALUE2
}
}
impl core::ops::Deref for CCUCR_R {
type Target = crate::FieldReader<bool, CCUCR_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CCUCR` writer - CCU Clock Control"]
pub struct CCUCR_W<'a> {
w: &'a mut W,
}
impl<'a> CCUCR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CCUCR_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(CCUCR_A::VALUE1)
}
#[doc = "Enable"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(CCUCR_A::VALUE2)<|fim▁hole|> #[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | ((value as u32 & 0x01) << 20);
self.w
}
}
#[doc = "WDT Clock Control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum WDTCR_A {
#[doc = "0: Disable"]
VALUE1 = 0,
#[doc = "1: Enable"]
VALUE2 = 1,
}
impl From<WDTCR_A> for bool {
#[inline(always)]
fn from(variant: WDTCR_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `WDTCR` reader - WDT Clock Control"]
pub struct WDTCR_R(crate::FieldReader<bool, WDTCR_A>);
impl WDTCR_R {
pub(crate) fn new(bits: bool) -> Self {
WDTCR_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WDTCR_A {
match self.bits {
false => WDTCR_A::VALUE1,
true => WDTCR_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == WDTCR_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == WDTCR_A::VALUE2
}
}
impl core::ops::Deref for WDTCR_R {
type Target = crate::FieldReader<bool, WDTCR_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `WDTCR` writer - WDT Clock Control"]
pub struct WDTCR_W<'a> {
w: &'a mut W,
}
impl<'a> WDTCR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WDTCR_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(WDTCR_A::VALUE1)
}
#[doc = "Enable"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(WDTCR_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | ((value as u32 & 0x01) << 21);
self.w
}
}
impl R {
#[doc = "Bit 0 - System Clock Selection Value"]
#[inline(always)]
pub fn syssel(&self) -> SYSSEL_R {
SYSSEL_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 16 - USB Clock Control"]
#[inline(always)]
pub fn usbcr(&self) -> USBCR_R {
USBCR_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 20 - CCU Clock Control"]
#[inline(always)]
pub fn ccucr(&self) -> CCUCR_R {
CCUCR_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - WDT Clock Control"]
#[inline(always)]
pub fn wdtcr(&self) -> WDTCR_R {
WDTCR_R::new(((self.bits >> 21) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - System Clock Selection Value"]
#[inline(always)]
pub fn syssel(&mut self) -> SYSSEL_W {
SYSSEL_W { w: self }
}
#[doc = "Bit 16 - USB Clock Control"]
#[inline(always)]
pub fn usbcr(&mut self) -> USBCR_W {
USBCR_W { w: self }
}
#[doc = "Bit 20 - CCU Clock Control"]
#[inline(always)]
pub fn ccucr(&mut self) -> CCUCR_W {
CCUCR_W { w: self }
}
#[doc = "Bit 21 - WDT Clock Control"]
#[inline(always)]
pub fn wdtcr(&mut self) -> WDTCR_W {
WDTCR_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Sleep Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sleepcr](index.html) module"]
pub struct SLEEPCR_SPEC;
impl crate::RegisterSpec for SLEEPCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [sleepcr::R](R) reader structure"]
impl crate::Readable for SLEEPCR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [sleepcr::W](W) writer structure"]
impl crate::Writable for SLEEPCR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets SLEEPCR to value 0"]
impl crate::Resettable for SLEEPCR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | }
#[doc = r"Sets the field bit"] |
<|file_name|>ios.py<|end_file_name|><|fim▁begin|>"""
ios.py
Handle arguments, configuration file
@author: K.Edeline
"""
import sys
import argparse
import configparser
import logging
import shutil
class IOManager(object):
"""
extend me
"""
#DEFAULT_CONFIG_LOC="/tmp/deploypl.ini"
PKG_FILE = "packages.txt"
def __init__(self, child=None, **kwargs):
super().__init__(**kwargs)
if child == None:
raise IOSException("Child class not found")
self.child = child
self.args = None
self.config = None
self.logger = None
def load_inputs(self):
self.arguments()
if "start" in self.args.cmd:
self.configuration()
def load_outputs(self, decoy=False):
self.log(decoy=decoy)
########################################################
# ARGPARSE
########################################################
def arguments(self):
"""
Parse arguments
Used mostly to provide the location of the config file.
"""
parser = argparse.ArgumentParser(description='PlanetLab C&C server')
parser.add_argument('cmd', type=str,
choices=["start", "stop", "restart", "status"])
parser.add_argument('-l' , '--log-file', type=str, default="deploypl.log",
help='log file location (default: deploypl.log)')
parser.add_argument('-c' , '--config', type=str,
#default=IOManager.DEFAULT_CONFIG_LOC,
help='configuration file location')
parser.add_argument('-d' , '--debug', action='store_true',
help='increase log output level')
parser.add_argument('-v' , '--verbose', action='store_true',
help='status print node descriptions')
parser.add_argument('-vv' , '--vverbose', action='store_true',
help='print info about non-usable nodes')
parser.add_argument('-n' , '--names', action='store_true',
help='status print node names, not addresses')
self.args = parser.parse_args()
return self.args
########################################################
# CONFIGPARSER
########################################################
def configuration(self):
"""
Parse configuration file
"""
if self.args == None or self.args.config == None:
raise IOSException("Arguments not found")
self.config = configparser.ConfigParser()
parsed = self.config.read(self.args.config)
if not parsed:
print("Configuration file not found:", self.args.config)
sys.exit(1)
# copy cfg file to /tmp/
#if self.args.config != IOManager.DEFAULT_CONFIG_LOC:
# shutil.copyfile(self.args.config, IOManager.DEFAULT_CONFIG_LOC)
# Load config
self._load_config()
return self.config
def _load_config(self):
"""
Load configuration
"""
self.slice = self.config["core"]["slice"]
self.user = self.config["core"]["user"]
# PL settings
self._nodedir = self._to_absolute(self.config["core"]["nodes_dir"])
self._datadir = self._to_absolute(self.config["core"]["data_dir"])
self._logdir = self._to_absolute(self.config["core"]["log_dir"])
self._rawfile = self._to_absolute(self.config["core"]["raw_nodes"],
root=self._nodedir)
self.userdir = self._to_absolute(self.user, root=self._logdir)
self.pkgfile = self._to_absolute(IOManager.PKG_FILE, root=self.userdir)
self.threadlimit = int(self.config["core"]["thread_limit"])
self.sshlimit = int(self.config["core"]["ssh_limit"])
self.sshkeyloc = self.config["core"]["ssh_keyloc"]
self.period = int(self.config["core"]["probing_period"])
self.initialdelay = (self.config["core"]["initial_delay"] == 'yes')
self._package_list()
def _package_list(self):
"""
load pkg list from file
"""
self.pkglist = []
if not self.userdir:
return
def pkgs(line):
return (line and not line.startswith(';'))
with open(self.pkgfile, 'r') as f:
lines = map(str.rstrip, f.readlines())
self.pkglist = list(filter(pkgs, lines))
def _to_absolute(self, path, root=None):
"""
Convert path to absolute if it's not already
"""
if not path:
return None
if path.startswith("/"):
return path
if not root:
root = self.cwd
return "/".join([root, path])
########################################################
# LOGGING
########################################################
def log(self, decoy=False, console=False, logfile=True, errfile=False):
"""
load logging facility
"""
if decoy:
decoy_logger = lambda _ : None
self.debug = self.info \
= self.warn \
= self.error \
= self.critical \
= decoy_logger
return
if self.args == None:
raise IOManagerException("Arguments not found")
if self.config == None:
raise IOManagerException("Configuration not found")
# create logger
self.logger = logging.getLogger(self.child.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
# console handler and set level to debug
if console:
ch = logging.StreamHandler()
ch.setLevel(logging.INFO if self.args.debug else logging.ERROR)
# XXX
#filehandler = logging.handlers.TimedRotatingFileHandler('/tmp/daemon.log',
# when='midnight',interval=1,backupCount=10)
# log file handler
if logfile:
fh = logging.FileHandler(self._to_absolute(self.args.log_file,
root=self._logdir))
fh.setLevel(logging.DEBUG if self.args.debug else logging.INFO)
# error file handler
if errfile:
eh = logging.FileHandler(self._to_absolute(self.args.error_file,
root=self._logdir))
eh.setLevel(logging.ERROR)
# add formatter to handlers & handlers to logger
formatter = logging.Formatter("%(asctime)s : %(levelname)-5s : %(message)s",
"%Y-%m-%d %H:%M:%S")
if console:
ch.setFormatter(formatter)
self.logger.addHandler(ch)
if logfile:
fh.setFormatter(formatter)
self.logger.addHandler(fh)
if errfile:
eh.setFormatter(formatter)
self.logger.addHandler(eh)
# log functions
self.debug = self.logger.debug
self.info = self.logger.info
self.warn = self.logger.warn
self.error = self.logger.error
self.critical = self.logger.critical
return self.logger
class IOManagerException(Exception):
"""
IOManagerException(Exception)
"""
def __init__(self, value):
self.value = value
def __str__(self):<|fim▁hole|> return repr(self.value)<|fim▁end|> | |
<|file_name|>rc.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! Task-local reference-counted boxes (`Rc` type)
The `Rc` type provides shared ownership of an immutable value. Destruction is
deterministic, and will occur as soon as the last owner is gone. It is marked
as non-sendable because it avoids the overhead of atomic reference counting.
The `downgrade` method can be used to create a non-owning `Weak` pointer to the
box. A `Weak` pointer can be upgraded to an `Rc` pointer, but will return
`None` if the value has already been freed.
For example, a tree with parent pointers can be represented by putting the
nodes behind strong `Rc` pointers, and then storing the parent pointers as
`Weak` pointers.
## Examples
Consider a scenario where a set of Gadgets are owned by a given Owner. We want
to have our Gadgets point to their Owner. We can't do this with unique
ownership, because more than one gadget may belong to the same Owner. Rc
allows us to share an Owner between multiple Gadgets, and have the Owner kept
alive as long as any Gadget points at it.
```rust
use std::rc::Rc;
struct Owner {
name: String
// ...other fields
}
struct Gadget {
id: int,
owner: Rc<Owner>
// ...other fields
}
fn main() {
// Create a reference counted Owner.
let gadget_owner : Rc<Owner> = Rc::new(
Owner { name: String::from_str("Gadget Man") }
);
// Create Gadgets belonging to gadget_owner. To increment the reference
// count we clone the Rc object.
let gadget1 = Gadget { id: 1, owner: gadget_owner.clone() };
let gadget2 = Gadget { id: 2, owner: gadget_owner.clone() };
drop(gadget_owner);
// Despite dropping gadget_owner, we're still able to print out the name of
// the Owner of the Gadgets. This is because we've only dropped the
// reference count object, not the Owner it wraps. As long as there are
// other Rc objects pointing at the same Owner, it will stay alive. Notice
// that the Rc wrapper around Gadget.owner gets automatically dereferenced
// for us.
println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
// At the end of the method, gadget1 and gadget2 get destroyed, and with
// them the last counted references to our Owner. Gadget Man now gets
// destroyed as well.
}
```
If our requirements change, and we also need to be able to traverse from
Owner->Gadget, we will run into problems: an Rc pointer from Owner->Gadget
introduces a cycle between the objects. This means that their reference counts
can never reach 0, and the objects will stay alive: a memory leak. In order to
get around this, we can use `Weak` pointers. These are reference counted
pointers that don't keep an object alive if there are no normal `Rc` (or
*strong*) pointers left.
Rust actually makes it somewhat difficult to produce this loop in the first
place: in order to end up with two objects that point at each other, one of
them needs to be mutable. This is problematic because Rc enforces memory
safety by only giving out shared references to the object it wraps, and these
don't allow direct mutation. We need to wrap the part of the object we wish to
mutate in a `RefCell`, which provides *interior mutability*: a method to
achieve mutability through a shared reference. `RefCell` enforces Rust's
borrowing rules at runtime. Read the `Cell` documentation for more details on
interior mutability.
```rust
use std::rc::Rc;
use std::rc::Weak;
use std::cell::RefCell;
struct Owner {
name: String,
gadgets: RefCell<Vec<Weak<Gadget>>>
// ...other fields
}
struct Gadget {
id: int,
owner: Rc<Owner>
// ...other fields
}
fn main() {
// Create a reference counted Owner. Note the fact that we've put the
// Owner's vector of Gadgets inside a RefCell so that we can mutate it
// through a shared reference.
let gadget_owner : Rc<Owner> = Rc::new(
Owner {
name: "Gadget Man".to_string(),
gadgets: RefCell::new(Vec::new())
}
);
// Create Gadgets belonging to gadget_owner as before.
let gadget1 = Rc::new(Gadget{id: 1, owner: gadget_owner.clone()});
let gadget2 = Rc::new(Gadget{id: 2, owner: gadget_owner.clone()});
// Add the Gadgets to their Owner. To do this we mutably borrow from
// the RefCell holding the Owner's Gadgets.
gadget_owner.gadgets.borrow_mut().push(gadget1.clone().downgrade());
gadget_owner.gadgets.borrow_mut().push(gadget2.clone().downgrade());
// Iterate over our Gadgets, printing their details out
for gadget_opt in gadget_owner.gadgets.borrow().iter() {
// gadget_opt is a Weak<Gadget>. Since weak pointers can't guarantee
// that their object is still alive, we need to call upgrade() on them
// to turn them into a strong reference. This returns an Option, which
// contains a reference to our object if it still exists.
let gadget = gadget_opt.upgrade().unwrap();
println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
}
// At the end of the method, gadget_owner, gadget1 and gadget2 get
// destroyed. There are now no strong (Rc) references to the gadgets.
// Once they get destroyed, the Gadgets get destroyed. This zeroes the
// reference count on Gadget Man, so he gets destroyed as well.
}
```
*/
use core::mem::transmute;
use core::cell::Cell;
use core::clone::Clone;
use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
use core::default::Default;
use core::kinds::marker;
use core::ops::{Deref, Drop};
use core::option::{Option, Some, None};
use core::ptr;
use core::ptr::RawPtr;
use core::mem::{min_align_of, size_of};
use core::fmt;
use heap::deallocate;
struct RcBox<T> {
value: T,
strong: Cell<uint>,
weak: Cell<uint>
}
/// Immutable reference counted pointer type
#[unsafe_no_drop_flag]
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
impl<T> Rc<T> {
/// Construct a new reference-counted box
pub fn new(value: T) -> Rc<T> {
unsafe {
Rc {
// there is an implicit weak pointer owned by all the
// strong pointers, which ensures that the weak
// destructor never frees the allocation while the
// strong destructor is running, even if the weak
// pointer is stored inside the strong one.
_ptr: transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
}),
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
}
}
}
impl<T> Rc<T> {
/// Downgrade the reference-counted pointer to a weak reference
pub fn downgrade(&self) -> Weak<T> {
self.inc_weak();
Weak {
_ptr: self._ptr,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
}
}
impl<T: Clone> Rc<T> {
/// Acquires a mutable pointer to the inner contents by guaranteeing that
/// the reference count is one (no sharing is possible).<|fim▁hole|> ///
/// This is also referred to as a copy-on-write operation because the inner
/// data is cloned if the reference count is greater than one.
#[inline]
#[experimental]
pub fn make_unique<'a>(&'a mut self) -> &'a mut T {
// Note that we hold a strong reference, which also counts as
// a weak reference, so we only clone if there is an
// additional reference of either kind.
if self.strong() != 1 || self.weak() != 1 {
*self = Rc::new(self.deref().clone())
}
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
// reference count is guaranteed to be 1 at this point, and we required
// the Rc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
let inner = unsafe { &mut *self._ptr };
&mut inner.value
}
}
impl<T> Deref<T> for Rc<T> {
/// Borrow the value contained in the reference-counted box
#[inline(always)]
fn deref<'a>(&'a self) -> &'a T {
&self.inner().value
}
}
#[unsafe_destructor]
impl<T> Drop for Rc<T> {
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
ptr::read(self.deref()); // destroy the contained object
// remove the implicit "strong weak" pointer now
// that we've destroyed the contents.
self.dec_weak();
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
}
}
}
}
#[unstable]
impl<T> Clone for Rc<T> {
#[inline]
fn clone(&self) -> Rc<T> {
self.inc_strong();
Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
}
}
impl<T: Default> Default for Rc<T> {
#[inline]
fn default() -> Rc<T> {
Rc::new(Default::default())
}
}
impl<T: PartialEq> PartialEq for Rc<T> {
#[inline(always)]
fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
#[inline(always)]
fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
}
impl<T: Eq> Eq for Rc<T> {}
impl<T: PartialOrd> PartialOrd for Rc<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
}
impl<T: Ord> Ord for Rc<T> {
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
}
impl<T: fmt::Show> fmt::Show for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(**self).fmt(f)
}
}
/// Weak reference to a reference-counted box
#[unsafe_no_drop_flag]
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
impl<T> Weak<T> {
/// Upgrade a weak reference to a strong reference
pub fn upgrade(&self) -> Option<Rc<T>> {
if self.strong() == 0 {
None
} else {
self.inc_strong();
Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
}
}
}
#[unsafe_destructor]
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to
// zero if all the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
}
}
}
#[unstable]
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
self.inc_weak();
Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
}
}
#[doc(hidden)]
trait RcBoxPtr<T> {
fn inner<'a>(&'a self) -> &'a RcBox<T>;
#[inline]
fn strong(&self) -> uint { self.inner().strong.get() }
#[inline]
fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
#[inline]
fn weak(&self) -> uint { self.inner().weak.get() }
#[inline]
fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
}
impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
}
impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
}
#[cfg(test)]
#[allow(experimental)]
mod tests {
use super::{Rc, Weak};
use std::cell::RefCell;
use std::option::{Option, Some, None};
use std::mem::drop;
use std::clone::Clone;
#[test]
fn test_clone() {
let x = Rc::new(RefCell::new(5i));
let y = x.clone();
*x.borrow_mut() = 20;
assert_eq!(*y.borrow(), 20);
}
#[test]
fn test_simple() {
let x = Rc::new(5i);
assert_eq!(*x, 5);
}
#[test]
fn test_simple_clone() {
let x = Rc::new(5i);
let y = x.clone();
assert_eq!(*x, 5);
assert_eq!(*y, 5);
}
#[test]
fn test_destructor() {
let x = Rc::new(box 5i);
assert_eq!(**x, 5);
}
#[test]
fn test_live() {
let x = Rc::new(5i);
let y = x.downgrade();
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Rc::new(5i);
let y = x.downgrade();
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn gc_inside() {
// see issue #11532
use std::gc::GC;
let a = Rc::new(RefCell::new(box(GC) 1i));
assert!(a.try_borrow_mut().is_some());
}
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: RefCell<Option<Weak<Cycle>>>
}
let a = Rc::new(Cycle { x: RefCell::new(None) });
let b = a.clone().downgrade();
*a.x.borrow_mut() = Some(b);
// hopefully we don't double-free (or leak)...
}
#[test]
fn test_cowrc_clone_make_unique() {
let mut cow0 = Rc::new(75u);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
assert!(78 == *cow2);
// none should point to the same backing memory
assert!(*cow0 != *cow1);
assert!(*cow0 != *cow2);
assert!(*cow1 != *cow2);
}
#[test]
fn test_cowrc_clone_unique2() {
let mut cow0 = Rc::new(75u);
let cow1 = cow0.clone();
let cow2 = cow1.clone();
assert!(75 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
// cow1 and cow2 should share the same contents
// cow0 should have a unique reference
assert!(*cow0 != *cow1);
assert!(*cow0 != *cow2);
assert!(*cow1 == *cow2);
}
#[test]
fn test_cowrc_clone_weak() {
let mut cow0 = Rc::new(75u);
let cow1_weak = cow0.downgrade();
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod file; |
<|file_name|>directive.d.ts<|end_file_name|><|fim▁begin|>import { OnInit, SimpleChanges, OnChanges } from '@angular/core';
import { Validator, AbstractControl } from '@angular/forms';
export declare class NotEqualValidator implements Validator, OnInit, OnChanges {
notEqual: any;<|fim▁hole|> ngOnChanges(changes: SimpleChanges): void;
validate(c: AbstractControl): {
[key: string]: any;
};
registerOnValidatorChange(fn: () => void): void;
}<|fim▁end|> | private validator;
private onChange;
ngOnInit(): void; |
<|file_name|>test_order.py<|end_file_name|><|fim▁begin|>import string
import unittest
import datetime
import collections
from unittest import mock
from flumine.order.order import (
BaseOrder,
BetfairOrder,
ExchangeType,
OrderTypes,
OrderStatus,
VALID_BETFAIR_CUSTOMER_ORDER_REF_CHARACTERS,
LIVE_STATUS,
COMPLETE_STATUS,
)
from flumine.exceptions import OrderUpdateError
class BaseOrderTest(unittest.TestCase):
def setUp(self) -> None:
mock_client = mock.Mock(paper_trade=False)
self.mock_trade = mock.Mock(
client=mock_client, market_id="1.1", selection_id=123, info={}
)
self.mock_order_type = mock.Mock(info={})
self.order = BaseOrder(
self.mock_trade, "BACK", self.mock_order_type, 1, context={1: 2}
)
def test_init(self):
self.assertIsNotNone(self.order.id)
self.assertEqual(self.order.trade, self.mock_trade)
self.assertEqual(self.order.side, "BACK")
self.assertEqual(self.order.order_type, self.mock_order_type)
self.assertEqual(self.order.selection_id, self.mock_trade.selection_id)
self.assertEqual(self.order.handicap, 1)
self.assertEqual(
self.order.lookup,
(self.order.market_id, self.order.selection_id, self.order.handicap),
)
self.assertIsNone(self.order.runner_status)
self.assertIsNone(self.order.market_type)
self.assertEqual(self.order.each_way_divisor, 1)
self.assertIsNone(self.order.status)
self.assertFalse(self.order.complete)
self.assertEqual(self.order.status_log, [])
self.assertIsNone(self.order.violation_msg)
self.assertEqual(self.order.context, {1: 2})
self.assertEqual(self.order.notes, {})
self.assertIsNone(self.order.market_notes)
self.assertIsNone(self.order.bet_id)
self.assertIsNone(self.order.EXCHANGE)
self.assertEqual(self.order.update_data, {})
self.assertIsNone(self.order.publish_time)
self.assertIsNone(self.order.market_version)
self.assertIsNone(self.order.async_)
self.assertIsNotNone(self.order.date_time_created)
self.assertIsNone(self.order.date_time_execution_complete)
self.assertFalse(self.order.simulated)
self.assertFalse(self.order._simulated)
self.assertEqual(
LIVE_STATUS,
[
OrderStatus.PENDING,
OrderStatus.CANCELLING,
OrderStatus.UPDATING,
OrderStatus.REPLACING,
OrderStatus.EXECUTABLE,
],
)
self.assertEqual(
COMPLETE_STATUS,
[
OrderStatus.EXECUTION_COMPLETE,
OrderStatus.EXPIRED,
OrderStatus.VIOLATION,
],
)
@mock.patch("flumine.order.order.BaseOrder._is_complete")
@mock.patch("flumine.order.order.BaseOrder.info")
def test__update_status(self, mock_info, mock__is_complete):
self.mock_trade.complete = True
self.order._update_status(OrderStatus.EXECUTION_COMPLETE)
self.assertEqual(self.order.status_log, [OrderStatus.EXECUTION_COMPLETE])
self.assertEqual(self.order.status, OrderStatus.EXECUTION_COMPLETE)
self.mock_trade.complete_trade.assert_called()
mock__is_complete.assert_called()
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_placing(self, mock__update_status):
self.order.placing()
mock__update_status.assert_called_with(OrderStatus.PENDING)
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_executable(self, mock__update_status):
self.order.update_data = {123: 456}
self.order.executable()
mock__update_status.assert_called_with(OrderStatus.EXECUTABLE)
self.assertEqual(self.order.update_data, {})
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_execution_complete(self, mock__update_status):
self.order.update_data = {123: 456}
self.order.execution_complete()
mock__update_status.assert_called_with(OrderStatus.EXECUTION_COMPLETE)
self.assertIsNotNone(self.order.date_time_execution_complete)
self.assertEqual(self.order.update_data, {})
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_cancelling(self, mock__update_status):
self.order.cancelling()
mock__update_status.assert_called_with(OrderStatus.CANCELLING)
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_updating(self, mock__update_status):
self.order.updating()
mock__update_status.assert_called_with(OrderStatus.UPDATING)
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_replacing(self, mock__update_status):
self.order.replacing()
mock__update_status.assert_called_with(OrderStatus.REPLACING)
@mock.patch("flumine.order.order.BaseOrder._update_status")
def test_violation(self, mock__update_status):
self.order.update_data = {123: 456}
self.order.violation("the murder capital")
mock__update_status.assert_called_with(OrderStatus.VIOLATION)
self.assertEqual(self.order.update_data, {})
self.assertEqual(self.order.violation_msg, "the murder capital")
def test_place(self):
with self.assertRaises(NotImplementedError):
self.order.place(123, 456, False)
def test_cancel(self):
with self.assertRaises(NotImplementedError):
self.order.cancel()
def test_update(self):
with self.assertRaises(NotImplementedError):
self.order.update("PERSIST")
def test_replace(self):
with self.assertRaises(NotImplementedError):
self.order.replace(20.0)
def test_create_place_instruction(self):
with self.assertRaises(NotImplementedError):
self.order.create_place_instruction()
def test_create_cancel_instruction(self):
with self.assertRaises(NotImplementedError):
self.order.create_cancel_instruction()
def test_create_update_instruction(self):
with self.assertRaises(NotImplementedError):
self.order.create_update_instruction()
def test_create_replace_instruction(self):
with self.assertRaises(NotImplementedError):
self.order.create_replace_instruction()
def test_update_current_order(self):
mock_current_order = mock.Mock()
self.order.update_current_order(mock_current_order)
self.assertEqual(self.order.responses.current_order, mock_current_order)
def test_current_order(self):
self.assertIsNone(self.order.current_order)
mock_responses = mock.Mock()
mock_responses.current_order = None
self.order.responses = mock_responses
self.assertEqual(self.order.current_order, mock_responses.place_response)
mock_responses.current_order = 1
self.assertEqual(self.order.current_order, 1)
@mock.patch("flumine.backtest.simulated.config")
def test_current_order_simulated(self, mock_config):
mock_config.simulated = True
order = BaseOrder(mock.Mock(), "", mock.Mock())
self.assertTrue(order.simulated)
self.assertTrue(order._simulated)
def test__is_complete(self):
self.order.status = None
self.assertFalse(self.order._is_complete())
for s in [
OrderStatus.PENDING,
OrderStatus.CANCELLING,
OrderStatus.UPDATING,
OrderStatus.REPLACING,
OrderStatus.EXECUTABLE,
]:
self.order.status = s
self.assertFalse(self.order._is_complete())
for s in [
OrderStatus.EXECUTION_COMPLETE,
OrderStatus.EXPIRED,
OrderStatus.VIOLATION,
]:
self.order.status = s
self.assertTrue(self.order._is_complete())
def test_average_price_matched(self):
with self.assertRaises(NotImplementedError):
assert self.order.average_price_matched
def test_size_matched(self):
with self.assertRaises(NotImplementedError):
assert self.order.size_matched
def test_size_remaining(self):
with self.assertRaises(NotImplementedError):
assert self.order.size_remaining
def test_size_cancelled(self):
with self.assertRaises(NotImplementedError):
assert self.order.size_cancelled
def test_size_lapsed(self):
with self.assertRaises(NotImplementedError):
assert self.order.size_lapsed
def test_size_voided(self):
with self.assertRaises(NotImplementedError):
assert self.order.size_voided
def test_elapsed_seconds(self):
self.assertIsNone(self.order.elapsed_seconds)
mock_responses = mock.Mock()
mock_responses.date_time_placed = datetime.datetime.utcnow()
self.order.responses = mock_responses
self.assertGreaterEqual(self.order.elapsed_seconds, 0)
def elapsed_seconds_created(self):
self.assertGreaterEqual(self.order.elapsed_seconds_created, 0)
def test_elapsed_seconds_executable(self):
self.assertIsNone(self.order.elapsed_seconds_executable)
mock_responses = mock.Mock()
mock_responses.date_time_placed = datetime.datetime.utcnow()
self.order.responses = mock_responses
self.order.date_time_execution_complete = datetime.datetime.utcnow()
self.assertGreaterEqual(self.order.elapsed_seconds_executable, 0)
def test_market_id(self):
self.assertEqual(self.order.market_id, self.mock_trade.market_id)
def test_lookup(self):
self.assertEqual(
self.order.lookup,
(self.mock_trade.market_id, self.mock_trade.selection_id, 1),
)
def test_repr(self):
self.assertEqual(repr(self.order), "Order None: None")
def test_set_and_get_sep(self):
self.order.sep = "a"
self.assertEqual("a", self.order.sep)
def test_customer_order_ref(self):
self.order.trade.strategy.name_hash = "my_name_hash"
self.order.id = 1234
self.assertEqual("my_name_hash-1234", self.order.customer_order_ref)
self.order.sep = "I"
self.assertEqual("my_name_hashI1234", self.order.customer_order_ref)
self.order.sep = "O"
self.assertEqual("my_name_hashO1234", self.order.customer_order_ref)
def test_notes_str(self):
self.order.notes = collections.OrderedDict({"1": 1, 2: "2", 3: 3, 4: "four"})
self.assertEqual(self.order.notes_str, "1,2,3,four")
self.order.notes = collections.OrderedDict()
self.assertEqual(self.order.notes_str, "")
class BetfairOrderTest(unittest.TestCase):
def setUp(self) -> None:
mock_client = mock.Mock(paper_trade=False)
self.mock_trade = mock.Mock(
client=mock_client, market_id="1.1", selection_id=123, info={}
)
self.mock_status = mock.Mock()
self.mock_order_type = mock.Mock(info={}, size=2.0, liability=2.0)
self.order = BetfairOrder(self.mock_trade, "BACK", self.mock_order_type)
def test_init(self):
self.assertEqual(self.order.EXCHANGE, ExchangeType.BETFAIR)
@mock.patch("flumine.order.order.BetfairOrder.placing")
def test_place(self, mock_placing):
self.order.place(123, 456, False)
mock_placing.assert_called_with()
self.assertEqual(self.order.publish_time, 123)
self.assertEqual(self.order.market_version, 456)
self.assertFalse(self.order.async_)
@mock.patch(
"flumine.order.order.BetfairOrder.size_remaining",
new_callable=mock.PropertyMock,
)
@mock.patch("flumine.order.order.BetfairOrder.cancelling")
def test_cancel(self, mock_cancelling, mock_size_remaining):
mock_size_remaining.return_value = 20
self.order.bet_id = 123
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.cancel(12)
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.order.cancel(0.01)
self.assertEqual(self.order.update_data, {"size_reduction": 0.01})
mock_cancelling.assert_called_with()
self.order.cancel()
self.assertEqual(self.order.update_data, {"size_reduction": None})
def test_cancel_bet_id(self):
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.cancel(12)
@mock.patch(
"flumine.order.order.BetfairOrder.size_remaining",
new_callable=mock.PropertyMock,
)
@mock.patch("flumine.order.order.BetfairOrder.cancelling")
def test_cancel_error_size(self, mock_cancelling, mock_size_remaining):
mock_size_remaining.return_value = 20
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.cancel(12)
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
with self.assertRaises(OrderUpdateError):
self.order.cancel(21)
@mock.patch(
"flumine.order.order.BetfairOrder.size_remaining",
new_callable=mock.PropertyMock,
)
def test_cancel_error(self, mock_size_remaining):
mock_size_remaining.return_value = 20
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.order.status = OrderStatus.PENDING
with self.assertRaises(OrderUpdateError):
self.order.cancel(12)
@mock.patch("flumine.order.order.BetfairOrder.updating")
def test_update(self, mock_updating):
self.order.bet_id = 123
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.update("PERSIST")
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.mock_order_type.persistence_type = "LAPSE"
self.order.update("PERSIST")
self.assertEqual(self.mock_order_type.persistence_type, "PERSIST")
mock_updating.assert_called_with()
with self.assertRaises(OrderUpdateError):
self.order.update("PERSIST")
def test_update_bet_id(self):
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.update("PERSIST")
def test_update_error(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.mock_order_type.persistence_type = "LAPSE"
self.order.status = OrderStatus.PENDING
with self.assertRaises(OrderUpdateError):
self.order.update("PERSIST")
@mock.patch("flumine.order.order.BetfairOrder.replacing")
def test_replace(self, mock_replacing):
self.order.bet_id = 123
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.replace(1.01)
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.mock_order_type.price = 2.02
self.order.replace(1.01)
self.assertEqual(self.order.update_data, {"new_price": 1.01})
mock_replacing.assert_called_with()
with self.assertRaises(OrderUpdateError):
self.order.replace(2.02)
def test_replace_bet_id(self):
self.order.status = OrderStatus.EXECUTABLE
with self.assertRaises(OrderUpdateError):
self.order.replace(1.01)
def test_replace_error(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.order.status = OrderStatus.PENDING
with self.assertRaises(OrderUpdateError):
self.order.replace(1.52)
def test_create_place_instruction(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.assertEqual(
self.order.create_place_instruction(),
{
"customerOrderRef": self.order.customer_order_ref,
"handicap": 0,
"limitOrder": self.mock_order_type.place_instruction(),
"orderType": "LIMIT",
"selectionId": self.mock_trade.selection_id,
"side": "BACK",
},
)
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT_ON_CLOSE
self.assertEqual(
self.order.create_place_instruction(),
{
"customerOrderRef": self.order.customer_order_ref,
"handicap": 0,
"limitOnCloseOrder": self.mock_order_type.place_instruction(),
"orderType": "LIMIT_ON_CLOSE",
"selectionId": self.mock_trade.selection_id,
"side": "BACK",
},
)
self.mock_order_type.ORDER_TYPE = OrderTypes.MARKET_ON_CLOSE
self.assertEqual(
self.order.create_place_instruction(),
{
"customerOrderRef": self.order.customer_order_ref,
"handicap": 0,
"marketOnCloseOrder": self.mock_order_type.place_instruction(),
"orderType": "MARKET_ON_CLOSE",
"selectionId": self.mock_trade.selection_id,
"side": "BACK",
},
)
def test_create_cancel_instruction(self):
self.order.update_data = {"size_reduction": 0.02}
self.assertEqual(
self.order.create_cancel_instruction(), {"sizeReduction": 0.02}
)
def test_create_update_instruction(self):
self.mock_order_type.persistence_type = "PERSIST"
self.assertEqual(
self.order.create_update_instruction(), {"newPersistenceType": "PERSIST"}
)
def test_create_replace_instruction(self):
self.order.update_data = {"new_price": 2.02}
self.assertEqual(self.order.create_replace_instruction(), {"newPrice": 2.02})
def test_average_price_matched(self):
self.assertEqual(self.order.average_price_matched, 0)
mock_current_order = mock.Mock(average_price_matched=12.3)
self.order.responses.current_order = mock_current_order
self.assertEqual(
self.order.average_price_matched, mock_current_order.average_price_matched
)
def test_size_matched(self):
self.assertEqual(self.order.size_matched, 0)
mock_current_order = mock.Mock(size_matched=10)
self.order.responses.current_order = mock_current_order
self.assertEqual(self.order.size_matched, mock_current_order.size_matched)
def test_size_remaining(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.mock_order_type.size = 0
self.mock_order_type.bet_target_size = 0
self.assertEqual(self.order.size_remaining, 0)
self.mock_order_type.size = 10
mock_current_order = mock.Mock(size_remaining=10)
self.order.responses.current_order = mock_current_order
self.assertEqual(self.order.size_remaining, mock_current_order.size_remaining)
def test_size_remaining_missing(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
self.mock_order_type.size = 2.51
self.assertEqual(self.order.size_remaining, 2.51)
@mock.patch(
"flumine.order.order.BetfairOrder.size_matched",
new_callable=mock.PropertyMock,
)
def test_size_remaining_missing_partial_match(self, mock_size_matched):
self.mock_order_type.ORDER_TYPE = OrderTypes.LIMIT
mock_size_matched.return_value = 2
self.mock_order_type.size = 10
self.assertEqual(self.order.size_remaining, 8)
def test_size_remaining_market_on_close(self):
self.mock_order_type.ORDER_TYPE = OrderTypes.MARKET_ON_CLOSE
self.mock_order_type.size = ValueError
self.order.responses.current_order = None
self.assertEqual(self.order.size_remaining, self.mock_order_type.liability)
def test_size_cancelled(self):
self.assertEqual(self.order.size_cancelled, 0)
mock_current_order = mock.Mock(size_cancelled=10)
self.order.responses.current_order = mock_current_order
self.assertEqual(self.order.size_cancelled, mock_current_order.size_cancelled)
def test_size_lapsed(self):
self.assertEqual(self.order.size_lapsed, 0)
mock_current_order = mock.Mock(size_lapsed=10)
self.order.responses.current_order = mock_current_order
self.assertEqual(self.order.size_lapsed, mock_current_order.size_lapsed)
def test_size_voided(self):
self.assertEqual(self.order.size_voided, 0)
mock_current_order = mock.Mock(size_voided=10)
self.order.responses.current_order = mock_current_order
self.assertEqual(self.order.size_voided, mock_current_order.size_voided)
def test_info(self):
self.order.status_log = [OrderStatus.PENDING, OrderStatus.EXECUTION_COMPLETE]
self.assertEqual(
self.order.info,
{
"bet_id": None,
"handicap": self.order.handicap,
"id": self.order.id,
"date_time_created": str(self.order.date_time_created),
"market_id": self.mock_trade.market_id,
"selection_id": self.mock_trade.selection_id,
"publish_time": None,
"market_version": None,
"async": None,
"status": None,
"status_log": "Pending, Execution complete",
"trade": self.mock_trade.info,
"order_type": self.mock_order_type.info,
"info": {
"side": self.order.side,
"size_matched": self.order.size_matched,
"size_remaining": self.order.size_remaining,
"size_cancelled": self.order.size_cancelled,
"size_lapsed": self.order.size_lapsed,
"size_voided": self.order.size_voided,
"average_price_matched": self.order.average_price_matched,
},
"customer_order_ref": self.order.customer_order_ref,
"simulated": {
"profit": 0.0,
"piq": 0.0,
"matched": [],
},
"violation_msg": self.order.violation_msg,
"responses": {
"date_time_placed": None,
"elapsed_seconds_executable": None,
},
"runner_status": self.order.runner_status,
"market_notes": None,
"notes": "",
},
)
def test_json(self):
self.assertTrue(isinstance(self.order.json(), str))
def test_set_invalid_sep(self):
with self.assertRaises(ValueError):
self.order.sep = "@"
class IsValidCustomerOrderRefTestCase(unittest.TestCase):
def test_letters_True(self):
# ascii_letters contains a-z and A-Z
for c in string.ascii_letters:
self.assertTrue(BetfairOrder.is_valid_customer_order_ref_character(c))
def test_2letters_False(self):
self.assertFalse(BetfairOrder.is_valid_customer_order_ref_character("aB"))
self.assertFalse(BetfairOrder.is_valid_customer_order_ref_character("CD"))
def test_digits_True(self):
# string.digits contains digits 0-9<|fim▁hole|> self.assertTrue(BetfairOrder.is_valid_customer_order_ref_character(c))
def test_special_characters_True(self):
for c in VALID_BETFAIR_CUSTOMER_ORDER_REF_CHARACTERS:
self.assertTrue(BetfairOrder.is_valid_customer_order_ref_character((c)))
def test_special_characters_False(self):
for c in list('!"£$%'):
self.assertFalse(BetfairOrder.is_valid_customer_order_ref_character((c)))<|fim▁end|> | for c in string.digits: |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from operator import attrgetter
from django.db import models
from django.test import TestCase
from .models import Answer, Dimension, Entity, Post, Question
<|fim▁hole|> def setUpTestData(cls):
cls.q1 = Question.objects.create(text="Which Beatle starts with the letter 'R'?")
Answer.objects.create(text="John", question=cls.q1)
Answer.objects.create(text="Paul", question=cls.q1)
Answer.objects.create(text="George", question=cls.q1)
Answer.objects.create(text="Ringo", question=cls.q1)
def test_default_to_insertion_order(self):
# Answers will always be ordered in the order they were inserted.
self.assertQuerysetEqual(
self.q1.answer_set.all(), [
"John", "Paul", "George", "Ringo",
],
attrgetter("text"),
)
def test_previous_and_next_in_order(self):
# We can retrieve the answers related to a particular object, in the
# order they were created, once we have a particular object.
a1 = Answer.objects.filter(question=self.q1)[0]
self.assertEqual(a1.text, "John")
self.assertEqual(a1.get_next_in_order().text, "Paul")
a2 = list(Answer.objects.filter(question=self.q1))[-1]
self.assertEqual(a2.text, "Ringo")
self.assertEqual(a2.get_previous_in_order().text, "George")
def test_item_ordering(self):
# We can retrieve the ordering of the queryset from a particular item.
a1 = Answer.objects.filter(question=self.q1)[1]
id_list = [o.pk for o in self.q1.answer_set.all()]
self.assertEqual(a1.question.get_answer_order(), id_list)
# It doesn't matter which answer we use to check the order, it will
# always be the same.
a2 = Answer.objects.create(text="Number five", question=self.q1)
self.assertEqual(
a1.question.get_answer_order(), a2.question.get_answer_order()
)
def test_change_ordering(self):
# The ordering can be altered
a = Answer.objects.create(text="Number five", question=self.q1)
# Swap the last two items in the order list
id_list = [o.pk for o in self.q1.answer_set.all()]
x = id_list.pop()
id_list.insert(-1, x)
# By default, the ordering is different from the swapped version
self.assertNotEqual(a.question.get_answer_order(), id_list)
# Change the ordering to the swapped version -
# this changes the ordering of the queryset.
a.question.set_answer_order(id_list)
self.assertQuerysetEqual(
self.q1.answer_set.all(), [
"John", "Paul", "George", "Number five", "Ringo"
],
attrgetter("text")
)
class OrderWithRespectToTests2(TestCase):
def test_recursive_ordering(self):
p1 = Post.objects.create(title='1')
p2 = Post.objects.create(title='2')
p1_1 = Post.objects.create(title="1.1", parent=p1)
p1_2 = Post.objects.create(title="1.2", parent=p1)
Post.objects.create(title="2.1", parent=p2)
p1_3 = Post.objects.create(title="1.3", parent=p1)
self.assertEqual(p1.get_post_order(), [p1_1.pk, p1_2.pk, p1_3.pk])
def test_duplicate_order_field(self):
class Bar(models.Model):
pass
class Foo(models.Model):
bar = models.ForeignKey(Bar)
order = models.OrderWrt()
class Meta:
order_with_respect_to = 'bar'
count = 0
for field in Foo._meta.local_fields:
if isinstance(field, models.OrderWrt):
count += 1
self.assertEqual(count, 1)
class TestOrderWithRespectToOneToOnePK(TestCase):
def test_set_order(self):
e = Entity.objects.create()
d = Dimension.objects.create(entity=e)
c1 = d.component_set.create()
c2 = d.component_set.create()
d.set_component_order([c1.id, c2.id])
self.assertQuerysetEqual(d.component_set.all(), [c1.id, c2.id], attrgetter('pk'))<|fim▁end|> |
class OrderWithRespectToTests(TestCase):
@classmethod |
<|file_name|>bitcoin_zh_TW.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="zh_TW" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Litecoin</source>
<translation>關於莱特幣</translation>
</message>
<message>
<location line="+39"/>
<source><b>Litecoin</b> version</source>
<translation><b>莱特幣</b>版本</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
這是一套實驗性的軟體.
此軟體是依據 MIT/X11 軟體授權條款散布, 詳情請見附帶的 COPYING 檔案, 或是以下網站: http://www.opensource.org/licenses/mit-license.php.
此產品也包含了由 OpenSSL Project 所開發的 OpenSSL Toolkit (http://www.openssl.org/) 軟體, 由 Eric Young ([email protected]) 撰寫的加解密軟體, 以及由 Thomas Bernard 所撰寫的 UPnP 軟體.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>版權</translation>
</message>
<message>
<location line="+0"/>
<source>The Litecoin developers</source>
<translation>莱特幣開發人員</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>位址簿</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>點兩下來修改位址或標記</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>產生新位址</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>複製目前選取的位址到系統剪貼簿</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>新增位址</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Litecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>這些是你用來收款的莱特幣位址. 你可以提供不同的位址給不同的付款人, 來追蹤是誰支付給你.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>複製位址</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>顯示 &QR 條碼</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Litecoin address</source>
<translation>簽署訊息是用來證明莱特幣位址是你的</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>訊息簽署</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>從列表中刪除目前選取的位址</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>將目前分頁的資料匯出存成檔案</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>匯出</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Litecoin address</source>
<translation>驗證訊息是用來確認訊息是用指定的莱特幣位址簽署的</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>訊息驗證</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>刪除</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Litecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>這是你用來付款的莱特幣位址. 在付錢之前, 務必要檢查金額和收款位址是否正確.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>複製標記</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>編輯</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>付錢</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>匯出位址簿資料</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>逗號區隔資料檔 (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>匯出失敗</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>無法寫入檔案 %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>標記</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>位址</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(沒有標記)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>密碼對話視窗</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>輸入密碼</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>新的密碼</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>重複新密碼</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>輸入錢包的新密碼.<br/>請用<b>10個以上的字元</b>, 或是<b>8個以上的單字</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>錢包加密</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>這個動作需要用你的錢包密碼來解鎖</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>錢包解鎖</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>這個動作需要用你的錢包密碼來解密</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>錢包解密</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>變更密碼</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>輸入錢包的新舊密碼.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>錢包加密確認</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>警告: 如果將錢包加密後忘記密碼, 你會<b>失去其中所有的莱特幣</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>你確定要將錢包加密嗎?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>重要: 請改用新產生有加密的錢包檔, 來取代之前錢包檔的備份. 為了安全性的理由, 當你開始使用新的有加密的錢包時, 舊錢包的備份就不能再使用了.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>警告: 大寫字母鎖定作用中!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>錢包已加密</translation>
</message>
<message>
<location line="-56"/>
<source>Litecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your litecoins from being stolen by malware infecting your computer.</source>
<translation>莱特幣現在要關閉以完成加密程序. 請記住, 加密錢包無法完全防止入侵電腦的惡意程式偷取你的莱特幣.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>錢包加密失敗</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>錢包加密因程式內部錯誤而失敗. 你的錢包還是沒有加密.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>提供的密碼不符.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>錢包解鎖失敗</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>用來解密錢包的密碼輸入錯誤.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>錢包解密失敗</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>錢包密碼變更成功.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>訊息簽署...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>網路同步中...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>總覽</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>顯示錢包一般總覽</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>交易</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>瀏覽交易紀錄</translation>
</message>
<message><|fim▁hole|> </message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>顯示收款位址的列表</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>結束</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>結束應用程式</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Litecoin</source>
<translation>顯示莱特幣相關資訊</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>關於 &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>顯示有關於 Qt 的資訊</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>選項...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>錢包加密...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>錢包備份...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>密碼變更...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>從磁碟匯入區塊中...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>重建磁碟區塊索引中...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Litecoin address</source>
<translation>付錢到莱特幣位址</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Litecoin</source>
<translation>修改莱特幣的設定選項</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>將錢包備份到其它地方</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>變更錢包加密用的密碼</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>除錯視窗</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>開啓除錯與診斷主控台</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>驗證訊息...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Litecoin</source>
<translation>莱特幣</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>錢包</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>付出</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>收受</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>位址</translation>
</message>
<message>
<location line="+22"/>
<source>&About Litecoin</source>
<translation>關於莱特幣</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>顯示或隱藏</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>顯示或隱藏主視窗</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>將屬於你的錢包的密鑰加密</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Litecoin addresses to prove you own them</source>
<translation>用莱特幣位址簽署訊息來證明那是你的</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Litecoin addresses</source>
<translation>驗證訊息來確認是用指定的莱特幣位址簽署的</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>檔案</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>設定</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>求助</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>分頁工具列</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Litecoin client</source>
<translation>莱特幣客戶端軟體</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Litecoin network</source>
<translation><numerusform>與莱特幣網路有 %n 個連線在使用中</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation>目前沒有區塊來源...</translation>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>已處理了估計全部 %2 個中的 %1 個區塊的交易紀錄.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>已處理了 %1 個區塊的交易紀錄.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n 個小時</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n 天</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n 個星期</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>落後 %1</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>最近收到的區塊是在 %1 之前生產出來.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>會看不見在這之後的交易.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>錯誤</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>資訊</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>這筆交易的資料大小超過限制了. 你還是可以付出 %1 的費用來傳送, 這筆費用會付給處理你的交易的節點, 並幫助維持整個網路. 你願意支付這項費用嗎?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>最新狀態</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>進度追趕中...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>確認交易手續費</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>付款交易</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>收款交易</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>日期: %1
金額: %2
類別: %3
位址: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>URI 處理</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Litecoin address or malformed URI parameters.</source>
<translation>無法解析 URI! 也許莱特幣位址無效或 URI 參數有誤.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>錢包<b>已加密</b>並且正<b>解鎖中</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>錢包<b>已加密</b>並且正<b>上鎖中</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Litecoin can no longer continue safely and will quit.</source>
<translation>發生了致命的錯誤. 莱特幣程式無法再繼續安全執行, 只好結束.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>網路警報</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>編輯位址</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>標記</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>與這個位址簿項目關聯的標記</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>位址</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>與這個位址簿項目關聯的位址. 付款位址才能被更改.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>新收款位址</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>新增付款位址</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>編輯收款位址</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>編輯付款位址</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>輸入的位址"%1"已存在於位址簿中.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Litecoin address.</source>
<translation>輸入的位址 "%1" 並不是有效的莱特幣位址.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>無法將錢包解鎖.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>新密鑰產生失敗.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Litecoin-Qt</source>
<translation>莱特幣-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>版本</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>用法:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>命令列選項</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>使用界面選項</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>設定語言, 比如說 "de_DE" (預設: 系統語系)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>啓動時最小化
</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>顯示啓動畫面 (預設: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>選項</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>主要</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation>非必要的交易手續費, 以 kB 為計費單位, 且有助於縮短你的交易處理時間. 大部份交易資料的大小是 1 kB.</translation>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>付交易手續費</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Litecoin after logging in to the system.</source>
<translation>在登入系統後自動啓動莱特幣.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Litecoin on system login</source>
<translation>系統登入時啟動莱特幣</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>回復所有客戶端軟體選項成預設值.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>選項回復</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>網路</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Litecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>自動在路由器上開啟 InstaMineNuggets 的客戶端通訊埠. 只有在你的路由器支援 UPnP 且開啟時才有作用.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>用 &UPnP 設定通訊埠對應</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Litecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>透過 SOCKS 代理伺服器連線至莱特幣網路 (比如說要透過 Tor 連線).</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>透過 SOCKS 代理伺服器連線:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>代理伺服器位址:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>代理伺服器的網際網路位址 (比如說 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>通訊埠:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>代理伺服器的通訊埠 (比如說 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS 協定版本:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>代理伺服器的 SOCKS 協定版本 (比如說 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>視窗</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>最小化視窗後只在通知區域顯示圖示</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>最小化至通知區域而非工作列</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>當視窗關閉時將其最小化, 而非結束應用程式. 當勾選這個選項時, 應用程式只能用選單中的結束來停止執行.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>關閉時最小化</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>顯示</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>使用界面語言</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Litecoin.</source>
<translation>可以在這裡設定使用者介面的語言. 這個設定在莱特幣程式重啓後才會生效.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>金額顯示單位:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>選擇操作界面與付錢時預設顯示的細分單位.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Litecoin addresses in the transaction list or not.</source>
<translation>是否要在交易列表中顯示莱特幣位址.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>在交易列表顯示位址</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>好</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>取消</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>套用</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>預設</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>確認回復選項</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>有些設定可能需要重新啓動客戶端軟體才會生效.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>你想要就做下去嗎?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Litecoin.</source>
<translation>這個設定會在莱特幣程式重啓後生效.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>提供的代理伺服器位址無效</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>表單</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Litecoin network after a connection is established, but this process has not completed yet.</source>
<translation>顯示的資訊可能是過期的. 與莱特幣網路的連線建立後, 你的錢包會自動和網路同步, 但這個步驟還沒完成.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>餘額:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>未確認額:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>錢包</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>未熟成</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>尚未熟成的開採金額</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>最近交易</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>目前餘額</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>尚未確認之交易的總額, 不包含在目前餘額中</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>沒同步</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start litecoin: click-to-pay handler</source>
<translation>無法啟動 InstaMineNuggets 隨按隨付處理器</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>QR 條碼對話視窗</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>付款單</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>金額:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>標記:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>訊息:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>儲存為...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>將 URI 編碼成 QR 條碼失敗</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>輸入的金額無效, 請檢查看看.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>造出的網址太長了,請把標籤或訊息的文字縮短再試看看.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>儲存 QR 條碼</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>PNG 圖檔 (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>客戶端軟體名稱</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>無</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>客戶端軟體版本</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>資訊</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>使用 OpenSSL 版本</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>啓動時間</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>網路</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>連線數</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>位於測試網路</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>區塊鎖鏈</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>目前區塊數</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>估計總區塊數</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>最近區塊時間</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>開啓</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>命令列選項</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Litecoin-Qt help message to get a list with possible Litecoin command-line options.</source>
<translation>顯示莱特幣-Qt的求助訊息, 來取得可用的命令列選項列表.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>顯示</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>主控台</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>建置日期</translation>
</message>
<message>
<location line="-104"/>
<source>Litecoin - Debug window</source>
<translation>莱特幣 - 除錯視窗</translation>
</message>
<message>
<location line="+25"/>
<source>Litecoin Core</source>
<translation>莱特幣核心</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>除錯紀錄檔</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Litecoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>從目前的資料目錄下開啓莱特幣的除錯紀錄檔. 當紀錄檔很大時可能要花好幾秒的時間.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>清主控台</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Litecoin RPC console.</source>
<translation>歡迎使用莱特幣 RPC 主控台.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>請用上下游標鍵來瀏覽歷史指令, 且可用 <b>Ctrl-L</b> 來清理畫面.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>請打 <b>help</b> 來看可用指令的簡介.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>付錢</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>一次付給多個人</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>加收款人</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>移除所有交易欄位</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>全部清掉</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>餘額:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>確認付款動作</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>付出</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> 給 %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>確認要付錢</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>確定要付出 %1 嗎?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>和</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>無效的收款位址, 請再檢查看看.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>付款金額必須大於 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>金額超過餘額了.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>包含 %1 的交易手續費後, 總金額超過你的餘額了.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>發現有重複的位址. 每個付款動作中, 只能付給個別的位址一次.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>錯誤: 交易產生失敗!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>錯誤: 交易被拒絕. 有時候會發生這種錯誤, 是因為你錢包中的一些錢已經被花掉了. 比如說你複製了錢包檔 wallet.dat, 然後用複製的錢包花掉了錢, 你現在所用的原來的錢包中卻沒有該筆交易紀錄.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>表單</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>金額:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>付給:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>付款的目標位址 (比如說 Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>輸入一個標記給這個位址, 並加到位址簿中</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>標記:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>從位址簿中選一個位址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>從剪貼簿貼上位址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>去掉這個收款人</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Litecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>輸入莱特幣位址 (比如說 Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>簽章 - 簽署或驗證訊息</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>訊息簽署</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>你可以用自己的位址來簽署訊息, 以證明你對它的所有權. 但是請小心, 不要簽署語意含糊不清的內容, 因為釣魚式詐騙可能會用騙你簽署的手法來冒充是你. 只有在語句中的細節你都同意時才簽署.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>用來簽署訊息的位址 (比如說 Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>從位址簿選一個位址</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>從剪貼簿貼上位址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>在這裡輸入你想簽署的訊息</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>簽章</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>複製目前的簽章到系統剪貼簿</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Litecoin address</source>
<translation>簽署訊息是用來證明這個莱特幣位址是你的</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>訊息簽署</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>重置所有訊息簽署欄位</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>全部清掉</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>訊息驗證</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>請在下面輸入簽署的位址, 訊息(請確認完整複製了所包含的換行, 空格, 跳位符號等等), 與簽章, 以驗證該訊息. 請小心, 除了訊息內容外, 不要對簽章本身過度解讀, 以避免被用"中間人攻擊法"詐騙.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>簽署該訊息的位址 (比如說 Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Litecoin address</source>
<translation>驗證訊息是用來確認訊息是用指定的莱特幣位址簽署的</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>訊息驗證</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>重置所有訊息驗證欄位</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Litecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>輸入莱特幣位址 (比如說 Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>按"訊息簽署"來產生簽章</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Litecoin signature</source>
<translation>輸入莱特幣簽章</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>輸入的位址無效.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>請檢查位址是否正確後再試一次.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>輸入的位址沒有指到任何密鑰.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>錢包解鎖已取消.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>沒有所輸入位址的密鑰.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>訊息簽署失敗.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>訊息已簽署.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>無法將這個簽章解碼.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>請檢查簽章是否正確後再試一次.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>這個簽章與訊息的數位摘要不符.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>訊息驗證失敗.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>訊息已驗證.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Litecoin developers</source>
<translation>莱特幣開發人員</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>在 %1 前未定</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/離線中</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/未確認</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>經確認 %1 次</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>狀態</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, 已公告至 %n 個節點</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>來源</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>生產出</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>來處</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>目的</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>自己的位址</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>標籤</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>入帳</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>將在 %n 個區塊產出後熟成</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>不被接受</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>出帳</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>交易手續費</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>淨額</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>訊息</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>附註</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>交易識別碼</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>生產出來的錢要再等 120 個區塊熟成之後, 才能夠花用. 當你產出區塊時, 它會被公布到網路上, 以被串連至區塊鎖鏈. 如果串連失敗了, 它的狀態就會變成"不被接受", 且不能被花用. 當你產出區塊的幾秒鐘內, 也有其他節點產出區塊的話, 有時候就會發生這種情形.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>除錯資訊</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>交易</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>輸入</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>金額</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>是</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>否</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, 尚未成功公告出去</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>在接下來 %n 個區塊產出前未定</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>未知</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>交易明細</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>此版面顯示交易的詳細說明</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>種類</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>位址</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>金額</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>在接下來 %n 個區塊產出前未定</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>在 %1 前未定</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>離線中 (經確認 %1 次)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>未確認 (經確認 %1 次, 應確認 %2 次)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>已確認 (經確認 %1 次)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>開採金額將可在 %n 個區塊熟成後可用</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>沒有其他節點收到這個區塊, 也許它不被接受!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>生產出但不被接受</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>收受於</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>收受自</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>付出至</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>付給自己</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>開採所得</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(不適用)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>交易狀態. 移動游標至欄位上方來顯示確認次數.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>收到交易的日期與時間.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>交易的種類.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>交易的目標位址.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>減去或加入至餘額的金額</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>全部</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>今天</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>這週</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>這個月</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>上個月</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>今年</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>指定範圍...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>收受於</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>付出至</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>給自己</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>開採所得</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>其他</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>輸入位址或標記來搜尋</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>最小金額</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>複製位址</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>複製標記</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>複製金額</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>複製交易識別碼</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>編輯標記</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>顯示交易明細</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>匯出交易資料</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>逗號分隔資料檔 (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>已確認</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>種類</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>標記</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>位址</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>金額</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>識別碼</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>匯出失敗</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>無法寫入至 %1 檔案.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>範圍:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>至</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>付錢</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>匯出</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>將目前分頁的資料匯出存成檔案</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>錢包備份</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>錢包資料檔 (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>備份失敗</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>儲存錢包資料到新的地方失敗</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>備份成功</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>錢包的資料已經成功儲存到新的地方了.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Litecoin version</source>
<translation>莱特幣版本</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>用法:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or litecoind</source>
<translation>送指令給 -server 或 litecoind
</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>列出指令
</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>取得指令說明
</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>選項:
</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: litecoin.conf)</source>
<translation>指定設定檔 (預設: litecoin.conf)
</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: litecoind.pid)</source>
<translation>指定行程識別碼檔案 (預設: litecoind.pid)
</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>指定資料目錄
</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>設定資料庫快取大小為多少百萬位元組(MB, 預設: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>在通訊埠 <port> 聽候連線 (預設: 9333, 或若為測試網路: 19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>維持與節點連線數的上限為 <n> 個 (預設: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>連線到某個節點以取得其它節點的位址, 然後斷線</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>指定自己公開的位址</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>與亂搞的節點斷線的臨界值 (預設: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>避免與亂搞的節點連線的秒數 (預設: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>在 IPv4 網路上以通訊埠 %u 聽取 RPC 連線時發生錯誤: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>在通訊埠 <port> 聽候 JSON-RPC 連線 (預設: 9332, 或若為測試網路: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>接受命令列與 JSON-RPC 指令
</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>以背景程式執行並接受指令</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>使用測試網路
</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>是否接受外來連線 (預設: 當沒有 -proxy 或 -connect 時預設為 1)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=litecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Litecoin Alert" [email protected]
</source>
<translation>%s, 你必須要在以下設定檔中設定 RPC 密碼(rpcpassword):
%s
建議你使用以下隨機產生的密碼:
rpcuser=litecoinrpc
rpcpassword=%s
(你不用記住這個密碼)
使用者名稱(rpcuser)和密碼(rpcpassword)不可以相同!
如果設定檔還不存在, 請在新增時, 設定檔案權限為"只有主人才能讀取".
也建議你設定警示通知, 發生問題時你才會被通知到;
比如說設定為:
alertnotify=echo %%s | mail -s "Litecoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>設定在 IPv6 網路的通訊埠 %u 上聽候 RPC 連線失敗, 退而改用 IPv4 網路: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>和指定的位址繫結, 並總是在該位址聽候連線. IPv6 請用 "[主機]:通訊埠" 這種格式</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Litecoin is probably already running.</source>
<translation>無法鎖定資料目錄 %s. 也許莱特幣已經在執行了.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>錯誤: 交易被拒絕了! 有時候會發生這種錯誤, 是因為你錢包中的一些錢已經被花掉了. 比如說你複製了錢包檔 wallet.dat, 然後用複製的錢包花掉了錢, 你現在所用的原來的錢包中卻沒有該筆交易紀錄.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>錯誤: 這筆交易需要至少 %s 的手續費! 因為它的金額太大, 或複雜度太高, 或是使用了最近才剛收到的款項.</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>當收到相關警示時所要執行的指令 (指令中的 %s 會被取代為警示訊息)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>當錢包有交易改變時所要執行的指令 (指令中的 %s 會被取代為交易識別碼)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>設定高優先權或低手續費的交易資料大小上限為多少位元組 (預設: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>這是尚未發表的測試版本 - 使用請自負風險 - 請不要用於開採或商業應用</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>警告: -paytxfee 設定了很高的金額! 這可是你交易付款所要付的手續費.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>警告: 顯示的交易可能不正確! 你可能需要升級, 或者需要等其它的節點升級.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Litecoin will not work properly.</source>
<translation>警告: 請檢查電腦時間與日期是否正確! 莱特幣無法在時鐘不準的情況下正常運作.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>警告: 讀取錢包檔 wallet.dat 失敗了! 所有的密鑰都正確讀取了, 但是交易資料或位址簿資料可能會缺少或不正確.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>警告: 錢包檔 wallet.dat 壞掉, 但資料被拯救回來了! 原來的 wallet.dat 會改儲存在 %s, 檔名為 wallet.{timestamp}.bak. 如果餘額或交易資料有誤, 你應該要用備份資料復原回來.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>嘗試從壞掉的錢包檔 wallet.dat 復原密鑰</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>區塊產生選項:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>只連線至指定節點(可多個)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>發現區塊資料庫壞掉了</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>找出自己的網際網路位址 (預設: 當有聽候連線且沒有 -externalip 時為 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>你要現在重建區塊資料庫嗎?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>初始化區塊資料庫失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>錢包資料庫環境 %s 初始化錯誤!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>載入區塊資料庫失敗</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>打開區塊資料庫檔案失敗</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>錯誤: 磁碟空間很少!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>錯誤: 錢包被上鎖了, 無法產生新的交易!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>錯誤: 系統錯誤:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>在任意的通訊埠聽候失敗. 如果你想的話可以用 -listen=0.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>讀取區塊資訊失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>讀取區塊失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>同步區塊索引失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>寫入區塊索引失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>寫入區塊資訊失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>寫入區塊失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>寫入檔案資訊失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>寫入莱特幣資料庫失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>寫入交易索引失敗</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>寫入回復資料失敗</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>是否允許在找節點時使用域名查詢 (預設: 當沒用 -connect 時為 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation>生產莱特幣 (預設值: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>啓動時檢查的區塊數 (預設: 288, 指定 0 表示全部)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>區塊檢查的仔細程度 (0 至 4, 預設: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation>檔案描述器不足.</translation>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>從目前的區塊檔 blk000??.dat 重建鎖鏈索引</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>設定處理 RPC 服務請求的執行緒數目 (預設為 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>驗證區塊資料中...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>驗證錢包資料中...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>從其它來源的 blk000??.dat 檔匯入區塊</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation>設定指令碼驗證的執行緒數目 (最多為 16, 若為 0 表示程式自動決定, 小於 0 表示保留不用的處理器核心數目, 預設為 0)</translation>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>資訊</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>無效的 -tor 位址: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>設定 -minrelaytxfee=<金額> 的金額無效: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>設定 -mintxfee=<amount> 的金額無效: '%s'</translation>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>維護全部交易的索引 (預設為 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>每個連線的接收緩衝區大小上限為 <n>*1000 個位元組 (預設: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>每個連線的傳送緩衝區大小上限為 <n>*1000 位元組 (預設: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>只接受與內建的檢查段點吻合的區塊鎖鏈 (預設: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>只和 <net> 網路上的節點連線 (IPv4, IPv6, 或 Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>輸出額外的除錯資訊. 包含了其它所有的 -debug* 選項</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>輸出額外的網路除錯資訊</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>在除錯輸出內容前附加時間</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Litecoin Wiki for SSL setup instructions)</source>
<translation>SSL 選項: (SSL 設定程序請見 InstaMineNuggets Wiki)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>選擇 SOCKS 代理伺服器的協定版本(4 或 5, 預設: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>在終端機顯示追蹤或除錯資訊, 而非寫到 debug.log 檔</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>輸出追蹤或除錯資訊給除錯器</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>設定區塊大小上限為多少位元組 (預設: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>設定區塊大小下限為多少位元組 (預設: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>客戶端軟體啓動時將 debug.log 檔縮小 (預設: 當沒有 -debug 時為 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation>簽署交易失敗</translation>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>指定連線在幾毫秒後逾時 (預設: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>系統錯誤:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation>交易金額太小</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation>交易金額必須是正的</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation>交易位元量太大</translation>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>是否使用通用即插即用(UPnP)協定來設定聽候連線的通訊埠 (預設: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>是否使用通用即插即用(UPnP)協定來設定聽候連線的通訊埠 (預設: 當有聽候連線為 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>透過代理伺服器來使用 Tor 隱藏服務 (預設: 同 -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>JSON-RPC 連線使用者名稱</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>警告: 這個版本已經被淘汰掉了, 必須要升級!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>改變 -txindex 參數後, 必須要用 -reindex 參數來重建資料庫</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>錢包檔 weallet.dat 壞掉了, 拯救失敗</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>JSON-RPC 連線密碼</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>只允許從指定網路位址來的 JSON-RPC 連線</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>送指令給在 <ip> 的節點 (預設: 127.0.0.1)
</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>當最新區塊改變時所要執行的指令 (指令中的 %s 會被取代為區塊的雜湊值)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>將錢包升級成最新的格式</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>設定密鑰池大小為 <n> (預設: 100)
</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>重新掃描區塊鎖鏈, 以尋找錢包所遺漏的交易.</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>於 JSON-RPC 連線使用 OpenSSL (https)
</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>伺服器憑證檔 (預設: server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>伺服器密鑰檔 (預設: server.pem)
</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>可以接受的加密法 (預設: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)
</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>此協助訊息
</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>無法和這台電腦上的 %s 繫結 (繫結回傳錯誤 %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>透過 SOCKS 代理伺服器連線</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>允許對 -addnode, -seednode, -connect 的參數使用域名查詢 </translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>載入位址中...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>載入檔案 wallet.dat 失敗: 錢包壞掉了</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Litecoin</source>
<translation>載入檔案 wallet.dat 失敗: 此錢包需要新版的 InstaMineNuggets</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Litecoin to complete</source>
<translation>錢包需要重寫: 請重啟莱特幣來完成</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>載入檔案 wallet.dat 失敗</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>無效的 -proxy 位址: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>在 -onlynet 指定了不明的網路別: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>在 -socks 指定了不明的代理協定版本: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>無法解析 -bind 位址: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>無法解析 -externalip 位址: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>設定 -paytxfee=<金額> 的金額無效: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>無效的金額</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>累積金額不足</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>載入區塊索引中...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>加入一個要連線的節線, 並試著保持對它的連線暢通</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Litecoin is probably already running.</source>
<translation>無法和這台電腦上的 %s 繫結. 也許莱特幣已經在執行了.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>交易付款時每 KB 的交易手續費</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>載入錢包中...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>無法將錢包格式降級</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>無法寫入預設位址</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>重新掃描中...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>載入完成</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>為了要使用 %s 選項</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>錯誤</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>你必須在下列設定檔中設定 RPC 密碼(rpcpassword=<password>):
%s
如果這個檔案還不存在, 請在新增時, 設定檔案權限為"只有主人才能讀取".</translation>
</message>
</context>
</TS><|fim▁end|> | <location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>編輯位址與標記的儲存列表</translation> |
<|file_name|>mylistwidget.py<|end_file_name|><|fim▁begin|>__author__ = 'snake'
from PyQt4 import QtGui, QtCore
class SiteItems(QtGui.QListWidget):
<|fim▁hole|>
def startDrag(self, dropAction):
# create mime data object
#get all selected items
selitems = ""
for i in self.selectedItems():
selitems += i.text() + ","
mime = QtCore.QMimeData()
mime.setText(str(selitems).strip(","))
# start drag
drag = QtGui.QDrag(self)
drag.setMimeData(mime)
drag.start(QtCore.Qt.CopyAction | QtCore.Qt.CopyAction)
def dragEnterEvent(self, event):
if event.mimeData().hasText():
event.accept()
else:
event.ignore()
def dragMoveEvent(self, event):
if event.mimeData().hasText():
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasText():
sites = event.mimeData().text()
for site in sites.split(","):
self.addItem(site)
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()<|fim▁end|> | def __init__(self):
super(SiteItems, self).__init__() |
<|file_name|>linux_command_runner_test.go<|end_file_name|><|fim▁begin|>package linux_command_runner_test
import (
"os"
"os/exec"
"syscall"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"code.cloudfoundry.org/commandrunner/linux_command_runner"
)
var _ = Describe("Running commands", func() {
It("runs the command and returns nil", func() {
runner := linux_command_runner.New()
cmd := exec.Command("ls")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Run(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).ToNot(BeNil())
})
Context("when the command fails", func() {
It("returns an error", func() {
runner := linux_command_runner.New()<|fim▁hole|> err := runner.Run(exec.Command("/bin/bash", "-c", "exit 1"))
Expect(err).To(HaveOccurred())
})
})
})
var _ = Describe("Starting commands", func() {
It("starts the command and does not block on it", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "read foo")
Expect(cmd.ProcessState).To(BeNil())
in, err := cmd.StdinPipe()
Expect(err).To(BeNil())
err = runner.Start(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).To(BeNil())
in.Write([]byte("hello\n"))
cmd.Wait()
Expect(cmd.ProcessState).ToNot(BeNil())
})
})
var _ = Describe("Spawning background processes", func() {
It("does not propagate signals to the child", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "exit 0")
err := runner.Start(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.SysProcAttr).ToNot(BeNil())
Expect(cmd.SysProcAttr.Setpgid).To(BeTrue())
})
})
var _ = Describe("Waiting on commands", func() {
It("blocks on the command's completion", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "sleep 0.1")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Start(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).To(BeNil())
err = runner.Wait(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).ToNot(BeNil())
})
})
var _ = Describe("Killing commands", func() {
It("terminates the command's process", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "sleep 10")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Start(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).To(BeNil())
err = runner.Kill(cmd)
Expect(err).ToNot(HaveOccurred())
err = cmd.Wait()
Expect(err).To(HaveOccurred())
Expect(cmd.ProcessState).ToNot(BeNil())
})
Context("when the command is not running", func() {
It("returns an error", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "sleep 10")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Kill(cmd)
Expect(err).To(HaveOccurred())
})
})
})
var _ = Describe("Signalling commands", func() {
It("sends the given signal to the process", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "sleep 10")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Start(cmd)
Expect(err).ToNot(HaveOccurred())
Expect(cmd.ProcessState).To(BeNil())
err = runner.Signal(cmd, os.Interrupt)
Expect(err).ToNot(HaveOccurred())
err = cmd.Wait()
Expect(err).To(HaveOccurred())
Expect(cmd.ProcessState.Sys().(syscall.WaitStatus).Signal()).To(Equal(os.Interrupt))
})
Context("when the command is not running", func() {
It("returns an error", func() {
runner := linux_command_runner.New()
cmd := exec.Command("bash", "-c", "read foo")
Expect(cmd.ProcessState).To(BeNil())
err := runner.Signal(cmd, os.Interrupt)
Expect(err).To(HaveOccurred())
})
})
})<|fim▁end|> | |
<|file_name|>optimisation.py<|end_file_name|><|fim▁begin|>"""
optimisation.py: Optimisations for worldview solving
Copyright (C) 2014 Michael Kelly
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# def evaluation_skip(optimisation, stat_struct, valuator_string, debug=0):
# """
# Retrofitting for statistic structure
# Evaluation Skip: Looks at possible optimisations from previous function
# and sorts through epistemic atoms and the current valuation string
# and determines if it is a valuation worth persuing, here valuations
# containing conflicting values will be considered removable
# Pre :- * epistemic atoms and their negation status need to be processed.
# * The valuation binary string must be calculated.
# * An evaluation of optimisations to check for must also be looked
# for.
# Post :- * A binary value will be outputted deciding whether this set of
# atoms is worth persuing as an interpretation of the original
# subjective program.
# """
# temp = 0
# if self.modal_operator_count(stat_struct) == 1 or \
# not self.modal_operator_count(stat_struct):
# return True
# # make a copy of the original queue to not lose original value set
# copysStat = copy.copy(stat_struct)
# count = self.modOpCount(stat_struct)
# countb = len(stat_struct.keys())
# while countb:
# counta = len(stat_struct[stat_struct.keys()[countb-1]])
# while counta:
# temp = valuator_string & 0x1
# valuator_string >>= 1
# if not temp:
# remove_item = stat_struct[stat_struct.keys()[countb-1]][counta-1]
# stat_struct[stat_struct.keys()[countb-1]].remove(remove_item)
# counta -= 1
# countb -= 1
# count = len(stat_struct)
# while count:
# if not stat_struct[stat_struct.keys()[count-1]]:
# del stat_struct[count]
# count -= 1
# for linea in stat_struct:
# for lineb in stat_struct:
# comparison_modals = combinations(stat_struct[lineb] + stat_struct[linea], 2)
# for modal_pair in comparison_modals:
# mod_a, mod_b = comparison_modals
# if mod_a.label != mod_b.label:
# continue
# if not check_optimisation(optimisation, mod_a, mod_b):
# return False
# return True
# def check_optimisation(optimisation, mod_a, mod_b):
# """
# Analysing old code
# 0: epistemic negation
# 1: modality K|B
# 2: atom negation
# """
# if optimisation & 0x1 == 1:
# if ((mod_a[1] & 0x2) != (mod_b[1] & 0x2)) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) == (mod_b[1] & 0x4)) and \
# ((mod_a[1] & 0x4) == 0):
# # if modal operators are different
# # atom negation is different, and there is no atom negation
# return False
# elif (optimisation & 0x2) == 2:
# if ((mod_a[1] & 0x6) == (mod_b[1] & 0x6)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x4) == 0) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)):
# # if both mod negation and mod are the same (K and no negation)
# return False
# elif (optimisation & 0x4) == 4:
# if ((mod_a[1] & 0x6) != (mod_b[1] & 0x6)) and \<|fim▁hole|># ((mod_a[1] & 0x1) == (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) != (mod_a[1] & 0x2)) and \
# ((mod_b[1] & 0x4) != (mod_b[1] & 0x2)):
# return False
# elif (optimisation & 0x8) == 8:
# # look for cases where the epistemic atoms are the same.
# if (mod_a[1] == mod_b[1]):
# return False
# elif (optimisation & 0x10) == 16:
# if ((mod_a[1] & 0x2) != (mod_b[1] & 0x2)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x1) == (mod_b[1] & 0x1)) and \
# ((mod_a[1] & 0x4) == (mod_b[1] & 0x4)) and \
# ((mod_a[1] & 0x4) == 0):
# return False
# elif (optimisation & 0x20) == 32:
# if ((mod_a[1] & 0x3) == (mod_b[1] & 0x3)) and \
# ((mod_b[1] & 0x4) != (mod_a[1] & 0x4)):
# return False
# def optimisation_present(e_atom_a, e_atom_b):
# if e_atom_a == e_atom_b:
# return True
# # if different modality but same negations on same label
# # optimisation exists
# if not e_atom_a.same_modal_token(e_atom_b) and e_atom_a.know() and \
# e_atom_b.same_atom_negation(e_atom_b) and \
# e_atom_b.same_epistemic_negation(e_atom_b)
# not e_atom_a.atom_negation:
# return True
# if e_atom_a.same_modal(e_atom_b) and \
# not e_atom_a.same_atom_negation(e_atom_b):
# return True
# if e_atom_a.same_modal(e_atom_b) and e_atom_a.know()
# elif (optimisation & 0x2) == 2:
# if ((mod_a[1] & 0x6) == (mod_b[1] & 0x6)) and \
# ((mod_a[1] & 0x2) == 1) and \
# ((mod_a[1] & 0x4) == 0) and \
# ((mod_a[1] & 0x1) != (mod_b[1] & 0x1)):<|fim▁end|> | |
<|file_name|>generate_date_table.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__prog_name__ = 'generate_date_table.py'
__prog_desc__ = 'Generate table with LPSN year or priority for species and subspecies names.'
__author__ = 'Pierre Chaumeil'
__copyright__ = 'Copyright 2018'
__credits__ = ['Pierre Chaumeil']
__license__ = 'GPL3'
__version__ = '0.0.2'
__maintainer__ = 'Pierre Chaumeil'
__email__ = '[email protected]'
__status__ = 'Development'
import os
import sys
import csv
import argparse
import re
import datetime
import logging
from biolib.logger import logger_setup
class DateEditor(object):
"""Main class
"""
def __init__(self):
"""Initialization."""
self.logger = logging.getLogger('timestamp')
def parse_lpsn_scraped_priorities(self, lpsn_scraped_species_info):
"""Parse year of priority from references scraped from LPSN."""
priorities = {}
with open(lpsn_scraped_species_info) as lsi:
lsi.readline()
for line in lsi:
infos = line.rstrip('\n').split('\t')
sp = infos[0]
if sp == 's__':
# *** hack to skip bad case in file
# Pierre to fix
continue
species_authority = infos[2]
reference_str = species_authority.split(', ')[0]
references = reference_str.replace('(', '').replace(')', '')
years = re.sub(r'emend\.[^\d]*\d{4}', '', references)
years = re.sub(r'ex [^\d]*\d{4}', ' ', years)
years = re.findall('[1-3][0-9]{3}', years, re.DOTALL)
years = [int(y) for y in years if int(y) <= datetime.datetime.now().year]
if len(years) == 0:
# assume this name is validated through ICN and just take the first
# date given as the year of priority
years = re.findall('[1-3][0-9]{3}', references, re.DOTALL)
years = [int(y) for y in years if int(y) <= datetime.datetime.now().year]
priorities[sp.replace('s__', '')] = years[0]
# We make sure that species and subspecies type species have the same date
# ie Photorhabdus luminescens and Photorhabdus luminescens subsp.
# Luminescens
for k, v in priorities.items():
infos_name = k.split(' ')
if len(infos_name) == 2 and '{0} {1} subsp. {1}'.format(infos_name[0], infos_name[1]) in priorities:
priorities[k] = min(int(v), int(priorities.get(
'{0} {1} subsp. {1}'.format(infos_name[0], infos_name[1]))))
elif len(infos_name) == 4 and infos_name[1] == infos_name[3] and '{} {}'.format(infos_name[0], infos_name[1]) in priorities:
priorities[k] = min(int(v), int(priorities.get(
'{} {}'.format(infos_name[0], infos_name[1]))))
return priorities
def parse_lpsn_gss_priorities(self, lpsn_gss_file):
"""Get priority of species and usbspecies from LPSN GSS file."""
<|fim▁hole|>
for line_num, tokens in enumerate(csv_reader):
if line_num == 0:
genus_idx = tokens.index('genus_name')
specific_idx = tokens.index('sp_epithet')
subsp_idx = tokens.index('subsp_epithet')
status_idx = tokens.index('status')
author_idx = tokens.index('authors')
else:
generic = tokens[genus_idx].strip().replace('"', '')
specific = tokens[specific_idx].strip().replace('"', '')
subsp = tokens[subsp_idx].strip().replace('"', '')
if subsp:
taxon = '{} {} subsp. {}'.format(generic, specific, subsp)
elif specific:
taxon = '{} {}'.format(generic, specific)
else:
# skip genus entries
continue
status = tokens[status_idx].strip().replace('"', '')
status_tokens = [t.strip() for t in status.split(';')]
status_tokens = [tt.strip() for t in status_tokens for tt in t.split(',') ]
if 'illegitimate name' in status_tokens:
illegitimate_names.add(taxon)
if taxon in priorities:
continue
# get priority references, ignoring references if they are
# marked as being a revied name as indicated by a 'ex' or 'emend'
# (e.g. Holospora (ex Hafkine 1890) Gromov and Ossipov 1981)
ref_str = tokens[author_idx]
references = ref_str.replace('(', '').replace(')', '')
years = re.sub(r'emend\.[^\d]*\d{4}', '', references)
years = re.sub(r'ex [^\d]*\d{4}', ' ', years)
years = re.findall('[1-3][0-9]{3}', years, re.DOTALL)
years = [int(y) for y in years if int(y) <= datetime.datetime.now().year]
if (taxon not in illegitimate_names
and taxon in priorities
and years[0] != priorities[taxon]):
# conflict that can't be attributed to one of the entries being
# considered an illegitimate name
self.logger.error('Conflicting priority references for {}: {} {}'.format(
taxon, years, priorities[taxon]))
priorities[taxon] = years[0]
return priorities
def run(self, lpsn_scraped_species_info, lpsn_gss_file, out_dir):
"""Parse priority year from LPSN data."""
self.logger.info('Reading priority references scrapped from LPSN.')
scraped_sp_priority = self.parse_lpsn_scraped_priorities(lpsn_scraped_species_info)
self.logger.info(' - read priority for {:,} species.'.format(len(scraped_sp_priority)))
self.logger.info('Reading priority references from LPSN GSS file.')
gss_sp_priority = self.parse_lpsn_gss_priorities(lpsn_gss_file)
self.logger.info(' - read priority for {:,} species.'.format(len(gss_sp_priority)))
self.logger.info('Scrapped priority information for {:,} species not in GSS file.'.format(
len(set(scraped_sp_priority) - set(gss_sp_priority))))
self.logger.info('Parsed priority information for {:,} species not on LPSN website.'.format(
len(set(gss_sp_priority) - set(scraped_sp_priority))))
self.logger.info('Writing out year of priority for species giving preference to GSS file.')
output_file = open(os.path.join(out_dir, 'year_table.tsv'), 'w')
same_year = 0
diff_year = 0
for sp in sorted(set(scraped_sp_priority).union(gss_sp_priority)):
if sp in gss_sp_priority:
output_file.write('{}\t{}\n'.format(sp, gss_sp_priority[sp]))
else:
output_file.write('{}\t{}\n'.format(sp, scraped_sp_priority[sp]))
if sp in gss_sp_priority and sp in scraped_sp_priority:
if gss_sp_priority[sp] == scraped_sp_priority[sp]:
same_year += 1
else:
diff_year += 1
self.logger.info(' - same priority year in GSS file and website: {:,}'.format(same_year))
self.logger.info(' - different priority year in GSS file and website: {:,}'.format(diff_year))
output_file.close()
if __name__ == '__main__':
print(__prog_name__ + ' v' + __version__ + ': ' + __prog_desc__)
print(' by ' + __author__ + ' (' + __email__ + ')' + '\n')
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--lpsn_scraped_species_info',
help='LPSN species file created by LPSN website parsing.',
required=True)
parser.add_argument('--lpsn_gss_file',
help="table from lpsn.dsmz.de with nomenclature information (lpsn_gss_<date>.csv)",
required=True)
parser.add_argument('--out_dir',
help='Output directory.',
required=True)
args = parser.parse_args()
logger_setup(args.out_dir,
__prog_name__.replace('.py', '.log'),
__prog_name__,
__version__,
False)
try:
dateeditor = DateEditor()
dateeditor.run(args.lpsn_scraped_species_info,
args.lpsn_gss_file,
args.out_dir)
except SystemExit:
print("\nControlled exit resulting from an unrecoverable error or warning.")
raise
except:
print("\nUnexpected error:", sys.exc_info()[0])
raise<|fim▁end|> | priorities = {}
illegitimate_names = set()
with open(lpsn_gss_file, encoding='utf-8', errors='ignore') as f:
csv_reader = csv.reader(f)
|
<|file_name|>FtpRobotRulesParser.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.protocol.ftp;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.nutch.crawl.CrawlDatum;
import org.apache.nutch.protocol.Protocol;
import org.apache.nutch.protocol.ProtocolOutput;
import org.apache.nutch.protocol.ProtocolStatus;
import org.apache.nutch.protocol.RobotRulesParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import crawlercommons.robots.BaseRobotRules;
import crawlercommons.robots.SimpleRobotRules;
/**
* This class is used for parsing robots for urls belonging to FTP protocol.
* It extends the generic {@link RobotRulesParser} class and contains
* Ftp protocol specific implementation for obtaining the robots file.
*/
public class FtpRobotRulesParser extends RobotRulesParser {
private static final String CONTENT_TYPE = "text/plain";
public static final Logger LOG = LoggerFactory.getLogger(FtpRobotRulesParser.class);
FtpRobotRulesParser() { }
public FtpRobotRulesParser(Configuration conf) {
super(conf);
}
/**
* The hosts for which the caching of robots rules is yet to be done,
* it sends a Ftp request to the host corresponding to the {@link URL}
* passed, gets robots file, parses the rules and caches the rules object
* to avoid re-work in future.
*
* @param ftp The {@link Protocol} object
* @param url URL
*
* @return robotRules A {@link BaseRobotRules} object for the rules
*/
public BaseRobotRules getRobotRulesSet(Protocol ftp, URL url) {
String protocol = url.getProtocol().toLowerCase(); // normalize to lower case
String host = url.getHost().toLowerCase(); // normalize to lower case
BaseRobotRules robotRules = (SimpleRobotRules) CACHE.get(protocol + ":" + host);
boolean cacheRule = true;
if (robotRules == null) { // cache miss
if (LOG.isTraceEnabled())
LOG.trace("cache miss " + url);
try {
Text robotsUrl = new Text(new URL(url, "/robots.txt").toString());
ProtocolOutput output = ((Ftp)ftp).getProtocolOutput(robotsUrl, new CrawlDatum());
ProtocolStatus status = output.getStatus();
<|fim▁hole|> } else {
robotRules = EMPTY_RULES; // use default rules
}
} catch (Throwable t) {
if (LOG.isInfoEnabled()) {
LOG.info("Couldn't get robots.txt for " + url + ": " + t.toString());
}
cacheRule = false;
robotRules = EMPTY_RULES;
}
if (cacheRule)
CACHE.put(protocol + ":" + host, robotRules); // cache rules for host
}
return robotRules;
}
}<|fim▁end|> | if (status.getCode() == ProtocolStatus.SUCCESS) {
robotRules = parseRules(url.toString(), output.getContent().getContent(),
CONTENT_TYPE, agentNames); |
<|file_name|>httpbasics.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
#
# Test rpc http basics
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir)
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)<|fim▁hole|> urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()<|fim▁end|> | out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on |
<|file_name|>test_ordinary_fields.py<|end_file_name|><|fim▁begin|>import unittest
import uuid
from django.core.checks import Error, Warning as DjangoWarning
from django.db import connection, models
from django.test import (
SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import isolate_apps, override_settings
from django.utils.functional import lazy
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from django.utils.version import get_docs_version
@isolate_apps('invalid_models_tests')
class AutoFieldTests(SimpleTestCase):
def test_valid_case(self):
class Model(models.Model):
id = models.AutoField(primary_key=True)
field = Model._meta.get_field('id')
self.assertEqual(field.check(), [])
def test_primary_key(self):
# primary_key must be True. Refs #12467.
class Model(models.Model):
field = models.AutoField(primary_key=False)
# Prevent Django from autocreating `id` AutoField, which would
# result in an error, because a model must have exactly one
# AutoField.
another = models.IntegerField(primary_key=True)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
'AutoFields must set primary_key=True.',
obj=field,
id='fields.E100',
),
])
def test_max_length_warning(self):
class Model(models.Model):
auto = models.AutoField(primary_key=True, max_length=2)
field = Model._meta.get_field('auto')
self.assertEqual(field.check(), [
DjangoWarning(
"'max_length' is ignored when used with %s."
% field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id='fields.W122',
),
])
@isolate_apps('invalid_models_tests')
class BinaryFieldTests(SimpleTestCase):
def test_valid_default_value(self):
class Model(models.Model):
field1 = models.BinaryField(default=b'test')
field2 = models.BinaryField(default=None)
for field_name in ('field1', 'field2'):
field = Model._meta.get_field(field_name)
self.assertEqual(field.check(), [])
def test_str_default_value(self):
class Model(models.Model):
field = models.BinaryField(default='test')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"BinaryField's default cannot be a string. Use bytes content "
"instead.",
obj=field,
id='fields.E170',
),
])
@isolate_apps('invalid_models_tests')
class CharFieldTests(TestCase):
def test_valid_field(self):
class Model(models.Model):
field = models.CharField(
max_length=255,
choices=[
('1', 'item1'),
('2', 'item2'),
],
db_index=True,
)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [])
def test_missing_max_length(self):
class Model(models.Model):
field = models.CharField()
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"CharFields must define a 'max_length' attribute.",
obj=field,
id='fields.E120',
),
])
def test_negative_max_length(self):
class Model(models.Model):
field = models.CharField(max_length=-1)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
])
def test_bad_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="bad")
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
])
def test_str_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length='20')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121',
),
])
def test_str_max_length_type(self):
class Model(models.Model):
field = models.CharField(max_length=True)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'max_length' must be a positive integer.",
obj=field,
id='fields.E121'
),
])
def test_non_iterable_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices='bad')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=field,
id='fields.E004',
),
])
def test_non_iterable_choices_two_letters(self):
"""Two letters isn't a valid choice pair."""
class Model(models.Model):
field = models.CharField(max_length=10, choices=['ab'])
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id='fields.E005',
),
])
def test_iterable_of_iterable_choices(self):
class ThingItem:
def __init__(self, value, display):
self.value = value
self.display = display
def __iter__(self):
return iter((self.value, self.display))
def __len__(self):
return 2
class Things:
def __iter__(self):
return iter((ThingItem(1, 2), ThingItem(3, 4)))
class ThingWithIterableChoices(models.Model):
thing = models.CharField(max_length=100, blank=True, choices=Things())
self.assertEqual(ThingWithIterableChoices._meta.get_field('thing').check(), [])
def test_choices_containing_non_pairs(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
class Model2(models.Model):
field = models.IntegerField(choices=[0])
for model in (Model, Model2):
with self.subTest(model.__name__):
field = model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'choices' must be an iterable containing (actual "
"value, human readable name) tuples.",
obj=field,
id='fields.E005',
),
])
def test_choices_containing_lazy(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[['1', _('1')], ['2', _('2')]])
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_lazy_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=lazy(lambda: [[1, '1'], [2, '2']], tuple)())
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_choices_named_group(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=[
['knights', [['L', 'Lancelot'], ['G', 'Galahad']]],
['wizards', [['T', 'Tim the Enchanter']]],
['R', 'Random character'],
],
)
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_choices_named_group_non_pairs(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[['knights', [['L', 'Lancelot', 'Du Lac']]]],
)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id='fields.E005',
),
])
def test_choices_named_group_bad_structure(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=[
['knights', [
['Noble', [['G', 'Galahad']]],
['Combative', [['L', 'Lancelot']]],
]],
],
)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id='fields.E005',
),
])
def test_choices_named_group_lazy(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=[
[_('knights'), [['L', _('Lancelot')], ['G', _('Galahad')]]],
['R', _('Random character')],
],
)
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_choices_in_max_length(self):
class Model(models.Model):
field = models.CharField(
max_length=2, choices=[
('ABC', 'Value Too Long!'), ('OK', 'Good')
],
)
group = models.CharField(
max_length=2, choices=[
('Nested', [('OK', 'Good'), ('Longer', 'Longer')]),
('Grouped', [('Bad', 'Bad')]),
],
)
for name, choice_max_length in (('field', 3), ('group', 6)):
with self.subTest(name):
field = Model._meta.get_field(name)
self.assertEqual(field.check(), [
Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=field,
id='fields.E009',
),
])
def test_bad_db_index_value(self):
class Model(models.Model):
field = models.CharField(max_length=10, db_index='bad')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'db_index' must be None, True or False.",
obj=field,
id='fields.E006',
),
])
def test_bad_validators(self):
class Model(models.Model):
field = models.CharField(max_length=10, validators=[True])
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"All 'validators' must be callable.",
hint=(
"validators[0] (True) isn't a function or instance of a "
"validator class."
),
obj=field,
id='fields.E008',
),
])
@unittest.skipUnless(connection.vendor == 'mysql',
"Test valid only for MySQL")
def test_too_long_char_field_under_mysql(self):
from django.db.backends.mysql.validation import DatabaseValidation
class Model(models.Model):
field = models.CharField(unique=True, max_length=256)
field = Model._meta.get_field('field')
validator = DatabaseValidation(connection=connection)
self.assertEqual(validator.check_field(field), [
DjangoWarning(
'%s may not allow unique CharFields to have a max_length > '
'255.' % connection.display_name,
hint=(
'See: https://docs.djangoproject.com/en/%s/ref/databases/'
'#mysql-character-fields' % get_docs_version()
),
obj=field,
id='mysql.W003',
)
])
def test_db_collation(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation='anything')
field = Model._meta.get_field('field')
error = Error(
'%s does not support a database collation on CharFields.'
% connection.display_name,
id='fields.E190',
obj=field,
)
expected = [] if connection.features.supports_collation_on_charfield else [error]
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation='anything')
class Meta:
required_db_features = {'supports_collation_on_charfield'}
field = Model._meta.get_field('field')
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps('invalid_models_tests')
class DateFieldTests(SimpleTestCase):
maxDiff = None
def test_auto_now_and_auto_now_add_raise_error(self):
class Model(models.Model):
field0 = models.DateTimeField(auto_now=True, auto_now_add=True, default=now)
field1 = models.DateTimeField(auto_now=True, auto_now_add=False, default=now)
field2 = models.DateTimeField(auto_now=False, auto_now_add=True, default=now)
field3 = models.DateTimeField(auto_now=True, auto_now_add=True, default=None)
expected = []
checks = []
for i in range(4):
field = Model._meta.get_field('field%d' % i)
expected.append(Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=field,
id='fields.E160',
))
checks.extend(field.check())
self.assertEqual(checks, expected)
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateField(default=now())
field_d = models.DateField(default=now().date())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(errors, [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
])
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps('invalid_models_tests')
class DateTimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateTimeField(default=now())
field_d = models.DateTimeField(default=now().date())
field_now = models.DateTimeField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(errors, [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
])
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps('invalid_models_tests')
class DecimalFieldTests(SimpleTestCase):
def test_required_attributes(self):
class Model(models.Model):
field = models.DecimalField()
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=field,
id='fields.E130',
),
Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=field,
id='fields.E132',
),
])
def test_negative_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits=-1, decimal_places=-1)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id='fields.E133',
),
])
def test_bad_values_of_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits="bad", decimal_places="bad")
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id='fields.E133',
),
])
def test_decimal_places_greater_than_max_digits(self):
class Model(models.Model):
field = models.DecimalField(max_digits=9, decimal_places=10)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=field,
id='fields.E134',
),
])
def test_valid_field(self):
class Model(models.Model):
field = models.DecimalField(max_digits=10, decimal_places=10)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [])
@isolate_apps('invalid_models_tests')
class FileFieldTests(SimpleTestCase):
def test_valid_default_case(self):
class Model(models.Model):
field = models.FileField()
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_valid_case(self):
class Model(models.Model):
field = models.FileField(upload_to='somewhere')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [])
def test_primary_key(self):
class Model(models.Model):
field = models.FileField(primary_key=False, upload_to='somewhere')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"'primary_key' is not a valid argument for a FileField.",
obj=field,
id='fields.E201',
)
])
def test_upload_to_starts_with_slash(self):
class Model(models.Model):
field = models.FileField(upload_to='/somewhere')
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"FileField's 'upload_to' argument must be a relative path, not "
"an absolute path.",
obj=field,
id='fields.E202',
hint='Remove the leading slash.',
)
])
def test_upload_to_callable_not_checked(self):
def callable(instance, filename):
return '/' + filename
class Model(models.Model):
field = models.FileField(upload_to=callable)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [])
@isolate_apps('invalid_models_tests')
class FilePathFieldTests(SimpleTestCase):
def test_forbidden_files_and_folders(self):
class Model(models.Model):
field = models.FilePathField(allow_files=False, allow_folders=False)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
obj=field,
id='fields.E140',
),
])
@isolate_apps('invalid_models_tests')
class GenericIPAddressFieldTests(SimpleTestCase):
def test_non_nullable_blank(self):
class Model(models.Model):
field = models.GenericIPAddressField(null=False, blank=True)
field = Model._meta.get_field('field')
self.assertEqual(field.check(), [
Error(
('GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.'),
obj=field,
id='fields.E150',
),
])
@isolate_apps('invalid_models_tests')
class ImageFieldTests(SimpleTestCase):
def test_pillow_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
pillow_installed = False
else:
pillow_installed = True
class Model(models.Model):
field = models.ImageField(upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [] if pillow_installed else [
Error(
'Cannot use ImageField because Pillow is not installed.',
hint=('Get Pillow at https://pypi.org/project/Pillow/ '
'or run command "python -m pip install Pillow".'),
obj=field,
id='fields.E210',
),
]
self.assertEqual(errors, expected)
@isolate_apps('invalid_models_tests')
class IntegerFieldTests(SimpleTestCase):
def test_max_length_warning(self):
class Model(models.Model):
integer = models.IntegerField(max_length=2)
biginteger = models.BigIntegerField(max_length=2)
smallinteger = models.SmallIntegerField(max_length=2)
positiveinteger = models.PositiveIntegerField(max_length=2)
positivebiginteger = models.PositiveBigIntegerField(max_length=2)
positivesmallinteger = models.PositiveSmallIntegerField(max_length=2)
for field in Model._meta.get_fields():
if field.auto_created:
continue
with self.subTest(name=field.name):
self.assertEqual(field.check(), [
DjangoWarning(
"'max_length' is ignored when used with %s." % field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id='fields.W122',
)
])
@isolate_apps('invalid_models_tests')
class TimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.TimeField(default=now())
field_t = models.TimeField(default=now().time())
# Timezone-aware time object (when USE_TZ=True).
field_tz = models.TimeField(default=now().timetz())
field_now = models.DateField(default=now)
names = ['field_dt', 'field_t', 'field_tz', 'field_now']
fields = [Model._meta.get_field(name) for name in names]
errors = []
for field in fields:<|fim▁hole|>
self.assertEqual(errors, [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=fields[0],
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=fields[1],
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint=(
'It seems you set a fixed date / time / datetime value as '
'default for this field. This may not be what you want. '
'If you want to have the current date as default, use '
'`django.utils.timezone.now`'
),
obj=fields[2],
id='fields.W161',
),
# field_now doesn't raise a warning.
])
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps('invalid_models_tests')
class TextFieldTests(TestCase):
@skipIfDBFeature('supports_index_on_text_field')
def test_max_length_warning(self):
class Model(models.Model):
value = models.TextField(db_index=True)
field = Model._meta.get_field('value')
field_type = field.db_type(connection)
self.assertEqual(field.check(databases=self.databases), [
DjangoWarning(
'%s does not support a database index on %s columns.'
% (connection.display_name, field_type),
hint=(
"An index won't be created. Silence this warning if you "
"don't care about it."
),
obj=field,
id='fields.W162',
)
])
def test_db_collation(self):
class Model(models.Model):
field = models.TextField(db_collation='anything')
field = Model._meta.get_field('field')
error = Error(
'%s does not support a database collation on TextFields.'
% connection.display_name,
id='fields.E190',
obj=field,
)
expected = [] if connection.features.supports_collation_on_textfield else [error]
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.TextField(db_collation='anything')
class Meta:
required_db_features = {'supports_collation_on_textfield'}
field = Model._meta.get_field('field')
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps('invalid_models_tests')
class UUIDFieldTests(TestCase):
def test_choices_named_group(self):
class Model(models.Model):
field = models.UUIDField(
choices=[
['knights', [
[uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'Lancelot'],
[uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'Galahad'],
]],
[uuid.UUID('25d405be-4895-4d50-9b2e-d6695359ce47'), 'Other'],
],
)
self.assertEqual(Model._meta.get_field('field').check(), [])
@isolate_apps('invalid_models_tests')
@skipUnlessDBFeature('supports_json_field')
class JSONFieldTests(TestCase):
def test_invalid_default(self):
class Model(models.Model):
field = models.JSONField(default={})
self.assertEqual(Model._meta.get_field('field').check(), [
DjangoWarning(
msg=(
"JSONField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint=(
'Use a callable instead, e.g., use `dict` instead of `{}`.'
),
obj=Model._meta.get_field('field'),
id='fields.E010',
)
])
def test_valid_default(self):
class Model(models.Model):
field = models.JSONField(default=dict)
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_valid_default_none(self):
class Model(models.Model):
field = models.JSONField(default=None)
self.assertEqual(Model._meta.get_field('field').check(), [])
def test_valid_callable_default(self):
def callable_default():
return {'it': 'works'}
class Model(models.Model):
field = models.JSONField(default=callable_default)
self.assertEqual(Model._meta.get_field('field').check(), [])<|fim▁end|> | errors.extend(field.check()) |
<|file_name|>rst2html.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python<|fim▁hole|># Modified for Bazaar to accommodate options containing dots
#
# This file is in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
import docutils
from docutils.core import publish_cmdline, default_description
if True: # this is still required in the distutils trunk as-at June 2008.
from docutils.parsers.rst.states import Body
# we have some option names that contain dot; which is not allowed by
# python-docutils 0.4-4 -- so monkeypatch in a better pattern
#
# This is a bit gross to patch because all this is built up at load time.
Body.pats['optname'] = r'[a-zA-Z0-9][a-zA-Z0-9._-]*'
Body.pats['longopt'] = r'(--|/)%(optname)s([ =]%(optarg)s)?' % Body.pats
Body.pats['option'] = r'(%(shortopt)s|%(longopt)s)' % Body.pats
Body.patterns['option_marker'] = r'%(option)s(, %(option)s)*( +| ?$)' % Body.pats
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
# workaround for bug with <xxx id="tags" name="tags"> in IE
from docutils.writers import html4css1
class IESafeHtmlTranslator(html4css1.HTMLTranslator):
def starttag(self, node, tagname, suffix='\n', empty=0, **attributes):
x = html4css1.HTMLTranslator.starttag(self, node, tagname, suffix,
empty, **attributes)
y = x.replace('id="tags"', 'id="tags_"')
y = y.replace('name="tags"', 'name="tags_"')
y = y.replace('href="#tags"', 'href="#tags_"')
return y
mywriter = html4css1.Writer()
mywriter.translator_class = IESafeHtmlTranslator
publish_cmdline(writer=mywriter, description=description)<|fim▁end|> |
# Originally by Dave Goodger, from the docutils, distribution.
# |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
/* App Module */
var granuleApp = angular.module('granuleApp', [ 'ngRoute', 'angularBasicAuth', 'granuleControllers', 'granuleServices']);
granuleApp.config(['$routeProvider',
function($routeProvider) {
$routeProvider.
when('/login', {
templateUrl: 'partials/login.html',
controller: 'LoginCtrl'
}).
when('/activity', {
templateUrl: 'partials/activity-list.html',
controller: 'ActivityListCtrl'
}).<|fim▁hole|> when('/activity/:activityId', {
templateUrl: 'partials/activity-detail.html',
controller: 'ActivityDetailCtrl'
}).
otherwise({
redirectTo: '/login'
});
}]);<|fim▁end|> | |
<|file_name|>_base.ctx.js<|end_file_name|><|fim▁begin|>module.exports = {
path: {<|fim▁hole|> scripts: '/assets/scripts',
styles: '/assets/styles',
images: '/assets/images'
},
site: {
url: require('./package.json').mylly.url,
name: 'My website',
lang: 'en',
charset: 'utf-8',
ua: '' // UA-XXXXXX-XX
},
page: {}
};<|fim▁end|> | |
<|file_name|>dci.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `DCI` reader"]
pub struct R(crate::R<DCI_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DCI_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<DCI_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<DCI_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `DCI` writer"]
pub struct W(crate::W<DCI_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DCI_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<DCI_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<DCI_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Value Selector input selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum SVIS_A {
#[doc = "0: HRPWMx.SyIA"]
VALUE1 = 0,
#[doc = "1: HRPWMx.SyIB"]
VALUE2 = 1,
#[doc = "2: HRPWMx.SyIC"]
VALUE3 = 2,
#[doc = "3: HRPWMx.SyID"]
VALUE4 = 3,
#[doc = "4: HRPWMx.SyIE"]
VALUE5 = 4,
#[doc = "5: HRPWMx.SyIF"]
VALUE6 = 5,
#[doc = "6: HRPWMx.SyIG"]
VALUE7 = 6,
#[doc = "7: HRPWMx.SyIH"]
VALUE8 = 7,
#[doc = "8: HRPWMx.SyII"]
VALUE9 = 8,
#[doc = "9: HRPWMx.SyIJ"]
VALUE10 = 9,
#[doc = "10: HRPWMx.SyIK"]
VALUE11 = 10,
#[doc = "11: HRPWMx.SyIL"]
VALUE12 = 11,
#[doc = "12: HRPWMx.SyIM"]
VALUE13 = 12,
#[doc = "13: HRPWMx.SyIN"]
VALUE14 = 13,
#[doc = "14: HRPWMx.SyIO"]
VALUE15 = 14,
#[doc = "15: HRPWMx.SyIP"]
VALUE16 = 15,
}
impl From<SVIS_A> for u8 {
#[inline(always)]
fn from(variant: SVIS_A) -> Self {
variant as _
}
}
#[doc = "Field `SVIS` reader - Value Selector input selection"]
pub struct SVIS_R(crate::FieldReader<u8, SVIS_A>);
impl SVIS_R {
pub(crate) fn new(bits: u8) -> Self {
SVIS_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SVIS_A {
match self.bits {
0 => SVIS_A::VALUE1,
1 => SVIS_A::VALUE2,
2 => SVIS_A::VALUE3,
3 => SVIS_A::VALUE4,
4 => SVIS_A::VALUE5,
5 => SVIS_A::VALUE6,
6 => SVIS_A::VALUE7,
7 => SVIS_A::VALUE8,
8 => SVIS_A::VALUE9,
9 => SVIS_A::VALUE10,
10 => SVIS_A::VALUE11,
11 => SVIS_A::VALUE12,
12 => SVIS_A::VALUE13,
13 => SVIS_A::VALUE14,
14 => SVIS_A::VALUE15,
15 => SVIS_A::VALUE16,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == SVIS_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == SVIS_A::VALUE2
}
#[doc = "Checks if the value of the field is `VALUE3`"]
#[inline(always)]
pub fn is_value3(&self) -> bool {
**self == SVIS_A::VALUE3
}
#[doc = "Checks if the value of the field is `VALUE4`"]
#[inline(always)]
pub fn is_value4(&self) -> bool {
**self == SVIS_A::VALUE4
}
#[doc = "Checks if the value of the field is `VALUE5`"]
#[inline(always)]
pub fn is_value5(&self) -> bool {
**self == SVIS_A::VALUE5
}
#[doc = "Checks if the value of the field is `VALUE6`"]
#[inline(always)]
pub fn is_value6(&self) -> bool {
**self == SVIS_A::VALUE6
}
#[doc = "Checks if the value of the field is `VALUE7`"]
#[inline(always)]
pub fn is_value7(&self) -> bool {
**self == SVIS_A::VALUE7
}
#[doc = "Checks if the value of the field is `VALUE8`"]
#[inline(always)]
pub fn is_value8(&self) -> bool {
**self == SVIS_A::VALUE8
}
#[doc = "Checks if the value of the field is `VALUE9`"]
#[inline(always)]
pub fn is_value9(&self) -> bool {
**self == SVIS_A::VALUE9
}
#[doc = "Checks if the value of the field is `VALUE10`"]
#[inline(always)]
pub fn is_value10(&self) -> bool {
**self == SVIS_A::VALUE10
}
#[doc = "Checks if the value of the field is `VALUE11`"]
#[inline(always)]
pub fn is_value11(&self) -> bool {
**self == SVIS_A::VALUE11
}
#[doc = "Checks if the value of the field is `VALUE12`"]
#[inline(always)]
pub fn is_value12(&self) -> bool {
**self == SVIS_A::VALUE12
}
#[doc = "Checks if the value of the field is `VALUE13`"]
#[inline(always)]
pub fn is_value13(&self) -> bool {
**self == SVIS_A::VALUE13
}
#[doc = "Checks if the value of the field is `VALUE14`"]
#[inline(always)]
pub fn is_value14(&self) -> bool {
**self == SVIS_A::VALUE14
}
#[doc = "Checks if the value of the field is `VALUE15`"]
#[inline(always)]
pub fn is_value15(&self) -> bool {
**self == SVIS_A::VALUE15
}
#[doc = "Checks if the value of the field is `VALUE16`"]
#[inline(always)]
pub fn is_value16(&self) -> bool {
**self == SVIS_A::VALUE16
}
}
impl core::ops::Deref for SVIS_R {
type Target = crate::FieldReader<u8, SVIS_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SVIS` writer - Value Selector input selection"]
pub struct SVIS_W<'a> {
w: &'a mut W,
}
impl<'a> SVIS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SVIS_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "HRPWMx.SyIA"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(SVIS_A::VALUE1)
}
#[doc = "HRPWMx.SyIB"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(SVIS_A::VALUE2)
}
#[doc = "HRPWMx.SyIC"]
#[inline(always)]
pub fn value3(self) -> &'a mut W {
self.variant(SVIS_A::VALUE3)
}
#[doc = "HRPWMx.SyID"]
#[inline(always)]
pub fn value4(self) -> &'a mut W {
self.variant(SVIS_A::VALUE4)
}
#[doc = "HRPWMx.SyIE"]
#[inline(always)]
pub fn value5(self) -> &'a mut W {
self.variant(SVIS_A::VALUE5)
}
#[doc = "HRPWMx.SyIF"]
#[inline(always)]
pub fn value6(self) -> &'a mut W {
self.variant(SVIS_A::VALUE6)
}
#[doc = "HRPWMx.SyIG"]
#[inline(always)]
pub fn value7(self) -> &'a mut W {
self.variant(SVIS_A::VALUE7)
}
#[doc = "HRPWMx.SyIH"]
#[inline(always)]
pub fn value8(self) -> &'a mut W {
self.variant(SVIS_A::VALUE8)
}
#[doc = "HRPWMx.SyII"]
#[inline(always)]
pub fn value9(self) -> &'a mut W {
self.variant(SVIS_A::VALUE9)
}
#[doc = "HRPWMx.SyIJ"]
#[inline(always)]
pub fn value10(self) -> &'a mut W {
self.variant(SVIS_A::VALUE10)
}
#[doc = "HRPWMx.SyIK"]
#[inline(always)]
pub fn value11(self) -> &'a mut W {
self.variant(SVIS_A::VALUE11)
}
#[doc = "HRPWMx.SyIL"]
#[inline(always)]
pub fn value12(self) -> &'a mut W {
self.variant(SVIS_A::VALUE12)
}
#[doc = "HRPWMx.SyIM"]
#[inline(always)]
pub fn value13(self) -> &'a mut W {
self.variant(SVIS_A::VALUE13)
}
#[doc = "HRPWMx.SyIN"]
#[inline(always)]
pub fn value14(self) -> &'a mut W {
self.variant(SVIS_A::VALUE14)
}
#[doc = "HRPWMx.SyIO"]
#[inline(always)]
pub fn value15(self) -> &'a mut W {
self.variant(SVIS_A::VALUE15)
}
#[doc = "HRPWMx.SyIP"]
#[inline(always)]
pub fn value16(self) -> &'a mut W {
self.variant(SVIS_A::VALUE16)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
#[doc = "Field `STRIS` reader - Slope generation start control input selection"]
pub struct STRIS_R(crate::FieldReader<u8, u8>);
impl STRIS_R {
pub(crate) fn new(bits: u8) -> Self {
STRIS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for STRIS_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `STRIS` writer - Slope generation start control input selection"]
pub struct STRIS_W<'a> {
w: &'a mut W,
}
impl<'a> STRIS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | ((value as u32 & 0x0f) << 4);
self.w
}
}
#[doc = "Field `STPIS` reader - Slope generation stop control input selection"]
pub struct STPIS_R(crate::FieldReader<u8, u8>);
impl STPIS_R {
pub(crate) fn new(bits: u8) -> Self {
STPIS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for STPIS_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `STPIS` writer - Slope generation stop control input selection"]
pub struct STPIS_W<'a> {
w: &'a mut W,
}
impl<'a> STPIS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | ((value as u32 & 0x0f) << 8);
self.w
}
}
#[doc = "Field `TRGIS` reader - External conversion trigger input selection"]
pub struct TRGIS_R(crate::FieldReader<u8, u8>);
impl TRGIS_R {
pub(crate) fn new(bits: u8) -> Self {
TRGIS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for TRGIS_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRGIS` writer - External conversion trigger input selection"]
pub struct TRGIS_W<'a> {
w: &'a mut W,
}
impl<'a> TRGIS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 12)) | ((value as u32 & 0x0f) << 12);
self.w
}
}
#[doc = "Field `STIS` reader - External shadow request enable input selection"]
pub struct STIS_R(crate::FieldReader<u8, u8>);
impl STIS_R {
pub(crate) fn new(bits: u8) -> Self {
STIS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for STIS_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `STIS` writer - External shadow request enable input selection"]
pub struct STIS_W<'a> {
w: &'a mut W,
}
impl<'a> STIS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | ((value as u32 & 0x0f) << 16);
self.w
}
}
#[doc = "Slope generation clock selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum SCS_A {
#[doc = "0: HRPWMx.MCLK (Module clock is used)"]
VALUE1 = 0,
#[doc = "1: HRPWMx.ECLKA (External clock is used)"]
VALUE2 = 1,
#[doc = "2: HRPWMx.ECLKB (External clock is used)"]
VALUE3 = 2,
#[doc = "3: HRPWMx.ECLKC (External clock is used)"]
VALUE4 = 3,
}
impl From<SCS_A> for u8 {
#[inline(always)]
fn from(variant: SCS_A) -> Self {
variant as _
}
}
#[doc = "Field `SCS` reader - Slope generation clock selection"]
pub struct SCS_R(crate::FieldReader<u8, SCS_A>);
impl SCS_R {
pub(crate) fn new(bits: u8) -> Self {
SCS_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SCS_A {
match self.bits {
0 => SCS_A::VALUE1,
1 => SCS_A::VALUE2,
2 => SCS_A::VALUE3,
3 => SCS_A::VALUE4,
_ => unreachable!(),
}<|fim▁hole|> #[inline(always)]
pub fn is_value1(&self) -> bool {
**self == SCS_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == SCS_A::VALUE2
}
#[doc = "Checks if the value of the field is `VALUE3`"]
#[inline(always)]
pub fn is_value3(&self) -> bool {
**self == SCS_A::VALUE3
}
#[doc = "Checks if the value of the field is `VALUE4`"]
#[inline(always)]
pub fn is_value4(&self) -> bool {
**self == SCS_A::VALUE4
}
}
impl core::ops::Deref for SCS_R {
type Target = crate::FieldReader<u8, SCS_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SCS` writer - Slope generation clock selection"]
pub struct SCS_W<'a> {
w: &'a mut W,
}
impl<'a> SCS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SCS_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "HRPWMx.MCLK (Module clock is used)"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(SCS_A::VALUE1)
}
#[doc = "HRPWMx.ECLKA (External clock is used)"]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(SCS_A::VALUE2)
}
#[doc = "HRPWMx.ECLKB (External clock is used)"]
#[inline(always)]
pub fn value3(self) -> &'a mut W {
self.variant(SCS_A::VALUE3)
}
#[doc = "HRPWMx.ECLKC (External clock is used)"]
#[inline(always)]
pub fn value4(self) -> &'a mut W {
self.variant(SCS_A::VALUE4)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | ((value as u32 & 0x03) << 20);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Value Selector input selection"]
#[inline(always)]
pub fn svis(&self) -> SVIS_R {
SVIS_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - Slope generation start control input selection"]
#[inline(always)]
pub fn stris(&self) -> STRIS_R {
STRIS_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - Slope generation stop control input selection"]
#[inline(always)]
pub fn stpis(&self) -> STPIS_R {
STPIS_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - External conversion trigger input selection"]
#[inline(always)]
pub fn trgis(&self) -> TRGIS_R {
TRGIS_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - External shadow request enable input selection"]
#[inline(always)]
pub fn stis(&self) -> STIS_R {
STIS_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:21 - Slope generation clock selection"]
#[inline(always)]
pub fn scs(&self) -> SCS_R {
SCS_R::new(((self.bits >> 20) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Value Selector input selection"]
#[inline(always)]
pub fn svis(&mut self) -> SVIS_W {
SVIS_W { w: self }
}
#[doc = "Bits 4:7 - Slope generation start control input selection"]
#[inline(always)]
pub fn stris(&mut self) -> STRIS_W {
STRIS_W { w: self }
}
#[doc = "Bits 8:11 - Slope generation stop control input selection"]
#[inline(always)]
pub fn stpis(&mut self) -> STPIS_W {
STPIS_W { w: self }
}
#[doc = "Bits 12:15 - External conversion trigger input selection"]
#[inline(always)]
pub fn trgis(&mut self) -> TRGIS_W {
TRGIS_W { w: self }
}
#[doc = "Bits 16:19 - External shadow request enable input selection"]
#[inline(always)]
pub fn stis(&mut self) -> STIS_W {
STIS_W { w: self }
}
#[doc = "Bits 20:21 - Slope generation clock selection"]
#[inline(always)]
pub fn scs(&mut self) -> SCS_W {
SCS_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "External input selection\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dci](index.html) module"]
pub struct DCI_SPEC;
impl crate::RegisterSpec for DCI_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [dci::R](R) reader structure"]
impl crate::Readable for DCI_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [dci::W](W) writer structure"]
impl crate::Writable for DCI_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets DCI to value 0"]
impl crate::Resettable for DCI_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | }
#[doc = "Checks if the value of the field is `VALUE1`"] |
<|file_name|>modal-portfolio.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core'
import template from './modal-portfolio.component.html'
@Component({
selector: 'modal-portfolio',
template
})
export class ModalPortfolioComponent {<|fim▁hole|><|fim▁end|> | constructor() {}
} |
<|file_name|>send_summary_mails.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from django.template.loader import render_to_string
from django.conf import settings
from preferences.models import UserPreferences
from summaries.models import Unseen
from django.contrib.sites.models import Site
from optparse import make_option
from django.core.mail import EmailMultiAlternatives
class Command(BaseCommand):
args = 'daily | weekly | monthly'
help = 'Builds and sends summary mails for given period'
option_list = BaseCommand.option_list + (
make_option('--dry-run',
action='store_true',
dest='dry',
default=False,
help='Run without posting emails and writing them on stdout'),
)
def handle(self, *args, **options):
if not len(args) == 1:
raise CommandError("Give a period please")
period = args[0]
if not period in ("daily", "weekly", "monthly"):
raise CommandError("Period must be daily, weekly or monthly.")
users = [preference.user for preference in
UserPreferences.objects.filter(summary_mails=period)]
for user in users:
unseen_models = Unseen.objects.filter(user=user)<|fim▁hole|> unseen_links = [unseen.link for unseen in unseen_models]
if unseen_links:
email_title = "%s new links for you:" % len(unseen_links)
email_body_txt = render_to_string("summaries/body.txt", {
"user": user,
"links": unseen_links,
"site": Site.objects.get_current()
})
email_body_html = render_to_string("summaries/body.html", {
"user": user,
"links": unseen_links,
"site": Site.objects.get_current()
})
email = EmailMultiAlternatives(
email_title,
email_body_txt,
"Linkfloyd %s" %settings.DEFAULT_FROM_EMAIL,
[user.email,])
email.attach_alternative(email_body_html, "text/html")
email.send()
self.stdout.write("Summary email for %s sent\n" % user)
if not options['dry']:
unseen_models.delete()<|fim▁end|> | |
<|file_name|>InitialDock1.py<|end_file_name|><|fim▁begin|>import sys
from PyQt4 import QtGui, QtCore
class InitialDock1(QtGui.QWidget):
def __init__(self, parent=None, message=None, speciesDefault=None, contBtnFn=None, addBtnFn=None, speciesFn=None):
QtGui.QWidget.__init__(self,parent)
self.setWindowTitle('Data Processing')
vbox = QtGui.QVBoxLayout()
vbox.setAlignment(QtCore.Qt.AlignCenter)
hbox1 = QtGui.QHBoxLayout()
hbox1.setAlignment(QtCore.Qt.AlignCenter)
hbox2 = QtGui.QHBoxLayout()
hbox2.setAlignment(QtCore.Qt.AlignCenter)
hbox3 = QtGui.QHBoxLayout()
hbox3.setAlignment(QtCore.Qt.AlignCenter)
hbox4 = QtGui.QHBoxLayout()
hbox4.setAlignment(QtCore.Qt.AlignCenter)
self.speciesList = ['S. cerevisiae', 'M. musculus', 'H. sapiens']
## message
if message != None:
label = QtGui.QLabel(message)
hbox1.addWidget(label)
vbox.addLayout(hbox1)
## add gene list button
if addBtnFn != False:
self.addBtn = QtGui.QPushButton("Add Gene List")
self.addBtn.setMaximumWidth(100)
self.addBtn.setMinimumWidth(100)
hbox4.addWidget(self.addBtn)
vbox.addLayout(hbox4)
if addBtnFn != None:
self.connect(self.addBtn, QtCore.SIGNAL('clicked()'),addBtnFn)
## cont button
if contBtnFn != False:
self.contBtn = QtGui.QPushButton("Continue")
self.contBtn.setMaximumWidth(100)
self.contBtn.setMinimumWidth(100)
hbox3.addWidget(self.contBtn)
vbox.addLayout(hbox3)
if contBtnFn != None:
self.connect(self.contBtn, QtCore.SIGNAL('clicked()'),contBtnFn)
## species selector
if speciesFn != False:
hbox1.addWidget(QtGui.QLabel('Species Selector'))
hbox1.setAlignment(QtCore.Qt.AlignTop)
vbox.addLayout(hbox1)
self.speciesSelector = QtGui.QComboBox(self)
self.speciesSelector.setMaximumWidth(150)
for species in self.speciesList:
self.speciesSelector.addItem(species)
hbox2.addWidget(self.speciesSelector)
hbox2.setAlignment(QtCore.Qt.AlignCenter)
vbox.addLayout(hbox2)
if speciesDefault != None:
if self.speciesList.__contains__(speciesDefault):
self.speciesSelector.setCurrentIndex(self.speciesList.index(speciesDefault))
else:
print "ERROR: in dpd - bad specified speciesDefault"
if speciesFn != None:<|fim▁hole|> ## finalize layout
self.setLayout(vbox)
## color the background
palette = self.palette()
role = self.backgroundRole()
palette.setColor(role, QtGui.QColor('white'))
self.setPalette(palette)
def enable_add_btn(self,fn):
self.connect(self.contBtn, QtCore.SIGNAL('clicked()'),fn)
def enable_continue_btn(self,fn):
self.connect(self.contBtn, QtCore.SIGNAL('clicked()'),fn)
def disable_all(self):
pass
def enable_all(self):
pass
### Run the tests
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
id1 = InitialDock1()
id1.show()
sys.exit(app.exec_())<|fim▁end|> | self.connect(self.speciesSelector,QtCore.SIGNAL('activated(QString)'), speciesFn)
|
<|file_name|>ActualInvocationsSource.hpp<|end_file_name|><|fim▁begin|>//
// Created by eran on 01/04/2015.
//
#include <unordered_set>
#include "fakeit/Invocation.hpp"
namespace fakeit {
struct ActualInvocationsContainer {
virtual void clear() = 0;
virtual ~ActualInvocationsContainer() NO_THROWS { }
};
struct ActualInvocationsSource {
virtual void getActualInvocations(std::unordered_set<fakeit::Invocation *> &into) const = 0;
virtual ~ActualInvocationsSource() NO_THROWS { }
};<|fim▁hole|> struct InvocationsSourceProxy : public ActualInvocationsSource {
InvocationsSourceProxy(ActualInvocationsSource *inner) :
_inner(inner) {
}
void getActualInvocations(std::unordered_set<fakeit::Invocation *> &into) const override {
_inner->getActualInvocations(into);
}
private:
std::shared_ptr<ActualInvocationsSource> _inner;
};
struct UnverifiedInvocationsSource : public ActualInvocationsSource {
UnverifiedInvocationsSource(InvocationsSourceProxy decorated) : _decorated(decorated) {
}
void getActualInvocations(std::unordered_set<fakeit::Invocation *> &into) const override {
std::unordered_set<fakeit::Invocation *> all;
_decorated.getActualInvocations(all);
for (fakeit::Invocation *i : all) {
if (!i->isVerified()) {
into.insert(i);
}
}
}
private:
InvocationsSourceProxy _decorated;
};
struct AggregateInvocationsSource : public ActualInvocationsSource {
AggregateInvocationsSource(std::vector<ActualInvocationsSource *> &sources) : _sources(sources) {
}
void getActualInvocations(std::unordered_set<fakeit::Invocation *> &into) const override {
std::unordered_set<fakeit::Invocation *> tmp;
for (ActualInvocationsSource *source : _sources) {
source->getActualInvocations(tmp);
}
filter(tmp, into);
}
protected:
bool shouldInclude(fakeit::Invocation *) const {
return true;
}
private:
std::vector<ActualInvocationsSource *> _sources;
void filter(std::unordered_set<Invocation *> &source, std::unordered_set<Invocation *> &target) const {
for (Invocation *i:source) {
if (shouldInclude(i)) {
target.insert(i);
}
}
}
};
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![recursion_limit = "1024"]
mod async_rt;
mod backend;
mod codec;
mod dealer;
mod endpoint;
mod error;
mod fair_queue;
mod message;
mod r#pub;
mod pull;
mod push;
mod rep;
mod req;
mod router;
mod sub;
mod task_handle;
mod transport;
pub mod util;
#[doc(hidden)]
pub mod __async_rt {
//! DO NOT USE! PRIVATE IMPLEMENTATION, EXPOSED ONLY FOR INTEGRATION TESTS.
pub use super::async_rt::*;
}
pub use crate::dealer::*;
pub use crate::endpoint::{Endpoint, Host, Transport, TryIntoEndpoint};
pub use crate::error::{ZmqError, ZmqResult};
pub use crate::pull::*;
pub use crate::push::*;
pub use crate::r#pub::*;
pub use crate::rep::*;
pub use crate::req::*;
pub use crate::router::*;
pub use crate::sub::*;<|fim▁hole|>pub use message::*;
use crate::codec::*;
use crate::transport::AcceptStopHandle;
use util::PeerIdentity;
#[macro_use]
extern crate enum_primitive_derive;
use async_trait::async_trait;
use asynchronous_codec::FramedWrite;
use futures::channel::mpsc;
use futures::FutureExt;
use num_traits::ToPrimitive;
use parking_lot::Mutex;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::fmt::{Debug, Display};
use std::sync::Arc;
#[allow(clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, PartialEq, Primitive)]
pub enum SocketType {
PAIR = 0,
PUB = 1,
SUB = 2,
REQ = 3,
REP = 4,
DEALER = 5,
ROUTER = 6,
PULL = 7,
PUSH = 8,
XPUB = 9,
XSUB = 10,
STREAM = 11,
}
impl TryFrom<&str> for SocketType {
type Error = ZmqError;
fn try_from(s: &str) -> Result<Self, ZmqError> {
Ok(match s {
"PAIR" => SocketType::PAIR,
"PUB" => SocketType::PUB,
"SUB" => SocketType::SUB,
"REQ" => SocketType::REQ,
"REP" => SocketType::REP,
"DEALER" => SocketType::DEALER,
"ROUTER" => SocketType::ROUTER,
"PULL" => SocketType::PULL,
"PUSH" => SocketType::PUSH,
"XPUB" => SocketType::XPUB,
"XSUB" => SocketType::XSUB,
"STREAM" => SocketType::STREAM,
_ => return Err(ZmqError::Other("Unknown socket type")),
})
}
}
impl Display for SocketType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SocketType::PAIR => write!(f, "PAIR"),
SocketType::PUB => write!(f, "PUB"),
SocketType::SUB => write!(f, "SUB"),
SocketType::REQ => write!(f, "REQ"),
SocketType::REP => write!(f, "REP"),
SocketType::DEALER => write!(f, "DEALER"),
SocketType::ROUTER => write!(f, "ROUTER"),
SocketType::PULL => write!(f, "PULL"),
SocketType::PUSH => write!(f, "PUSH"),
SocketType::XPUB => write!(f, "XPUB"),
SocketType::XSUB => write!(f, "XSUB"),
SocketType::STREAM => write!(f, "STREAM"),
}
}
}
#[derive(Debug)]
pub enum SocketEvent {
Connected(Endpoint, PeerIdentity),
ConnectDelayed,
ConnectRetried,
Listening(Endpoint),
Accepted(Endpoint, PeerIdentity),
AcceptFailed(ZmqError),
Closed,
CloseFailed,
Disconnected(PeerIdentity),
}
#[derive(Default)]
pub struct SocketOptions {
pub(crate) peer_id: Option<PeerIdentity>,
}
impl SocketOptions {
pub fn peer_identity(&mut self, peer_id: PeerIdentity) -> &mut Self {
self.peer_id = Some(peer_id);
self
}
}
#[async_trait]
pub trait MultiPeerBackend: SocketBackend {
/// This should not be public..
/// Find a better way of doing this
async fn peer_connected(self: Arc<Self>, peer_id: &PeerIdentity, io: FramedIo);
fn peer_disconnected(&self, peer_id: &PeerIdentity);
}
pub trait SocketBackend: Send + Sync {
fn socket_type(&self) -> SocketType;
fn socket_options(&self) -> &SocketOptions;
fn shutdown(&self);
fn monitor(&self) -> &Mutex<Option<mpsc::Sender<SocketEvent>>>;
}
#[async_trait]
pub trait SocketRecv {
async fn recv(&mut self) -> ZmqResult<ZmqMessage>;
}
#[async_trait]
pub trait SocketSend {
async fn send(&mut self, message: ZmqMessage) -> ZmqResult<()>;
}
/// Marker trait that express the fact that only certain types of sockets might be used
/// in [proxy] function as a capture parameter
pub trait CaptureSocket: SocketSend {}
#[async_trait]
pub trait Socket: Sized + Send {
fn new() -> Self {
Self::with_options(SocketOptions::default())
}
fn with_options(options: SocketOptions) -> Self;
fn backend(&self) -> Arc<dyn MultiPeerBackend>;
/// Binds to the endpoint and starts a coroutine to accept new connections
/// on it.
///
/// Returns the endpoint resolved to the exact bound location if applicable
/// (port # resolved, for example).
async fn bind(&mut self, endpoint: &str) -> ZmqResult<Endpoint> {
let endpoint = endpoint.try_into()?;
let cloned_backend = self.backend();
let cback = move |result| {
let cloned_backend = cloned_backend.clone();
async move {
let result = match result {
Ok((socket, endpoint)) => {
match util::peer_connected(socket, cloned_backend.clone()).await {
Ok(peer_id) => Ok((endpoint, peer_id)),
Err(e) => Err(e),
}
}
Err(e) => Err(e),
};
match result {
Ok((endpoint, peer_id)) => {
if let Some(monitor) = cloned_backend.monitor().lock().as_mut() {
let _ = monitor.try_send(SocketEvent::Accepted(endpoint, peer_id));
}
}
Err(e) => {
if let Some(monitor) = cloned_backend.monitor().lock().as_mut() {
let _ = monitor.try_send(SocketEvent::AcceptFailed(e));
}
}
}
}
};
let (endpoint, stop_handle) = transport::begin_accept(endpoint, cback).await?;
if let Some(monitor) = self.backend().monitor().lock().as_mut() {
let _ = monitor.try_send(SocketEvent::Listening(endpoint.clone()));
}
self.binds().insert(endpoint.clone(), stop_handle);
Ok(endpoint)
}
fn binds(&mut self) -> &mut HashMap<Endpoint, AcceptStopHandle>;
/// Unbinds the endpoint, blocking until the associated endpoint is no
/// longer in use
///
/// # Errors
/// May give a `ZmqError::NoSuchBind` if `endpoint` isn't bound. May also
/// give any other zmq errors encountered when attempting to disconnect
async fn unbind(&mut self, endpoint: Endpoint) -> ZmqResult<()> {
let stop_handle = self.binds().remove(&endpoint);
let stop_handle = stop_handle.ok_or(ZmqError::NoSuchBind(endpoint))?;
stop_handle.0.shutdown().await
}
/// Unbinds all bound endpoints, blocking until finished.
async fn unbind_all(&mut self) -> Vec<ZmqError> {
let mut errs = Vec::new();
let endpoints: Vec<_> = self
.binds()
.iter()
.map(|(endpoint, _)| endpoint.clone())
.collect();
for endpoint in endpoints {
if let Err(err) = self.unbind(endpoint).await {
errs.push(err);
}
}
errs
}
/// Connects to the given endpoint.
async fn connect(&mut self, endpoint: &str) -> ZmqResult<()> {
let backend = self.backend();
let endpoint = endpoint.try_into()?;
let result = match util::connect_forever(endpoint).await {
Ok((socket, endpoint)) => match util::peer_connected(socket, backend).await {
Ok(peer_id) => Ok((endpoint, peer_id)),
Err(e) => Err(e),
},
Err(e) => Err(e),
};
match result {
Ok((endpoint, peer_id)) => {
if let Some(monitor) = self.backend().monitor().lock().as_mut() {
let _ = monitor.try_send(SocketEvent::Connected(endpoint, peer_id));
}
Ok(())
}
Err(e) => Err(e),
}
}
/// Creates and setups new socket monitor
///
/// Subsequent calls to this method each create a new monitor channel.
/// Sender side of previous one is dropped.
fn monitor(&mut self) -> mpsc::Receiver<SocketEvent>;
// TODO: async fn connections(&self) -> ?
/// Disconnects from the given endpoint, blocking until finished.
///
/// # Errors
/// May give a `ZmqError::NoSuchConnection` if `endpoint` isn't connected.
/// May also give any other zmq errors encountered when attempting to
/// disconnect.
// TODO: async fn disconnect(&mut self, endpoint: impl TryIntoEndpoint + 'async_trait) ->
// ZmqResult<()>;
/// Disconnects all connecttions, blocking until finished.
// TODO: async fn disconnect_all(&mut self) -> ZmqResult<()>;
/// Closes the socket, blocking until all associated binds are closed.
/// This is equivalent to `drop()`, but with the benefit of blocking until
/// resources are released, and getting any underlying errors.
///
/// Returns any encountered errors.
// TODO: Call disconnect_all() when added
async fn close(mut self) -> Vec<ZmqError> {
// self.disconnect_all().await?;
self.unbind_all().await
}
}
pub async fn proxy<Frontend: SocketSend + SocketRecv, Backend: SocketSend + SocketRecv>(
mut frontend: Frontend,
mut backend: Backend,
mut capture: Option<Box<dyn CaptureSocket>>,
) -> ZmqResult<()> {
loop {
futures::select! {
frontend_mess = frontend.recv().fuse() => {
match frontend_mess {
Ok(message) => {
if let Some(capture) = &mut capture {
capture.send(message.clone()).await?;
}
backend.send(message).await?;
}
Err(_) => {
todo!()
}
}
},
backend_mess = backend.recv().fuse() => {
match backend_mess {
Ok(message) => {
if let Some(capture) = &mut capture {
capture.send(message.clone()).await?;
}
frontend.send(message).await?;
}
Err(_) => {
todo!()
}
}
}
};
}
}
pub mod prelude {
//! Re-exports important traits. Consider glob-importing.
pub use crate::{Socket, SocketRecv, SocketSend, TryIntoEndpoint};
}<|fim▁end|> | |
<|file_name|>parse_response_file.py<|end_file_name|><|fim▁begin|>from ggrade import read_tab_file
import argparse
################################################################################
################################################################################
def main():
# Parse the input arguments
parser = argparse.ArgumentParser()
parser.add_argument('infile_name', type=str, default=None, help='Input file name',nargs='?')
parser.add_argument('--solutions-file', dest='outfile_name', type=str,\
default=None, help='Name of output file to write the solutions to.')
args = parser.parse_args()
# Open the file and pull out the information.
questions,solutions,student_answers = None,None,None
if args.infile_name is not None:
questions,solutions,student_answers = read_tab_file(args.infile_name)
solutions_string = "solutions = [ \n"
extra_feedback_string = "feedback_for_everyone = [ \n"
incorrect_feedback_string = "feedback_for_wrong_answers = [ \n"
points_per_question_string = "points_per_question = [ \n"
nsolutions = len(solutions)
# For now, assume the solutions are the first one.
for i,solution in enumerate(solutions):
solutions_string += "\t\"%s\"" % (solution)
extra_feedback_string += "\tNone"
incorrect_feedback_string += "\tNone"
points_per_question_string += "10"
if i != nsolutions-1:
solutions_string += ", # Question %d\n" % (i+1)
extra_feedback_string += ", # Question %d\n" % (i+1)
incorrect_feedback_string += ", # Question %d\n" % (i+1)
points_per_question_string += ", # Question %d\n" % (i+1)
else:
solutions_string += " # Question %d \n" % (i+1)
extra_feedback_string += " # Question %d \n" % (i+1)
incorrect_feedback_string += " # Question %d \n" % (i+1)
points_per_question_string += " # Question %d \n" % (i+1)
solutions_string += "] \n"
extra_feedback_string += "] \n"
incorrect_feedback_string += "] \n"
points_per_question_string += "] \n"
# Write the output to a file.
outfile_name = "solutions.py"
if args.outfile_name is not None:
outfile_name = args.outfile_name
else:
outfile_name = args.infile_name.split('.tsv')[0]
outfile_name = "SOLUTIONS_%s.py" % (outfile_name)
outfile = open(outfile_name,'w+')
outfile.write("# -*- coding: utf-8 -*-")<|fim▁hole|> outfile.write("\n")
outfile.write(incorrect_feedback_string)
outfile.write("\n")
outfile.write(points_per_question_string)
outfile.close()
################################################################################
################################################################################
if __name__=="__main__":
main()<|fim▁end|> | outfile.write("\n")
outfile.write(solutions_string)
outfile.write("\n")
outfile.write(extra_feedback_string) |
<|file_name|>WISCSpider.py<|end_file_name|><|fim▁begin|>import scrapy
import re
from research.items import ResearchItem
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class CaltechSpider(scrapy.Spider):
name = "WISC"<|fim▁hole|>
def parse(self, response):
item = ResearchItem()
for sel in response.xpath('//table[@class="views-table cols-2"]'):
item['groupname'] = sel.xpath('caption/text()').extract()[0]
item['proflist'] = []
for selp in sel.xpath('.//div[@class="views-field views-field-name-1"]/span/a'):
tmpname = selp.xpath('text()').extract()
print str(tmpname)
item['proflist'].append(tmpname)
yield item<|fim▁end|> | allowed_domains = ["cs.wisc.edu"]
start_urls = ["https://www.cs.wisc.edu/research/groups"] |
<|file_name|>WorldWideWeb_suite.py<|end_file_name|><|fim▁begin|>"""Suite WorldWideWeb suite, as defined in Spyglass spec.:
Level 1, version 1
Generated from /Volumes/Sap/Applications (Mac OS 9)/Netscape Communicator\xe2\x84\xa2 Folder/Netscape Communicator\xe2\x84\xa2
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'WWW!'
class WorldWideWeb_suite_Events:
_argmap_OpenURL = {
'to' : 'INTO',
'toWindow' : 'WIND',
'flags' : 'FLGS',
'post_data' : 'POST',
'post_type' : 'MIME',
'progressApp' : 'PROG',
}
def OpenURL(self, _object, _attributes={}, **_arguments):
"""OpenURL: Opens a URL. Allows for more options than GetURL event
Required argument: URL
Keyword argument to: file destination
Keyword argument toWindow: window iD
Keyword argument flags: Binary: any combination of 1, 2 and 4 is allowed: 1 and 2 mean force reload the document. 4 is ignored
Keyword argument post_data: Form posting data
Keyword argument post_type: MIME type of the posting data. Defaults to application/x-www-form-urlencoded
Keyword argument progressApp: Application that will display progress
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: ID of the loading window
"""
_code = 'WWW!'
_subcode = 'OURL'
aetools.keysubst(_arguments, self._argmap_OpenURL)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_ShowFile = {
'MIME_type' : 'MIME',
'Window_ID' : 'WIND',
'URL' : 'URL ',
}
def ShowFile(self, _object, _attributes={}, **_arguments):
"""ShowFile: Similar to OpenDocuments, except that it specifies the parent URL, and MIME type of the file
Required argument: File to open
Keyword argument MIME_type: MIME type
Keyword argument Window_ID: Window to open the file in
Keyword argument URL: Use this as a base URL
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: Window ID of the loaded window. 0 means ShowFile failed, FFFFFFF means that data was not appropriate type to display in the browser.
"""
_code = 'WWW!'
_subcode = 'SHWF'
aetools.keysubst(_arguments, self._argmap_ShowFile)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_cancel_progress = {
'in_window' : 'WIND',
}
def cancel_progress(self, _object=None, _attributes={}, **_arguments):
"""cancel progress: Interrupts the download of the document in the given window
Required argument: progress ID, obtained from the progress app
Keyword argument in_window: window ID of the progress to cancel
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'CNCL'
aetools.keysubst(_arguments, self._argmap_cancel_progress)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def find_URL(self, _object, _attributes={}, **_arguments):
"""find URL: If the file was downloaded by Netscape, you can call FindURL to find out the URL used to download the file.
Required argument: File spec
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: The URL
"""
_code = 'WWW!'
_subcode = 'FURL'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def get_window_info(self, _object=None, _attributes={}, **_arguments):
"""get window info: Returns the information about the window as a list. Currently the list contains the window title and the URL. You can get the same information using standard Apple Event GetProperty.
Required argument: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: undocumented, typecode 'list'
"""
_code = 'WWW!'
_subcode = 'WNFO'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def list_windows(self, _no_object=None, _attributes={}, **_arguments):
"""list windows: Lists the IDs of all the hypertext windows
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: List of unique IDs of all the hypertext windows
"""
_code = 'WWW!'<|fim▁hole|> if _no_object != None: raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_parse_anchor = {
'relative_to' : 'RELA',
}
def parse_anchor(self, _object, _attributes={}, **_arguments):
"""parse anchor: Resolves the relative URL
Required argument: Main URL
Keyword argument relative_to: Relative URL
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: Parsed URL
"""
_code = 'WWW!'
_subcode = 'PRSA'
aetools.keysubst(_arguments, self._argmap_parse_anchor)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def register_URL_echo(self, _object=None, _attributes={}, **_arguments):
"""register URL echo: Registers the \xd2echo\xd3 application. Each download from now on will be echoed to this application.
Required argument: Application signature
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'RGUE'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_protocol = {
'for_protocol' : 'PROT',
}
def register_protocol(self, _object=None, _attributes={}, **_arguments):
"""register protocol: Registers application as a \xd2handler\xd3 for this protocol with a given prefix. The handler will receive \xd2OpenURL\xd3, or if that fails, \xd2GetURL\xd3 event.
Required argument: Application sig
Keyword argument for_protocol: protocol prefix: \xd2finger:\xd3, \xd2file\xd3,
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if registration has been successful
"""
_code = 'WWW!'
_subcode = 'RGPR'
aetools.keysubst(_arguments, self._argmap_register_protocol)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_viewer = {
'MIME_type' : 'MIME',
'with_file_type' : 'FTYP',
}
def register_viewer(self, _object, _attributes={}, **_arguments):
"""register viewer: Registers an application as a \xd4special\xd5 viewer for this MIME type. The application will be launched with ViewDoc events
Required argument: Application sig
Keyword argument MIME_type: MIME type viewer is registering for
Keyword argument with_file_type: Mac file type for the downloaded files
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if registration has been successful
"""
_code = 'WWW!'
_subcode = 'RGVW'
aetools.keysubst(_arguments, self._argmap_register_viewer)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_register_window_close = {
'for_window' : 'WIND',
}
def register_window_close(self, _object=None, _attributes={}, **_arguments):
"""register window close: Netscape will notify registered application when this window closes
Required argument: Application signature
Keyword argument for_window: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: true if successful
"""
_code = 'WWW!'
_subcode = 'RGWC'
aetools.keysubst(_arguments, self._argmap_register_window_close)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def unregister_URL_echo(self, _object, _attributes={}, **_arguments):
"""unregister URL echo: cancels URL echo
Required argument: application signature
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'UNRU'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_protocol = {
'for_protocol' : 'PROT',
}
def unregister_protocol(self, _object=None, _attributes={}, **_arguments):
"""unregister protocol: reverses the effects of \xd2register protocol\xd3
Required argument: Application sig.
Keyword argument for_protocol: protocol prefix. If none, unregister for all protocols
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if successful
"""
_code = 'WWW!'
_subcode = 'UNRP'
aetools.keysubst(_arguments, self._argmap_unregister_protocol)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_viewer = {
'MIME_type' : 'MIME',
}
def unregister_viewer(self, _object, _attributes={}, **_arguments):
"""unregister viewer: Revert to the old way of handling this MIME type
Required argument: Application sig
Keyword argument MIME_type: MIME type to be unregistered
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: TRUE if the event was successful
"""
_code = 'WWW!'
_subcode = 'UNRV'
aetools.keysubst(_arguments, self._argmap_unregister_viewer)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_unregister_window_close = {
'for_window' : 'WIND',
}
def unregister_window_close(self, _object=None, _attributes={}, **_arguments):
"""unregister window close: Undo for register window close
Required argument: Application signature
Keyword argument for_window: window ID
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: true if successful
"""
_code = 'WWW!'
_subcode = 'UNRC'
aetools.keysubst(_arguments, self._argmap_unregister_window_close)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def webActivate(self, _object=None, _attributes={}, **_arguments):
"""webActivate: Makes Netscape the frontmost application, and selects a given window. This event is here for suite completeness/ cross-platform compatibility only, you should use standard AppleEvents instead.
Required argument: window to bring to front
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'WWW!'
_subcode = 'ACTV'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}<|fim▁end|> | _subcode = 'LSTW'
if _arguments: raise TypeError, 'No optional args expected' |
<|file_name|>batchutils.py<|end_file_name|><|fim▁begin|>import plugins, datetime, time, os
class BatchVersionFilter:
def __init__(self, batchSession):
self.batchSession = batchSession
def verifyVersions(self, app):
badVersion = self.findUnacceptableVersion(app)
if badVersion is not None:
raise plugins.TextTestError, "unregistered version '" + badVersion + "' for " + self.batchSession + " session."
def findUnacceptableVersion(self, app):
if app.getCompositeConfigValue("batch_use_version_filtering", self.batchSession) != "true":
return
allowedVersions = app.getCompositeConfigValue("batch_version", self.batchSession)
for version in app.versions:
if len(version) > 0 and version not in allowedVersions and not version.startswith("copy_"):
return version
def calculateBatchDate():
# Batch mode uses a standardised date that give a consistent answer for night-jobs.
# Hence midnight is a bad cutover point. The day therefore starts and ends at 8am :)
timeToUse = plugins.globalStartTime - datetime.timedelta(hours=8)
return timeToUse.strftime("%d%b%Y")
def parseFileName(fileName, diag):
versionStr = fileName[5:-5]
components = versionStr.split("_")
diag.info("Parsing file with components " + repr(components))
for index, component in enumerate(components[1:]):
try:
diag.info("Trying to parse " + component + " as date")
date = time.strptime(component, "%d%b%Y")
version = "_".join(components[:index + 1])
tag = "_".join(components[index + 2:]) or component
return version, date, tag
except ValueError:<|fim▁hole|>def convertToUrl(path, fileMapping):
for filePath, httpPath in fileMapping.items():
if path.startswith(filePath):
return path.replace(filePath, httpPath)
return "file://" + os.path.abspath(path)<|fim▁end|> | pass
return None, None, None
|
<|file_name|>TestNativeAtan2piRelaxed.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh.
#include "TestNativeAtan2pi.rs"
#pragma rs_fp_relaxed<|fim▁end|> | *
* http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>test_shelve_instance.py<|end_file_name|><|fim▁begin|># Copyright 2014 Scality
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import testtools
from tempest.common import waiters
from tempest import config
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestShelveInstance(manager.ScenarioTest):
"""
This test shelves then unshelves a Nova instance
The following is the scenario outline:
* boot a instance and create a timestamp file in it
* shelve the instance
* unshelve the instance
* check the existence of the timestamp file in the unshelved instance
"""
def _write_timestamp(self, server_or_ip):
ssh_client = self.get_remote_client(server_or_ip)
ssh_client.exec_command('date > /tmp/timestamp; sync')
self.timestamp = ssh_client.exec_command('cat /tmp/timestamp')
def _check_timestamp(self, server_or_ip):
ssh_client = self.get_remote_client(server_or_ip)
got_timestamp = ssh_client.exec_command('cat /tmp/timestamp')<|fim▁hole|> self.servers_client.shelve_server(server['id'])
offload_time = CONF.compute.shelved_offload_time
if offload_time >= 0:
waiters.wait_for_server_status(self.servers_client, server['id'],
'SHELVED_OFFLOADED',
extra_timeout=offload_time)
else:
waiters.wait_for_server_status(self.servers_client,
server['id'], 'SHELVED')
self.servers_client.shelve_offload_server(server['id'])
waiters.wait_for_server_status(self.servers_client, server['id'],
'SHELVED_OFFLOADED')
self.servers_client.unshelve_server(server['id'])
waiters.wait_for_server_status(self.servers_client, server['id'],
'ACTIVE')
@test.idempotent_id('1164e700-0af0-4a4c-8792-35909a88743c')
@testtools.skipUnless(CONF.compute_feature_enabled.shelve,
'Shelve is not available.')
@test.services('compute', 'network', 'image')
def test_shelve_instance(self):
self.keypair = self.create_keypair()
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'key_name': self.keypair['name'],
'security_groups': security_groups
}
server = self.create_server(image=CONF.compute.image_ref,
create_kwargs=create_kwargs)
if CONF.compute.use_floatingip_for_ssh:
floating_ip = self.floating_ips_client.create_floating_ip()
self.addCleanup(self.delete_wrapper,
self.floating_ips_client.delete_floating_ip,
floating_ip['id'])
self.floating_ips_client.associate_floating_ip_to_server(
floating_ip['ip'], server['id'])
self._write_timestamp(floating_ip['ip'])
else:
self._write_timestamp(server)
# Prevent bug #1257594 from coming back
# Unshelve used to boot the instance with the original image, not
# with the instance snapshot
self._shelve_then_unshelve_server(server)
if CONF.compute.use_floatingip_for_ssh:
self._check_timestamp(floating_ip['ip'])
else:
self._check_timestamp(server)<|fim▁end|> | self.assertEqual(self.timestamp, got_timestamp)
def _shelve_then_unshelve_server(self, server): |
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Gogs Authors. All rights reserved.
// Copyright 2016 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
// Package v1 Gitea API.
//
// This documentation describes the Gitea API.
//
// Schemes: http, https
// BasePath: /api/v1
// Version: {{AppVer | JSEscape | Safe}}
// License: MIT http://opensource.org/licenses/MIT
//
// Consumes:
// - application/json
// - text/plain
//
// Produces:
// - application/json
// - text/html
//
// Security:
// - BasicAuth :
// - Token :
// - AccessToken :
// - AuthorizationHeaderToken :
// - SudoParam :
// - SudoHeader :
// - TOTPHeader :
//
// SecurityDefinitions:
// BasicAuth:
// type: basic
// Token:
// type: apiKey
// name: token
// in: query
// AccessToken:
// type: apiKey
// name: access_token
// in: query
// AuthorizationHeaderToken:
// type: apiKey
// name: Authorization
// in: header
// description: API tokens must be prepended with "token" followed by a space.
// SudoParam:
// type: apiKey
// name: sudo
// in: query
// description: Sudo API request as the user provided as the key. Admin privileges are required.
// SudoHeader:
// type: apiKey
// name: Sudo
// in: header
// description: Sudo API request as the user provided as the key. Admin privileges are required.
// TOTPHeader:
// type: apiKey
// name: X-GITEA-OTP
// in: header
// description: Must be used in combination with BasicAuth if two-factor authentication is enabled.
//
// swagger:meta
package v1
import (
"net/http"
"reflect"
"strings"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/api/v1/admin"
"code.gitea.io/gitea/routers/api/v1/misc"
"code.gitea.io/gitea/routers/api/v1/notify"
"code.gitea.io/gitea/routers/api/v1/org"
"code.gitea.io/gitea/routers/api/v1/repo"
"code.gitea.io/gitea/routers/api/v1/settings"
_ "code.gitea.io/gitea/routers/api/v1/swagger" // for swagger generation
"code.gitea.io/gitea/routers/api/v1/user"
"code.gitea.io/gitea/services/forms"
"gitea.com/go-chi/binding"
"gitea.com/go-chi/session"
"github.com/go-chi/cors"
)
func sudo() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
sudo := ctx.Query("sudo")
if len(sudo) == 0 {
sudo = ctx.Req.Header.Get("Sudo")
}
if len(sudo) > 0 {
if ctx.IsSigned && ctx.User.IsAdmin {
user, err := models.GetUserByName(sudo)
if err != nil {
if models.IsErrUserNotExist(err) {
ctx.NotFound()
} else {
ctx.Error(http.StatusInternalServerError, "GetUserByName", err)
}
return
}
log.Trace("Sudo from (%s) to: %s", ctx.User.Name, user.Name)
ctx.User = user
} else {
ctx.JSON(http.StatusForbidden, map[string]string{
"message": "Only administrators allowed to sudo.",
})
return
}
}
}
}
func repoAssignment() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
userName := ctx.Params("username")
repoName := ctx.Params("reponame")
var (
owner *models.User
err error
)
// Check if the user is the same as the repository owner.
if ctx.IsSigned && ctx.User.LowerName == strings.ToLower(userName) {
owner = ctx.User
} else {
owner, err = models.GetUserByName(userName)
if err != nil {
if models.IsErrUserNotExist(err) {
if redirectUserID, err := models.LookupUserRedirect(userName); err == nil {
context.RedirectToUser(ctx.Context, userName, redirectUserID)
} else if models.IsErrUserRedirectNotExist(err) {
ctx.NotFound("GetUserByName", err)
} else {
ctx.Error(http.StatusInternalServerError, "LookupUserRedirect", err)
}
} else {
ctx.Error(http.StatusInternalServerError, "GetUserByName", err)
}
return
}
}
ctx.Repo.Owner = owner
// Get repository.
repo, err := models.GetRepositoryByName(owner.ID, repoName)
if err != nil {
if models.IsErrRepoNotExist(err) {
redirectRepoID, err := models.LookupRepoRedirect(owner.ID, repoName)
if err == nil {
context.RedirectToRepo(ctx.Context, redirectRepoID)
} else if models.IsErrRepoRedirectNotExist(err) {
ctx.NotFound()
} else {
ctx.Error(http.StatusInternalServerError, "LookupRepoRedirect", err)
}
} else {
ctx.Error(http.StatusInternalServerError, "GetRepositoryByName", err)
}
return
}
repo.Owner = owner
ctx.Repo.Repository = repo
ctx.Repo.Permission, err = models.GetUserRepoPermission(repo, ctx.User)
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err)
return
}
if !ctx.Repo.HasAccess() {
ctx.NotFound()
return
}
}
}
// Contexter middleware already checks token for user sign in process.
func reqToken() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if true == ctx.Data["IsApiToken"] {
return
}
if ctx.Context.IsBasicAuth {
ctx.CheckForOTP()
return
}
if ctx.IsSigned {
ctx.RequireCSRF()
return
}
ctx.Error(http.StatusUnauthorized, "reqToken", "token is required")
}
}
func reqExploreSignIn() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if setting.Service.Explore.RequireSigninView && !ctx.IsSigned {
ctx.Error(http.StatusUnauthorized, "reqExploreSignIn", "you must be signed in to search for users")
}
}
}
func reqBasicAuth() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.Context.IsBasicAuth {
ctx.Error(http.StatusUnauthorized, "reqBasicAuth", "basic auth required")
return
}
ctx.CheckForOTP()
}
}
// reqSiteAdmin user should be the site admin
func reqSiteAdmin() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqSiteAdmin", "user should be the site admin")
return
}
}
}
// reqOwner user should be the owner of the repo or site admin.
func reqOwner() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserRepoOwner() && !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqOwner", "user should be the owner of the repo")
return
}
}
}
// reqAdmin user should be an owner or a collaborator with admin write of a repository, or site admin
func reqAdmin() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqAdmin", "user should be an owner or a collaborator with admin write of a repository")
return
}
}
}
// reqRepoWriter user should have a permission to write to a repo, or be a site admin
func reqRepoWriter(unitTypes ...models.UnitType) func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserRepoWriter(unitTypes) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqRepoWriter", "user should have a permission to write to a repo")
return
}
}
}
// reqRepoReader user should have specific read permission or be a repo admin or a site admin
func reqRepoReader(unitType models.UnitType) func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserRepoReaderSpecific(unitType) && !ctx.IsUserRepoAdmin() && !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqRepoReader", "user should have specific read permission or be a repo admin or a site admin")
return
}
}
}
// reqAnyRepoReader user should have any permission to read repository or permissions of site admin
func reqAnyRepoReader() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.IsUserRepoReaderAny() && !ctx.IsUserSiteAdmin() {
ctx.Error(http.StatusForbidden, "reqAnyRepoReader", "user should have any permission to read repository or permissions of site admin")
return
}
}
}
// reqOrgOwnership user should be an organization owner, or a site admin
func reqOrgOwnership() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if ctx.Context.IsUserSiteAdmin() {
return
}
var orgID int64
if ctx.Org.Organization != nil {
orgID = ctx.Org.Organization.ID
} else if ctx.Org.Team != nil {
orgID = ctx.Org.Team.OrgID
} else {
ctx.Error(http.StatusInternalServerError, "", "reqOrgOwnership: unprepared context")
return
}
isOwner, err := models.IsOrganizationOwner(orgID, ctx.User.ID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "IsOrganizationOwner", err)
return
} else if !isOwner {
if ctx.Org.Organization != nil {
ctx.Error(http.StatusForbidden, "", "Must be an organization owner")
} else {
ctx.NotFound()
}
return
}
}
}
// reqTeamMembership user should be an team member, or a site admin
func reqTeamMembership() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if ctx.Context.IsUserSiteAdmin() {
return
}
if ctx.Org.Team == nil {
ctx.Error(http.StatusInternalServerError, "", "reqTeamMembership: unprepared context")
return
}
var orgID = ctx.Org.Team.OrgID
isOwner, err := models.IsOrganizationOwner(orgID, ctx.User.ID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "IsOrganizationOwner", err)
return
} else if isOwner {
return
}
if isTeamMember, err := models.IsTeamMember(orgID, ctx.Org.Team.ID, ctx.User.ID); err != nil {
ctx.Error(http.StatusInternalServerError, "IsTeamMember", err)
return
} else if !isTeamMember {
isOrgMember, err := models.IsOrganizationMember(orgID, ctx.User.ID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "IsOrganizationMember", err)
} else if isOrgMember {
ctx.Error(http.StatusForbidden, "", "Must be a team member")
} else {
ctx.NotFound()
}
return
}
}
}
// reqOrgMembership user should be an organization member, or a site admin
func reqOrgMembership() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if ctx.Context.IsUserSiteAdmin() {
return
}
var orgID int64
if ctx.Org.Organization != nil {
orgID = ctx.Org.Organization.ID
} else if ctx.Org.Team != nil {
orgID = ctx.Org.Team.OrgID
} else {
ctx.Error(http.StatusInternalServerError, "", "reqOrgMembership: unprepared context")
return
}
if isMember, err := models.IsOrganizationMember(orgID, ctx.User.ID); err != nil {
ctx.Error(http.StatusInternalServerError, "IsOrganizationMember", err)
return
} else if !isMember {
if ctx.Org.Organization != nil {
ctx.Error(http.StatusForbidden, "", "Must be an organization member")
} else {
ctx.NotFound()
}
return
}
}
}
func reqGitHook() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if !ctx.User.CanEditGitHook() {
ctx.Error(http.StatusForbidden, "", "must be allowed to edit Git hooks")
return
}
}
}
// reqWebhooksEnabled requires webhooks to be enabled by admin.
func reqWebhooksEnabled() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
if setting.DisableWebhooks {
ctx.Error(http.StatusForbidden, "", "webhooks disabled by administrator")
return
}
}
}
func orgAssignment(args ...bool) func(ctx *context.APIContext) {
var (
assignOrg bool
assignTeam bool
)
if len(args) > 0 {
assignOrg = args[0]
}
if len(args) > 1 {
assignTeam = args[1]
}
return func(ctx *context.APIContext) {
ctx.Org = new(context.APIOrganization)
var err error
if assignOrg {
ctx.Org.Organization, err = models.GetOrgByName(ctx.Params(":org"))
if err != nil {
if models.IsErrOrgNotExist(err) {
redirectUserID, err := models.LookupUserRedirect(ctx.Params(":org"))
if err == nil {
context.RedirectToUser(ctx.Context, ctx.Params(":org"), redirectUserID)
} else if models.IsErrUserRedirectNotExist(err) {
ctx.NotFound("GetOrgByName", err)
} else {
ctx.Error(http.StatusInternalServerError, "LookupUserRedirect", err)
}
} else {
ctx.Error(http.StatusInternalServerError, "GetOrgByName", err)
}
return
}
}
if assignTeam {
ctx.Org.Team, err = models.GetTeamByID(ctx.ParamsInt64(":teamid"))
if err != nil {
if models.IsErrTeamNotExist(err) {
ctx.NotFound()
} else {
ctx.Error(http.StatusInternalServerError, "GetTeamById", err)
}
return
}
}
}
}
func mustEnableIssues(ctx *context.APIContext) {
if !ctx.Repo.CanRead(models.UnitTypeIssues) {
if log.IsTrace() {
if ctx.IsSigned {
log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
"User in Repo has Permissions: %-+v",
ctx.User,
models.UnitTypeIssues,
ctx.Repo.Repository,
ctx.Repo.Permission)
} else {
log.Trace("Permission Denied: Anonymous user cannot read %-v in Repo %-v\n"+
"Anonymous user in Repo has Permissions: %-+v",
models.UnitTypeIssues,
ctx.Repo.Repository,
ctx.Repo.Permission)
}
}
ctx.NotFound()
return
}
}
func mustAllowPulls(ctx *context.APIContext) {
if !(ctx.Repo.Repository.CanEnablePulls() && ctx.Repo.CanRead(models.UnitTypePullRequests)) {
if ctx.Repo.Repository.CanEnablePulls() && log.IsTrace() {
if ctx.IsSigned {
log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
"User in Repo has Permissions: %-+v",
ctx.User,
models.UnitTypePullRequests,
ctx.Repo.Repository,
ctx.Repo.Permission)
} else {
log.Trace("Permission Denied: Anonymous user cannot read %-v in Repo %-v\n"+
"Anonymous user in Repo has Permissions: %-+v",
models.UnitTypePullRequests,
ctx.Repo.Repository,
ctx.Repo.Permission)
}
}
ctx.NotFound()
return
}
}
func mustEnableIssuesOrPulls(ctx *context.APIContext) {
if !ctx.Repo.CanRead(models.UnitTypeIssues) &&
!(ctx.Repo.Repository.CanEnablePulls() && ctx.Repo.CanRead(models.UnitTypePullRequests)) {
if ctx.Repo.Repository.CanEnablePulls() && log.IsTrace() {
if ctx.IsSigned {
log.Trace("Permission Denied: User %-v cannot read %-v and %-v in Repo %-v\n"+
"User in Repo has Permissions: %-+v",
ctx.User,
models.UnitTypeIssues,
models.UnitTypePullRequests,
ctx.Repo.Repository,
ctx.Repo.Permission)
} else {
log.Trace("Permission Denied: Anonymous user cannot read %-v and %-v in Repo %-v\n"+
"Anonymous user in Repo has Permissions: %-+v",
models.UnitTypeIssues,
models.UnitTypePullRequests,
ctx.Repo.Repository,
ctx.Repo.Permission)
}
}
ctx.NotFound()
return
}
}
func mustNotBeArchived(ctx *context.APIContext) {
if ctx.Repo.Repository.IsArchived {
ctx.NotFound()
return
}
}
// bind binding an obj to a func(ctx *context.APIContext)
func bind(obj interface{}) http.HandlerFunc {
var tp = reflect.TypeOf(obj)
for tp.Kind() == reflect.Ptr {
tp = tp.Elem()
}
return web.Wrap(func(ctx *context.APIContext) {
var theObj = reflect.New(tp).Interface() // create a new form obj for every request but not use obj directly
errs := binding.Bind(ctx.Req, theObj)
if len(errs) > 0 {
ctx.Error(http.StatusUnprocessableEntity, "validationError", errs[0].Error())
return
}
web.SetForm(ctx, theObj)
})
}
// Routes registers all v1 APIs routes to web application.
func Routes() *web.Route {
var m = web.NewRoute()
m.Use(session.Sessioner(session.Options{
Provider: setting.SessionConfig.Provider,
ProviderConfig: setting.SessionConfig.ProviderConfig,
CookieName: setting.SessionConfig.CookieName,
CookiePath: setting.SessionConfig.CookiePath,
Gclifetime: setting.SessionConfig.Gclifetime,
Maxlifetime: setting.SessionConfig.Maxlifetime,
Secure: setting.SessionConfig.Secure,
SameSite: setting.SessionConfig.SameSite,
Domain: setting.SessionConfig.Domain,
}))
m.Use(securityHeaders())
if setting.CORSConfig.Enabled {
m.Use(cors.Handler(cors.Options{
//Scheme: setting.CORSConfig.Scheme, // FIXME: the cors middleware needs scheme option
AllowedOrigins: setting.CORSConfig.AllowDomain,
//setting.CORSConfig.AllowSubdomain // FIXME: the cors middleware needs allowSubdomain option
AllowedMethods: setting.CORSConfig.Methods,
AllowCredentials: setting.CORSConfig.AllowCredentials,
MaxAge: int(setting.CORSConfig.MaxAge.Seconds()),
}))
}
m.Use(context.APIContexter())
m.Use(context.ToggleAPI(&context.ToggleOptions{
SignInRequired: setting.Service.RequireSignInView,
}))
m.Group("", func() {
// Miscellaneous
if setting.API.EnableSwagger {
m.Get("/swagger", func(ctx *context.APIContext) {
ctx.Redirect("/api/swagger")
})
}
m.Get("/version", misc.Version)
m.Get("/signing-key.gpg", misc.SigningKey)
m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", misc.MarkdownRaw)
m.Group("/settings", func() {
m.Get("/ui", settings.GetGeneralUISettings)
m.Get("/api", settings.GetGeneralAPISettings)
m.Get("/attachment", settings.GetGeneralAttachmentSettings)
m.Get("/repository", settings.GetGeneralRepoSettings)
})
// Notifications
m.Group("/notifications", func() {
m.Combo("").
Get(notify.ListNotifications).
Put(notify.ReadNotifications)
m.Get("/new", notify.NewAvailable)
m.Combo("/threads/{id}").
Get(notify.GetThread).
Patch(notify.ReadThread)
}, reqToken())
// Users
m.Group("/users", func() {
m.Get("/search", reqExploreSignIn(), user.Search)
m.Group("/{username}", func() {
m.Get("", reqExploreSignIn(), user.GetInfo)
if setting.Service.EnableUserHeatmap {
m.Get("/heatmap", user.GetUserHeatmapData)
}
m.Get("/repos", reqExploreSignIn(), user.ListUserRepos)
m.Group("/tokens", func() {
m.Combo("").Get(user.ListAccessTokens).
Post(bind(api.CreateAccessTokenOption{}), user.CreateAccessToken)
m.Combo("/{id}").Delete(user.DeleteAccessToken)
}, reqBasicAuth())
})
})
m.Group("/users", func() {
m.Group("/{username}", func() {
m.Get("/keys", user.ListPublicKeys)
m.Get("/gpg_keys", user.ListGPGKeys)
m.Get("/followers", user.ListFollowers)
m.Group("/following", func() {
m.Get("", user.ListFollowing)
m.Get("/{target}", user.CheckFollowing)
})
m.Get("/starred", user.GetStarredRepos)
m.Get("/subscriptions", user.GetWatchedRepos)
})
}, reqToken())
m.Group("/user", func() {
m.Get("", user.GetAuthenticatedUser)
m.Combo("/emails").Get(user.ListEmails).
Post(bind(api.CreateEmailOption{}), user.AddEmail).
Delete(bind(api.DeleteEmailOption{}), user.DeleteEmail)
m.Get("/followers", user.ListMyFollowers)
m.Group("/following", func() {
m.Get("", user.ListMyFollowing)
m.Combo("/{username}").Get(user.CheckMyFollowing).Put(user.Follow).Delete(user.Unfollow)
})
m.Group("/keys", func() {
m.Combo("").Get(user.ListMyPublicKeys).
Post(bind(api.CreateKeyOption{}), user.CreatePublicKey)
m.Combo("/{id}").Get(user.GetPublicKey).
Delete(user.DeletePublicKey)
})
m.Group("/applications", func() {
m.Combo("/oauth2").
Get(user.ListOauth2Applications).<|fim▁hole|> Delete(user.DeleteOauth2Application).
Patch(bind(api.CreateOAuth2ApplicationOptions{}), user.UpdateOauth2Application).
Get(user.GetOauth2Application)
}, reqToken())
m.Group("/gpg_keys", func() {
m.Combo("").Get(user.ListMyGPGKeys).
Post(bind(api.CreateGPGKeyOption{}), user.CreateGPGKey)
m.Combo("/{id}").Get(user.GetGPGKey).
Delete(user.DeleteGPGKey)
})
m.Combo("/repos").Get(user.ListMyRepos).
Post(bind(api.CreateRepoOption{}), repo.Create)
m.Group("/starred", func() {
m.Get("", user.GetMyStarredRepos)
m.Group("/{username}/{reponame}", func() {
m.Get("", user.IsStarring)
m.Put("", user.Star)
m.Delete("", user.Unstar)
}, repoAssignment())
})
m.Get("/times", repo.ListMyTrackedTimes)
m.Get("/stopwatches", repo.GetStopwatches)
m.Get("/subscriptions", user.GetMyWatchedRepos)
m.Get("/teams", org.ListUserTeams)
}, reqToken())
// Repositories
m.Post("/org/{org}/repos", reqToken(), bind(api.CreateRepoOption{}), repo.CreateOrgRepoDeprecated)
m.Combo("/repositories/{id}", reqToken()).Get(repo.GetByID)
m.Group("/repos", func() {
m.Get("/search", repo.Search)
m.Get("/issues/search", repo.SearchIssues)
m.Post("/migrate", reqToken(), bind(api.MigrateRepoOptions{}), repo.Migrate)
m.Group("/{username}/{reponame}", func() {
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), repo.Edit)
m.Post("/transfer", reqOwner(), bind(api.TransferRepoOption{}), repo.Transfer)
m.Combo("/notifications").
Get(reqToken(), notify.ListRepoNotifications).
Put(reqToken(), notify.ReadRepoNotifications)
m.Group("/hooks/git", func() {
m.Combo("").Get(repo.ListGitHooks)
m.Group("/{id}", func() {
m.Combo("").Get(repo.GetGitHook).
Patch(bind(api.EditGitHookOption{}), repo.EditGitHook).
Delete(repo.DeleteGitHook)
})
}, reqToken(), reqAdmin(), reqGitHook(), context.ReferencesGitRepo(true))
m.Group("/hooks", func() {
m.Combo("").Get(repo.ListHooks).
Post(bind(api.CreateHookOption{}), repo.CreateHook)
m.Group("/{id}", func() {
m.Combo("").Get(repo.GetHook).
Patch(bind(api.EditHookOption{}), repo.EditHook).
Delete(repo.DeleteHook)
m.Post("/tests", context.RepoRefForAPI, repo.TestHook)
})
}, reqToken(), reqAdmin(), reqWebhooksEnabled())
m.Group("/collaborators", func() {
m.Get("", reqAnyRepoReader(), repo.ListCollaborators)
m.Combo("/{collaborator}").Get(reqAnyRepoReader(), repo.IsCollaborator).
Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator).
Delete(reqAdmin(), repo.DeleteCollaborator)
}, reqToken())
m.Group("/teams", func() {
m.Get("", reqAnyRepoReader(), repo.ListTeams)
m.Combo("/{team}").Get(reqAnyRepoReader(), repo.IsTeam).
Put(reqAdmin(), repo.AddTeam).
Delete(reqAdmin(), repo.DeleteTeam)
}, reqToken())
m.Get("/raw/*", context.RepoRefForAPI, reqRepoReader(models.UnitTypeCode), repo.GetRawFile)
m.Get("/archive/*", reqRepoReader(models.UnitTypeCode), repo.GetArchive)
m.Combo("/forks").Get(repo.ListForks).
Post(reqToken(), reqRepoReader(models.UnitTypeCode), bind(api.CreateForkOption{}), repo.CreateFork)
m.Group("/branches", func() {
m.Get("", repo.ListBranches)
m.Get("/*", repo.GetBranch)
m.Delete("/*", context.ReferencesGitRepo(false), reqRepoWriter(models.UnitTypeCode), repo.DeleteBranch)
m.Post("", reqRepoWriter(models.UnitTypeCode), bind(api.CreateBranchRepoOption{}), repo.CreateBranch)
}, reqRepoReader(models.UnitTypeCode))
m.Group("/branch_protections", func() {
m.Get("", repo.ListBranchProtections)
m.Post("", bind(api.CreateBranchProtectionOption{}), repo.CreateBranchProtection)
m.Group("/{name}", func() {
m.Get("", repo.GetBranchProtection)
m.Patch("", bind(api.EditBranchProtectionOption{}), repo.EditBranchProtection)
m.Delete("", repo.DeleteBranchProtection)
})
}, reqToken(), reqAdmin())
m.Group("/tags", func() {
m.Get("", repo.ListTags)
m.Delete("/{tag}", repo.DeleteTag)
}, reqRepoReader(models.UnitTypeCode), context.ReferencesGitRepo(true))
m.Group("/keys", func() {
m.Combo("").Get(repo.ListDeployKeys).
Post(bind(api.CreateKeyOption{}), repo.CreateDeployKey)
m.Combo("/{id}").Get(repo.GetDeployKey).
Delete(repo.DeleteDeploykey)
}, reqToken(), reqAdmin())
m.Group("/times", func() {
m.Combo("").Get(repo.ListTrackedTimesByRepository)
m.Combo("/{timetrackingusername}").Get(repo.ListTrackedTimesByUser)
}, mustEnableIssues, reqToken())
m.Group("/issues", func() {
m.Combo("").Get(repo.ListIssues).
Post(reqToken(), mustNotBeArchived, bind(api.CreateIssueOption{}), repo.CreateIssue)
m.Group("/comments", func() {
m.Get("", repo.ListRepoIssueComments)
m.Group("/{id}", func() {
m.Combo("").
Get(repo.GetIssueComment).
Patch(mustNotBeArchived, reqToken(), bind(api.EditIssueCommentOption{}), repo.EditIssueComment).
Delete(reqToken(), repo.DeleteIssueComment)
m.Combo("/reactions").
Get(repo.GetIssueCommentReactions).
Post(reqToken(), bind(api.EditReactionOption{}), repo.PostIssueCommentReaction).
Delete(reqToken(), bind(api.EditReactionOption{}), repo.DeleteIssueCommentReaction)
})
})
m.Group("/{index}", func() {
m.Combo("").Get(repo.GetIssue).
Patch(reqToken(), bind(api.EditIssueOption{}), repo.EditIssue)
m.Group("/comments", func() {
m.Combo("").Get(repo.ListIssueComments).
Post(reqToken(), mustNotBeArchived, bind(api.CreateIssueCommentOption{}), repo.CreateIssueComment)
m.Combo("/{id}", reqToken()).Patch(bind(api.EditIssueCommentOption{}), repo.EditIssueCommentDeprecated).
Delete(repo.DeleteIssueCommentDeprecated)
})
m.Group("/labels", func() {
m.Combo("").Get(repo.ListIssueLabels).
Post(reqToken(), bind(api.IssueLabelsOption{}), repo.AddIssueLabels).
Put(reqToken(), bind(api.IssueLabelsOption{}), repo.ReplaceIssueLabels).
Delete(reqToken(), repo.ClearIssueLabels)
m.Delete("/{id}", reqToken(), repo.DeleteIssueLabel)
})
m.Group("/times", func() {
m.Combo("").
Get(repo.ListTrackedTimes).
Post(bind(api.AddTimeOption{}), repo.AddTime).
Delete(repo.ResetIssueTime)
m.Delete("/{id}", repo.DeleteTime)
}, reqToken())
m.Combo("/deadline").Post(reqToken(), bind(api.EditDeadlineOption{}), repo.UpdateIssueDeadline)
m.Group("/stopwatch", func() {
m.Post("/start", reqToken(), repo.StartIssueStopwatch)
m.Post("/stop", reqToken(), repo.StopIssueStopwatch)
m.Delete("/delete", reqToken(), repo.DeleteIssueStopwatch)
})
m.Group("/subscriptions", func() {
m.Get("", repo.GetIssueSubscribers)
m.Get("/check", reqToken(), repo.CheckIssueSubscription)
m.Put("/{user}", reqToken(), repo.AddIssueSubscription)
m.Delete("/{user}", reqToken(), repo.DelIssueSubscription)
})
m.Combo("/reactions").
Get(repo.GetIssueReactions).
Post(reqToken(), bind(api.EditReactionOption{}), repo.PostIssueReaction).
Delete(reqToken(), bind(api.EditReactionOption{}), repo.DeleteIssueReaction)
})
}, mustEnableIssuesOrPulls)
m.Group("/labels", func() {
m.Combo("").Get(repo.ListLabels).
Post(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), bind(api.CreateLabelOption{}), repo.CreateLabel)
m.Combo("/{id}").Get(repo.GetLabel).
Patch(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), bind(api.EditLabelOption{}), repo.EditLabel).
Delete(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), repo.DeleteLabel)
})
m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", misc.MarkdownRaw)
m.Group("/milestones", func() {
m.Combo("").Get(repo.ListMilestones).
Post(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), bind(api.CreateMilestoneOption{}), repo.CreateMilestone)
m.Combo("/{id}").Get(repo.GetMilestone).
Patch(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), bind(api.EditMilestoneOption{}), repo.EditMilestone).
Delete(reqToken(), reqRepoWriter(models.UnitTypeIssues, models.UnitTypePullRequests), repo.DeleteMilestone)
})
m.Get("/stargazers", repo.ListStargazers)
m.Get("/subscribers", repo.ListSubscribers)
m.Group("/subscription", func() {
m.Get("", user.IsWatching)
m.Put("", reqToken(), user.Watch)
m.Delete("", reqToken(), user.Unwatch)
})
m.Group("/releases", func() {
m.Combo("").Get(repo.ListReleases).
Post(reqToken(), reqRepoWriter(models.UnitTypeReleases), context.ReferencesGitRepo(false), bind(api.CreateReleaseOption{}), repo.CreateRelease)
m.Group("/{id}", func() {
m.Combo("").Get(repo.GetRelease).
Patch(reqToken(), reqRepoWriter(models.UnitTypeReleases), context.ReferencesGitRepo(false), bind(api.EditReleaseOption{}), repo.EditRelease).
Delete(reqToken(), reqRepoWriter(models.UnitTypeReleases), repo.DeleteRelease)
m.Group("/assets", func() {
m.Combo("").Get(repo.ListReleaseAttachments).
Post(reqToken(), reqRepoWriter(models.UnitTypeReleases), repo.CreateReleaseAttachment)
m.Combo("/{asset}").Get(repo.GetReleaseAttachment).
Patch(reqToken(), reqRepoWriter(models.UnitTypeReleases), bind(api.EditAttachmentOptions{}), repo.EditReleaseAttachment).
Delete(reqToken(), reqRepoWriter(models.UnitTypeReleases), repo.DeleteReleaseAttachment)
})
})
m.Group("/tags", func() {
m.Combo("/{tag}").
Get(repo.GetReleaseByTag).
Delete(reqToken(), reqRepoWriter(models.UnitTypeReleases), repo.DeleteReleaseByTag)
})
}, reqRepoReader(models.UnitTypeReleases))
m.Post("/mirror-sync", reqToken(), reqRepoWriter(models.UnitTypeCode), repo.MirrorSync)
m.Get("/editorconfig/{filename}", context.RepoRefForAPI, reqRepoReader(models.UnitTypeCode), repo.GetEditorconfig)
m.Group("/pulls", func() {
m.Combo("").Get(repo.ListPullRequests).
Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest)
m.Group("/{index}", func() {
m.Combo("").Get(repo.GetPullRequest).
Patch(reqToken(), bind(api.EditPullRequestOption{}), repo.EditPullRequest)
m.Get(".diff", repo.DownloadPullDiff)
m.Get(".patch", repo.DownloadPullPatch)
m.Post("/update", reqToken(), repo.UpdatePullRequest)
m.Combo("/merge").Get(repo.IsPullRequestMerged).
Post(reqToken(), mustNotBeArchived, bind(forms.MergePullRequestForm{}), repo.MergePullRequest)
m.Group("/reviews", func() {
m.Combo("").
Get(repo.ListPullReviews).
Post(reqToken(), bind(api.CreatePullReviewOptions{}), repo.CreatePullReview)
m.Group("/{id}", func() {
m.Combo("").
Get(repo.GetPullReview).
Delete(reqToken(), repo.DeletePullReview).
Post(reqToken(), bind(api.SubmitPullReviewOptions{}), repo.SubmitPullReview)
m.Combo("/comments").
Get(repo.GetPullReviewComments)
m.Post("/dismissals", reqToken(), bind(api.DismissPullReviewOptions{}), repo.DismissPullReview)
m.Post("/undismissals", reqToken(), repo.UnDismissPullReview)
})
})
m.Combo("/requested_reviewers").
Delete(reqToken(), bind(api.PullReviewRequestOptions{}), repo.DeleteReviewRequests).
Post(reqToken(), bind(api.PullReviewRequestOptions{}), repo.CreateReviewRequests)
})
}, mustAllowPulls, reqRepoReader(models.UnitTypeCode), context.ReferencesGitRepo(false))
m.Group("/statuses", func() {
m.Combo("/{sha}").Get(repo.GetCommitStatuses).
Post(reqToken(), bind(api.CreateStatusOption{}), repo.NewCommitStatus)
}, reqRepoReader(models.UnitTypeCode))
m.Group("/commits", func() {
m.Get("", repo.GetAllCommits)
m.Group("/{ref}", func() {
m.Get("/status", repo.GetCombinedCommitStatusByRef)
m.Get("/statuses", repo.GetCommitStatusesByRef)
})
}, reqRepoReader(models.UnitTypeCode))
m.Group("/git", func() {
m.Group("/commits", func() {
m.Get("/{sha}", repo.GetSingleCommit)
})
m.Get("/refs", repo.GetGitAllRefs)
m.Get("/refs/*", repo.GetGitRefs)
m.Get("/trees/{sha}", context.RepoRefForAPI, repo.GetTree)
m.Get("/blobs/{sha}", context.RepoRefForAPI, repo.GetBlob)
m.Get("/tags/{sha}", context.RepoRefForAPI, repo.GetTag)
}, reqRepoReader(models.UnitTypeCode))
m.Group("/contents", func() {
m.Get("", repo.GetContentsList)
m.Get("/*", repo.GetContents)
m.Group("/*", func() {
m.Post("", bind(api.CreateFileOptions{}), repo.CreateFile)
m.Put("", bind(api.UpdateFileOptions{}), repo.UpdateFile)
m.Delete("", bind(api.DeleteFileOptions{}), repo.DeleteFile)
}, reqRepoWriter(models.UnitTypeCode), reqToken())
}, reqRepoReader(models.UnitTypeCode))
m.Get("/signing-key.gpg", misc.SigningKey)
m.Group("/topics", func() {
m.Combo("").Get(repo.ListTopics).
Put(reqToken(), reqAdmin(), bind(api.RepoTopicOptions{}), repo.UpdateTopics)
m.Group("/{topic}", func() {
m.Combo("").Put(reqToken(), repo.AddTopic).
Delete(reqToken(), repo.DeleteTopic)
}, reqAdmin())
}, reqAnyRepoReader())
m.Get("/issue_templates", context.ReferencesGitRepo(false), repo.GetIssueTemplates)
m.Get("/languages", reqRepoReader(models.UnitTypeCode), repo.GetLanguages)
}, repoAssignment())
})
// Organizations
m.Get("/user/orgs", reqToken(), org.ListMyOrgs)
m.Get("/users/{username}/orgs", org.ListUserOrgs)
m.Post("/orgs", reqToken(), bind(api.CreateOrgOption{}), org.Create)
m.Get("/orgs", org.GetAll)
m.Group("/orgs/{org}", func() {
m.Combo("").Get(org.Get).
Patch(reqToken(), reqOrgOwnership(), bind(api.EditOrgOption{}), org.Edit).
Delete(reqToken(), reqOrgOwnership(), org.Delete)
m.Combo("/repos").Get(user.ListOrgRepos).
Post(reqToken(), bind(api.CreateRepoOption{}), repo.CreateOrgRepo)
m.Group("/members", func() {
m.Get("", org.ListMembers)
m.Combo("/{username}").Get(org.IsMember).
Delete(reqToken(), reqOrgOwnership(), org.DeleteMember)
})
m.Group("/public_members", func() {
m.Get("", org.ListPublicMembers)
m.Combo("/{username}").Get(org.IsPublicMember).
Put(reqToken(), reqOrgMembership(), org.PublicizeMember).
Delete(reqToken(), reqOrgMembership(), org.ConcealMember)
})
m.Group("/teams", func() {
m.Combo("", reqToken()).Get(org.ListTeams).
Post(reqOrgOwnership(), bind(api.CreateTeamOption{}), org.CreateTeam)
m.Get("/search", org.SearchTeam)
}, reqOrgMembership())
m.Group("/labels", func() {
m.Get("", org.ListLabels)
m.Post("", reqToken(), reqOrgOwnership(), bind(api.CreateLabelOption{}), org.CreateLabel)
m.Combo("/{id}").Get(org.GetLabel).
Patch(reqToken(), reqOrgOwnership(), bind(api.EditLabelOption{}), org.EditLabel).
Delete(reqToken(), reqOrgOwnership(), org.DeleteLabel)
})
m.Group("/hooks", func() {
m.Combo("").Get(org.ListHooks).
Post(bind(api.CreateHookOption{}), org.CreateHook)
m.Combo("/{id}").Get(org.GetHook).
Patch(bind(api.EditHookOption{}), org.EditHook).
Delete(org.DeleteHook)
}, reqToken(), reqOrgOwnership(), reqWebhooksEnabled())
}, orgAssignment(true))
m.Group("/teams/{teamid}", func() {
m.Combo("").Get(org.GetTeam).
Patch(reqOrgOwnership(), bind(api.EditTeamOption{}), org.EditTeam).
Delete(reqOrgOwnership(), org.DeleteTeam)
m.Group("/members", func() {
m.Get("", org.GetTeamMembers)
m.Combo("/{username}").
Get(org.GetTeamMember).
Put(reqOrgOwnership(), org.AddTeamMember).
Delete(reqOrgOwnership(), org.RemoveTeamMember)
})
m.Group("/repos", func() {
m.Get("", org.GetTeamRepos)
m.Combo("/{org}/{reponame}").
Put(org.AddTeamRepository).
Delete(org.RemoveTeamRepository)
})
}, orgAssignment(false, true), reqToken(), reqTeamMembership())
m.Group("/admin", func() {
m.Group("/cron", func() {
m.Get("", admin.ListCronTasks)
m.Post("/{task}", admin.PostCronTask)
})
m.Get("/orgs", admin.GetAllOrgs)
m.Group("/users", func() {
m.Get("", admin.GetAllUsers)
m.Post("", bind(api.CreateUserOption{}), admin.CreateUser)
m.Group("/{username}", func() {
m.Combo("").Patch(bind(api.EditUserOption{}), admin.EditUser).
Delete(admin.DeleteUser)
m.Group("/keys", func() {
m.Post("", bind(api.CreateKeyOption{}), admin.CreatePublicKey)
m.Delete("/{id}", admin.DeleteUserPublicKey)
})
m.Get("/orgs", org.ListUserOrgs)
m.Post("/orgs", bind(api.CreateOrgOption{}), admin.CreateOrg)
m.Post("/repos", bind(api.CreateRepoOption{}), admin.CreateRepo)
})
})
m.Group("/unadopted", func() {
m.Get("", admin.ListUnadoptedRepositories)
m.Post("/{username}/{reponame}", admin.AdoptRepository)
m.Delete("/{username}/{reponame}", admin.DeleteUnadoptedRepository)
})
}, reqToken(), reqSiteAdmin())
m.Group("/topics", func() {
m.Get("/search", repo.TopicSearch)
})
}, sudo())
return m
}
func securityHeaders() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
// CORB: https://www.chromium.org/Home/chromium-security/corb-for-developers
// http://stackoverflow.com/a/3146618/244009
resp.Header().Set("x-content-type-options", "nosniff")
next.ServeHTTP(resp, req)
})
}
}<|fim▁end|> | Post(bind(api.CreateOAuth2ApplicationOptions{}), user.CreateOauth2Application)
m.Combo("/oauth2/{id}"). |
<|file_name|>tui.py<|end_file_name|><|fim▁begin|>class SpellPickerController:
def render(self):
pass
<|fim▁hole|>_controller_class = SpellPickerController<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|># This file contains the WSGI configuration required to serve up your
# web application at http://khasm08.pythonanywhere.com/
# It works by setting the variable 'application' to a WSGI handler of some
# description.
#
# +++++++++++ GENERAL DEBUGGING TIPS +++++++++++
# getting imports and sys.path right can be fiddly!
# We've tried to collect some general tips here:
# https://www.pythonanywhere.com/wiki/DebuggingImportError
# +++++++++++ HELLO WORLD +++++++++++
# A little pure-wsgi hello world we've cooked up, just
# to prove everything works. You should delete this
# code to get your own working.
#HELLO_WORLD = """<html>
#<head>
# <title>Python Anywhere hosted web application</title>
#</head>
#<body>
#<h1>Hello, World!</h1>
#<p>
# This is the default welcome page for a
# <a href="https://www.pythonanywhere.com/">PythonAnywhere</a>
# hosted web application.
#</p>
#<p>
# Find out more about how to configure your own web application
# by visiting the <a href="https://www.pythonanywhere.com/web_app_setup/">web app setup</a> page
#</p>
#</body>
#</html>"""
#def application(environ, start_response):
# if environ.get('PATH_INFO') == '/':
# status = '200 OK'
# content = HELLO_WORLD
# else:
# status = '404 NOT FOUND'
# content = 'Page not found.'
# response_headers = [('Content-Type', 'text/html'), ('Content-Length', str(len(content)))]
# start_response(status, response_headers)
# yield content.encode('utf8')
# Below are templates for Django and Flask. You should update the file
# appropriately for the web framework you're using, and then
# click the 'Reload /yourdomain.com/' button on the 'Web' tab to make your site
# live.
# +++++++++++ VIRTUALENV +++++++++++
# If you want to use a virtualenv, set its path on the web app setup tab.
# Then come back here and import your application object as per the
# instructions below
# +++++++++++ CUSTOM WSGI +++++++++++
# If you have a WSGI file that you want to serve using PythonAnywhere, perhaps
# in your home directory under version control, then use something like this:
#
import sys
path = '/home/khasm08/PythonSandbox/KFrame_0_01'
if path not in sys.path:
sys.path.append(path)
from index import application
# +++++++++++ DJANGO +++++++++++
# To use your own django app use code like this:
#import os
#import sys
#
## assuming your django settings file is at '/home/khasm08/mysite/mysite/settings.py'
## and your manage.py is is at '/home/khasm08/mysite/manage.py'
#path = '/home/khasm08/mysite'
#if path not in sys.path:
# sys.path.append(path)
#
#os.environ['DJANGO_SETTINGS_MODULE'] = 'mysite.settings'
#
## then, for django >=1.5:
#from django.core.wsgi import get_wsgi_application
#application = get_wsgi_application()
## or, for older django <=1.4
#import django.core.handlers.wsgi
#application = django.core.handlers.wsgi.WSGIHandler()
# +++++++++++ FLASK +++++++++++
# Flask works like any other WSGI-compatible framework, we just need
# to import the application. Often Flask apps are called "app" so we
# may need to rename it during the import:
#
#
#import sys
#
## The "/home/khasm08" below specifies your home
## directory -- the rest should be the directory you uploaded your Flask
## code to underneath the home directory. So if you just ran
## "git clone [email protected]/myusername/myproject.git"
## ...or uploaded files to the directory "myproject", then you should
## specify "/home/khasm08/myproject"
#path = '/home/khasm08/path/to/flask_app_directory'
#if path not in sys.path:
# sys.path.append(path)
#
## After you uncomment the line below, the yellow triangle on the left
## side in our in-browser editor shows a warning saying:
## 'application' imported but unused.
## You can ignore this error. The line is necessary, and the variable<|fim▁hole|>#
# NB -- many Flask guides suggest you use a file called run.py; that's
# not necessary on PythonAnywhere. And you should make sure your code
# does *not* invoke the flask development server with app.run(), as it
# will prevent your wsgi file from working.<|fim▁end|> | ## is used externally.
#from main_flask_app_file import app as application |
<|file_name|>userAccountBasicAuth.js<|end_file_name|><|fim▁begin|>import basicAuth from 'express-basic-auth'
import { Ability } from '@casl/ability'
const userAccountAuthorizer = (app) => async (email, password, cb) => {
try {
const auth = await app
.service('authentication')
.create({ email, password, strategy: 'local' }, { provider: 'rest' })
const ability = new Ability(auth.abilities)
return cb(null, ability.can('read', 'arena'))
} catch (e) {
app.info('basic auth failed', e)
return cb(null, false)
}
}
export default (app) =>
basicAuth({
authorizer: userAccountAuthorizer(app),
challenge: true,
authorizeAsync: true,<|fim▁hole|><|fim▁end|> | }) |
<|file_name|>fullsearch.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
"""
MoinMoin - fullsearch action
This is the backend of the search form. Search pages and print results.
@copyright: 2001 by Juergen Hermann <[email protected]>
@license: GNU GPL, see COPYING for details.
"""
import re, time
from MoinMoin.Page import Page
from MoinMoin import wikiutil
from parsedatetime.parsedatetime import Calendar
from MoinMoin.web.utils import check_surge_protect
def checkTitleSearch(request):
""" Return 1 for title search, 0 for full text search, -1 for idiot spammer
who tries to press all buttons at once.
When used in FullSearch macro, we have 'titlesearch' parameter with
'0' or '1'. In standard search, we have either 'titlesearch' or
'fullsearch' with localized string. If both missing, default to
True (might happen with Safari) if this isn't an advanced search.
"""
form = request.values
if 'titlesearch' in form and 'fullsearch' in form:
ret = -1 # spammer / bot
else:
try:
ret = int(form['titlesearch'])
except ValueError:
ret = 1
except KeyError:
ret = ('fullsearch' not in form and not isAdvancedSearch(request)) and 1 or 0
return ret
def isAdvancedSearch(request):
""" Return True if advanced search is requested """
try:
return int(request.values['advancedsearch'])
except KeyError:
return False
def searchHints(f, hints):
<|fim▁hole|>
@param f: the formatter to use
@param hints: list of hints (as strings) to show
"""
return ''.join([
f.paragraph(1, attr={'class': 'searchhint'}),
# this is illegal formatter usage anyway, so we can directly use a literal
"<br>".join(hints),
f.paragraph(0),
])
def execute(pagename, request, fieldname='value', titlesearch=0, statistic=0):
_ = request.getText
titlesearch = checkTitleSearch(request)
if titlesearch < 0:
check_surge_protect(request, kick=True) # get rid of spammer
return
advancedsearch = isAdvancedSearch(request)
form = request.values
# context is relevant only for full search
if titlesearch:
context = 0
elif advancedsearch:
context = 180 # XXX: hardcoded context count for advancedsearch
else:
context = int(form.get('context', 0))
# Get other form parameters
needle = form.get(fieldname, '')
case = int(form.get('case', 0))
regex = int(form.get('regex', 0)) # no interface currently
hitsFrom = int(form.get('from', 0))
highlight_titles = int(form.get('highlight_titles', 1))
highlight_pages = int(form.get('highlight_pages', 1))
mtime = None
msg = ''
historysearch = 0
# if advanced search is enabled we construct our own search query
if advancedsearch:
and_terms = form.get('and_terms', '').strip()
or_terms = form.get('or_terms', '').strip()
not_terms = form.get('not_terms', '').strip()
#xor_terms = form.get('xor_terms', '').strip()
categories = form.getlist('categories') or ['']
timeframe = form.get('time', '').strip()
language = form.getlist('language') or ['']
mimetype = form.getlist('mimetype') or [0]
excludeunderlay = form.get('excludeunderlay', 0)
nosystemitems = form.get('nosystemitems', 0)
historysearch = form.get('historysearch', 0)
mtime = form.get('mtime', '')
if mtime:
mtime_parsed = None
# get mtime from known date/time formats
for fmt in (request.user.datetime_fmt,
request.cfg.datetime_fmt, request.user.date_fmt,
request.cfg.date_fmt):
try:
mtime_parsed = time.strptime(mtime, fmt)
except ValueError:
continue
else:
break
if mtime_parsed:
mtime = time.mktime(mtime_parsed)
else:
# didn't work, let's try parsedatetime
cal = Calendar()
mtime_parsed, parsed_what = cal.parse(mtime)
# XXX it is unclear if usage of localtime here and in parsedatetime module is correct.
# time.localtime is the SERVER's local time and of no relevance to the user (being
# somewhere in the world)
# mktime is reverse function for localtime, so this maybe fixes it again!?
if parsed_what > 0 and mtime_parsed <= time.localtime():
mtime = time.mktime(mtime_parsed)
else:
mtime_parsed = None # we don't use invalid stuff
# show info
if mtime_parsed:
# XXX mtime_msg is not shown in some cases
mtime_msg = _("(!) Only pages changed since '''%s''' are being displayed!",
wiki=True) % request.user.getFormattedDateTime(mtime)
else:
mtime_msg = _('/!\\ The modification date you entered was not '
'recognized and is therefore not considered for the '
'search results!', wiki=True)
else:
mtime_msg = None
word_re = re.compile(r'(\"[\w\s]+"|\w+)', re.UNICODE)
needle = ''
if categories[0]:
needle += 'category:%s ' % ','.join(categories)
if language[0]:
needle += 'language:%s ' % ','.join(language)
if mimetype[0]:
needle += 'mimetype:%s ' % ','.join(mimetype)
if excludeunderlay:
needle += '-domain:underlay '
if nosystemitems:
needle += '-domain:system '
if and_terms:
needle += '(%s) ' % and_terms
if not_terms:
needle += '(%s) ' % ' '.join(['-%s' % t for t in word_re.findall(not_terms)])
if or_terms:
needle += '(%s) ' % ' or '.join(word_re.findall(or_terms))
# check for sensible search term
stripped = needle.strip()
if len(stripped) == 0:
request.theme.add_msg(_('Please use a more selective search term instead '
'of {{{"%s"}}}', wiki=True) % wikiutil.escape(needle), "error")
Page(request, pagename).send_page()
return
needle = stripped
# Setup for type of search
if titlesearch:
title = _('Title Search: "%s"')
sort = 'page_name'
else:
if advancedsearch:
title = _('Advanced Search: "%s"')
else:
title = _('Full Text Search: "%s"')
sort = 'weight'
# search the pages
from MoinMoin.search import searchPages, QueryParser, QueryError
try:
query = QueryParser(case=case, regex=regex,
titlesearch=titlesearch).parse_query(needle)
except QueryError: # catch errors in the search query
request.theme.add_msg(_('Your search query {{{"%s"}}} is invalid. Please refer to '
'HelpOnSearching for more information.', wiki=True, percent=True) % wikiutil.escape(needle), "error")
Page(request, pagename).send_page()
return
results = searchPages(request, query, sort, mtime, historysearch)
# directly show a single hit for title searches
# this is the "quick jump" functionality if you don't remember
# the pagename exactly, but just some parts of it
if titlesearch and len(results.hits) == 1:
page = results.hits[0]
if not page.attachment: # we did not find an attachment
page = Page(request, page.page_name)
querydict = {}
if highlight_pages:
highlight = query.highlight_re()
if highlight:
querydict.update({'highlight': highlight})
url = page.url(request, querystr=querydict)
request.http_redirect(url)
return
if not results.hits: # no hits?
f = request.formatter
querydict = dict(wikiutil.parseQueryString(request.query_string))
querydict.update({'titlesearch': 0})
request.theme.add_msg(_('Your search query {{{"%s"}}} didn\'t return any results. '
'Please change some terms and refer to HelpOnSearching for '
'more information.%s', wiki=True, percent=True) % (wikiutil.escape(needle),
titlesearch and ''.join([
'<br>',
_('(!) Consider performing a', wiki=True), ' ',
f.url(1, href=request.page.url(request, querydict, escape=0)),
_('full-text search with your search terms'),
f.url(0), '.',
]) or ''), "error")
Page(request, pagename).send_page()
return
# This action generates data using the user language
request.setContentLanguage(request.lang)
request.theme.send_title(title % needle, pagename=pagename)
# Start content (important for RTL support)
request.write(request.formatter.startContent("content"))
# Hints
f = request.formatter
hints = []
if titlesearch:
querydict = dict(wikiutil.parseQueryString(request.query_string))
querydict.update({'titlesearch': 0})
hints.append(''.join([
_("(!) You're performing a title search that might not include"
' all related results of your search query in this wiki. <<BR>>', wiki=True),
' ',
f.url(1, href=request.page.url(request, querydict, escape=0)),
f.text(_('Click here to perform a full-text search with your '
'search terms!')),
f.url(0),
]))
if advancedsearch and mtime_msg:
hints.append(mtime_msg)
if hints:
request.write(searchHints(f, hints))
# Search stats
request.write(results.stats(request, request.formatter, hitsFrom))
# Then search results
info = not titlesearch
if context:
output = results.pageListWithContext(request, request.formatter,
info=info, context=context, hitsFrom=hitsFrom, hitsInfo=1,
highlight_titles=highlight_titles,
highlight_pages=highlight_pages)
else:
output = results.pageList(request, request.formatter, info=info,
hitsFrom=hitsFrom, hitsInfo=1,
highlight_titles=highlight_titles,
highlight_pages=highlight_pages)
request.write(output)
request.write(request.formatter.endContent())
request.theme.send_footer(pagename)
request.theme.send_closing_html()<|fim▁end|> | """ Return a paragraph showing hints for a search
|
<|file_name|>TrustBasedServiceRecommender.java<|end_file_name|><|fim▁begin|>package org.trrusst.software.recommender;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Created by lahiru.gallege
*/
public class TrustBasedServiceRecommender extends HttpServlet implements Servlet {
// Logger for the servlet
private static final Logger log = Logger.getLogger(TrustBasedServiceRecommender.class.getName());
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// Get the form parameters
//String spList = (String)request.getParameter("spList");
String query = (String)request.getParameter("TrustBased");
String qos = (String)request.getParameter("QoS");
//System.out.println("Inside A : " + query);
String sqlQuery = "select id,title,score from App.AzSoftware where title like '%" + query + "%' and text like '%" + qos +"%'";
String[] resultSet = DerbyDBUtil.getResults(sqlQuery);
List<ServiceDAO> spList = new ArrayList<ServiceDAO>();
for (int i = 0 ; i < resultSet.length ; i++){
String[] resultArray = resultSet[i].split(",");
ServiceDAO serviceDAO = new ServiceDAO();
serviceDAO.setId(resultArray[0]);<|fim▁hole|> serviceDAO.setRating(resultArray[2]);
spList.add(serviceDAO);
}
// Set Request Attributes
request.setAttribute("recommendedTrustList", spList);
request.setAttribute("selection", "TrustBased");
// Log the request status
log.log(Level.INFO, "Tagging Automatic Recommendations : "
+ "| Description : " + "None");
// Forward the request back
request.getRequestDispatcher("/recommendServiceProviders.jsp").forward(request, response);
}
}<|fim▁end|> | serviceDAO.setName(resultArray[1]); |
<|file_name|>import_tests.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::unit_tests::testutils::{
compile_module_string_with_stdlib, compile_script_string_with_stdlib,
};
#[test]
fn compile_script_with_imports() {
let code = String::from(
"
import 0x1.DiemCoin;
<|fim▁hole|> main() {
let x: u64;
let y: u64;
x = 2;
y = copy(x) + copy(x);
return;
}
",
);
let compiled_script_res = compile_script_string_with_stdlib(&code);
let _compiled_script = compiled_script_res.unwrap();
}
#[test]
fn compile_module_with_imports() {
let code = String::from(
"
module Foobar {
import 0x1.DiemCoin;
struct FooCoin { value: u64 }
public value(this: &Self.FooCoin): u64 {
let value_ref: &u64;
value_ref = &move(this).value;
return *move(value_ref);
}
public deposit(this: &mut Self.FooCoin, check: Self.FooCoin) {
let value_ref: &mut u64;
let value: u64;
let check_ref: &Self.FooCoin;
let check_value: u64;
let new_value: u64;
let i: u64;
value_ref = &mut move(this).value;
value = *copy(value_ref);
check_ref = ✓
check_value = Self.value(move(check_ref));
new_value = copy(value) + copy(check_value);
*move(value_ref) = move(new_value);
FooCoin { value: i } = move(check);
return;
}
}
",
);
let compiled_module_res = compile_module_string_with_stdlib(&code);
let _compiled_module = compiled_module_res.unwrap();
}<|fim▁end|> | |
<|file_name|>sitemap-helpers.js<|end_file_name|><|fim▁begin|>const libxmljs2 = require('libxmljs2');
const fetch = require('node-fetch');
const E2eHelpers = require('../../../testing/e2e/helpers');
const SITEMAP_URL = `${E2eHelpers.baseUrl}/sitemap.xml`;
const SITEMAP_LOC_NS = 'http://www.sitemaps.org/schemas/sitemap/0.9';
const DOMAIN_REGEX = /http[s]?:\/\/(.*?)\//;
const pagesWithRedirects = ['/manage-va-debt/your-debt/'];
const shouldIgnore = url => {
const parsedUrl = new URL(url);
return (
!url.endsWith('auth/login/callback/') &&
!url.includes('playbook/') &&
!url.includes('pittsburgh-health-care/') &&
!/.*opt-out-information-sharing.*/.test(url) &&
!pagesWithRedirects.some(redirectUrl => parsedUrl.pathname === redirectUrl)
);
};
function sitemapURLs() {
return fetch(SITEMAP_URL)
.then(res => res.text())
.then(body => libxmljs2.parseXml(body))
.then(doc =>
doc
.find('//xmlns:loc', SITEMAP_LOC_NS)
.map(n => n.text().replace(DOMAIN_REGEX, `${E2eHelpers.baseUrl}/`))
.filter(shouldIgnore),
)
.then(urls => {
const onlyTest508Rules = [
// 404 page contains 2 search auto-suggest elements with the same element ID,
// which violates WCAG 2.0 standards. This element id is referenced by
// https://search.usa.gov/assets/sayt_loader_libs.js, so if we change the ID
// of one of the elements, search won't work.
'/404.html',
// This is here because aXe bug flags the custom select component on this page
'/find-locations/',
// This is here because an aXe bug flags the autosuggest component on this page
'/education/gi-bill-comparison-tool/',
/* Using the Microsoft Healthbot framework, the chatbot currently violates
two rules in the WCAG 2.0(A) ruleset: aria-valid-attr-value and aria-required-children.
There are open Github issues with Microsoft to address these.
The 508 ruleset is slightly less strict to test on chatbot for now. */
'/coronavirus-chatbot/',
];
// Whitelist of URLs to only test against the 'section508' rule set and not
// the stricter 'wcag2a' rule set. For each URL added to this list, please
// add a comment explaining why it cannot be tested against stricter rules.
return { urls, onlyTest508Rules };<|fim▁hole|> });
}
module.exports = { sitemapURLs };<|fim▁end|> | |
<|file_name|>BenchmarkTest11696.java<|end_file_name|><|fim▁begin|><|fim▁hole|>* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Dave Wichers <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest11696")
public class BenchmarkTest11696 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
org.owasp.benchmark.helpers.SeparateClassRequest scr = new org.owasp.benchmark.helpers.SeparateClassRequest( request );
String param = scr.getTheParameter("foo");
String bar = new Test().doSomething(param);
// FILE URIs are tricky because they are different between Mac and Windows because of lack of standardization.
// Mac requires an extra slash for some reason.
String startURIslashes = "";
if (System.getProperty("os.name").indexOf("Windows") != -1)
if (System.getProperty("os.name").indexOf("Windows") != -1)
startURIslashes = "/";
else startURIslashes = "//";
try {
java.net.URI fileURI = new java.net.URI("file:" + startURIslashes
+ org.owasp.benchmark.helpers.Utils.testfileDir.replace('\\', '/').replace(' ', '_') + bar);
new java.io.File(fileURI);
} catch (java.net.URISyntaxException e) {
throw new ServletException(e);
}
} // end doPost
private class Test {
public String doSomething(String param) throws ServletException, IOException {
StringBuilder sbxyz59007 = new StringBuilder(param);
String bar = sbxyz59007.append("_SafeStuff").toString();
return bar;
}
} // end innerclass Test
} // end DataflowThruInnerClass<|fim▁end|> | /**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP) |
<|file_name|>pwconst_user.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2012 Carsten Burstedde, Donna Calhoun
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "pwconst_user.h"
#include <fclaw2d_clawpatch.h>
#include <fc2d_clawpack46.h>
#include <clawpack46_user_fort.h> /* Headers for user defined fortran files */
#include <fc2d_clawpack5.h>
#include <clawpack5_user_fort.h>
void pwconst_link_solvers(fclaw2d_global_t *glob)
{
const user_options_t* user = pwconst_get_options(glob);
if (user->claw_version == 4)
{
fc2d_clawpack46_vtable_t *claw46_vt = fc2d_clawpack46_vt();
claw46_vt->fort_qinit = &CLAWPACK46_QINIT;
claw46_vt->fort_rpn2 = &CLAWPACK46_RPN2;
claw46_vt->fort_rpt2 = &CLAWPACK46_RPT2;
}
else if (user->claw_version == 5)
{
fc2d_clawpack5_vtable_t *claw5_vt = fc2d_clawpack5_vt();
claw5_vt->fort_qinit = &CLAWPACK5_QINIT;
claw5_vt->fort_rpn2 = &CLAWPACK5_RPN2;
claw5_vt->fort_rpt2 = &CLAWPACK5_RPT2;
}
}
#if 0
void pwconst_link_solvers(fclaw2d_domain_t *domain)
{
const user_options_t* user = pwconst_user_get_options(domain);
fclaw2d_init_vtable(&fclaw2d_vt);<|fim▁hole|> if (user->claw_version == 4)
{
fc2d_clawpack46_set_vtable_defaults(&fclaw2d_vt,&classic_claw46);
classic_claw46.qinit = &CLAWPACK46_QINIT;
classic_claw46.rpn2 = &CLAWPACK46_RPN2;
classic_claw46.rpt2 = &CLAWPACK46_RPT2;
fc2d_clawpack46_set_vtable(classic_claw46);
}
else if (user->claw_version == 5)
{
fc2d_clawpack5_set_vtable_defaults(&fclaw2d_vt,&classic_claw5);
classic_claw5.qinit = &CLAWPACK5_QINIT;
classic_claw5.rpn2 = &CLAWPACK5_RPN2;
classic_claw5.rpt2 = &CLAWPACK5_RPT2;
fc2d_clawpack5_set_vtable(classic_claw5);
}
fclaw2d_set_vtable(domain,&fclaw2d_vt);
#if 0
fclaw2d_init_vtable(&fclaw2d_vt);
fc2d_clawpack46_init_vtable(&classic_claw);
vt.patch_initialize = &fc2d_clawpack46_qinit;
classic_claw.qinit = &QINIT;
classic_claw.rpn2 = &RPN2;
classic_claw.rpt2 = &RPT2;
fclaw2d_set_vtable(domain,&vt);
fc2d_clawpack46_set_vtable(&classic_claw);
#endif
}
#endif<|fim▁end|> | |
<|file_name|>schema.py<|end_file_name|><|fim▁begin|>"""Voluptuous schemas for the KNX integration."""
import voluptuous as vol
from xknx.devices.climate import SetpointShiftMode
from homeassistant.const import (
CONF_ADDRESS,
CONF_DEVICE_CLASS,
CONF_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_TYPE,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
OPERATION_MODES,
PRESET_MODES,
ColorTempModes,
)
class ConnectionSchema:
"""Voluptuous schema for KNX connection."""
CONF_KNX_LOCAL_IP = "local_ip"
TUNNELING_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_KNX_LOCAL_IP): cv.string,
vol.Optional(CONF_PORT): cv.port,
}
)
ROUTING_SCHEMA = vol.Schema({vol.Optional(CONF_KNX_LOCAL_IP): cv.string})
class CoverSchema:
"""Voluptuous schema for KNX covers."""
CONF_MOVE_LONG_ADDRESS = "move_long_address"
CONF_MOVE_SHORT_ADDRESS = "move_short_address"
CONF_STOP_ADDRESS = "stop_address"
CONF_POSITION_ADDRESS = "position_address"
CONF_POSITION_STATE_ADDRESS = "position_state_address"
CONF_ANGLE_ADDRESS = "angle_address"
CONF_ANGLE_STATE_ADDRESS = "angle_state_address"
CONF_TRAVELLING_TIME_DOWN = "travelling_time_down"
CONF_TRAVELLING_TIME_UP = "travelling_time_up"
CONF_INVERT_POSITION = "invert_position"
CONF_INVERT_ANGLE = "invert_angle"
DEFAULT_TRAVEL_TIME = 25
DEFAULT_NAME = "KNX Cover"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MOVE_LONG_ADDRESS): cv.string,
vol.Optional(CONF_MOVE_SHORT_ADDRESS): cv.string,
vol.Optional(CONF_STOP_ADDRESS): cv.string,
vol.Optional(CONF_POSITION_ADDRESS): cv.string,
vol.Optional(CONF_POSITION_STATE_ADDRESS): cv.string,
vol.Optional(CONF_ANGLE_ADDRESS): cv.string,
vol.Optional(CONF_ANGLE_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_TRAVELLING_TIME_DOWN, default=DEFAULT_TRAVEL_TIME
): cv.positive_int,
vol.Optional(
CONF_TRAVELLING_TIME_UP, default=DEFAULT_TRAVEL_TIME
): cv.positive_int,
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
}
)
class BinarySensorSchema:
"""Voluptuous schema for KNX binary sensors."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
CONF_IGNORE_INTERNAL_STATE = "ignore_internal_state"
CONF_AUTOMATION = "automation"
CONF_HOOK = "hook"
CONF_DEFAULT_HOOK = "on"
CONF_COUNTER = "counter"
CONF_DEFAULT_COUNTER = 1
CONF_ACTION = "action"
CONF_RESET_AFTER = "reset_after"
DEFAULT_NAME = "KNX Binary Sensor"
AUTOMATION_SCHEMA = vol.Schema(
{
vol.Optional(CONF_HOOK, default=CONF_DEFAULT_HOOK): cv.string,
vol.Optional(CONF_COUNTER, default=CONF_DEFAULT_COUNTER): cv.port,
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
}
)
AUTOMATIONS_SCHEMA = vol.All(cv.ensure_list, [AUTOMATION_SCHEMA])
SCHEMA = vol.All(
cv.deprecated("significant_bit"),
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean,
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_DEVICE_CLASS): cv.string,
vol.Optional(CONF_RESET_AFTER): cv.positive_int,
vol.Optional(CONF_AUTOMATION): AUTOMATIONS_SCHEMA,
}
),
)
class LightSchema:
"""Voluptuous schema for KNX lights."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_BRIGHTNESS_ADDRESS = "brightness_address"
CONF_BRIGHTNESS_STATE_ADDRESS = "brightness_state_address"
CONF_COLOR_ADDRESS = "color_address"
CONF_COLOR_STATE_ADDRESS = "color_state_address"
CONF_COLOR_TEMP_ADDRESS = "color_temperature_address"
CONF_COLOR_TEMP_STATE_ADDRESS = "color_temperature_state_address"
CONF_COLOR_TEMP_MODE = "color_temperature_mode"
CONF_RGBW_ADDRESS = "rgbw_address"
CONF_RGBW_STATE_ADDRESS = "rgbw_state_address"
CONF_MIN_KELVIN = "min_kelvin"
CONF_MAX_KELVIN = "max_kelvin"
DEFAULT_NAME = "KNX Light"
DEFAULT_COLOR_TEMP_MODE = "absolute"
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_COLOR_TEMP_MODE, default=DEFAULT_COLOR_TEMP_MODE
): cv.enum(ColorTempModes),
vol.Optional(CONF_RGBW_ADDRESS): cv.string,
vol.Optional(CONF_RGBW_STATE_ADDRESS): cv.string,
vol.Optional(CONF_MIN_KELVIN, default=DEFAULT_MIN_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
)
class ClimateSchema:
"""Voluptuous schema for KNX climate devices."""
CONF_SETPOINT_SHIFT_ADDRESS = "setpoint_shift_address"
CONF_SETPOINT_SHIFT_STATE_ADDRESS = "setpoint_shift_state_address"
CONF_SETPOINT_SHIFT_MODE = "setpoint_shift_mode"
CONF_SETPOINT_SHIFT_MAX = "setpoint_shift_max"
CONF_SETPOINT_SHIFT_MIN = "setpoint_shift_min"
CONF_TEMPERATURE_ADDRESS = "temperature_address"<|fim▁hole|> CONF_TEMPERATURE_STEP = "temperature_step"
CONF_TARGET_TEMPERATURE_ADDRESS = "target_temperature_address"
CONF_TARGET_TEMPERATURE_STATE_ADDRESS = "target_temperature_state_address"
CONF_OPERATION_MODE_ADDRESS = "operation_mode_address"
CONF_OPERATION_MODE_STATE_ADDRESS = "operation_mode_state_address"
CONF_CONTROLLER_STATUS_ADDRESS = "controller_status_address"
CONF_CONTROLLER_STATUS_STATE_ADDRESS = "controller_status_state_address"
CONF_CONTROLLER_MODE_ADDRESS = "controller_mode_address"
CONF_CONTROLLER_MODE_STATE_ADDRESS = "controller_mode_state_address"
CONF_HEAT_COOL_ADDRESS = "heat_cool_address"
CONF_HEAT_COOL_STATE_ADDRESS = "heat_cool_state_address"
CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS = (
"operation_mode_frost_protection_address"
)
CONF_OPERATION_MODE_NIGHT_ADDRESS = "operation_mode_night_address"
CONF_OPERATION_MODE_COMFORT_ADDRESS = "operation_mode_comfort_address"
CONF_OPERATION_MODE_STANDBY_ADDRESS = "operation_mode_standby_address"
CONF_OPERATION_MODES = "operation_modes"
CONF_ON_OFF_ADDRESS = "on_off_address"
CONF_ON_OFF_STATE_ADDRESS = "on_off_state_address"
CONF_ON_OFF_INVERT = "on_off_invert"
CONF_MIN_TEMP = "min_temp"
CONF_MAX_TEMP = "max_temp"
DEFAULT_NAME = "KNX Climate"
DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010"
DEFAULT_SETPOINT_SHIFT_MAX = 6
DEFAULT_SETPOINT_SHIFT_MIN = -6
DEFAULT_TEMPERATURE_STEP = 0.1
DEFAULT_ON_OFF_INVERT = False
SCHEMA = vol.All(
cv.deprecated("setpoint_shift_step", replacement_key=CONF_TEMPERATURE_STEP),
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_SETPOINT_SHIFT_MODE, default=DEFAULT_SETPOINT_SHIFT_MODE
): cv.enum(SetpointShiftMode),
vol.Optional(
CONF_SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX
): vol.All(int, vol.Range(min=0, max=32)),
vol.Optional(
CONF_SETPOINT_SHIFT_MIN, default=DEFAULT_SETPOINT_SHIFT_MIN
): vol.All(int, vol.Range(min=-32, max=0)),
vol.Optional(
CONF_TEMPERATURE_STEP, default=DEFAULT_TEMPERATURE_STEP
): vol.All(float, vol.Range(min=0, max=2)),
vol.Required(CONF_TEMPERATURE_ADDRESS): cv.string,
vol.Required(CONF_TARGET_TEMPERATURE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_TARGET_TEMPERATURE_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_HEAT_COOL_ADDRESS): cv.string,
vol.Optional(CONF_HEAT_COOL_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_NIGHT_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_COMFORT_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_STANDBY_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT
): cv.boolean,
vol.Optional(CONF_OPERATION_MODES): vol.All(
cv.ensure_list, [vol.In({**OPERATION_MODES, **PRESET_MODES})]
),
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
}
),
)
class SwitchSchema:
"""Voluptuous schema for KNX switches."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
DEFAULT_NAME = "KNX Switch"
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
}
)
class ExposeSchema:
"""Voluptuous schema for KNX exposures."""
CONF_KNX_EXPOSE_TYPE = CONF_TYPE
CONF_KNX_EXPOSE_ATTRIBUTE = "attribute"
CONF_KNX_EXPOSE_DEFAULT = "default"
CONF_KNX_EXPOSE_ADDRESS = CONF_ADDRESS
SCHEMA = vol.Schema(
{
vol.Required(CONF_KNX_EXPOSE_TYPE): vol.Any(int, float, str),
vol.Optional(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_KNX_EXPOSE_ATTRIBUTE): cv.string,
vol.Optional(CONF_KNX_EXPOSE_DEFAULT): cv.match_all,
vol.Required(CONF_KNX_EXPOSE_ADDRESS): cv.string,
}
)
class NotifySchema:
"""Voluptuous schema for KNX notifications."""
DEFAULT_NAME = "KNX Notify"
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
class SensorSchema:
"""Voluptuous schema for KNX sensors."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Required(CONF_TYPE): vol.Any(int, float, str),
}
)
class SceneSchema:
"""Voluptuous schema for KNX scenes."""
CONF_SCENE_NUMBER = "scene_number"
DEFAULT_NAME = "KNX SCENE"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_ADDRESS): cv.string,
vol.Required(CONF_SCENE_NUMBER): cv.positive_int,
}
)
class WeatherSchema:
"""Voluptuous schema for KNX weather station."""
CONF_SYNC_STATE = CONF_SYNC_STATE
CONF_KNX_TEMPERATURE_ADDRESS = "address_temperature"
CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS = "address_brightness_south"
CONF_KNX_BRIGHTNESS_EAST_ADDRESS = "address_brightness_east"
CONF_KNX_BRIGHTNESS_WEST_ADDRESS = "address_brightness_west"
CONF_KNX_WIND_SPEED_ADDRESS = "address_wind_speed"
CONF_KNX_RAIN_ALARM_ADDRESS = "address_rain_alarm"
CONF_KNX_FROST_ALARM_ADDRESS = "address_frost_alarm"
CONF_KNX_WIND_ALARM_ADDRESS = "address_wind_alarm"
CONF_KNX_DAY_NIGHT_ADDRESS = "address_day_night"
CONF_KNX_AIR_PRESSURE_ADDRESS = "address_air_pressure"
CONF_KNX_HUMIDITY_ADDRESS = "address_humidity"
CONF_KNX_EXPOSE_SENSORS = "expose_sensors"
DEFAULT_NAME = "KNX Weather Station"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Optional(CONF_KNX_EXPOSE_SENSORS, default=False): cv.boolean,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): cv.string,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): cv.string,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): cv.string,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): cv.string,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): cv.string,
}
)<|fim▁end|> | |
<|file_name|>container_manager_stub.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>limitations under the License.
*/
package cm
import (
"github.com/golang/glog"
"k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/resource"
internalapi "k8s.io/kubernetes/pkg/kubelet/apis/cri"
"k8s.io/kubernetes/pkg/kubelet/cm/cpumanager"
"k8s.io/kubernetes/pkg/kubelet/config"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
"k8s.io/kubernetes/pkg/kubelet/lifecycle"
"k8s.io/kubernetes/pkg/kubelet/status"
"k8s.io/kubernetes/pkg/kubelet/util/pluginwatcher"
schedulercache "k8s.io/kubernetes/pkg/scheduler/cache"
)
type containerManagerStub struct{}
var _ ContainerManager = &containerManagerStub{}
func (cm *containerManagerStub) Start(_ *v1.Node, _ ActivePodsFunc, _ config.SourcesReady, _ status.PodStatusProvider, _ internalapi.RuntimeService) error {
glog.V(2).Infof("Starting stub container manager")
return nil
}
func (cm *containerManagerStub) SystemCgroupsLimit() v1.ResourceList {
return v1.ResourceList{}
}
func (cm *containerManagerStub) GetNodeConfig() NodeConfig {
return NodeConfig{}
}
func (cm *containerManagerStub) GetMountedSubsystems() *CgroupSubsystems {
return &CgroupSubsystems{}
}
func (cm *containerManagerStub) GetQOSContainersInfo() QOSContainersInfo {
return QOSContainersInfo{}
}
func (cm *containerManagerStub) UpdateQOSCgroups() error {
return nil
}
func (cm *containerManagerStub) Status() Status {
return Status{}
}
func (cm *containerManagerStub) GetNodeAllocatableReservation() v1.ResourceList {
return nil
}
func (cm *containerManagerStub) GetCapacity() v1.ResourceList {
c := v1.ResourceList{
v1.ResourceEphemeralStorage: *resource.NewQuantity(
int64(0),
resource.BinarySI),
}
return c
}
func (cm *containerManagerStub) GetPluginRegistrationHandler() pluginwatcher.PluginHandler {
return nil
}
func (cm *containerManagerStub) GetDevicePluginResourceCapacity() (v1.ResourceList, v1.ResourceList, []string) {
return nil, nil, []string{}
}
func (cm *containerManagerStub) NewPodContainerManager() PodContainerManager {
return &podContainerManagerStub{}
}
func (cm *containerManagerStub) GetResources(pod *v1.Pod, container *v1.Container) (*kubecontainer.RunContainerOptions, error) {
return &kubecontainer.RunContainerOptions{}, nil
}
func (cm *containerManagerStub) UpdatePluginResources(*schedulercache.NodeInfo, *lifecycle.PodAdmitAttributes) error {
return nil
}
func (cm *containerManagerStub) InternalContainerLifecycle() InternalContainerLifecycle {
return &internalContainerLifecycleImpl{cpumanager.NewFakeManager()}
}
func (cm *containerManagerStub) GetPodCgroupRoot() string {
return ""
}
func NewStubContainerManager() ContainerManager {
return &containerManagerStub{}
}<|fim▁end|> | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and |
<|file_name|>ninnhu_kalman_filtering_expt.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Thu Apr 28 16:52:18 2016 by emin
"""
import os
import sys
import theano
import theano.tensor as T
import numpy as np
from lasagne.layers import InputLayer, ReshapeLayer, DenseLayer
from generators import KalmanFilteringTaskFFWD
import lasagne.layers
import lasagne.nonlinearities
import lasagne.updates
import lasagne.objectives
import lasagne.init
import scipy.io as sio
os.chdir(os.path.dirname(sys.argv[0]))
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
job_idx = int(os.getenv('PBS_ARRAYID'))
np.random.seed(job_idx)
nnn = np.ceil(np.logspace(.5,2.5,14))
nhu_vec, nin_vec = np.meshgrid(nnn, nnn)
nhu_vec = nhu_vec.flatten()
nin_vec = nin_vec.flatten()
n_in = int(nin_vec[job_idx-1])
n_hid = int(nhu_vec[job_idx-1])
def model(input_var, batch_size=10, n_in=50, n_out=1, n_hid=200, ei_ratio=0.8):
# Input Layer
l_in = InputLayer((batch_size, None, n_in), input_var=input_var)
_, seqlen, _ = l_in.input_var.shape
# Recurrent EI Net
l_in_hid = DenseLayer(lasagne.layers.InputLayer((None, n_in)), n_hid, b=None, nonlinearity=lasagne.nonlinearities.linear)
l_hid_hid = DenseLayer(lasagne.layers.InputLayer((None, n_hid)), n_hid, nonlinearity=lasagne.nonlinearities.linear)
l_rec = lasagne.layers.CustomRecurrentLayer(l_in, l_in_hid, l_hid_hid, nonlinearity=lasagne.nonlinearities.rectify)
# Output Layer
l_shp = ReshapeLayer(l_rec, (-1, n_hid))
l_dense = DenseLayer(l_shp, num_units=n_out, nonlinearity=lasagne.nonlinearities.linear)<|fim▁hole|>
if __name__ == '__main__':
# Define the input and expected output variable
input_var, target_var = T.tensor3s('input', 'target')
# The generator to sample examples from
tr_cond = 'all_gains'
test_cond = 'all_gains'
generator = KalmanFilteringTaskFFWD(max_iter=50001, batch_size=10, n_in=n_in, n_out=1, stim_dur=25, sigtc_sq=4.0, signu_sq=1.0, gamma=0.1, tr_cond=tr_cond)
# The model
l_out, l_rec = model(input_var, batch_size=generator.batch_size, n_in=generator.n_in, n_out=generator.n_out, n_hid=n_hid)
# The generated output variable and the loss function
# all_layers = lasagne.layers.get_all_layers(l_out)
# l2_penalty = lasagne.regularization.regularize_layer_params(all_layers, lasagne.regularization.l2) * 1e-6
pred_var = lasagne.layers.get_output(l_out)
loss = T.mean(lasagne.objectives.squared_error(pred_var[:,:,-1], target_var[:,:,-1])) # + l2_penalty
# Create the update expressions
params = lasagne.layers.get_all_params(l_out, trainable=True)
updates = lasagne.updates.adam(loss, params, learning_rate=0.001)
# Compile the function for a training step, as well as the prediction function and a utility function to get the inner details of the RNN
train_fn = theano.function([input_var, target_var], loss, updates=updates, allow_input_downcast=True)
pred_fn = theano.function([input_var], pred_var, allow_input_downcast=True)
rec_layer_fn = theano.function([input_var], lasagne.layers.get_output(l_rec, get_details=True), allow_input_downcast=True)
# If want to continue training an old model, uncomment below
# npzfile_lout = np.load('kf_lout_trained_model.npz')
# npzfile_lrec = np.load('kf_lrec_trained_model.npz')
# lasagne.layers.set_all_param_values(l_out,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4'],npzfile_lout['arr_5']])
# lasagne.layers.set_all_param_values(l_rec,[npzfile_lout['arr_0'],npzfile_lout['arr_1'],npzfile_lout['arr_2'],npzfile_lout['arr_3'],npzfile_lout['arr_4']])
# TRAINING
success = 0.0
s_vec, opt_s_vec, ex_pred_vec, frac_rmse_vec = [], [], [], []
for i, (example_input, example_output, opt_s) in generator:
score = train_fn(example_input, example_output)
example_prediction = pred_fn(example_input)
s_vec.append(example_output[:,:,-1])
opt_s_vec.append(opt_s[:,:,-1])
ex_pred_vec.append(example_prediction[:,:,-1])
if i % 500 == 0:
rmse_opt = np.sqrt(np.nanmean((np.asarray(s_vec) - np.asarray(opt_s_vec))**2))
rmse_net = np.sqrt(np.nanmean((np.asarray(s_vec) - np.squeeze(np.asarray(ex_pred_vec)))**2))
frac_rmse = (rmse_net - rmse_opt) / rmse_opt
frac_rmse_vec.append(frac_rmse)
print 'Batch #%d; Frac. RMSE: %.6f; Opt. RMSE: %.6f; Net. RMSE: %.6f' % (i, frac_rmse, rmse_opt, rmse_net)
if frac_rmse < 0.1:
success = 1.0
break;
s_vec = []
opt_s_vec = []
ex_pred_vec = []
# SAVE TRAINED MODEL
sio.savemat('kf_nin%i_nhu%i_jobidx%i.mat'%(n_in,n_hid,job_idx), {'frac_rmse':frac_rmse, 'frac_rmse_vec':np.asarray(frac_rmse_vec), 'success':success } )<|fim▁end|> | # To reshape back to our original shape, we can use the symbolic shape variables we retrieved above.
l_out = ReshapeLayer(l_dense, (batch_size, seqlen, n_out))
return l_out, l_rec |
<|file_name|>point-loc_test.go<|end_file_name|><|fim▁begin|>package test
import (
"fmt"
"math/rand"
"strconv"
"testing"
"time"
"github.com/200sc/go-compgeo/dcel"
"github.com/200sc/go-compgeo/dcel/off"
"github.com/200sc/go-compgeo/dcel/pointLoc"
"github.com/200sc/go-compgeo/dcel/pointLoc/bench/bruteForce"
"github.com/200sc/go-compgeo/dcel/pointLoc/bench/slab"
"github.com/200sc/go-compgeo/dcel/pointLoc/bench/trapezoid"
"github.com/200sc/go-compgeo/dcel/pointLoc/rtree"
"github.com/200sc/go-compgeo/geom"
"github.com/200sc/go-compgeo/search/tree"
"github.com/stretchr/testify/assert"
)
var (
inputSize = 25
inputRange = 10000.0
testCt = 10000
slabErrors = 0
trapErrors = 0
rtreeErrors = 0
plumbErrors = 0
seed int64
)
func randomPt() geom.D3 {
return geom.NewPoint(rand.Float64()*inputRange,
rand.Float64()*inputRange, 0)
}
func testRandomPts(t *testing.T, pl pointLoc.LocatesPoints, limit int, errs *int) {
for i := 0; i < limit; i++ {
pt := randomPt()
structIntersected, err := pl.PointLocate(pt.X(), pt.Y())
bruteForceContains := structIntersected.Contains(pt)
assert.Nil(t, err)
if !assert.True(t, bruteForceContains) {
t.Log("Error point:", pt)
t.Log("Error face:", structIntersected)
(*errs)++
}
}
}
func TestRandomDCELSlab(t *testing.T) {
dc := dcel.Random2DDCEL(inputRange, inputSize)
structure, err := slab.Decompose(dc, tree.RedBlack)
assert.Nil(t, err)
testRandomPts(t, structure, testCt, &slabErrors)
printErrors()
}
func TestDCELSlabErrors(t *testing.T) {
errCt := 0
subTestCt := 100
for i := 0; i < testCt; i++ {
inputSize = 2
dc := dcel.Random2DDCEL(inputRange, inputSize)
structure, err := slab.Decompose(dc, tree.RedBlack)
if err != nil {
errCt++
continue
}
queryErrors := 0
testRandomPts(t, structure, subTestCt, &queryErrors)
if queryErrors != 0 {
off.Save(dc).WriteFile("testFail" + strconv.Itoa(i) + ".off")
t.Log("Error index:", i)
s := structure.(*slab.PointLocator)
t.Log(s)
errCt++
continue
}
}
t.Log("Errors in Slab:", errCt, testCt)
}
func TestRandomDCELTrapErrors(t *testing.T) {
errCt := 0
subTestCt := 50
for i := 0; i < testCt; i++ {
inputSize = 3
dc := dcel.Random2DDCEL(inputRange, inputSize)
_, _, structure, err := trapezoid.TrapezoidalMap(dc)
if err != nil {
errCt++
continue
}
queryErrors := 0
testRandomPts(t, structure, subTestCt, &queryErrors)
if queryErrors != 0 {
errCt++
continue
}
}
t.Log("Errors in Trap:", errCt, testCt)
}
func TestRandomDCELTrapezoid(t *testing.T) {
dc := dcel.Random2DDCEL(inputRange, inputSize)
_, _, structure, err := trapezoid.TrapezoidalMap(dc)
assert.Nil(t, err)
testRandomPts(t, structure, testCt, &trapErrors)
printErrors()
}
func TestRandomDCELPlumbLine(t *testing.T) {
dc := dcel.Random2DDCEL(inputRange, inputSize)
it := bruteForce.PlumbLine(dc)
testRandomPts(t, it, testCt, &plumbErrors)
printErrors()
}
func TestRandomDCELRtree(t *testing.T) {
dc := dcel.Random2DDCEL(inputRange, inputSize)
pl := rtree.DCELtoRtree(dc)
testRandomPts(t, pl, testCt, &rtreeErrors)
printErrors()
}
func printErrors() {
fmt.Println("Total errors")
fmt.Println("Slab:", slabErrors)
fmt.Println("Trapezoid:", trapErrors)
fmt.Println("Rtree:", rtreeErrors)
fmt.Println("Plumb Line:", plumbErrors, "(Baseline)")
fmt.Println()
}
func BenchmarkRandomDCELSlab(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
dc := dcel.Random2DDCEL(inputRange, inputSize)
pl, _ := slab.Decompose(dc, tree.RedBlack)
rand.Seed(seed)
b.ResetTimer()
for i := 0; i < b.N; i++ {
pt := randomPt()
pl.PointLocate(pt.X(), pt.Y())
}
}
func BenchmarkRandomDCELTrapezoid(b *testing.B) {
// This seed pattern guarantees that
// each benchmark is run with the same
// input dcel and input points
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
dc := dcel.Random2DDCEL(inputRange, inputSize)
_, _, pl, _ := trapezoid.TrapezoidalMap(dc)
rand.Seed(seed)
b.ResetTimer()
for i := 0; i < b.N; i++ {<|fim▁hole|>
func BenchmarkRandomDCELRtree(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
dc := dcel.Random2DDCEL(inputRange, inputSize)
pl := rtree.DCELtoRtree(dc)
rand.Seed(seed)
b.ResetTimer()
for i := 0; i < b.N; i++ {
pt := randomPt()
pl.PointLocate(pt.X(), pt.Y())
}
}
func BenchmarkRandomDCELPlumbLine(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
dc := dcel.Random2DDCEL(inputRange, inputSize)
pl := bruteForce.PlumbLine(dc)
rand.Seed(seed)
b.ResetTimer()
for i := 0; i < b.N; i++ {
pt := randomPt()
pl.PointLocate(pt.X(), pt.Y())
}
}
func BenchmarkRandomSetupSlab(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
for i := 0; i < b.N; i++ {
dc := dcel.Random2DDCEL(inputRange, inputSize)
slab.Decompose(dc, tree.RedBlack)
}
}
func BenchmarkRandomSetupTrapezoid(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
for i := 0; i < b.N; i++ {
dc := dcel.Random2DDCEL(inputRange, inputSize)
trapezoid.TrapezoidalMap(dc)
}
}
func BenchmarkRandomSetupRtree(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
for i := 0; i < b.N; i++ {
dc := dcel.Random2DDCEL(inputRange, inputSize)
rtree.DCELtoRtree(dc)
}
}
func BenchmarkRandomSetupPlumbLine(b *testing.B) {
if seed == 0 {
fmt.Println("Setting seed")
seed = time.Now().UnixNano()
}
rand.Seed(seed)
for i := 0; i < b.N; i++ {
dc := dcel.Random2DDCEL(inputRange, inputSize)
bruteForce.PlumbLine(dc)
}
}
func BenchmarkAll(b *testing.B) {
//fmt.Println("Setting seed")
for i := 0; i < 1000; i += 2 {
inputSize = i
seed = time.Now().UnixNano()
fmt.Println("InputSize:", i)
b.Run("SlabSetup", BenchmarkRandomSetupSlab)
b.Run("Slab", BenchmarkRandomDCELSlab)
b.Run("TrapSetup", BenchmarkRandomSetupTrapezoid)
b.Run("Trapezoid", BenchmarkRandomDCELTrapezoid)
b.Run("RtreeSetup", BenchmarkRandomSetupRtree)
b.Run("Rtree", BenchmarkRandomDCELRtree)
b.Run("PlumbLineSetup", BenchmarkRandomSetupPlumbLine)
b.Run("PlumbLine", BenchmarkRandomDCELPlumbLine)
}
}
func BenchmarkAdditional(b *testing.B) {
for i := 0; i < 1000; i += 2 {
inputSize = i
seed = time.Now().UnixNano()
fmt.Println("InputSize:", i)
b.Run("TrapSetup", BenchmarkRandomSetupTrapezoid)
}
}
func BenchmarkSlab(b *testing.B) {
for i := 0; i < 1000; i += 16 {
inputSize = i
seed = time.Now().UnixNano()
fmt.Println("InputSize:", i)
b.Run("SlabSetup", BenchmarkRandomSetupSlab)
b.Run("Slab", BenchmarkRandomDCELSlab)
}
}<|fim▁end|> | pt := randomPt()
pl.PointLocate(pt.X(), pt.Y())
}
} |
<|file_name|>sup.server.protocol.ts<|end_file_name|><|fim▁begin|>import { Upload } from './../models/upload/upload.model';
import { SUPController } from './sup.server.controller';
const yellow = '\x1b[33m%s\x1b[0m: ';
export class SUP {
constructor(private io: SocketIOClient.Manager) { }
registerIO() {
this.io.on('connection', (socket: SocketIOClient.Socket) => {
console.log(yellow, 'Socket connected!');
socket.on('NextChunk', (data) => {
console.log(yellow, 'Receiving data.');
SUPController.nextChunk(data, socket);
});
socket.on('NextFile', (data) => {
console.log(yellow, 'Receiving next File.');
SUPController.nextFile(data, socket);<|fim▁hole|> });
});
}
static handshake(data, cb): void {
SUPController.handshake(data, cb);
}
static pause(data, cb): void {
SUPController.pause(data, cb);
}
static continue(data, cb): void {
SUPController.continue(data, cb);
}
static abort(data, cb): void {
SUPController.abort(data, cb);
}
}<|fim▁end|> | |
<|file_name|>WaypointManager.java<|end_file_name|><|fim▁begin|>package org.droidplanner.core.MAVLink;
import com.MAVLink.Messages.MAVLinkMessage;
import com.MAVLink.common.msg_mission_ack;
import com.MAVLink.common.msg_mission_count;
import com.MAVLink.common.msg_mission_current;
import com.MAVLink.common.msg_mission_item;
import com.MAVLink.common.msg_mission_item_reached;
import com.MAVLink.common.msg_mission_request;
import org.droidplanner.core.drone.DroneInterfaces;
import org.droidplanner.core.drone.DroneInterfaces.OnWaypointManagerListener;
import org.droidplanner.core.drone.DroneVariable;
import org.droidplanner.core.model.Drone;
import java.util.ArrayList;
import java.util.List;
/**
* Class to manage the communication of waypoints to the MAV.
* <p/>
* Should be initialized with a MAVLink Object, so the manager can send messages
* via the MAV link. The function processMessage must be called with every new
* MAV Message.
*/
public class WaypointManager extends DroneVariable {
enum WaypointStates {
IDLE, READ_REQUEST, READING_WP, WRITING_WP_COUNT, WRITING_WP, WAITING_WRITE_ACK
}
public enum WaypointEvent_Type {
WP_UPLOAD, WP_DOWNLOAD, WP_RETRY, WP_CONTINUE, WP_TIMED_OUT
}
private static final long TIMEOUT = 15000; //ms
private static final int RETRY_LIMIT = 3;
private int retryTracker = 0;
private int readIndex;
private int writeIndex;
private int retryIndex;
private OnWaypointManagerListener wpEventListener;
WaypointStates state = WaypointStates.IDLE;
/**
* waypoint witch is currently being written
*/
private final DroneInterfaces.Handler watchdog;
private final Runnable watchdogCallback = new Runnable() {
@Override
public void run() {
if (processTimeOut(++retryTracker))
watchdog.postDelayed(this, TIMEOUT);
}
};
public WaypointManager(Drone drone, DroneInterfaces.Handler handler) {
super(drone);
this.watchdog = handler;
}
public void setWaypointManagerListener(OnWaypointManagerListener wpEventListener) {
this.wpEventListener = wpEventListener;
}
private void startWatchdog() {
stopWatchdog();
retryTracker = 0;
this.watchdog.postDelayed(watchdogCallback, TIMEOUT);
}
private void stopWatchdog() {
this.watchdog.removeCallbacks(watchdogCallback);
}
/**
* Try to receive all waypoints from the MAV.
* <p/>
* If all runs well the callback will return the list of waypoints.
*/
public void getWaypoints() {
// ensure that WPManager is not doing anything else
if (state != WaypointStates.IDLE)
return;
doBeginWaypointEvent(WaypointEvent_Type.WP_DOWNLOAD);
readIndex = -1;
state = WaypointStates.READ_REQUEST;
MavLinkWaypoint.requestWaypointsList(myDrone);
startWatchdog();
}
/**
* Write a list of waypoints to the MAV.
* <p/>
* The callback will return the status of this operation
*
* @param data waypoints to be written
*/
public void writeWaypoints(List<msg_mission_item> data) {
// ensure that WPManager is not doing anything else
if (state != WaypointStates.IDLE)
return;
if ((mission != null)) {
doBeginWaypointEvent(WaypointEvent_Type.WP_UPLOAD);
updateMsgIndexes(data);
mission.clear();
mission.addAll(data);
writeIndex = 0;
state = WaypointStates.WRITING_WP_COUNT;
MavLinkWaypoint.sendWaypointCount(myDrone, mission.size());
startWatchdog();
}
}
private void updateMsgIndexes(List<msg_mission_item> data) {
short index = 0;
for (msg_mission_item msg : data) {
msg.seq = index++;
}
}
/**
* Sets the current waypoint in the MAV
* <p/>
* The callback will return the status of this operation
*/
public void setCurrentWaypoint(int i) {
if ((mission != null)) {
MavLinkWaypoint.sendSetCurrentWaypoint(myDrone, (short) i);
}
}
/**
* Callback for when a waypoint has been reached
*
* @param wpNumber number of the completed waypoint
*/
public void onWaypointReached(int wpNumber) {
}
/**
* Callback for a change in the current waypoint the MAV is heading for
*
* @param seq number of the updated waypoint
*/
private void onCurrentWaypointUpdate(short seq) {
}
/**
* number of waypoints to be received, used when reading waypoints
*/
private short waypointCount;
/**
* list of waypoints used when writing or receiving
*/
private List<msg_mission_item> mission = new ArrayList<msg_mission_item>();
/**
* Try to process a Mavlink message if it is a mission related message
*
* @param msg Mavlink message to process
* @return Returns true if the message has been processed
*/
public boolean processMessage(MAVLinkMessage msg) {
switch (state) {
default:
case IDLE:
break;
case READ_REQUEST:
if (msg.msgid == msg_mission_count.MAVLINK_MSG_ID_MISSION_COUNT) {
waypointCount = ((msg_mission_count) msg).count;
mission.clear();
startWatchdog();
MavLinkWaypoint.requestWayPoint(myDrone, mission.size());
state = WaypointStates.READING_WP;
return true;
}
break;
case READING_WP:
if (msg.msgid == msg_mission_item.MAVLINK_MSG_ID_MISSION_ITEM) {
startWatchdog();
processReceivedWaypoint((msg_mission_item) msg);
doWaypointEvent(WaypointEvent_Type.WP_DOWNLOAD, readIndex + 1, waypointCount);
if (mission.size() < waypointCount) {
MavLinkWaypoint.requestWayPoint(myDrone, mission.size());
} else {
stopWatchdog();
state = WaypointStates.IDLE;
MavLinkWaypoint.sendAck(myDrone);
myDrone.getMission().onMissionReceived(mission);
doEndWaypointEvent(WaypointEvent_Type.WP_DOWNLOAD);
}
return true;
}
break;
case WRITING_WP_COUNT:
state = WaypointStates.WRITING_WP;
case WRITING_WP:
if (msg.msgid == msg_mission_request.MAVLINK_MSG_ID_MISSION_REQUEST) {
startWatchdog();
processWaypointToSend((msg_mission_request) msg);
doWaypointEvent(WaypointEvent_Type.WP_UPLOAD, writeIndex + 1, mission.size());
return true;
}
break;
case WAITING_WRITE_ACK:
if (msg.msgid == msg_mission_ack.MAVLINK_MSG_ID_MISSION_ACK) {
stopWatchdog();
myDrone.getMission().onWriteWaypoints((msg_mission_ack) msg);
state = WaypointStates.IDLE;
doEndWaypointEvent(WaypointEvent_Type.WP_UPLOAD);
return true;
}
break;
}
if (msg.msgid == msg_mission_item_reached.MAVLINK_MSG_ID_MISSION_ITEM_REACHED) {
onWaypointReached(((msg_mission_item_reached) msg).seq);
return true;
}
if (msg.msgid == msg_mission_current.MAVLINK_MSG_ID_MISSION_CURRENT) {
onCurrentWaypointUpdate(((msg_mission_current) msg).seq);
return true;
}
return false;
}
public boolean processTimeOut(int mTimeOutCount) {
// If max retry is reached, set state to IDLE. No more retry.
if (mTimeOutCount >= RETRY_LIMIT) {
state = WaypointStates.IDLE;
doWaypointEvent(WaypointEvent_Type.WP_TIMED_OUT, retryIndex, RETRY_LIMIT);
return false;
}
retryIndex++;
doWaypointEvent(WaypointEvent_Type.WP_RETRY, retryIndex, RETRY_LIMIT);
switch (state) {
default:
case IDLE:
break;
case READ_REQUEST:
MavLinkWaypoint.requestWaypointsList(myDrone);
break;
case READING_WP:
if (mission.size() < waypointCount) { // request last lost WP
MavLinkWaypoint.requestWayPoint(myDrone, mission.size());
}
break;
case WRITING_WP_COUNT:
MavLinkWaypoint.sendWaypointCount(myDrone, mission.size());
break;
case WRITING_WP:
// Log.d("TIMEOUT", "re Write Msg: " + String.valueOf(writeIndex));
if (writeIndex < mission.size()) {
myDrone.getMavClient().sendMavPacket(mission.get(writeIndex).pack());
}
break;
case WAITING_WRITE_ACK:
myDrone.getMavClient().sendMavPacket(mission.get(mission.size() - 1).pack());
break;
}
return true;
}
private void processWaypointToSend(msg_mission_request msg) {
/*
* Log.d("TIMEOUT", "Write Msg: " + String.valueOf(msg.seq));
*/
writeIndex = msg.seq;
msg_mission_item item = mission.get(writeIndex);
item.target_system = myDrone.getSysid();
item.target_component = myDrone.getCompid();
myDrone.getMavClient().sendMavPacket(item.pack());
if (writeIndex + 1 >= mission.size()) {
state = WaypointStates.WAITING_WRITE_ACK;
}
}
private void processReceivedWaypoint(msg_mission_item msg) {
/*
* Log.d("TIMEOUT", "Read Last/Curr: " + String.valueOf(readIndex) + "/"
* + String.valueOf(msg.seq));
*/
// in case of we receive the same WP again after retry
if (msg.seq <= readIndex)
return;
readIndex = msg.seq;
mission.add(msg);
}<|fim▁hole|>
private void doBeginWaypointEvent(WaypointEvent_Type wpEvent) {
retryIndex = 0;
if (wpEventListener == null)
return;
wpEventListener.onBeginWaypointEvent(wpEvent);
}
private void doEndWaypointEvent(WaypointEvent_Type wpEvent) {
if (retryIndex > 0)// if retry successful, notify that we now continue
doWaypointEvent(WaypointEvent_Type.WP_CONTINUE, retryIndex, RETRY_LIMIT);
retryIndex = 0;
if (wpEventListener == null)
return;
wpEventListener.onEndWaypointEvent(wpEvent);
}
private void doWaypointEvent(WaypointEvent_Type wpEvent, int index, int count) {
retryIndex = 0;
if (wpEventListener == null)
return;
wpEventListener.onWaypointEvent(wpEvent, index, count);
}
}<|fim▁end|> | |
<|file_name|>logsettings.py<|end_file_name|><|fim▁begin|>import os
import platform
import sys
from logging.handlers import SysLogHandler
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
def get_logger_config(log_dir,
logging_env="no_env",
tracking_filename="tracking.log",
edx_filename="edx.log",
dev_env=False,
syslog_addr=None,
debug=False,
local_loglevel='INFO',
console_loglevel=None,
service_variant=None):
"""
Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings. The reason it's done
this way instead of registering directly is because I didn't want to worry
about resetting the logging state if this is called multiple times when
settings are extended.
If dev_env is set to true logging will not be done via local rsyslogd,
instead, tracking and application logs will be dropped in log_dir.
"tracking_filename" and "edx_filename" are ignored unless dev_env
is set to true since otherwise logging is handled by rsyslogd.
"""
# Revert to INFO if an invalid string is passed in
if local_loglevel not in LOG_LEVELS:
local_loglevel = 'INFO'
if console_loglevel is None or console_loglevel not in LOG_LEVELS:
console_loglevel = 'DEBUG' if debug else 'INFO'
if service_variant is None:
# default to a blank string so that if SERVICE_VARIANT is not
# set we will not log to a sub directory
service_variant = ''
hostname = platform.node().split(".")[0]
syslog_format = ("[service_variant={service_variant}]"
"[%(name)s][env:{logging_env}] %(levelname)s "
"[{hostname} %(process)d] [%(filename)s:%(lineno)d] "
"- %(message)s").format(service_variant=service_variant,
logging_env=logging_env,
hostname=hostname)
handlers = ['console', 'local'] if debug else ['console',
'syslogger-remote', 'local']
logger_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d '<|fim▁hole|> },
'syslog_format': {'format': syslog_format},
'raw': {'format': '%(message)s'},
},
'handlers': {
'console': {
'level': console_loglevel,
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stderr,
},
'syslogger-remote': {
'level': 'INFO',
'class': 'logging.handlers.SysLogHandler',
'address': syslog_addr,
'formatter': 'syslog_format',
},
'newrelic': {
'level': 'ERROR',
'class': 'lms.lib.newrelic_logging.NewRelicHandler',
'formatter': 'raw',
}
},
'loggers': {
'tracking': {
'handlers': ['tracking'],
'level': 'DEBUG',
'propagate': False,
},
'': {
'handlers': handlers,
'level': 'DEBUG',
'propagate': False
},
}
}
if dev_env:
tracking_file_loc = os.path.join(log_dir, tracking_filename)
edx_file_loc = os.path.join(log_dir, edx_filename)
logger_config['handlers'].update({
'local': {
'class': 'logging.handlers.RotatingFileHandler',
'level': local_loglevel,
'formatter': 'standard',
'filename': edx_file_loc,
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': tracking_file_loc,
'formatter': 'raw',
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
})
else:
# for production environments we will only
# log INFO and up
logger_config['loggers']['']['level'] = 'INFO'
logger_config['handlers'].update({
'local': {
'level': local_loglevel,
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'formatter': 'syslog_format',
'facility': SysLogHandler.LOG_LOCAL0,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'facility': SysLogHandler.LOG_LOCAL1,
'formatter': 'raw',
},
})
return logger_config<|fim▁end|> | '[%(name)s] %(filename)s:%(lineno)d - %(message)s', |
<|file_name|>transfer.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package http
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http/internal"<|fim▁hole|> "sync"
)
// ErrLineTooLong is returned when reading request or response bodies
// with malformed chunked encoding.
var ErrLineTooLong = internal.ErrLineTooLong
type errorReader struct {
err error
}
func (r errorReader) Read(p []byte) (n int, err error) {
return 0, r.err
}
// transferWriter inspects the fields of a user-supplied Request or Response,
// sanitizes them without changing the user object and provides methods for
// writing the respective header, body and trailer in wire format.
type transferWriter struct {
Method string
Body io.Reader
BodyCloser io.Closer
ResponseToHEAD bool
ContentLength int64 // -1 means unknown, 0 means exactly none
Close bool
TransferEncoding []string
Trailer Header
IsResponse bool
}
func newTransferWriter(r interface{}) (t *transferWriter, err error) {
t = &transferWriter{}
// Extract relevant fields
atLeastHTTP11 := false
switch rr := r.(type) {
case *Request:
if rr.ContentLength != 0 && rr.Body == nil {
return nil, fmt.Errorf("http: Request.ContentLength=%d with nil Body", rr.ContentLength)
}
t.Method = valueOrDefault(rr.Method, "GET")
t.Body = rr.Body
t.BodyCloser = rr.Body
t.ContentLength = rr.ContentLength
t.Close = rr.Close
t.TransferEncoding = rr.TransferEncoding
t.Trailer = rr.Trailer
atLeastHTTP11 = rr.ProtoAtLeast(1, 1)
if t.Body != nil && len(t.TransferEncoding) == 0 && atLeastHTTP11 {
if t.ContentLength == 0 {
// Test to see if it's actually zero or just unset.
var buf [1]byte
n, rerr := io.ReadFull(t.Body, buf[:])
if rerr != nil && rerr != io.EOF {
t.ContentLength = -1
t.Body = errorReader{rerr}
} else if n == 1 {
// Oh, guess there is data in this Body Reader after all.
// The ContentLength field just wasn't set.
// Stich the Body back together again, re-attaching our
// consumed byte.
t.ContentLength = -1
t.Body = io.MultiReader(bytes.NewReader(buf[:]), t.Body)
} else {
// Body is actually empty.
t.Body = nil
t.BodyCloser = nil
}
}
if t.ContentLength < 0 {
t.TransferEncoding = []string{"chunked"}
}
}
case *Response:
t.IsResponse = true
if rr.Request != nil {
t.Method = rr.Request.Method
}
t.Body = rr.Body
t.BodyCloser = rr.Body
t.ContentLength = rr.ContentLength
t.Close = rr.Close
t.TransferEncoding = rr.TransferEncoding
t.Trailer = rr.Trailer
atLeastHTTP11 = rr.ProtoAtLeast(1, 1)
t.ResponseToHEAD = noBodyExpected(t.Method)
}
// Sanitize Body,ContentLength,TransferEncoding
if t.ResponseToHEAD {
t.Body = nil
if chunked(t.TransferEncoding) {
t.ContentLength = -1
}
} else {
if !atLeastHTTP11 || t.Body == nil {
t.TransferEncoding = nil
}
if chunked(t.TransferEncoding) {
t.ContentLength = -1
} else if t.Body == nil { // no chunking, no body
t.ContentLength = 0
}
}
// Sanitize Trailer
if !chunked(t.TransferEncoding) {
t.Trailer = nil
}
return t, nil
}
func noBodyExpected(requestMethod string) bool {
return requestMethod == "HEAD"
}
func (t *transferWriter) shouldSendContentLength() bool {
if chunked(t.TransferEncoding) {
return false
}
if t.ContentLength > 0 {
return true
}
if t.ContentLength < 0 {
return false
}
// Many servers expect a Content-Length for these methods
if t.Method == "POST" || t.Method == "PUT" {
return true
}
if t.ContentLength == 0 && isIdentity(t.TransferEncoding) {
if t.Method == "GET" || t.Method == "HEAD" {
return false
}
return true
}
return false
}
func (t *transferWriter) WriteHeader(w io.Writer) error {
if t.Close {
if _, err := io.WriteString(w, "Connection: close\r\n"); err != nil {
return err
}
}
// Write Content-Length and/or Transfer-Encoding whose values are a
// function of the sanitized field triple (Body, ContentLength,
// TransferEncoding)
if t.shouldSendContentLength() {
if _, err := io.WriteString(w, "Content-Length: "); err != nil {
return err
}
if _, err := io.WriteString(w, strconv.FormatInt(t.ContentLength, 10)+"\r\n"); err != nil {
return err
}
} else if chunked(t.TransferEncoding) {
if _, err := io.WriteString(w, "Transfer-Encoding: chunked\r\n"); err != nil {
return err
}
}
// Write Trailer header
if t.Trailer != nil {
keys := make([]string, 0, len(t.Trailer))
for k := range t.Trailer {
k = CanonicalHeaderKey(k)
switch k {
case "Transfer-Encoding", "Trailer", "Content-Length":
return &badStringError{"invalid Trailer key", k}
}
keys = append(keys, k)
}
if len(keys) > 0 {
sort.Strings(keys)
// TODO: could do better allocation-wise here, but trailers are rare,
// so being lazy for now.
if _, err := io.WriteString(w, "Trailer: "+strings.Join(keys, ",")+"\r\n"); err != nil {
return err
}
}
}
return nil
}
func (t *transferWriter) WriteBody(w io.Writer) error {
var err error
var ncopy int64
// Write body
if t.Body != nil {
if chunked(t.TransferEncoding) {
if bw, ok := w.(*bufio.Writer); ok && !t.IsResponse {
w = &internal.FlushAfterChunkWriter{bw}
}
cw := internal.NewChunkedWriter(w)
_, err = io.Copy(cw, t.Body)
if err == nil {
err = cw.Close()
}
} else if t.ContentLength == -1 {
ncopy, err = io.Copy(w, t.Body)
} else {
ncopy, err = io.Copy(w, io.LimitReader(t.Body, t.ContentLength))
if err != nil {
return err
}
var nextra int64
nextra, err = io.Copy(ioutil.Discard, t.Body)
ncopy += nextra
}
if err != nil {
return err
}
if err = t.BodyCloser.Close(); err != nil {
return err
}
}
if !t.ResponseToHEAD && t.ContentLength != -1 && t.ContentLength != ncopy {
return fmt.Errorf("http: ContentLength=%d with Body length %d",
t.ContentLength, ncopy)
}
if chunked(t.TransferEncoding) {
// Write Trailer header
if t.Trailer != nil {
if err := t.Trailer.Write(w); err != nil {
return err
}
}
// Last chunk, empty trailer
_, err = io.WriteString(w, "\r\n")
}
return err
}
type transferReader struct {
// Input
Header Header
StatusCode int
RequestMethod string
ProtoMajor int
ProtoMinor int
// Output
Body io.ReadCloser
ContentLength int64
TransferEncoding []string
Close bool
Trailer Header
}
func (t *transferReader) protoAtLeast(m, n int) bool {
return t.ProtoMajor > m || (t.ProtoMajor == m && t.ProtoMinor >= n)
}
// bodyAllowedForStatus reports whether a given response status code
// permits a body. See RFC2616, section 4.4.
func bodyAllowedForStatus(status int) bool {
switch {
case status >= 100 && status <= 199:
return false
case status == 204:
return false
case status == 304:
return false
}
return true
}
var (
suppressedHeaders304 = []string{"Content-Type", "Content-Length", "Transfer-Encoding"}
suppressedHeadersNoBody = []string{"Content-Length", "Transfer-Encoding"}
)
func suppressedHeaders(status int) []string {
switch {
case status == 304:
// RFC 2616 section 10.3.5: "the response MUST NOT include other entity-headers"
return suppressedHeaders304
case !bodyAllowedForStatus(status):
return suppressedHeadersNoBody
}
return nil
}
// msg is *Request or *Response.
func readTransfer(msg interface{}, r *bufio.Reader) (err error) {
t := &transferReader{RequestMethod: "GET"}
// Unify input
isResponse := false
switch rr := msg.(type) {
case *Response:
t.Header = rr.Header
t.StatusCode = rr.StatusCode
t.ProtoMajor = rr.ProtoMajor
t.ProtoMinor = rr.ProtoMinor
t.Close = shouldClose(t.ProtoMajor, t.ProtoMinor, t.Header, true)
isResponse = true
if rr.Request != nil {
t.RequestMethod = rr.Request.Method
}
case *Request:
t.Header = rr.Header
t.RequestMethod = rr.Method
t.ProtoMajor = rr.ProtoMajor
t.ProtoMinor = rr.ProtoMinor
// Transfer semantics for Requests are exactly like those for
// Responses with status code 200, responding to a GET method
t.StatusCode = 200
t.Close = rr.Close
default:
panic("unexpected type")
}
// Default to HTTP/1.1
if t.ProtoMajor == 0 && t.ProtoMinor == 0 {
t.ProtoMajor, t.ProtoMinor = 1, 1
}
// Transfer encoding, content length
err = t.fixTransferEncoding()
if err != nil {
return err
}
realLength, err := fixLength(isResponse, t.StatusCode, t.RequestMethod, t.Header, t.TransferEncoding)
if err != nil {
return err
}
if isResponse && t.RequestMethod == "HEAD" {
if n, err := parseContentLength(t.Header.get("Content-Length")); err != nil {
return err
} else {
t.ContentLength = n
}
} else {
t.ContentLength = realLength
}
// Trailer
t.Trailer, err = fixTrailer(t.Header, t.TransferEncoding)
if err != nil {
return err
}
// If there is no Content-Length or chunked Transfer-Encoding on a *Response
// and the status is not 1xx, 204 or 304, then the body is unbounded.
// See RFC2616, section 4.4.
switch msg.(type) {
case *Response:
if realLength == -1 &&
!chunked(t.TransferEncoding) &&
bodyAllowedForStatus(t.StatusCode) {
// Unbounded body.
t.Close = true
}
}
// Prepare body reader. ContentLength < 0 means chunked encoding
// or close connection when finished, since multipart is not supported yet
switch {
case chunked(t.TransferEncoding):
if noBodyExpected(t.RequestMethod) {
t.Body = eofReader
} else {
t.Body = &body{src: internal.NewChunkedReader(r), hdr: msg, r: r, closing: t.Close}
}
case realLength == 0:
t.Body = eofReader
case realLength > 0:
t.Body = &body{src: io.LimitReader(r, realLength), closing: t.Close}
default:
// realLength < 0, i.e. "Content-Length" not mentioned in header
if t.Close {
// Close semantics (i.e. HTTP/1.0)
t.Body = &body{src: r, closing: t.Close}
} else {
// Persistent connection (i.e. HTTP/1.1)
t.Body = eofReader
}
}
// Unify output
switch rr := msg.(type) {
case *Request:
rr.Body = t.Body
rr.ContentLength = t.ContentLength
rr.TransferEncoding = t.TransferEncoding
rr.Close = t.Close
rr.Trailer = t.Trailer
case *Response:
rr.Body = t.Body
rr.ContentLength = t.ContentLength
rr.TransferEncoding = t.TransferEncoding
rr.Close = t.Close
rr.Trailer = t.Trailer
}
return nil
}
// Checks whether chunked is part of the encodings stack
func chunked(te []string) bool { return len(te) > 0 && te[0] == "chunked" }
// Checks whether the encoding is explicitly "identity".
func isIdentity(te []string) bool { return len(te) == 1 && te[0] == "identity" }
// fixTransferEncoding sanitizes t.TransferEncoding, if needed.
func (t *transferReader) fixTransferEncoding() error {
raw, present := t.Header["Transfer-Encoding"]
if !present {
return nil
}
delete(t.Header, "Transfer-Encoding")
// Issue 12785; ignore Transfer-Encoding on HTTP/1.0 requests.
if !t.protoAtLeast(1, 1) {
return nil
}
encodings := strings.Split(raw[0], ",")
te := make([]string, 0, len(encodings))
// TODO: Even though we only support "identity" and "chunked"
// encodings, the loop below is designed with foresight. One
// invariant that must be maintained is that, if present,
// chunked encoding must always come first.
for _, encoding := range encodings {
encoding = strings.ToLower(strings.TrimSpace(encoding))
// "identity" encoding is not recorded
if encoding == "identity" {
break
}
if encoding != "chunked" {
return &badStringError{"unsupported transfer encoding", encoding}
}
te = te[0 : len(te)+1]
te[len(te)-1] = encoding
}
if len(te) > 1 {
return &badStringError{"too many transfer encodings", strings.Join(te, ",")}
}
if len(te) > 0 {
// RFC 7230 3.3.2 says "A sender MUST NOT send a
// Content-Length header field in any message that
// contains a Transfer-Encoding header field."
//
// but also:
// "If a message is received with both a
// Transfer-Encoding and a Content-Length header
// field, the Transfer-Encoding overrides the
// Content-Length. Such a message might indicate an
// attempt to perform request smuggling (Section 9.5)
// or response splitting (Section 9.4) and ought to be
// handled as an error. A sender MUST remove the
// received Content-Length field prior to forwarding
// such a message downstream."
//
// Reportedly, these appear in the wild.
delete(t.Header, "Content-Length")
t.TransferEncoding = te
return nil
}
return nil
}
// Determine the expected body length, using RFC 2616 Section 4.4. This
// function is not a method, because ultimately it should be shared by
// ReadResponse and ReadRequest.
func fixLength(isResponse bool, status int, requestMethod string, header Header, te []string) (int64, error) {
contentLens := header["Content-Length"]
isRequest := !isResponse
// Logic based on response type or status
if noBodyExpected(requestMethod) {
// For HTTP requests, as part of hardening against request
// smuggling (RFC 7230), don't allow a Content-Length header for
// methods which don't permit bodies. As an exception, allow
// exactly one Content-Length header if its value is "0".
if isRequest && len(contentLens) > 0 && !(len(contentLens) == 1 && contentLens[0] == "0") {
return 0, fmt.Errorf("http: method cannot contain a Content-Length; got %q", contentLens)
}
return 0, nil
}
if status/100 == 1 {
return 0, nil
}
switch status {
case 204, 304:
return 0, nil
}
if len(contentLens) > 1 {
// harden against HTTP request smuggling. See RFC 7230.
return 0, errors.New("http: message cannot contain multiple Content-Length headers")
}
// Logic based on Transfer-Encoding
if chunked(te) {
return -1, nil
}
// Logic based on Content-Length
var cl string
if len(contentLens) == 1 {
cl = strings.TrimSpace(contentLens[0])
}
if cl != "" {
n, err := parseContentLength(cl)
if err != nil {
return -1, err
}
return n, nil
} else {
header.Del("Content-Length")
}
if !isResponse {
// RFC 2616 neither explicitly permits nor forbids an
// entity-body on a GET request so we permit one if
// declared, but we default to 0 here (not -1 below)
// if there's no mention of a body.
// Likewise, all other request methods are assumed to have
// no body if neither Transfer-Encoding chunked nor a
// Content-Length are set.
return 0, nil
}
// Body-EOF logic based on other methods (like closing, or chunked coding)
return -1, nil
}
// Determine whether to hang up after sending a request and body, or
// receiving a response and body
// 'header' is the request headers
func shouldClose(major, minor int, header Header, removeCloseHeader bool) bool {
if major < 1 {
return true
} else if major == 1 && minor == 0 {
vv := header["Connection"]
if headerValuesContainsToken(vv, "close") || !headerValuesContainsToken(vv, "keep-alive") {
return true
}
return false
} else {
if headerValuesContainsToken(header["Connection"], "close") {
if removeCloseHeader {
header.Del("Connection")
}
return true
}
}
return false
}
// Parse the trailer header
func fixTrailer(header Header, te []string) (Header, error) {
vv, ok := header["Trailer"]
if !ok {
return nil, nil
}
header.Del("Trailer")
trailer := make(Header)
var err error
for _, v := range vv {
foreachHeaderElement(v, func(key string) {
key = CanonicalHeaderKey(key)
switch key {
case "Transfer-Encoding", "Trailer", "Content-Length":
if err == nil {
err = &badStringError{"bad trailer key", key}
return
}
}
trailer[key] = nil
})
}
if err != nil {
return nil, err
}
if len(trailer) == 0 {
return nil, nil
}
if !chunked(te) {
// Trailer and no chunking
return nil, ErrUnexpectedTrailer
}
return trailer, nil
}
// body turns a Reader into a ReadCloser.
// Close ensures that the body has been fully read
// and then reads the trailer if necessary.
type body struct {
src io.Reader
hdr interface{} // non-nil (Response or Request) value means read trailer
r *bufio.Reader // underlying wire-format reader for the trailer
closing bool // is the connection to be closed after reading body?
doEarlyClose bool // whether Close should stop early
mu sync.Mutex // guards closed, and calls to Read and Close
sawEOF bool
closed bool
earlyClose bool // Close called and we didn't read to the end of src
}
// ErrBodyReadAfterClose is returned when reading a Request or Response
// Body after the body has been closed. This typically happens when the body is
// read after an HTTP Handler calls WriteHeader or Write on its
// ResponseWriter.
var ErrBodyReadAfterClose = errors.New("http: invalid Read on closed Body")
func (b *body) Read(p []byte) (n int, err error) {
b.mu.Lock()
defer b.mu.Unlock()
if b.closed {
return 0, ErrBodyReadAfterClose
}
return b.readLocked(p)
}
// Must hold b.mu.
func (b *body) readLocked(p []byte) (n int, err error) {
if b.sawEOF {
return 0, io.EOF
}
n, err = b.src.Read(p)
if err == io.EOF {
b.sawEOF = true
// Chunked case. Read the trailer.
if b.hdr != nil {
if e := b.readTrailer(); e != nil {
err = e
// Something went wrong in the trailer, we must not allow any
// further reads of any kind to succeed from body, nor any
// subsequent requests on the server connection. See
// golang.org/issue/12027
b.sawEOF = false
b.closed = true
}
b.hdr = nil
} else {
// If the server declared the Content-Length, our body is a LimitedReader
// and we need to check whether this EOF arrived early.
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N > 0 {
err = io.ErrUnexpectedEOF
}
}
}
// If we can return an EOF here along with the read data, do
// so. This is optional per the io.Reader contract, but doing
// so helps the HTTP transport code recycle its connection
// earlier (since it will see this EOF itself), even if the
// client doesn't do future reads or Close.
if err == nil && n > 0 {
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N == 0 {
err = io.EOF
b.sawEOF = true
}
}
return n, err
}
var (
singleCRLF = []byte("\r\n")
doubleCRLF = []byte("\r\n\r\n")
)
func seeUpcomingDoubleCRLF(r *bufio.Reader) bool {
for peekSize := 4; ; peekSize++ {
// This loop stops when Peek returns an error,
// which it does when r's buffer has been filled.
buf, err := r.Peek(peekSize)
if bytes.HasSuffix(buf, doubleCRLF) {
return true
}
if err != nil {
break
}
}
return false
}
var errTrailerEOF = errors.New("http: unexpected EOF reading trailer")
func (b *body) readTrailer() error {
// The common case, since nobody uses trailers.
buf, err := b.r.Peek(2)
if bytes.Equal(buf, singleCRLF) {
b.r.Discard(2)
return nil
}
if len(buf) < 2 {
return errTrailerEOF
}
if err != nil {
return err
}
// Make sure there's a header terminator coming up, to prevent
// a DoS with an unbounded size Trailer. It's not easy to
// slip in a LimitReader here, as textproto.NewReader requires
// a concrete *bufio.Reader. Also, we can't get all the way
// back up to our conn's LimitedReader that *might* be backing
// this bufio.Reader. Instead, a hack: we iteratively Peek up
// to the bufio.Reader's max size, looking for a double CRLF.
// This limits the trailer to the underlying buffer size, typically 4kB.
if !seeUpcomingDoubleCRLF(b.r) {
return errors.New("http: suspiciously long trailer after chunked body")
}
hdr, err := textproto.NewReader(b.r).ReadMIMEHeader()
if err != nil {
if err == io.EOF {
return errTrailerEOF
}
return err
}
switch rr := b.hdr.(type) {
case *Request:
mergeSetHeader(&rr.Trailer, Header(hdr))
case *Response:
mergeSetHeader(&rr.Trailer, Header(hdr))
}
return nil
}
func mergeSetHeader(dst *Header, src Header) {
if *dst == nil {
*dst = src
return
}
for k, vv := range src {
(*dst)[k] = vv
}
}
// unreadDataSizeLocked returns the number of bytes of unread input.
// It returns -1 if unknown.
// b.mu must be held.
func (b *body) unreadDataSizeLocked() int64 {
if lr, ok := b.src.(*io.LimitedReader); ok {
return lr.N
}
return -1
}
func (b *body) Close() error {
b.mu.Lock()
defer b.mu.Unlock()
if b.closed {
return nil
}
var err error
switch {
case b.sawEOF:
// Already saw EOF, so no need going to look for it.
case b.hdr == nil && b.closing:
// no trailer and closing the connection next.
// no point in reading to EOF.
case b.doEarlyClose:
// Read up to maxPostHandlerReadBytes bytes of the body, looking for
// for EOF (and trailers), so we can re-use this connection.
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N > maxPostHandlerReadBytes {
// There was a declared Content-Length, and we have more bytes remaining
// than our maxPostHandlerReadBytes tolerance. So, give up.
b.earlyClose = true
} else {
var n int64
// Consume the body, or, which will also lead to us reading
// the trailer headers after the body, if present.
n, err = io.CopyN(ioutil.Discard, bodyLocked{b}, maxPostHandlerReadBytes)
if err == io.EOF {
err = nil
}
if n == maxPostHandlerReadBytes {
b.earlyClose = true
}
}
default:
// Fully consume the body, which will also lead to us reading
// the trailer headers after the body, if present.
_, err = io.Copy(ioutil.Discard, bodyLocked{b})
}
b.closed = true
return err
}
func (b *body) didEarlyClose() bool {
b.mu.Lock()
defer b.mu.Unlock()
return b.earlyClose
}
// bodyLocked is a io.Reader reading from a *body when its mutex is
// already held.
type bodyLocked struct {
b *body
}
func (bl bodyLocked) Read(p []byte) (n int, err error) {
if bl.b.closed {
return 0, ErrBodyReadAfterClose
}
return bl.b.readLocked(p)
}
// parseContentLength trims whitespace from s and returns -1 if no value
// is set, or the value if it's >= 0.
func parseContentLength(cl string) (int64, error) {
cl = strings.TrimSpace(cl)
if cl == "" {
return -1, nil
}
n, err := strconv.ParseInt(cl, 10, 64)
if err != nil || n < 0 {
return 0, &badStringError{"bad Content-Length", cl}
}
return n, nil
}<|fim▁end|> | "net/textproto"
"sort"
"strconv"
"strings" |
<|file_name|>ping_pong_paddle.js<|end_file_name|><|fim▁begin|>function PingPongPaddle(x, y, z, radius) {
this.x = x;
this.y = y;
this.z = z;
this.radius = radius;
}
PingPongPaddle.prototype = {
rotation: new THREE.Vector3(0, 0, 0),
init: function(scene) {
var tableImage = THREE.ImageUtils.loadTexture('images/paddle_texture.jpg');
//var geometry = new THREE.CubeGeometry(this.radius, this.radius, .1);
var geometry = new THREE.CylinderGeometry(this.radius, this.radius, .1, 16, 1);
this.paddle = new THREE.Mesh(geometry, new THREE.MeshBasicMaterial({
map: tableImage,
transparent: true,
opacity: .5
}));
//this.paddle.doubleSided = true;
this.rotate(Math.PI / 2, 0, 0);<|fim▁hole|> },
update: function() {
if (this.paddle.position.y > 20) {
direction = -1;
} else if (this.paddle.position.y < -20) {
direction = 1;
}
this.paddle.position.y += direction * .1;
},
hitPaddle: function(x, y, z, radius) {
if (Math.abs(this.paddle.position.x - x) < radius) {
return isBetween(y, this.paddle.position.y + this.radius / 2, this.paddle.position.y - this.radius / 2) && isBetween(z, this.paddle.position.z + this.radius / 2, this.paddle.position.z - this.radius / 2);
}
},
rotate: function(x, y, z) {
this.paddle.rotateOnAxis(new THREE.Vector3(1, 0, 0), x);
this.rotation.x += x;
this.paddle.rotateOnAxis(new THREE.Vector3(0, 1, 0), y);
this.rotation.y += y;
this.paddle.rotateOnAxis(new THREE.Vector3(0, 0, 1), z);
this.rotation.z += z;
}
}
var direction = 1;
function isBetween(x, x1, x2) {
return (x <= x1 && x >= x2) || (x >= x1 && x <= x2);
}<|fim▁end|> | this.paddle.position.x = this.x;
this.paddle.position.y = this.y;
this.paddle.position.z = this.z;
scene.add(this.paddle); |
<|file_name|>test_scale.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestScale(TestCase):
def testRegression(self):
inputSize = 1024
input = range(inputSize)
factor = 0.5
expected = [factor * n for n in input]
output = Scale(factor=factor, clipping=False)(input)
self.assertEqualVector(output, expected)
def testZero(self):
inputSize = 1024
input = [0] * inputSize
<|fim▁hole|> def testEmpty(self):
input = []
expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
def testClipping(self):
inputSize = 1024
maxAbsValue= 10
factor = 1
input = [n + maxAbsValue for n in range(inputSize)]
expected = [maxAbsValue] * inputSize
output = Scale(factor=factor, clipping=True, maxAbsValue=maxAbsValue)(input)
self.assertEqualVector(output, expected)
def testInvalidParam(self):
self.assertConfigureFails(Scale(), { 'maxAbsValue': -1 })
suite = allTests(TestScale)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | expected = input[:]
output = Scale()(input)
self.assertEqualVector(output, input)
|
<|file_name|>_width.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class WidthValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="width", parent_name="violin", **kwargs):
super(WidthValidator, self).__init__(<|fim▁hole|> parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "info"),
**kwargs
)<|fim▁end|> | plotly_name=plotly_name, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This module contains external, potentially separately licensed,
packages that are included in spack.
So far:
argparse: We include our own version to be Python 2.6 compatible.<|fim▁hole|>
functools: Used for implementation of total_ordering.
jinja2: A modern and designer-friendly templating language for Python
jsonschema: An implementation of JSON Schema for Python.
ordereddict: We include our own version to be Python 2.6 compatible.
py: Needed by pytest. Library with cross-python path,
ini-parsing, io, code, and log facilities.
pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
pytest: Testing framework used by Spack.
yaml: Used for config files.
"""<|fim▁end|> |
distro: Provides a more stable linux distribution detection. |
<|file_name|>gate.rs<|end_file_name|><|fim▁begin|>// ignore-arm
// ignore-aarch64
// ignore-wasm
// ignore-emscripten
// ignore-mips
// ignore-mips64
// ignore-powerpc<|fim▁hole|>// ignore-powerpc64le
// ignore-riscv64
// ignore-sparc
// ignore-sparc64
// ignore-s390x
// gate-test-sse4a_target_feature
// gate-test-powerpc_target_feature
// gate-test-avx512_target_feature
// gate-test-tbm_target_feature
// gate-test-arm_target_feature
// gate-test-aarch64_target_feature
// gate-test-hexagon_target_feature
// gate-test-mips_target_feature
// gate-test-wasm_target_feature
// gate-test-adx_target_feature
// gate-test-cmpxchg16b_target_feature
// gate-test-movbe_target_feature
// gate-test-rtm_target_feature
// gate-test-f16c_target_feature
// gate-test-riscv_target_feature
// gate-test-ermsb_target_feature
// gate-test-bpf_target_feature
#[target_feature(enable = "avx512bw")]
//~^ ERROR: currently unstable
unsafe fn foo() {}
fn main() {}<|fim▁end|> | // ignore-powerpc64 |
<|file_name|>decodable.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>The compiler code necessary for #[deriving(Decodable)]. See
encodable.rs for more.
*/
use ast::{MetaItem, Item, Expr, MutMutable, Ident};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
pub fn expand_deriving_decodable(cx: &ExtCtxt,
span: Span,
mitem: @MetaItem,
in_items: ~[@Item]) -> ~[@Item] {
let trait_def = TraitDef {
cx: cx, span: span,
path: Path::new_(~["extra", "serialize", "Decodable"], None,
~[~Literal(Path::new_local("__D"))], true),
additional_bounds: ~[],
generics: LifetimeBounds {
lifetimes: ~[],
bounds: ~[("__D", ~[Path::new(~["extra", "serialize", "Decoder"])])],
},
methods: ~[
MethodDef {
name: "decode",
generics: LifetimeBounds::empty(),
explicit_self: None,
args: ~[Ptr(~Literal(Path::new_local("__D")),
Borrowed(None, MutMutable))],
ret_ty: Self,
inline: false,
const_nonmatching: true,
combine_substructure: decodable_substructure,
},
]
};
trait_def.expand(mitem, in_items)
}
fn decodable_substructure(cx: &ExtCtxt, span: Span,
substr: &Substructure) -> @Expr {
let decoder = substr.nonself_args[0];
let recurse = ~[cx.ident_of("extra"),
cx.ident_of("serialize"),
cx.ident_of("Decodable"),
cx.ident_of("decode")];
// throw an underscore in front to suppress unused variable warnings
let blkarg = cx.ident_of("_d");
let blkdecoder = cx.expr_ident(span, blkarg);
let calldecode = cx.expr_call_global(span, recurse, ~[blkdecoder]);
let lambdadecode = cx.lambda_expr_1(span, calldecode, blkarg);
return match *substr.fields {
StaticStruct(_, ref summary) => {
let nfields = match *summary {
Unnamed(ref fields) => fields.len(),
Named(ref fields) => fields.len()
};
let read_struct_field = cx.ident_of("read_struct_field");
let result = decode_static_fields(cx,
span,
substr.type_ident,
summary,
|span, name, field| {
cx.expr_method_call(span, blkdecoder, read_struct_field,
~[cx.expr_str(span, name),
cx.expr_uint(span, field),
lambdadecode])
});
cx.expr_method_call(span, decoder, cx.ident_of("read_struct"),
~[cx.expr_str(span, cx.str_of(substr.type_ident)),
cx.expr_uint(span, nfields),
cx.lambda_expr_1(span, result, blkarg)])
}
StaticEnum(_, ref fields) => {
let variant = cx.ident_of("i");
let mut arms = ~[];
let mut variants = ~[];
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
for (i, f) in fields.iter().enumerate() {
let (name, parts) = match *f { (i, ref p) => (i, p) };
variants.push(cx.expr_str(span, cx.str_of(name)));
let decoded = decode_static_fields(cx,
span,
name,
parts,
|span, _, field| {
cx.expr_method_call(span, blkdecoder, rvariant_arg,
~[cx.expr_uint(span, field),
lambdadecode])
});
arms.push(cx.arm(span,
~[cx.pat_lit(span, cx.expr_uint(span, i))],
decoded));
}
arms.push(cx.arm_unreachable(span));
let result = cx.expr_match(span, cx.expr_ident(span, variant), arms);
let lambda = cx.lambda_expr(span, ~[blkarg, variant], result);
let variant_vec = cx.expr_vec(span, variants);
let result = cx.expr_method_call(span, blkdecoder,
cx.ident_of("read_enum_variant"),
~[variant_vec, lambda]);
cx.expr_method_call(span, decoder, cx.ident_of("read_enum"),
~[cx.expr_str(span, cx.str_of(substr.type_ident)),
cx.lambda_expr_1(span, result, blkarg)])
}
_ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)")
};
}
/// Create a decoder for a single enum variant/struct:
/// - `outer_pat_ident` is the name of this enum variant/struct
/// - `getarg` should retrieve the `uint`-th field with name `@str`.
fn decode_static_fields(cx: &ExtCtxt,
outer_span: Span,
outer_pat_ident: Ident,
fields: &StaticFields,
getarg: |Span, @str, uint| -> @Expr)
-> @Expr {
match *fields {
Unnamed(ref fields) => {
if fields.is_empty() {
cx.expr_ident(outer_span, outer_pat_ident)
} else {
let fields = fields.iter().enumerate().map(|(i, &span)| {
getarg(span, format!("_field{}", i).to_managed(), i)
}).collect();
cx.expr_call_ident(outer_span, outer_pat_ident, fields)
}
}
Named(ref fields) => {
// use the field's span to get nicer error messages.
let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
cx.field_imm(span, name, getarg(span, cx.str_of(name), i))
}).collect();
cx.expr_struct_ident(outer_span, outer_pat_ident, fields)
}
}
}<|fim▁end|> | // except according to those terms.
/*! |
<|file_name|>compile.js<|end_file_name|><|fim▁begin|>var typecheck = require('./typeinference').typecheck,
macroexpand = require('./macroexpand').macroexpand,
loadModule = require('./modules').loadModule,
exportType = require('./modules').exportType,
types = require('./types'),
nodeToType = require('./typeinference').nodeToType,
nodes = require('./nodes').nodes,
lexer = require('./lexer'),
parser = require('../lib/parser').parser,
typeparser = require('../lib/typeparser').parser,
_ = require('underscore');
// Assigning the nodes to `parser.yy` allows the grammar to access the nodes from
// the `yy` namespace.
parser.yy = typeparser.yy = nodes;
parser.lexer = typeparser.lexer = {
"lex": function() {
var token = this.tokens[this.pos] ? this.tokens[this.pos++] : ['EOF'];
this.yytext = token[1];
this.yylineno = token[2];
return token[0];
},
"setInput": function(tokens) {
this.tokens = tokens;
this.pos = 0;
},
"upcomingInput": function() {
return "";
}
};
// Separate end comments from other expressions
var splitComments = function(body) {
return _.reduceRight(body, function(accum, node) {
if(!accum.body.length && node instanceof nodes.Comment) {
accum.comments.unshift(node);
return accum;
}
accum.body.unshift(node);
return accum;
}, {
body: [],
comments: []
});
};
// Compile an abstract syntax tree (AST) node to JavaScript.
var indent = 0;
var getIndent = function(extra) {
if(!extra) {
extra = 0;
}
var spacing = "";
var i;
for(i = 0; i < indent + extra; i++) {
spacing += " ";
}
return spacing;
};
var joinIndent = function(args, extra) {
var lineIndent = "\n" + getIndent(extra);
var argIndent = args.join("\n" + getIndent(extra));
if(argIndent) {
return argIndent + lineIndent;
}
return "";
};
var pushIndent = function() {
indent++;
return getIndent();
};
var popIndent = function() {
indent--;
return getIndent();
};
var compileNodeWithEnv = function(n, env, opts) {
if(!opts) opts = {};
var compileNode = function(n) {
return compileNodeWithEnv(n, env);
};
return n.accept({
// Function definition to JavaScript function.
visitFunction: function() {
var getArgs = function(a) {
return _.map(a, function(v) {
return v.name;
}).join(", ");
};
pushIndent();
var split = splitComments(n.body);
var compiledWhereDecls = _.map(n.whereDecls, compileNode);
var compiledNodeBody = _.map(split.body, compileNode);
var init = [];
if(compiledWhereDecls.length > 0) {
init.push(compiledWhereDecls.join(';\n' + getIndent()) + ';');
}
if(compiledNodeBody.length > 1) {
init.push(compiledNodeBody.slice(0, compiledNodeBody.length - 1).join(';\n' + getIndent()) + ';');
}
var lastString = compiledNodeBody[compiledNodeBody.length - 1];
var varEquals = "";
if(n.name) {
varEquals = "var " + n.name + " = ";
}
var compiledEndComments = "";
if(split.comments.length) {
compiledEndComments = getIndent() + _.map(split.comments, compileNode).join("\n" + getIndent()) + "\n";
}
return varEquals + "function(" + getArgs(n.args) + ") {\n" +
getIndent() + joinIndent(init) + "return " + lastString +
";\n" + compiledEndComments + popIndent() + "}";
},
visitIfThenElse: function() {
var compiledCondition = compileNode(n.condition);
var compileAppendSemicolon = function(n) {
return compileNode(n) + ';';
};
var ifTrue = splitComments(n.ifTrue);
var ifFalse = splitComments(n.ifFalse);
pushIndent();
pushIndent();
var compiledIfTrueInit = joinIndent(_.map(ifTrue.body.slice(0, ifTrue.body.length - 1), compileAppendSemicolon));
var compiledIfTrueLast = compileNode(ifTrue.body[ifTrue.body.length - 1]);
var compiledIfTrueEndComments = "";
if(ifTrue.comments.length) {
compiledIfTrueEndComments = getIndent() + _.map(ifTrue.comments, compileNode).join("\n" + getIndent()) + "\n";
}
var compiledIfFalseInit = joinIndent(_.map(ifFalse.body.slice(0, ifFalse.body.length - 1), compileAppendSemicolon));
var compiledIfFalseLast = compileNode(ifFalse.body[ifFalse.body.length - 1]);
var compiledIfFalseEndComments = "";
if(ifFalse.comments.length) {
compiledIfFalseEndComments = getIndent() + _.map(ifFalse.comments, compileNode).join("\n" + getIndent()) + "\n";
}
popIndent();
popIndent();
return "(function() {\n" +
getIndent(1) + "if(" + compiledCondition + ") {\n" +
getIndent(2) + compiledIfTrueInit + "return " + compiledIfTrueLast + ";\n" + compiledIfTrueEndComments +
getIndent(1) + "} else {\n" +
getIndent(2) + compiledIfFalseInit + "return " + compiledIfFalseLast + ";\n" + compiledIfFalseEndComments +
getIndent(1) + "}\n" +
getIndent() + "})()";
},
// Let binding to JavaScript variable.
visitLet: function() {
return "var " + n.name + " = " + compileNode(n.value);
},
visitInstance: function() {
return "var " + n.name + " = " + compileNode(n.object);
},
visitAssignment: function() {
return compileNode(n.name) + " = " + compileNode(n.value) + ";";
},
visitData: function() {
var defs = _.map(n.tags, compileNode);
return defs.join(";\n");
},
visitExpression: function() {
// No need to retain parenthesis for operations of higher
// precendence in JS
if(n.value instanceof nodes.Function || n.value instanceof nodes.Call) {
return compileNode(n.value);
}
return '(' + compileNode(n.value) + ')';
},
visitReplacement: function() {
return n.value;
},
visitQuoted: function() {
var serializeNode = {
visitReplacement: function(v) {
return "new nodes.Replacement(" + compileNode(v.value) + ")";
},
visitIdentifier: function(v) {
return "new nodes.Identifier(" + JSON.stringify(v.value) + ")";
},
visitAccess: function(v) {
return "new nodes.Access(" + serialize(v.value) + ", " + JSON.stringify(v.property) + ")";
},
visitPropertyAccess: function(v) {
return "new nodes.PropertyAccess(" + serialize(v.value) + ", " + JSON.stringify(v.property) + ")";
},
visitCall: function(v) {
return "new nodes.Call(" + serialize(v.func) + ", [" + _.map(v.args, serialize).join(', ') + "])";
}
};
var serialize = function(v) {
return v.accept(serializeNode);
};
return serialize(n.value);
},
visitReturn: function() {
return "__monad__.return(" + compileNode(n.value) + ");";
},
visitBind: function() {
var init = n.rest.slice(0, n.rest.length - 1);
var last = n.rest[n.rest.length - 1];
return "__monad__.bind(" + compileNode(n.value) +
", function(" + n.name + ") {\n" + pushIndent() +
_.map(init, compileNode).join(";\n" + getIndent()) + "\n" +
getIndent() + "return " + compileNode(last) + "\n" +
popIndent() + "});";
},
visitDo: function() {
var compiledInit = [];
var firstBind;
var lastBind;
var lastBindIndex = 0;
_.each(n.body, function(node, i) {
if(node instanceof nodes.Bind) {
if(!lastBind) {
firstBind = node;
} else {
lastBind.rest = n.body.slice(lastBindIndex + 1, i + 1);
}
lastBindIndex = i;
lastBind = node;
} else {
if(!lastBind) {
compiledInit.push(compileNode(node));
}
}
});
if(lastBind) {
lastBind.rest = n.body.slice(lastBindIndex + 1);
}
return "(function(){\n" + pushIndent() + "var __monad__ = " +
compileNode(n.value) + ";\n" + getIndent() +
(!firstBind ? 'return ' : '') + compiledInit.join(';\n' + getIndent()) + '\n' + getIndent() +
(firstBind ? 'return ' + compileNode(firstBind) : '') + "\n" +
popIndent() + "})()";
},
visitTag: function() {
var args = _.map(n.vars, function(v, i) {
return v.value + "_" + i;
});
var setters = _.map(args, function(v, i) {
return "this._" + i + " = " + v;
});
pushIndent();
var constructorString = "if(!(this instanceof " + n.name + ")) {\n" + getIndent(1) + "return new " + n.name + "(" + args.join(", ") + ");\n" + getIndent() + "}";
var settersString = (setters.length === 0 ? "" : "\n" + getIndent() + setters.join(";\n" + getIndent()) + ";");
popIndent();
return "var " + n.name + " = function(" + args.join(", ") + ") {\n" + getIndent(1) + constructorString + settersString + getIndent() + "\n}";
},
visitMatch: function() {
var flatMap = function(a, f) {
return _.flatten(_.map(a, f));
};
var pathConditions = _.map(n.cases, function(c) {
var getVars = function(pattern, varPath) {
return flatMap(pattern.vars, function(a, i) {
var nextVarPath = varPath.slice();
nextVarPath.push(i);
return a.accept({
visitIdentifier: function() {
if(a.value == '_') return [];
var accessors = _.map(nextVarPath, function(x) {
return "._" + x;
}).join('');
return ["var " + a.value + " = " + compileNode(n.value) + accessors + ";"];
},
visitPattern: function() {
return getVars(a, nextVarPath);
}
});
});
};
var vars = getVars(c.pattern, []);
var getTagPaths = function(pattern, patternPath) {
return flatMap(pattern.vars, function(a, i) {
var nextPatternPath = patternPath.slice();
nextPatternPath.push(i);
return a.accept({
visitIdentifier: function() {
return [];
},
visitPattern: function() {
var inner = getTagPaths(a, nextPatternPath);
inner.unshift({path: nextPatternPath, tag: a.tag});
return inner;
}
});
});
};
var tagPaths = getTagPaths(c.pattern, []);
var compiledValue = compileNode(n.value);
var extraConditions = _.map(tagPaths, function(e) {
return ' && ' + compiledValue + '._' + e.path.join('._') + ' instanceof ' + e.tag.value;
}).join('');
// More specific patterns need to appear first
// Need to sort by the length of the path
var maxTagPath = _.max(tagPaths, function(t) {
return t.path.length;
});
var maxPath = maxTagPath ? maxTagPath.path : [];
return {
path: maxPath,
condition: "if(" + compiledValue + " instanceof " + c.pattern.tag.value +
extraConditions + ") {\n" + getIndent(2) +
joinIndent(vars, 2) + "return " + compileNode(c.value) +
";\n" + getIndent(1) + "}"
};
});
var cases = _.map(_.sortBy(pathConditions, function(t) {
return -t.path.length;
}), function(e) {
return e.condition;
});
return "(function() {\n" + getIndent(1) + cases.join(" else ") + "\n" + getIndent() + "})()";
},
// Call to JavaScript call.
visitCall: function() {
var typeClasses = '';
if(n.typeClassInstance) {
typeClasses = n.typeClassInstance + ', ';
}
if(n.func.value == 'import') {
return importModule(JSON.parse(n.args[0].value), env, opts);
}
return compileNode(n.func) + "(" + typeClasses + _.map(n.args, compileNode).join(", ") + ")";
},
visitPropertyAccess: function() {
return compileNode(n.value) + "." + n.property;
},
visitAccess: function() {
return compileNode(n.value) + "[" + compileNode(n.property) + "]";
},
visitUnaryBooleanOperator: function() {
return [n.name, compileNode(n.value)].join(" ");
},
visitBinaryGenericOperator: function() {
return [compileNode(n.left), n.name, compileNode(n.right)].join(" ");
},
visitBinaryNumberOperator: function() {
return [compileNode(n.left), n.name, compileNode(n.right)].join(" ");
},
visitBinaryBooleanOperator: function() {
return [compileNode(n.left), n.name, compileNode(n.right)].join(" ");
},
visitBinaryStringOperator: function() {
return [compileNode(n.left), n.name, compileNode(n.right)].join(" ");
},
visitWith: function() {
var args = compileNode(n.left) + ', ' + compileNode(n.right);
var inner = _.map(['__l__', '__r__'], function(name) {
return 'for(__n__ in ' + name + ') {\n' + getIndent(2) + '__o__[__n__] = ' + name + '[__n__];\n' + getIndent(1) + '}';
});
return joinIndent(['(function(__l__, __r__) {', 'var __o__ = {}, __n__;'], 1) + joinIndent(inner, 1) + 'return __o__;\n' + getIndent() + '})(' + args + ')';
},
// Print all other nodes directly.
visitComment: function() {
return n.value;
},
visitIdentifier: function() {
var typeClassAccessor = '';
if(n.typeClassInstance) {
typeClassAccessor = n.typeClassInstance + '.';
}
return typeClassAccessor + n.value;
},
visitNumber: function() {
return n.value;
},
visitString: function() {
return n.value;
},
visitBoolean: function() {
return n.value;
},
visitUnit: function() {
return "null";
},
visitArray: function() {
return '[' + _.map(n.values, compileNode).join(', ') + ']';
},
visitTuple: function() {
return '[' + _.map(n.values, compileNode).join(', ') + ']';
},
visitObject: function() {
var key;
var pairs = [];
pushIndent();
for(key in n.values) {
pairs.push("\"" + key + "\": " + compileNode(n.values[key]));
}
return "{\n" + getIndent() + pairs.join(",\n" + getIndent()) + "\n" + popIndent() + "}";
}
});
};
exports.compileNodeWithEnv = compileNodeWithEnv;
var compile = function(source, env, aliases, opts) {
if(!env) env = {};
if(!aliases) aliases = {};
if(!opts) opts = {};
if(!opts.exported) opts.exported = {};
// Parse the file to an AST.
var tokens = lexer.tokenise(source);
var ast = parser.parse(tokens);
ast = macroexpand(ast, env, opts);
// Typecheck the AST. Any type errors will throw an exception.
var resultType = typecheck(ast, env, aliases);
// Export types
ast = _.map(ast, function(n) {
if(n instanceof nodes.Call && n.func.value == 'export') {
return exportType(n.args[0], env, opts.exported, opts.nodejs);
}
return n;
});
var output = [];<|fim▁hole|>
if(opts.strict) {
output.push('"use strict";');
}
var outputLine = output.length + 1;
_.each(ast, function(v) {
var compiled = compileNodeWithEnv(v, env, opts),
j, lineCount;
if(compiled) {
lineCount = compiled.split('\n').length;
if(opts.sourceMap && v.lineno > 1) {
opts.sourceMap.addMapping({
source: opts.filename,
original: {
line: v.lineno,
column: 0
},
generated: {
line: outputLine,
column: 0
}
});
}
outputLine += lineCount;
output.push(compiled + (v instanceof nodes.Comment ? '' : ';'));
}
});
if(!opts.nodejs) {
output.push("})();");
}
// Add a newline at the end
output.push("");
return {type: resultType, output: output.join('\n')};
};
exports.compile = compile;
var getSandbox = function() {
var sandbox = {require: require, exports: exports};
var name;
for(name in global) {
sandbox[name] = global[name];
}
return sandbox;
};
var getFileContents = function(filename) {
var fs = require('fs'),
exts = ["", ".roy", ".lroy"],
filenames = _.map(exts, function(ext){
return filename + ext;
}),
foundFilename,
source,
err,
i;
// Check to see if an extension is specified, if so, don't bother
// checking others
if (/\..+$/.test(filename)) {
source = fs.readFileSync(filename, 'utf8');
filenames = [filename];
} else {
foundFilename = _.find(filenames, function(filename) {
return fs.existsSync(filename);
});
if(foundFilename) {
source = fs.readFileSync(foundFilename, 'utf8');
}
}
if(!source) {
throw new Error("File(s) not found: " + filenames.join(", "));
}
return source;
};
var nodeRepl = function(opts) {
var readline = require('readline'),
path = require('path'),
vm = require('vm'),
prettyPrint = require('./prettyprint').prettyPrint;
var stdout = process.stdout;
var stdin = process.openStdin();
var repl = readline.createInterface(stdin, stdout);
var env = {};
var sources = {};
var aliases = {};
var sandbox = getSandbox();
var colorLog = function(color) {
var args = [].slice.call(arguments, 1);
if(opts.colorConsole) {
args[0] = '\u001b[' + color + 'm' + args[0];
args[args.length - 1] = args[args.length - 1] + '\u001b[0m';
}
console.log.apply(console, args);
};
// Include the standard library
var fs = require('fs');
var prelude = fs.readFileSync(path.dirname(__dirname) + '/lib/prelude.roy', 'utf8');
vm.runInNewContext(compile(prelude, env, {}, {nodejs: true}).output, sandbox, 'eval');
repl.setPrompt('roy> ');
repl.on('close', function() {
stdin.destroy();
});
repl.on('line', function(line) {
var compiled;
var output;
var filename;
var source;
var tokens;
var ast;
// Check for a "metacommand"
// e.g. ":q" or ":l test.roy"
var metacommand = line.replace(/^\s+/, '').split(' ');
try {
switch(metacommand[0]) {
case ":q":
// Exit
process.exit();
break;
case ":l":
// Load
filename = metacommand[1];
source = getFileContents(filename);
compiled = compile(source, env, aliases, {nodejs: true, filename: ".", run: true});
break;
case ":t":
if(metacommand[1] in env) {
console.log(env[metacommand[1]].toString());
} else {
colorLog(33, metacommand[1], "is not defined.");
}
break;
case ":s":
// Source
if(sources[metacommand[1]]) {
colorLog(33, metacommand[1], "=", prettyPrint(sources[metacommand[1]]));
} else {
if(metacommand[1]){
colorLog(33, metacommand[1], "is not defined.");
}else{
console.log("Usage :s command ");
console.log(":s [identifier] :: show original code about identifier.");
}
}
break;
case ":?":
// Help
colorLog(32, "Commands available from the prompt");
console.log(":l -- load and run an external file");
console.log(":q -- exit REPL");
console.log(":s -- show original code about identifier");
console.log(":t -- show the type of the identifier");
console.log(":? -- show help");
break;
default:
// The line isn't a metacommand
// Remember the source if it's a binding
tokens = lexer.tokenise(line);
ast = parser.parse(tokens);
ast[0].accept({
visitLet: function(n) {
sources[n.name] = n.value;
}
});
// Just eval it
compiled = compile(line, env, aliases, {nodejs: true, filename: ".", run: true});
break;
}
if(compiled) {
output = vm.runInNewContext(compiled.output, sandbox, 'eval');
if(typeof output != 'undefined') {
colorLog(32, (typeof output == 'object' ? JSON.stringify(output) : output) + " : " + compiled.type);
}
}
} catch(e) {
colorLog(31, (e.stack || e.toString()));
}
repl.prompt();
});
repl.prompt();
};
var writeModule = function(env, exported, filename) {
var fs = require('fs');
var moduleOutput = _.map(exported, function(v, k) {
if(v instanceof types.TagType) {
return 'type ' + v.toString().replace(/#/g, '');
}
return k + ': ' + v.toString();
}).join('\n') + '\n';
fs.writeFile(filename, moduleOutput, 'utf8');
};
var importModule = function(name, env, opts) {
var addTypesToEnv = function(moduleTypes) {
_.each(moduleTypes, function(v, k) {
var dataType = [new types.TagNameType(k)];
_.each(function() {
dataType.push(new types.Variable());
});
env[k] = new types.TagType(dataType);
});
};
var moduleTypes;
if(opts.nodejs) {
// Need to convert to absolute paths for the CLI
if(opts.run) {
var path = require("path");
name = path.resolve(path.dirname(opts.filename), name);
}
moduleTypes = loadModule(name, opts);
addTypesToEnv(moduleTypes.types);
var variable = name.substr(name.lastIndexOf("/") + 1);
env[variable] = new types.Variable();
var props = {};
_.each(moduleTypes.env, function(v, k) {
props[k] = nodeToType(v, env, {});
});
env[variable] = new types.ObjectType(props);
console.log("Using sync CommonJS module:", name);
return variable + " = require(" + JSON.stringify(name) + ")";
} else {
moduleTypes = loadModule(name, opts);
addTypesToEnv(moduleTypes.types);
_.each(moduleTypes.env, function(v, k) {
env[k] = nodeToType(v, env, {});
});
if(console) console.log("Using browser module:", name);
return "";
}
};
var main = function() {
var argv = process.argv.slice(2);
// Meta-commands configuration
var opts = {
colorConsole: false
};
// Roy package information
var fs = require('fs'),
path = require('path');
var info = JSON.parse(fs.readFileSync(path.dirname(__dirname) + '/package.json', 'utf8'));
if(process.argv.length < 3) {
console.log("Roy: " + info.description);
console.log(info.author);
console.log(":? for help");
nodeRepl(opts);
return;
}
var source;
var vm;
var browserModules = false;
var run = false;
var includePrelude = true;
switch(argv[0]) {
case "-v":
case "--version":
console.log("Roy: " + info.description);
console.log(info.version);
process.exit();
break;
case "--help":
case "-h":
console.log("Roy: " + info.description + "\n");
console.log("-v : show current version");
console.log("-r [file] : run Roy-code without JavaScript output");
console.log("-p : run without prelude (standard library)");
console.log("-c : colorful REPL mode");
console.log("-h : show this help");
return;
case "--stdio":
source = '';
process.stdin.resume();
process.stdin.setEncoding('utf8');
process.stdin.on('data', function(data) {
source += data;
});
process.stdin.on('end', function() {
console.log(compile(source).output);
});
return;
case "-p":
includePrelude = false;
/* falls through */
case "-r":
vm = require('vm');
run = true;
argv.shift();
break;
case "-b":
case "--browser":
browserModules = true;
argv.shift();
break;
case "-c":
case "--color":
opts.colorConsole = true;
nodeRepl(opts);
return;
}
var extensions = /\.l?roy$/;
var literateExtension = /\.lroy$/;
var exported;
var env = {};
var aliases = {};
var sandbox = getSandbox();
if(run) {
// Include the standard library
if(includePrelude) {
argv.unshift(path.dirname(__dirname) + '/lib/prelude.roy');
}
} else {
var modules = [];
if(!argv.length || argv[0] != 'lib/prelude.roy') {
modules.push(path.dirname(__dirname) + '/lib/prelude');
}
_.each(modules, function(module) {
var moduleTypes = loadModule(module, {filename: '.'});
_.each(moduleTypes.env, function(v, k) {
env[k] = new types.Variable();
env[k] = nodeToType(v, env, aliases);
});
});
}
_.each(argv, function(filename) {
// Read the file content.
var source = getFileContents(filename);
if(filename.match(literateExtension)) {
// Strip out the Markdown.
source = source.match(/^ {4,}.+$/mg).join('\n').replace(/^ {4}/gm, '');
} else {
console.assert(filename.match(extensions), 'Filename must end with ".roy" or ".lroy"');
}
exported = {};
var outputPath = filename.replace(extensions, '.js');
var SourceMapGenerator = require('source-map').SourceMapGenerator;
var sourceMap = new SourceMapGenerator({file: path.basename(outputPath)});
var compiled = compile(source, env, aliases, {
nodejs: !browserModules,
filename: filename,
run: run,
exported: exported,
sourceMap: sourceMap
});
if(run) {
// Execute the JavaScript output.
output = vm.runInNewContext(compiled.output, sandbox, 'eval');
} else {
// Write the JavaScript output.
fs.writeFile(outputPath, compiled.output + '//@ sourceMappingURL=' + path.basename(outputPath) + '.map\n', 'utf8');
fs.writeFile(outputPath + '.map', sourceMap.toString(), 'utf8');
writeModule(env, exported, filename.replace(extensions, '.roym'));
}
});
};
exports.main = main;
if(exports && !module.parent) {
main();
}<|fim▁end|> |
if(!opts.nodejs) {
output.push("(function() {");
} |
<|file_name|>items.py<|end_file_name|><|fim▁begin|># Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html<|fim▁hole|>class PopularArticlesItem(Item):
# define the fields for your item here like:
# name = Field()
pass<|fim▁end|> |
from scrapy.item import Item, Field
|
<|file_name|>assembly.py<|end_file_name|><|fim▁begin|>'''
Opcode d(11:8) Operand d(7:0) Operation
0 8 bits representing a constant Load constant to Reg0
1 8 bits representing a constant Load constant to Reg1
2 d7 selects register Reg0 or Reg1 Load value of selected register to the ALU accumulator
3 d7 selects register Reg0 or Reg1 Add value of selected register to ALU accumulator and store result in accumulator
4 d7 selects register Reg0 or Reg1 Subtract value of selected register to ALU accumulator and store result in accumulator
5 Not used Accumulator shift right
6 Not used Accumulator shift left
7 d7 selects register Reg0 or Reg1 AND accumulator with selected register and store result in accumulator
8 d7 selects register Reg0 or Reg1 OR accumulator with selected register and store result in accumulator
9 Not used Invert Accumulator bits
A 8 bits represent address in
instruction memory Jump to address
B 8 bits represent address in
instruction memory Jump to address if Accumulator is all zeros
C 8 bits represent address in
instruction memory Jump subroutine (program counter value is stored for the subroutine return)
D Not used Return from subroutine (restore value of program counter)
E D(3:0) selects either Reg0 '000' or
Reg1 '001' or output port P1 '010' or
output port P2 '011' or UART transmit
register '100' Write value in accumulator to selected destination
F d7 selects register Reg0 or Reg1 Store UART received byte into selected register
'''
# Imports
import argparse;
import os;
# Constants and Globals
FILE_EXTENSION = str('ass');
DOT_FILE_EXTENSION = str('.{}'.format(FILE_EXTENSION));
CONSTANT_MAX = 255; # Largest number in 8 unsigned bits
CONSTANT_MIN = 0; # Only using unsigned bits
ADDRESS_MAX = 255; # Largest address
ADDRESS_MIN = 0; # Can't go lower than zero
INSTRUCTION_LENGTH_DICT = {
'load' : 3,
'move' : 2,
'add' : 2,
'sub' : 2,
'sr' : 1,
'sl' : 1,
'and' : 2,
'or' : 2,
'inv' : 1,
'j' : 2,
'jaz' : 2,
'jal' : 2,
'jr' : 1,
'wri' : 2,
'str' : 2
};
# Helper functions
def doExit(error):
print "ERROR: {}".format(error);
print "Exiting..."
os.sys.exit(1);
def printInstructions():
print """Instruction Set:
load [Constant] [Reg0, Reg1] {load constant to register}
move [Reg0, Reg1] {To Accum}
add [Reg0, Reg1] {To Accum}
sub [Reg0, Reg1] {From Accum}
sl {Shift accum left}
sr {Shift accum right}
and [Reg0, Reg1] {With Accum}
or [Reg0, Reg1] {With Accum}
inv {Invert Accum}
j [Address] {Jump to address}
jaz [Address] {Jump to address if accum zero}
jal [Address] {Jump and link (sub routine)}
jr {Jump return (From sub routine)}
wri [Reg0, Reg1, P1, P2, Tx] {Write accum to register}
str [Reg0, Reg1] {Store Rx from UART into register}
Examples:
load 25 Reg0 #Comment, Comment, Comment
load 1 Reg1
move Reg0
add Reg1
sub Reg0
wri P1 # p+++++++
List of Register:
Reg0: General Purpose Register
Reg1: General Purpose Register
P1: Register reading into first digit of seven segment display
P2: Register reading into second digit of seven segment display
UART: UART send register
Rx: UART receive register"""
return;
def getInstructionCode(lineList, LineCount):
'''Return the instruction as a binary value'''
instruction = '{0:012b}'.format(0);
if(len(lineList) != INSTRUCTION_LENGTH_DICT[lineList[0]]):
doExit("Invalid number of arguments to {0} instruciton on line {1}".format(lineList[0], LineCount));
code = lineList[0];
try:
if(code == 'load'):
# Test bounds of constant
if(int(lineList[1]) > CONSTANT_MAX or int(lineList[1]) < CONSTANT_MIN):
doExit("Invalid constant range for instruction on line {}".format(LineCount));
else:
#Determine which register we are storing it in
if(lineList[2] == 'reg0'):
instruction = '0000' + '{:08b}'.format(int(lineList[1]));
elif(lineList[2] == 'reg1'):
instruction = '0001' + '{:08b}'.format(int(lineList[1]));
else:
doExit("Unkown register {0} for load instruciton on line {1}".format(lineList[2], LineCount));
elif(code == 'move'):
#Determine which register we are storing it in
if(lineList[1] == 'reg0'):
instruction = '0010' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '0010' + '10000000';
else:
doExit("Unkown register {0} for move instruciton on line {1}".format(lineList[1], LineCount));
elif(code == 'add'):
if(lineList[1] == 'reg0'):
instruction = '0011' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '0011' + '10000000';
else:
doExit("Unkown register {0} for add instruciton on line {1}".format(lineList[1], LineCount));
elif(code == 'sub'):
if(lineList[1] == 'reg0'):
instruction = '0100' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '0100' + '10000000';
else:
doExit("Unkown register {0} for sub instruciton on line {1}".format(lineList[1], LineCount));
elif(code == 'sr'):
instruction = '0101' + '00000000';
elif(code == 'sl'):
instruction = '0110' + '00000000';
elif(code == 'and'):
if(lineList[1] == 'reg0'):
instruction = '0111' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '0111' + '10000000';
else:
doExit("Unkown register {0} for and instruciton on line {1}".format(lineList[1], LineCount));
elif(code == 'or'):
if(lineList[1] == 'reg0'):
instruction = '1000' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '1000' + '10000000';
else:
doExit("Unkown register {0} for or instruciton on line {1}".format(lineList[1], LineCount));
elif(code == 'inv'):
instruction = '1001' + '00000000';
elif(code == 'j'):
if(int(lineList[1]) > ADDRESS_MAX or int(lineList[1]) < ADDRESS_MIN):
doExit("Invalid address range for instruction on line {}".format(LineCount));
else:
instruction = '1010' + '{:08b}'.format(int(lineList[1]));
elif(code == 'jaz'):
if(int(lineList[1]) > ADDRESS_MAX or int(lineList[1]) < ADDRESS_MIN):
doExit("Invalid address range for instruction on line {}".format(LineCount));
else:
instruction = '1011' + '{:08b}'.format(int(lineList[1]));
elif(code == 'jal'):
if(int(lineList[1]) > ADDRESS_MAX or int(lineList[1]) < ADDRESS_MIN):
doExit("Invalid address range for instruction on line {}".format(LineCount));
else:
instruction = '1100' + '{:08b}'.format(int(lineList[1]));
elif(code == 'jr'):
instruction = '1101' + '00000000';
elif(code == 'wri'):
if(lineList[1] == 'reg0'):
instruction = '1110' + '00000' + '000';
elif(lineList[1] == 'reg1'):
instruction = '1110' + '00000' + '001';
elif(lineList[1] == 'p1'):
instruction = '1110' + '00000' + '010';
elif(lineList[1] == 'p2'):
instruction = '1110' + '00000' + '011';
elif(lineList[1] == 'tx'):
instruction = '1110' + '00000' + '100';
else:
doExit("Unkown register {0} for wri instruciton on line {1}".format(lineList[2], LineCount));
elif(code == 'str'):
if(lineList[1] == 'reg0'):
instruction = '1111' + '00000000';
elif(lineList[1] == 'reg1'):
instruction = '1111' + '10000000';
else:
doExit("Unkown register {0} for str instruciton on line {1}".format(lineList[1], LineCount));
except:
doExit("Unkown code {0} occured on line {1}".format(lineList, LineCount));
return instruction;
def fixString(line):
'''Fix the string we receive for any not wanted characters'''
# Remove comments, deal with empty lines # Example ' LoAd 123 Reg0 #Mean #Comment'
if('#' in line): # ' LoAd 123 Reg0 '
line = line[0:line.find('#')];
line = line.strip(); #Remove leading and trailing whitespace # 'LoAd 123 Reg0'
# If empty, continue
if(line == ''):
return [];
# Remove cases where there are extra spaces between characters
lineList = [c for c in line.lower().split(' ')];
while '' in lineList:
lineList.remove('') # ['load','123','reg0']
return lineList;
# Parse Arguments
parser = argparse.ArgumentParser( description='An assembler for a small processor build in ENEL453',
version=0.1
);
parser.add_argument( 'filename',
help='File name of the file you would like to convert to assembly',
nargs='*',
);
parser.add_argument( '-i', '--instructions',
action='store_true',
help='Print Instructions and exit'
);
parser.add_argument( '-b', '--binary',
help='Output in binary, default is hex',
action='store_true',
default=False
);
args = parser.parse_args();
if args.instructions:<|fim▁hole|>
if len(args.filename) == 0:
parser.print_help();
doExit("Did not receive a file to assemble");
for name in args.filename:
# Attempt to open output file
try:
out = open(os.path.splitext(name)[0] + DOT_FILE_EXTENSION, 'w');
except IOError as e:
doExit("I/O error({0}): {1}".format(e.errno, e.strerror));
except ValueError:
doExit("Unable to convert file names to strings");
# Read line by line to parse
with open(name) as f:
# Keep a line counter for error handling
LineCount = int(0);
# Loop though lines handling them all
for line in f.readlines():
LineCount += 1;
lineList = fixString(line);
if(lineList == []):
continue;
instructionCode = getInstructionCode(lineList, LineCount);
if(args.binary == False):
instructionCode = "{0:#05X}".format(int(instructionCode,2));
out.write(instructionCode);
out.write('\n');
out.close();<|fim▁end|> | printInstructions(); |
<|file_name|>test_mxne_inverse.py<|end_file_name|><|fim▁begin|># Author: Alexandre Gramfort <[email protected]>
# Daniel Strohmeier <[email protected]>
#
# License: Simplified BSD
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_array_less, assert_array_equal)
import pytest
<|fim▁hole|>from mne import (read_cov, read_forward_solution, read_evokeds,
convert_forward_solution)
from mne.inverse_sparse import mixed_norm, tf_mixed_norm
from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles, _split_gof
from mne.inverse_sparse.mxne_inverse import _compute_mxne_sure
from mne.inverse_sparse.mxne_optim import norm_l2inf
from mne.minimum_norm import apply_inverse, make_inverse_operator
from mne.minimum_norm.tests.test_inverse import \
assert_var_exp_log, assert_stc_res
from mne.utils import assert_stcs_equal, catch_logging, _record_warnings
from mne.dipole import Dipole
from mne.source_estimate import VolSourceEstimate
from mne.simulation import simulate_sparse_stc, simulate_evoked
data_path = testing.data_path(download=False)
# NOTE: These use the ave and cov from sample dataset (no _trunc)
fname_data = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif')
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
label = 'Aud-rh'
fname_label = op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)
@pytest.fixture(scope='module', params=[testing._pytest_param])
def forward():
"""Get a forward solution."""
# module scope it for speed (but don't overwrite in use!)
return read_forward_solution(fname_fwd)
@testing.requires_testing_data
@pytest.mark.timeout(150) # ~30 sec on Travis Linux
@pytest.mark.slowtest
def test_mxne_inverse_standard(forward):
"""Test (TF-)MxNE inverse computation."""
# Read noise covariance matrix
cov = read_cov(fname_cov)
# Handling average file
loose = 0.0
depth = 0.9
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(tmin=-0.05, tmax=0.2)
evoked_l21 = evoked.copy()
evoked_l21.crop(tmin=0.081, tmax=0.1)
label = read_label(fname_label)
assert label.hemi == 'rh'
forward = convert_forward_solution(forward, surf_ori=True)
# Reduce source space to make test computation faster
inverse_operator = make_inverse_operator(evoked_l21.info, forward, cov,
loose=loose, depth=depth,
fixed=True, use_cps=True)
stc_dspm = apply_inverse(evoked_l21, inverse_operator, lambda2=1. / 9.,
method='dSPM')
stc_dspm.data[np.abs(stc_dspm.data) < 12] = 0.0
stc_dspm.data[np.abs(stc_dspm.data) >= 12] = 1.
weights_min = 0.5
# MxNE tests
alpha = 70 # spatial regularization parameter
with _record_warnings(): # CD
stc_cd = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8,
active_set_size=10, weights=stc_dspm,
weights_min=weights_min, solver='cd')
stc_bcd = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='bcd')
assert_array_almost_equal(stc_cd.times, evoked_l21.times, 5)
assert_array_almost_equal(stc_bcd.times, evoked_l21.times, 5)
assert_allclose(stc_cd.data, stc_bcd.data, rtol=1e-3, atol=0.0)
assert stc_cd.vertices[1][0] in label.vertices
assert stc_bcd.vertices[1][0] in label.vertices
# vector
with _record_warnings(): # no convergence
stc = mixed_norm(evoked_l21, forward, cov, alpha, loose=1, maxit=2)
with _record_warnings(): # no convergence
stc_vec = mixed_norm(evoked_l21, forward, cov, alpha, loose=1, maxit=2,
pick_ori='vector')
assert_stcs_equal(stc_vec.magnitude(), stc)
with _record_warnings(), \
pytest.raises(ValueError, match='pick_ori='):
mixed_norm(evoked_l21, forward, cov, alpha, loose=0, maxit=2,
pick_ori='vector')
with _record_warnings(), catch_logging() as log: # CD
dips = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='cd', return_as_dipoles=True, verbose=True)
stc_dip = make_stc_from_dipoles(dips, forward['src'])
assert isinstance(dips[0], Dipole)
assert stc_dip.subject == "sample"
assert_stcs_equal(stc_cd, stc_dip)
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
# Single time point things should match
with _record_warnings(), catch_logging() as log:
dips = mixed_norm(evoked_l21.copy().crop(0.081, 0.081),
forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='cd', return_as_dipoles=True, verbose=True)
assert_var_exp_log(log.getvalue(), 37.8, 38.0) # 37.9
gof = sum(dip.gof[0] for dip in dips) # these are now partial exp vars
assert_allclose(gof, 37.9, atol=0.1)
with _record_warnings(), catch_logging() as log:
stc, res = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8,
weights=stc_dspm, # gh-6382
active_set_size=10, return_residual=True,
solver='cd', verbose=True)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert stc.vertices[1][0] in label.vertices
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
assert stc.data.min() < -1e-9 # signed
assert_stc_res(evoked_l21, stc, forward, res)
# irMxNE tests
with _record_warnings(), catch_logging() as log: # CD
stc, residual = mixed_norm(
evoked_l21, forward, cov, alpha, n_mxne_iter=5, loose=0.0001,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
solver='cd', return_residual=True, pick_ori='vector', verbose=True)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert stc.vertices[1][0] in label.vertices
assert stc.vertices == [[63152], [79017]]
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
assert_stc_res(evoked_l21, stc, forward, residual)
# Do with TF-MxNE for test memory savings
alpha = 60. # overall regularization parameter
l1_ratio = 0.01 # temporal regularization proportion
stc, _ = tf_mixed_norm(evoked, forward, cov,
loose=loose, depth=depth, maxit=100, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, return_residual=True,
alpha=alpha, l1_ratio=l1_ratio)
assert_array_almost_equal(stc.times, evoked.times, 5)
assert stc.vertices[1][0] in label.vertices
# vector
stc_nrm = tf_mixed_norm(
evoked, forward, cov, loose=1, depth=depth, maxit=2, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, alpha=alpha, l1_ratio=l1_ratio)
stc_vec, residual = tf_mixed_norm(
evoked, forward, cov, loose=1, depth=depth, maxit=2, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, alpha=alpha, l1_ratio=l1_ratio,
pick_ori='vector', return_residual=True)
assert_stcs_equal(stc_vec.magnitude(), stc_nrm)
pytest.raises(ValueError, tf_mixed_norm, evoked, forward, cov,
alpha=101, l1_ratio=0.03)
pytest.raises(ValueError, tf_mixed_norm, evoked, forward, cov,
alpha=50., l1_ratio=1.01)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_mxne_vol_sphere():
"""Test (TF-)MxNE with a sphere forward and volumic source space."""
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(tmin=-0.05, tmax=0.2)
cov = read_cov(fname_cov)
evoked_l21 = evoked.copy()
evoked_l21.crop(tmin=0.081, tmax=0.1)
info = evoked.info
sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080)
src = mne.setup_volume_source_space(subject=None, pos=15., mri=None,
sphere=(0.0, 0.0, 0.0, 0.08),
bem=None, mindist=5.0,
exclude=2.0, sphere_units='m')
fwd = mne.make_forward_solution(info, trans=None, src=src,
bem=sphere, eeg=False, meg=True)
alpha = 80.
pytest.raises(ValueError, mixed_norm, evoked, fwd, cov, alpha,
loose=0.0, return_residual=False,
maxit=3, tol=1e-8, active_set_size=10)
pytest.raises(ValueError, mixed_norm, evoked, fwd, cov, alpha,
loose=0.2, return_residual=False,
maxit=3, tol=1e-8, active_set_size=10)
# irMxNE tests
with catch_logging() as log:
stc = mixed_norm(evoked_l21, fwd, cov, alpha,
n_mxne_iter=1, maxit=30, tol=1e-8,
active_set_size=10, verbose=True)
assert isinstance(stc, VolSourceEstimate)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert_var_exp_log(log.getvalue(), 9, 11) # 10.2
# Compare orientation obtained using fit_dipole and gamma_map
# for a simulated evoked containing a single dipole
stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4),
vertices=[stc.vertices[0][:1]],
tmin=stc.tmin,
tstep=stc.tstep)
evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9,
use_cps=True)
dip_mxne = mixed_norm(evoked_dip, fwd, cov, alpha=80,
n_mxne_iter=1, maxit=30, tol=1e-8,
active_set_size=10, return_as_dipoles=True)
amp_max = [np.max(d.amplitude) for d in dip_mxne]
dip_mxne = dip_mxne[np.argmax(amp_max)]
assert dip_mxne.pos[0] in src[0]['rr'][stc.vertices[0]]
dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0]
assert np.abs(np.dot(dip_fit.ori[0], dip_mxne.ori[0])) > 0.99
dist = 1000 * np.linalg.norm(dip_fit.pos[0] - dip_mxne.pos[0])
assert dist < 4. # within 4 mm
# Do with TF-MxNE for test memory savings
alpha = 60. # overall regularization parameter
l1_ratio = 0.01 # temporal regularization proportion
stc, _ = tf_mixed_norm(evoked, fwd, cov, maxit=3, tol=1e-4,
tstep=16, wsize=32, window=0.1, alpha=alpha,
l1_ratio=l1_ratio, return_residual=True)
assert isinstance(stc, VolSourceEstimate)
assert_array_almost_equal(stc.times, evoked.times, 5)
@pytest.mark.parametrize('mod', (
None, 'mult', 'augment', 'sign', 'zero', 'less'))
def test_split_gof_basic(mod):
"""Test splitting the goodness of fit."""
# first a trivial case
gain = np.array([[0., 1., 1.], [1., 1., 0.]]).T
M = np.ones((3, 1))
X = np.ones((2, 1))
M_est = gain @ X
assert_allclose(M_est, np.array([[1., 2., 1.]]).T) # a reasonable estimate
if mod == 'mult':
gain *= [1., -0.5]
X[1] *= -2
elif mod == 'augment':
gain = np.concatenate((gain, np.zeros((3, 1))), axis=1)
X = np.concatenate((X, [[1.]]))
elif mod == 'sign':
gain[1] *= -1
M[1] *= -1
M_est[1] *= -1
elif mod in ('zero', 'less'):
gain = np.array([[1, 1., 1.], [1., 1., 1.]]).T
if mod == 'zero':
X[:, 0] = [1., 0.]
else:
X[:, 0] = [1., 0.5]
M_est = gain @ X
else:
assert mod is None
res = M - M_est
gof = 100 * (1. - (res * res).sum() / (M * M).sum())
gof_split = _split_gof(M, X, gain)
assert_allclose(gof_split.sum(), gof)
want = gof_split[[0, 0]]
if mod == 'augment':
want = np.concatenate((want, [[0]]))
if mod in ('mult', 'less'):
assert_array_less(gof_split[1], gof_split[0])
elif mod == 'zero':
assert_allclose(gof_split[0], gof_split.sum(0))
assert_allclose(gof_split[1], 0., atol=1e-6)
else:
assert_allclose(gof_split, want, atol=1e-12)
@testing.requires_testing_data
@pytest.mark.parametrize('idx, weights', [
# empirically determined approximately orthogonal columns: 0, 15157, 19448
([0], [1]),
([0, 15157], [1, 1]),
([0, 15157], [1, 3]),
([0, 15157], [5, -1]),
([0, 15157, 19448], [1, 1, 1]),
([0, 15157, 19448], [1e-2, 1, 5]),
])
def test_split_gof_meg(forward, idx, weights):
"""Test GOF splitting on MEG data."""
gain = forward['sol']['data'][:, idx]
# close to orthogonal
norms = np.linalg.norm(gain, axis=0)
triu = np.triu_indices(len(idx), 1)
prods = np.abs(np.dot(gain.T, gain) / np.outer(norms, norms))[triu]
assert_array_less(prods, 5e-3) # approximately orthogonal
# first, split across time (one dipole per time point)
M = gain * weights
gof_split = _split_gof(M, np.diag(weights), gain)
assert_allclose(gof_split.sum(0), 100., atol=1e-5) # all sum to 100
assert_allclose(gof_split, 100 * np.eye(len(weights)), atol=1) # loc
# next, summed to a single time point (all dipoles active at one time pt)
weights = np.array(weights)[:, np.newaxis]
x = gain @ weights
assert x.shape == (gain.shape[0], 1)
gof_split = _split_gof(x, weights, gain)
want = (norms * weights.T).T ** 2
want = 100 * want / want.sum()
assert_allclose(gof_split, want, atol=1e-3, rtol=1e-2)
assert_allclose(gof_split.sum(), 100, rtol=1e-5)
@pytest.mark.parametrize('n_sensors, n_dipoles, n_times', [
(10, 15, 7),
(20, 60, 20),
])
@pytest.mark.parametrize('nnz', [2, 4])
@pytest.mark.parametrize('corr', [0.75])
@pytest.mark.parametrize('n_orient', [1, 3])
def test_mxne_inverse_sure_synthetic(n_sensors, n_dipoles, n_times, nnz, corr,
n_orient, snr=4):
"""Tests SURE criterion for automatic alpha selection on synthetic data."""
rng = np.random.RandomState(0)
sigma = np.sqrt(1 - corr ** 2)
U = rng.randn(n_sensors)
# generate gain matrix
G = np.empty([n_sensors, n_dipoles], order='F')
G[:, :n_orient] = np.expand_dims(U, axis=-1)
n_dip_per_pos = n_dipoles // n_orient
for j in range(1, n_dip_per_pos):
U *= corr
U += sigma * rng.randn(n_sensors)
G[:, j * n_orient:(j + 1) * n_orient] = np.expand_dims(U, axis=-1)
# generate coefficient matrix
support = rng.choice(n_dip_per_pos, nnz, replace=False)
X = np.zeros((n_dipoles, n_times))
for k in support:
X[k * n_orient:(k + 1) * n_orient, :] = rng.normal(
size=(n_orient, n_times))
# generate measurement matrix
M = G @ X
noise = rng.randn(n_sensors, n_times)
sigma = 1 / np.linalg.norm(noise) * np.linalg.norm(M) / snr
M += sigma * noise
# inverse modeling with sure
alpha_max = norm_l2inf(np.dot(G.T, M), n_orient, copy=False)
alpha_grid = np.geomspace(alpha_max, alpha_max / 10, num=15)
_, active_set, _ = _compute_mxne_sure(M, G, alpha_grid, sigma=sigma,
n_mxne_iter=5, maxit=3000, tol=1e-4,
n_orient=n_orient,
active_set_size=10, debias=True,
solver="auto", dgap_freq=10,
random_state=0, verbose=False)
assert np.count_nonzero(active_set, axis=-1) == n_orient * nnz
@pytest.mark.slowtest # slow on Azure
@testing.requires_testing_data
def test_mxne_inverse_sure():
"""Tests SURE criterion for automatic alpha selection on MEG data."""
def data_fun(times):
data = np.zeros(times.shape)
data[times >= 0] = 50e-9
return data
n_dipoles = 2
raw = mne.io.read_raw_fif(fname_raw)
info = mne.io.read_info(fname_data)
with info._unlock():
info['projs'] = []
noise_cov = mne.make_ad_hoc_cov(info)
label_names = ['Aud-lh', 'Aud-rh']
labels = [
mne.read_label(data_path / 'MEG' / 'sample' / 'labels' / f'{ln}.label')
for ln in label_names]
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
forward = mne.read_forward_solution(fname_fwd)
forward = mne.pick_types_forward(forward, meg="grad", eeg=False,
exclude=raw.info['bads'])
times = np.arange(100, dtype=np.float64) / raw.info['sfreq'] - 0.1
stc = simulate_sparse_stc(forward['src'], n_dipoles=n_dipoles, times=times,
random_state=1, labels=labels, data_fun=data_fun)
nave = 30
evoked = simulate_evoked(forward, stc, info, noise_cov, nave=nave,
use_cps=False, iir_filter=None)
evoked = evoked.crop(tmin=0, tmax=10e-3)
stc_ = mixed_norm(evoked, forward, noise_cov, loose=0.9, n_mxne_iter=5,
depth=0.9)
assert_array_equal(stc_.vertices, stc.vertices)
@pytest.mark.slowtest # slow on Azure
@testing.requires_testing_data
def test_mxne_inverse_empty():
"""Tests solver with too high alpha."""
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.pick("grad", exclude="bads")
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
forward = mne.read_forward_solution(fname_fwd)
forward = mne.pick_types_forward(forward, meg="grad", eeg=False,
exclude=evoked.info['bads'])
cov = read_cov(fname_cov)
with pytest.warns(RuntimeWarning, match='too big'):
stc, residual = mixed_norm(
evoked, forward, cov, n_mxne_iter=3, alpha=99,
return_residual=True)
assert stc.data.size == 0
assert stc.vertices[0].size == 0
assert stc.vertices[1].size == 0
assert_allclose(evoked.data, residual.data)<|fim▁end|> | import mne
from mne.datasets import testing
from mne.label import read_label |
<|file_name|>parse_test.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package net
import (
"bufio";
"os";
"testing";
)
func TestReadLine(t *testing.T) {
filename := "/etc/services"; // a nice big file
fd, err := os.Open(filename, os.O_RDONLY, 0);
if err != nil {
t.Fatalf("open %s: %v", filename, err)
}
br := bufio.NewReader(fd);
var file *file;
file, err = open(filename);
if file == nil {
t.Fatalf("net.open(%s) = nil", filename)
}
<|fim▁hole|> bline, berr := br.ReadString('\n');
if n := len(bline); n > 0 {
bline = bline[0 : n-1]
}
line, ok := file.readLine();
if (berr != nil) != !ok || bline != line {
t.Fatalf("%s:%d (#%d)\nbufio => %q, %v\nnet => %q, %v",
filename, lineno, byteno, bline, berr, line, ok)
}
if !ok {
break
}
lineno++;
byteno += len(line) + 1;
}
}<|fim▁end|> | lineno := 1;
byteno := 0;
for { |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#
# stage.py -- Classes for pipeline stages
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from ginga.misc import Bunch
<|fim▁hole|>
class StageError(Exception):
pass
class Stage(object):
"""Class to handle a pipeline stage."""
_stagename = 'generic'
def __init__(self):
super(Stage, self).__init__()
# default name, until user changes it
self.name = str(self)
# for holding widgets
self.w = Bunch.Bunch()
self._bypass = False
# these get assigned by the owning pipeline
self.pipeline = None
self.logger = None
self.result = None
self.gui_up = False
def build_gui(self, container):
"""subclass can override this to build some kind of GUI."""
pass
def start(self):
"""subclass can override this to do any necessary setup."""
pass
def stop(self):
"""subclass can override this to do any necessary teardown."""
pass
def pause(self):
"""subclass can override this to do any necessary teardown."""
pass
def resume(self):
"""subclass can override this to do any necessary teardown."""
pass
def invalidate(self):
"""subclass can override this to do any necessary invalidation."""
pass
def bypass(self, tf):
self._bypass = tf
def verify_2d(self, data):
if data is not None and len(data.shape) < 2:
raise StageError("Expecting a 2D or greater array in final stage")
def export_as_dict(self):
d = dict(name=self.name, type=self._stagename, bypass=self._bypass)
return d
def import_from_dict(self, d):
self.name = d['name']
self._bypass = d['bypass']
def __str__(self):
return self._stagename<|fim▁end|> | #__all__ = ['Pipeline']
|
<|file_name|>progressbar-labels.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core';
@Component({
selector: 'ng2vd-progressbar-labels',<|fim▁hole|> ng2v-progressbar {
margin-top: 5rem;
}
`]
})
export class Ng2vdProgressbarLabels {
}<|fim▁end|> | templateUrl: './progressbar-labels.html',
styles: [` |
<|file_name|>0002_auto__add_field_score_created_at__add_field_score_updated_at__add_fiel.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Score.created_at'
db.add_column(u'core_score', 'created_at',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default=datetime.datetime(2015, 1, 22, 0, 0), blank=True),
keep_default=False)
# Adding field 'Score.updated_at'
db.add_column(u'core_score', 'updated_at',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, default=datetime.datetime(2015, 1, 22, 0, 0), blank=True),
keep_default=False)
# Adding field 'Score.changed_by'
db.add_column(u'core_score', 'changed_by',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name=u'core_score_related', null=True, to=orm['auth.User']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Score.created_at'
db.delete_column(u'core_score', 'created_at')
# Deleting field 'Score.updated_at'
db.delete_column(u'core_score', 'updated_at')
# Deleting field 'Score.changed_by'
db.delete_column(u'core_score', 'changed_by_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.image': {
'Meta': {'object_name': 'Image'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'core_image_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'original_file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'core.indicator': {
'Meta': {'object_name': 'Indicator'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'core_indicator_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['forms.Form']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),<|fim▁hole|> 'title': ('django.db.models.fields.TextField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'core.location': {
'Meta': {'object_name': 'Location'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'core_location_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['core.Image']", 'null': 'True', 'blank': 'True'}),
'indicators': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['core.Indicator']", 'null': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'position': ('geoposition.fields.GeopositionField', [], {'max_length': '42'}),
'title': ('django.db.models.fields.TextField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'core.score': {
'Meta': {'object_name': 'Score'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'core_score_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entry_count': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Indicator']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Location']"}),
'month': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'passing': ('django.db.models.fields.BooleanField', [], {}),
'passing_entry_count': ('django.db.models.fields.IntegerField', [], {}),
'score': ('django.db.models.fields.FloatField', [], {'default': '85'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'core.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'forms.form': {
'Meta': {'object_name': 'Form'},
'button_text': ('django.db.models.fields.CharField', [], {'default': "u'Submit'", 'max_length': '50'}),
'email_copies': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email_from': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_subject': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'default': '[1]', 'to': u"orm['sites.Site']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['core']<|fim▁end|> | 'form_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maximum_monthly_records': ('django.db.models.fields.IntegerField', [], {'default': '20'}),
'passing_percentage': ('django.db.models.fields.FloatField', [], {'default': '85'}), |
<|file_name|>test_satellitesync.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for InterSatellite Sync feature
:Requirement: Satellitesync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from robottelo.decorators import (
run_only_on,
stubbed,
tier1,
tier3,
upgrade
)
from robottelo.test import UITestCase
class InterSatelliteSyncTestCase(UITestCase):
"""Implements InterSatellite Sync tests in UI"""
@run_only_on('sat')
@stubbed()
@tier3
@upgrade
def test_positive_show_repo_export_history(self):
"""Product history shows repo export history on export.
:id: 01d82253-081b-4d11-9a5b-e6052173fe47
:steps: Export a repo to a specified location in settings.
:expectedresults: Repo/Product history should reflect the export
history with user and time.
:caseautomation: notautomated
:CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
@upgrade
def test_positive_show_cv_export_history(self):
"""CV history shows CV version export history on export.
:id: 06e26cca-e262-4eff-b8d7-fbca504a8acb
:steps: Export a CV to a specified location in settings.
:expectedresults: CV history should reflect the export history with<|fim▁hole|> user, version, action and time.
:caseautomation: notautomated
:CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_positive_update_cdn_url(self):
"""Update CDN URL to import from upstream.
:id: 5ff30764-a1b1-48df-a6a1-0f1d23f883b9
:steps:
1. In upstream, Export Redhat repo/CV to a directory.
2. Copy exported contents to /var/www/html.
3. In downstream, Update CDN URL with step 2 location to import the
Redhat contents.
4. Enable and sync the imported repo from Redhat Repositories page.
:expectedresults:
1. The CDN URL is is updated successfully.
2. The imported repo is enabled and sync.
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier1
def test_negative_update_cdn_url(self):
"""Update non existing CDN URL to import from upstream.
:id: 4bf74712-dac8-447b-9c9f-227a41cdec4d
:steps:
1. In downstream, Update CDN URL with some non existing url.
2. Attempt to Enable and sync some repo from Redhat Repositories
page.
:expectedresults:
1. The CDN URL is not allowed to update any non existing url.
2. None of the repo is allowed to enable and sync.
:caseautomation: notautomated
:CaseImportance: Critical
"""
@run_only_on('sat')
@stubbed()
@tier3
@upgrade
def test_positive_restrict_other_redhat_repo_import(self):
"""Restrict the import/sync of non exported repos.
:id: 7091ca13-7f58-4733-87d5-1fa3670bfcee
:steps:
1. Export Red Hat YUM repo to path which will be accessible over
HTTP.
2. Define the CDN URL the same as the exported HTTP URL.
3. Attempt to Import/Enable non exported repos from Redhat
Repositories page.
:expectedresults: The import of non exported repos is restricted.
:caseautomation: notautomated
:CaseLevel: System
"""<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import render_template
from app import app, db, models
import json
@app.route('/')<|fim▁hole|>def index():
# obtain today's words
# words = models.Words.query.all()
# words = list((str(word[0]), word[1]) for word in db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC"))
data = db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC").all()[:50]
words = [_[0].word for _ in data]
count = [_[1] for _ in data]
return render_template('index.html', words=words, count = count)<|fim▁end|> | @app.route('/index') |
<|file_name|>fetch-zero-knowledge.js<|end_file_name|><|fim▁begin|>/**
* Fetch configuration data using an API Key and the Application Secret Key.
* By doing so, the sconfig servers will just apply firewall rules (if any) and return
* the encrypted configuration data. The client will then decrypt the configuration
* data using the App Secret Key
* Note: Ff no API Key or App Secret is provided, it will look for it in process.env.SCONFIG_KEY and process.env.SCONFIG_SECRET
* */
var sconfig = require('sconfig');
sconfig({<|fim▁hole|> // json: true // expect the result data to be JSON. This is true by default.
sync: true // persist the configuration data locally in the event of an sconfig server outage
}, function(err, config) {
if (err) {
console.log(err);
return;
}
console.log("OK", config);
});<|fim▁end|> | key: '{YOUR_API_KEY}', // the 32-char version of an API Key
secret: '{YOUR_APP_SECRET}', // the 32 char secret key found under the App Details tab
//version: '{YOUR_VERSION}', // version name to fetch, defaults to latest version created |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>__author__ = 'tom'
from django.contrib import admin
from core.models import Post, Project
admin.site.register(Post)<|fim▁hole|><|fim▁end|> | admin.site.register(Project) |
<|file_name|>htmltextareaelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::element::RawLayoutElementHelpers;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::globalscope::GlobalScope;
use dom::htmlelement::HTMLElement;
use dom::htmlfieldsetelement::HTMLFieldSetElement;
use dom::htmlformelement::{FormControl, HTMLFormElement};
use dom::keyboardevent::KeyboardEvent;
use dom::node::{ChildrenMutation, Node, NodeDamage, UnbindContext};
use dom::node::{document_from_node, window_from_node};
use dom::nodelist::NodeList;
use dom::validation::Validatable;
use dom::virtualmethods::VirtualMethods;
use html5ever_atoms::LocalName;
use ipc_channel::ipc::IpcSender;
use script_traits::ScriptMsg as ConstellationMsg;
use std::cell::Cell;
use std::ops::Range;
use style::attr::AttrValue;
use style::element_state::*;
use textinput::{KeyReaction, Lines, SelectionDirection, TextInput};
#[dom_struct]
pub struct HTMLTextAreaElement {
htmlelement: HTMLElement,
#[ignore_heap_size_of = "#7193"]
textinput: DOMRefCell<TextInput<IpcSender<ConstellationMsg>>>,
placeholder: DOMRefCell<DOMString>,
// https://html.spec.whatwg.org/multipage/#concept-textarea-dirty
value_changed: Cell<bool>,
}
pub trait LayoutHTMLTextAreaElementHelpers {
#[allow(unsafe_code)]
unsafe fn get_value_for_layout(self) -> String;
#[allow(unsafe_code)]
unsafe fn selection_for_layout(self) -> Option<Range<usize>>;
#[allow(unsafe_code)]
fn get_cols(self) -> u32;
#[allow(unsafe_code)]
fn get_rows(self) -> u32;
}
impl LayoutHTMLTextAreaElementHelpers for LayoutJS<HTMLTextAreaElement> {
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn get_value_for_layout(self) -> String {
let text = (*self.unsafe_get()).textinput.borrow_for_layout().get_content();
String::from(if text.is_empty() {
(*self.unsafe_get()).placeholder.borrow_for_layout().clone()
} else {
text
})
}
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn selection_for_layout(self) -> Option<Range<usize>> {
if !(*self.unsafe_get()).upcast::<Element>().focus_state() {
return None;
}
let textinput = (*self.unsafe_get()).textinput.borrow_for_layout();
Some(textinput.get_absolute_selection_range())
}
#[allow(unsafe_code)]
fn get_cols(self) -> u32 {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("cols"))
.map_or(DEFAULT_COLS, AttrValue::as_uint)
}
}
#[allow(unsafe_code)]
fn get_rows(self) -> u32 {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("rows"))
.map_or(DEFAULT_ROWS, AttrValue::as_uint)
}
}
}
// https://html.spec.whatwg.org/multipage/#attr-textarea-cols-value
static DEFAULT_COLS: u32 = 20;
// https://html.spec.whatwg.org/multipage/#attr-textarea-rows-value
static DEFAULT_ROWS: u32 = 2;
impl HTMLTextAreaElement {
fn new_inherited(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document) -> HTMLTextAreaElement {
let chan = document.window().upcast::<GlobalScope>().constellation_chan().clone();
HTMLTextAreaElement {
htmlelement:
HTMLElement::new_inherited_with_state(IN_ENABLED_STATE | IN_READ_WRITE_STATE,
local_name, prefix, document),
placeholder: DOMRefCell::new(DOMString::new()),
textinput: DOMRefCell::new(TextInput::new(
Lines::Multiple, DOMString::new(), chan, None, None, SelectionDirection::None)),
value_changed: Cell::new(false),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTextAreaElement> {
Node::reflect_node(box HTMLTextAreaElement::new_inherited(local_name, prefix, document),
document,
HTMLTextAreaElementBinding::Wrap)
}
fn update_placeholder_shown_state(&self) {
let has_placeholder = !self.placeholder.borrow().is_empty();
let has_value = !self.textinput.borrow().is_empty();
let el = self.upcast::<Element>();
el.set_placeholder_shown_state(has_placeholder && !has_value);
el.set_placeholder_shown_state(has_placeholder);
}
}
impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
// TODO A few of these attributes have default values and additional
// constraints
// https://html.spec.whatwg.org/multipage/#dom-textarea-cols
make_uint_getter!(Cols, "cols", DEFAULT_COLS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-cols
make_limited_uint_setter!(SetCols, "cols", DEFAULT_COLS);
// https://html.spec.whatwg.org/multipage/#dom-fe-disabled
make_bool_getter!(Disabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fae-form
fn GetForm(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner()
}
// https://html.spec.whatwg.org/multipage/#attr-fe-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#attr-fe-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-textarea-placeholder
make_getter!(Placeholder, "placeholder");
// https://html.spec.whatwg.org/multipage/#dom-textarea-placeholder
make_setter!(SetPlaceholder, "placeholder");
// https://html.spec.whatwg.org/multipage/#attr-textarea-readonly
make_bool_getter!(ReadOnly, "readonly");
// https://html.spec.whatwg.org/multipage/#attr-textarea-readonly
make_bool_setter!(SetReadOnly, "readonly");
// https://html.spec.whatwg.org/multipage/#dom-textarea-required
make_bool_getter!(Required, "required");
// https://html.spec.whatwg.org/multipage/#dom-textarea-required
make_bool_setter!(SetRequired, "required");
// https://html.spec.whatwg.org/multipage/#dom-textarea-rows
make_uint_getter!(Rows, "rows", DEFAULT_ROWS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-rows
make_limited_uint_setter!(SetRows, "rows", DEFAULT_ROWS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-wrap
make_getter!(Wrap, "wrap");
// https://html.spec.whatwg.org/multipage/#dom-textarea-wrap
make_setter!(SetWrap, "wrap");
// https://html.spec.whatwg.org/multipage/#dom-textarea-type
fn Type(&self) -> DOMString {
DOMString::from("textarea")
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-defaultvalue
fn DefaultValue(&self) -> DOMString {
self.upcast::<Node>().GetTextContent().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-defaultvalue
fn SetDefaultValue(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value));
// if the element's dirty value flag is false, then the element's
// raw value must be set to the value of the element's textContent IDL attribute
if !self.value_changed.get() {
self.reset();
}
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-value
fn Value(&self) -> DOMString {
self.textinput.borrow().get_content()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-value
fn SetValue(&self, value: DOMString) {
// TODO move the cursor to the end of the field
self.textinput.borrow_mut().set_content(value);
self.value_changed.set(true);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
// https://html.spec.whatwg.org/multipage/#dom-lfe-labels
fn Labels(&self) -> Root<NodeList> {
self.upcast::<HTMLElement>().labels()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectiondirection
fn SetSelectionDirection(&self, direction: DOMString) {
self.textinput.borrow_mut().selection_direction = SelectionDirection::from(direction);
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectiondirection
fn SelectionDirection(&self) -> DOMString {
DOMString::from(self.textinput.borrow().selection_direction)
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionend
fn SetSelectionEnd(&self, end: u32) {
let selection_start = self.SelectionStart();
self.textinput.borrow_mut().set_selection_range(selection_start, end);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionend
fn SelectionEnd(&self) -> u32 {
self.textinput.borrow().get_absolute_insertion_point() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionstart
fn SetSelectionStart(&self, start: u32) {
let selection_end = self.SelectionEnd();
self.textinput.borrow_mut().set_selection_range(start, selection_end);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-selectionstart
fn SelectionStart(&self) -> u32 {
self.textinput.borrow().get_selection_start()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea/input-setselectionrange
fn SetSelectionRange(&self, start: u32, end: u32, direction: Option<DOMString>) {
let direction = direction.map_or(SelectionDirection::None, |d| SelectionDirection::from(d));
self.textinput.borrow_mut().selection_direction = direction;
self.textinput.borrow_mut().set_selection_range(start, end);
let window = window_from_node(self);
let _ = window.user_interaction_task_source().queue_event(
&self.upcast(),
atom!("select"),
EventBubbles::Bubbles,
EventCancelable::NotCancelable,
&window);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
}
impl HTMLTextAreaElement {
pub fn reset(&self) {
// https://html.spec.whatwg.org/multipage/#the-textarea-element:concept-form-reset-control
self.SetValue(self.DefaultValue());
self.value_changed.set(false);
}
}
impl VirtualMethods for HTMLTextAreaElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match *attr.local_name() {
local_name!("disabled") => {
let el = self.upcast::<Element>();
match mutation {
AttributeMutation::Set(_) => {
el.set_disabled_state(true);
el.set_enabled_state(false);
el.set_read_write_state(false);
},
AttributeMutation::Removed => {
el.set_disabled_state(false);
el.set_enabled_state(true);
el.check_ancestors_disabled_state_for_form_control();
if !el.disabled_state() && !el.read_write_state() {
el.set_read_write_state(true);
}
}
}
},
local_name!("placeholder") => {
{
let mut placeholder = self.placeholder.borrow_mut();
placeholder.clear();
if let AttributeMutation::Set(_) = mutation {
placeholder.push_str(&attr.value());
}
}
self.update_placeholder_shown_state();
},
local_name!("readonly") => {
let el = self.upcast::<Element>();
match mutation {
AttributeMutation::Set(_) => {
el.set_read_write_state(false);
},
AttributeMutation::Removed => {
el.set_read_write_state(!el.disabled_state());
}
}
}
_ => {},
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
self.upcast::<Element>().check_ancestors_disabled_state_for_form_control();
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match *name {
local_name!("cols") => AttrValue::from_limited_u32(value.into(), DEFAULT_COLS),
local_name!("rows") => AttrValue::from_limited_u32(value.into(), DEFAULT_ROWS),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn unbind_from_tree(&self, context: &UnbindContext) {
self.super_type().unwrap().unbind_from_tree(context);
let node = self.upcast::<Node>();
let el = self.upcast::<Element>();
if node.ancestors().any(|ancestor| ancestor.is::<HTMLFieldSetElement>()) {
el.check_ancestors_disabled_state_for_form_control();
} else {
el.check_disabled_attribute();
}
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if !self.value_changed.get() {
self.reset();
}
}
// copied and modified from htmlinputelement.rs
fn handle_event(&self, event: &Event) {
if let Some(s) = self.super_type() {
s.handle_event(event);
}
if event.type_() == atom!("click") && !event.DefaultPrevented() {
//TODO: set the editing position for text inputs
document_from_node(self).request_focus(self.upcast());
} else if event.type_() == atom!("keydown") && !event.DefaultPrevented() {
if let Some(kevent) = event.downcast::<KeyboardEvent>() {
// This can't be inlined, as holding on to textinput.borrow_mut()
// during self.implicit_submission will cause a panic.
let action = self.textinput.borrow_mut().handle_keydown(kevent);
match action {
KeyReaction::TriggerDefaultAction => (),
KeyReaction::DispatchInput => {
self.value_changed.set(true);
self.update_placeholder_shown_state();
if event.IsTrusted() {
let window = window_from_node(self);
let _ = window.user_interaction_task_source().queue_event(
&self.upcast(),
atom!("input"),
EventBubbles::Bubbles,
EventCancelable::NotCancelable,
&window);
}
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
event.PreventDefault();
}<|fim▁hole|> KeyReaction::RedrawSelection => {
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
event.PreventDefault();
}
KeyReaction::Nothing => (),
}
}
}
}
}
impl FormControl for HTMLTextAreaElement {}
impl Validatable for HTMLTextAreaElement {}<|fim▁end|> | |
<|file_name|>request_test.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package elastigo
import (
"fmt"
"testing"
<|fim▁hole|> "github.com/vimeo/carbon-tagger/_third_party/github.com/bmizerany/assert"
)
func TestQueryString(t *testing.T) {
// Test nil argument
s, err := Escape(nil)
assert.T(t, s == "" && err == nil, fmt.Sprintf("Nil should not fail and yield empty string"))
// Test single string argument
s, err = Escape(map[string]interface{}{"foo": "bar"})
exp := "foo=bar"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single int argument
s, err = Escape(map[string]interface{}{"foo": 1})
exp = "foo=1"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single float argument
s, err = Escape(map[string]interface{}{"foo": 3.141592})
exp = "foo=3.141592"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single []string argument
s, err = Escape(map[string]interface{}{"foo": []string{"bar", "baz"}})
exp = "foo=bar%2Cbaz"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test combination of all arguments
s, err = Escape(map[string]interface{}{
"foo": "bar",
"bar": 1,
"baz": 3.141592,
"test": []string{"a", "b"},
})
// url.Values also orders arguments alphabetically.
exp = "bar=1&baz=3.141592&foo=bar&test=a%2Cb"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test invalid datatype
s, err = Escape(map[string]interface{}{"foo": []int{}})
assert.T(t, err != nil, fmt.Sprintf("Expected err to not be nil"))
}<|fim▁end|> | |
<|file_name|>ll.rs<|end_file_name|><|fim▁begin|>#![allow(improper_ctypes, non_camel_case_types)]
use super::errors;
use super::{Connection, UserStatus, MessageType, FileControl, };
use libc::{c_int, c_uint, c_void};
pub enum Struct_Tox { }
pub type Tox = Struct_Tox;
#[repr(C)]
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Tox_Options {
pub ipv6_enabled: u8,
pub udp_enabled: u8,
pub proxy_type: super::ProxyType,
pub proxy_host: *const ::libc::c_char,
pub proxy_port: u16,
pub start_port: u16,
pub end_port: u16,
pub savedata_type: super::SavedataType,
pub savedata_data: *const u8,
pub savedata_length: usize,
}
impl ::std::default::Default for Tox_Options {
fn default() -> Tox_Options {
unsafe {
let mut opts = ::std::mem::uninitialized();
tox_options_default(&mut opts);
opts
}
}
}
pub type Enum_TOX_ERR_OPTIONS_NEW = ::libc::c_uint;
pub const TOX_ERR_OPTIONS_NEW_OK: ::libc::c_uint = 0;
pub const TOX_ERR_OPTIONS_NEW_MALLOC: ::libc::c_uint = 1;
pub type TOX_ERR_OPTIONS_NEW = Enum_TOX_ERR_OPTIONS_NEW;
pub type tox_self_connection_status_cb =
extern "C" fn(tox: *mut Tox, connection_status: Connection,
user_data: *mut ::libc::c_void) -> ();
pub type Enum_TOX_ERR_FRIEND_DELETE = ::libc::c_uint;
pub const TOX_ERR_FRIEND_DELETE_OK: ::libc::c_uint = 0;
pub const TOX_ERR_FRIEND_DELETE_FRIEND_NOT_FOUND: ::libc::c_uint = 1;
pub type TOX_ERR_FRIEND_DELETE = Enum_TOX_ERR_FRIEND_DELETE;
pub type Enum_TOX_ERR_FRIEND_BY_PUBLIC_KEY = ::libc::c_uint;
pub const TOX_ERR_FRIEND_BY_PUBLIC_KEY_OK: ::libc::c_uint = 0;
pub const TOX_ERR_FRIEND_BY_PUBLIC_KEY_NULL: ::libc::c_uint = 1;
pub const TOX_ERR_FRIEND_BY_PUBLIC_KEY_NOT_FOUND: ::libc::c_uint = 2;
pub type TOX_ERR_FRIEND_BY_PUBLIC_KEY = Enum_TOX_ERR_FRIEND_BY_PUBLIC_KEY;
pub type Enum_TOX_ERR_FRIEND_GET_PUBLIC_KEY = ::libc::c_uint;
pub const TOX_ERR_FRIEND_GET_PUBLIC_KEY_OK: ::libc::c_uint = 0;
pub const TOX_ERR_FRIEND_GET_PUBLIC_KEY_FRIEND_NOT_FOUND: ::libc::c_uint = 1;
pub type TOX_ERR_FRIEND_GET_PUBLIC_KEY = Enum_TOX_ERR_FRIEND_GET_PUBLIC_KEY;
pub type Enum_TOX_ERR_FRIEND_GET_LAST_ONLINE = ::libc::c_uint;
pub const TOX_ERR_FRIEND_GET_LAST_ONLINE_OK: ::libc::c_uint = 0;
pub const TOX_ERR_FRIEND_GET_LAST_ONLINE_FRIEND_NOT_FOUND: ::libc::c_uint = 1;
pub type TOX_ERR_FRIEND_GET_LAST_ONLINE = Enum_TOX_ERR_FRIEND_GET_LAST_ONLINE;
pub type Enum_TOX_ERR_FRIEND_QUERY = ::libc::c_uint;
pub const TOX_ERR_FRIEND_QUERY_OK: ::libc::c_uint = 0;
pub const TOX_ERR_FRIEND_QUERY_NULL: ::libc::c_uint = 1;
pub const TOX_ERR_FRIEND_QUERY_FRIEND_NOT_FOUND: ::libc::c_uint = 2;
pub type TOX_ERR_FRIEND_QUERY = Enum_TOX_ERR_FRIEND_QUERY;
pub type tox_friend_name_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
name: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_status_message_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
message: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_status_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
status: UserStatus, user_data: *mut ::libc::c_void)
-> ();
pub type tox_friend_connection_status_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
connection_status: Connection,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_typing_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32, is_typing: u8,
user_data: *mut ::libc::c_void) -> ();
pub type Enum_TOX_ERR_SET_TYPING = ::libc::c_uint;
pub const TOX_ERR_SET_TYPING_OK: ::libc::c_uint = 0;
pub const TOX_ERR_SET_TYPING_FRIEND_NOT_FOUND: ::libc::c_uint = 1;
pub type TOX_ERR_SET_TYPING = Enum_TOX_ERR_SET_TYPING;
pub type tox_friend_read_receipt_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
message_id: u32, user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_request_cb =
extern "C" fn(tox: *mut Tox, public_key: *const u8,
message: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_message_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
_type: MessageType, message: *const u8,
length: usize, user_data: *mut ::libc::c_void) -> ();
pub type tox_file_recv_control_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
file_number: u32, control: FileControl,
user_data: *mut ::libc::c_void) -> ();
pub type tox_file_chunk_request_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
file_number: u32, position: u64, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_file_recv_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
file_number: u32, kind: u32, file_size: u64,
filename: *const u8, filename_length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_file_recv_chunk_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
file_number: u32, position: u64,
data: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_lossy_packet_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
data: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type tox_friend_lossless_packet_cb =
extern "C" fn(tox: *mut Tox, friend_number: u32,
data: *const u8, length: usize,
user_data: *mut ::libc::c_void) -> ();
pub type Enum_TOX_ERR_GET_PORT = ::libc::c_uint;
pub const TOX_ERR_GET_PORT_OK: ::libc::c_uint = 0;
pub const TOX_ERR_GET_PORT_NOT_BOUND: ::libc::c_uint = 1;
pub type TOX_ERR_GET_PORT = Enum_TOX_ERR_GET_PORT;
pub type Enum_Unnamed1 = ::libc::c_uint;
pub const TOX_GROUPCHAT_TYPE_TEXT: ::libc::c_uint = 0;
pub const TOX_GROUPCHAT_TYPE_AV: ::libc::c_uint = 1;
pub type Enum_Unnamed2 = ::libc::c_uint;
pub const TOX_CHAT_CHANGE_PEER_ADD: ::libc::c_uint = 0;
pub const TOX_CHAT_CHANGE_PEER_DEL: ::libc::c_uint = 1;
pub const TOX_CHAT_CHANGE_PEER_NAME: ::libc::c_uint = 2;
pub type TOX_CHAT_CHANGE = Enum_Unnamed2;
#[link(name = "toxcore")]
extern "C" {
pub fn tox_version_major() -> u32;
pub fn tox_version_minor() -> u32;
pub fn tox_version_patch() -> u32;
pub fn tox_version_is_compatible(major: u32, minor: u32,
patch: u32) -> u8;
pub fn tox_options_default(options: *mut Tox_Options) -> ();
pub fn tox_options_new(error: *mut TOX_ERR_OPTIONS_NEW)
-> *mut Tox_Options;
pub fn tox_options_free(options: *mut Tox_Options) -> ();
pub fn tox_new(options: *const Tox_Options, error: *mut errors::InitError) -> *mut Tox;
pub fn tox_kill(tox: *mut Tox) -> ();
pub fn tox_get_savedata_size(tox: *const Tox) -> usize;
pub fn tox_get_savedata(tox: *const Tox, data: *mut u8) -> ();
pub fn tox_bootstrap(tox: *mut Tox, host: *const ::libc::c_char,
port: u16, public_key: *const u8,
error: *mut errors::BootstrapError) -> u8;
pub fn tox_add_tcp_relay(tox: *mut Tox, host: *const ::libc::c_char,
port: u16, public_key: *const u8,
error: *mut errors::BootstrapError) -> u8;
pub fn tox_self_get_connection_status(tox: *const Tox) -> Connection;
pub fn tox_callback_self_connection_status(tox: *mut Tox,
function: tox_self_connection_status_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_iteration_interval(tox: *const Tox) -> u32;
pub fn tox_iterate(tox: *mut Tox) -> ();
pub fn tox_self_get_address(tox: *const Tox, address: *mut u8) -> ();
pub fn tox_self_set_nospam(tox: *mut Tox, nospam: u32) -> ();
pub fn tox_self_get_nospam(tox: *const Tox) -> u32;
pub fn tox_self_get_public_key(tox: *const Tox, public_key: *mut u8)
-> ();
pub fn tox_self_get_secret_key(tox: *const Tox, secret_key: *mut u8)
-> ();
pub fn tox_self_set_name(tox: *mut Tox, name: *const u8,
length: usize, error: *mut errors::SetInfoError)
-> u8;
pub fn tox_self_get_name_size(tox: *const Tox) -> usize;
pub fn tox_self_get_name(tox: *const Tox, name: *mut u8) -> ();
pub fn tox_self_set_status_message(tox: *mut Tox, status: *const u8,
length: usize,
error: *mut errors::SetInfoError) -> u8;
pub fn tox_self_get_status_message_size(tox: *const Tox) -> usize;
pub fn tox_self_get_status_message(tox: *const Tox, status: *mut u8)
-> ();
pub fn tox_self_set_status(tox: *mut Tox, user_status: UserStatus)
-> ();
pub fn tox_self_get_status(tox: *const Tox) -> UserStatus;
pub fn tox_friend_add(tox: *mut Tox, address: *const u8,
message: *const u8, length: usize,
error: *mut errors::FriendAddError) -> u32;
pub fn tox_friend_add_norequest(tox: *mut Tox, public_key: *const u8,
error: *mut errors::FriendAddError)
-> u32;
pub fn tox_friend_delete(tox: *mut Tox, friend_number: u32,
error: *mut TOX_ERR_FRIEND_DELETE) -> u8;
pub fn tox_friend_by_public_key(tox: *const Tox,
public_key: *const u8,
error: *mut TOX_ERR_FRIEND_BY_PUBLIC_KEY)
-> u32;
pub fn tox_friend_get_public_key(tox: *const Tox, friend_number: u32,
public_key: *mut u8,
error:
*mut TOX_ERR_FRIEND_GET_PUBLIC_KEY)
-> u8;
pub fn tox_friend_exists(tox: *const Tox, friend_number: u32) -> u8;
pub fn tox_friend_get_last_online(tox: *const Tox,
friend_number: u32,
error:
*mut TOX_ERR_FRIEND_GET_LAST_ONLINE)
-> u64;
pub fn tox_self_get_friend_list_size(tox: *const Tox) -> usize;
pub fn tox_self_get_friend_list(tox: *const Tox, list: *mut u32)
-> ();
pub fn tox_friend_get_name_size(tox: *const Tox, friend_number: u32,
error: *mut TOX_ERR_FRIEND_QUERY)
-> usize;
pub fn tox_friend_get_name(tox: *const Tox, friend_number: u32,
name: *mut u8,
error: *mut TOX_ERR_FRIEND_QUERY) -> u8;
pub fn tox_callback_friend_name(tox: *mut Tox,
function:
*mut tox_friend_name_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_friend_get_status_message_size(tox: *const Tox,
friend_number: u32,
error:
*mut TOX_ERR_FRIEND_QUERY)
-> usize;
pub fn tox_friend_get_status_message(tox: *const Tox,
friend_number: u32,
message: *mut u8,
error: *mut TOX_ERR_FRIEND_QUERY)
-> u8;
pub fn tox_callback_friend_status_message(tox: *mut Tox,
function: *mut tox_friend_status_message_cb,
user_data: *mut ::libc::c_void)
-> ();<|fim▁hole|> function: *mut tox_friend_status_message_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_friend_get_connection_status(tox: *const Tox,
friend_number: u32,
error: *mut TOX_ERR_FRIEND_QUERY)
-> Connection;
pub fn tox_callback_friend_connection_status(tox: *mut Tox,
function: *mut tox_friend_connection_status_cb,
user_data:
*mut ::libc::c_void)
-> ();
pub fn tox_friend_get_typing(tox: *const Tox, friend_number: u32,
error: *mut TOX_ERR_FRIEND_QUERY) -> u8;
pub fn tox_callback_friend_typing(tox: *mut Tox,
function: *mut tox_friend_typing_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_self_set_typing(tox: *mut Tox, friend_number: u32,
is_typing: u8, error: *mut TOX_ERR_SET_TYPING)
-> u8;
pub fn tox_friend_send_message(tox: *mut Tox, friend_number: u32,
_type: MessageType,
message: *const u8, length: usize,
error: *mut errors::FriendSendMessageError)
-> u32;
pub fn tox_callback_friend_read_receipt(tox: *mut Tox,
function: tox_friend_read_receipt_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_callback_friend_request(tox: *mut Tox,
function: tox_friend_request_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_callback_friend_message(tox: *mut Tox,
function: tox_friend_message_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_hash(hash: *mut u8, data: *const u8, length: usize)
-> u8;
pub fn tox_file_control(tox: *mut Tox, friend_number: u32,
file_number: u32, control: FileControl,
error: *mut errors::FileControlError) -> u8;
pub fn tox_callback_file_recv_control(tox: *mut Tox,
function: tox_file_recv_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_file_seek(tox: *mut Tox, friend_number: u32,
file_number: u32, position: u64,
error: *mut errors::FileSeekError) -> u8;
pub fn tox_file_get_file_id(tox: *const Tox, friend_number: u32,
file_number: u32, file_id: *mut u8,
error: *mut errors::FileGetError) -> u8;
pub fn tox_file_send(tox: *mut Tox, friend_number: u32,
kind: u32, file_size: u64,
file_id: *const u8, filename: *const u8,
filename_length: usize,
error: *mut errors::FileSendError) -> u32;
pub fn tox_file_send_chunk(tox: *mut Tox, friend_number: u32,
file_number: u32, position: u64,
data: *const u8, length: usize,
error: *mut errors::FileSendChunkError) -> u8;
pub fn tox_callback_file_chunk_request(tox: *mut Tox,
function: tox_file_chunk_request_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_callback_file_recv(tox: *mut Tox,
function: tox_file_recv_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_callback_file_recv_chunk(tox: *mut Tox,
function: tox_file_recv_chunk_cb,
user_data: *mut ::libc::c_void) -> ();
pub fn tox_friend_send_lossy_packet(tox: *mut Tox,
friend_number: u32,
data: *const u8, length: usize,
error:
*mut errors::FriendCustomPacketError)
-> u8;
pub fn tox_callback_friend_lossy_packet(tox: *mut Tox,
function: tox_friend_lossy_packet_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_friend_send_lossless_packet(tox: *mut Tox,
friend_number: u32,
data: *const u8,
length: usize,
error:
*mut errors::FriendCustomPacketError)
-> u8;
pub fn tox_callback_friend_lossless_packet(tox: *mut Tox,
function: tox_friend_lossless_packet_cb,
user_data: *mut ::libc::c_void)
-> ();
pub fn tox_self_get_dht_id(tox: *const Tox, dht_id: *mut u8) -> ();
pub fn tox_self_get_udp_port(tox: *const Tox,
error: *mut TOX_ERR_GET_PORT) -> u16;
pub fn tox_self_get_tcp_port(tox: *const Tox,
error: *mut TOX_ERR_GET_PORT) -> u16;
// +-----------------------------------------------------+
// | KLUDGE ALERT!!! This will be removed in the future. |
// +-----------------------------------------------------+
pub fn tox_callback_group_invite(tox: *mut Tox,
function:
/*Option<*/extern fn
(arg1: *mut Tox,
arg2: i32,
arg3: u8,
arg4: *const u8,
arg5: u16,
arg6: *mut c_void)/*>*/,
userdata: *mut c_void);
pub fn tox_callback_group_message(tox: *mut Tox,
function:
/*Option<*/extern fn
(arg1: *mut Tox,
arg2: c_int,
arg3: c_int,
arg4: *const u8,
arg5: u16,
arg6: *mut c_void)/*>*/,
userdata: *mut c_void);
pub fn tox_callback_group_action(tox: *mut Tox,
function:
/*Option<*/extern fn
(arg1: *mut Tox,
arg2: c_int,
arg3: c_int,
arg4: *const u8,
arg5: u16,
arg6: *mut c_void)/*>*/,
userdata: *mut c_void);
pub fn tox_callback_group_title(tox: *mut Tox,
function:
/*Option<*/extern fn
(arg1: *mut Tox,
arg2: c_int,
arg3: c_int,
arg4: *const u8,
arg5: u8,
arg6: *mut c_void)/*>*/,
userdata: *mut c_void);
pub fn tox_callback_group_namelist_change(tox: *mut Tox,
function:
/*Option<*/extern fn
(arg1: *mut Tox,
arg2: c_int,
arg3: c_int,
arg4: u8,
arg5: *mut c_void)/*>*/,
userdata: *mut c_void);
pub fn tox_add_groupchat(tox: *mut Tox) -> c_int;
pub fn tox_del_groupchat(tox: *mut Tox, groupnumber: c_int) -> c_int;
pub fn tox_group_peername(tox: *const Tox, groupnumber: c_int, peernumber: c_int,
name: *mut u8) -> c_int;
pub fn tox_group_peer_pubkey(tox: *const Tox, groupnumber: c_int, peernumber: c_int,
pk: *mut u8) -> c_int;
pub fn tox_invite_friend(tox: *mut Tox, friendnumber: i32,
groupnumber: c_int) -> c_int;
pub fn tox_join_groupchat(tox: *mut Tox, friendnumber: i32, data: *const u8,
length: u16) -> c_int;
pub fn tox_group_message_send(tox: *mut Tox, groupnumber: c_int, message: *const u8,
length: u16) -> c_int;
pub fn tox_group_get_title(tox: *const Tox, groupnumber: c_int,
title: *mut u8, max_length: u32) -> c_int;
pub fn tox_group_set_title(tox: *mut Tox, groupnumber: c_int, title: *const u8,
length: u8) -> c_int;
pub fn tox_group_action_send(tox: *mut Tox, groupnumber: c_int, action: *const u8,
length: u16) -> c_int;
pub fn tox_group_peernumber_is_ours(tox: *const Tox, groupnumber: c_int,
peernumber: c_int) -> c_uint;
pub fn tox_group_number_peers(tox: *const Tox, groupnumber: c_int) -> c_int;
pub fn tox_group_get_names(tox: *const Tox, groupnumber: c_int,
names: *mut [u8; 128], lengths: *mut u16,
length: u16) -> c_int;
pub fn tox_count_chatlist(tox: *const Tox) -> u32;
pub fn tox_get_chatlist(tox: *const Tox, out_list: *mut i32, list_size: u32) -> u32;
pub fn tox_group_get_type(tox: *const Tox, groupnumber: c_int) -> c_int;
// ================================
// END OF NECESSARY DERPECATED CODE
// ================================
}<|fim▁end|> | pub fn tox_friend_get_status(tox: *const Tox, friend_number: u32,
error: *mut TOX_ERR_FRIEND_QUERY)
-> UserStatus;
pub fn tox_callback_friend_status(tox: *mut Tox, |
<|file_name|>4eba2f05c2f4_correct_vxlan_endpoint_primary_key.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Thales Services SAS
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""correct Vxlan Endpoint primary key
Revision ID: 4eba2f05c2f4
Revises: 884573acbf1c
Create Date: 2014-07-07 22:48:38.544323
"""
# revision identifiers, used by Alembic.
revision = '4eba2f05c2f4'
down_revision = '884573acbf1c'
from alembic import op
TABLE_NAME = 'ml2_vxlan_endpoints'
PK_NAME = 'ml2_vxlan_endpoints_pkey'
def upgrade():
op.drop_constraint(PK_NAME, TABLE_NAME, type_='primary')
op.create_primary_key(PK_NAME, TABLE_NAME, cols=['ip_address'])
<|fim▁hole|>def downgrade():
op.drop_constraint(PK_NAME, TABLE_NAME, type_='primary')
op.create_primary_key(PK_NAME, TABLE_NAME, cols=['ip_address', 'udp_port'])<|fim▁end|> | |
<|file_name|>light.py<|end_file_name|><|fim▁begin|>"""Support for FutureNow Ethernet unit outputs as Lights."""
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_NAME, CONF_HOST, CONF_PORT, CONF_DEVICES)
from homeassistant.components.light import (
ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, Light,
PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DRIVER = 'driver'
CONF_DRIVER_FNIP6X10AD = 'FNIP6x10ad'
CONF_DRIVER_FNIP8X10A = 'FNIP8x10a'
CONF_DRIVER_TYPES = [CONF_DRIVER_FNIP6X10AD, CONF_DRIVER_FNIP8X10A]
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Optional('dimmable', default=False): cv.boolean,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DRIVER): vol.In(CONF_DRIVER_TYPES),
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_DEVICES): {cv.string: DEVICE_SCHEMA},
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the light platform for each FutureNow unit."""
lights = []
for channel, device_config in config[CONF_DEVICES].items():
device = {}
device['name'] = device_config[CONF_NAME]
device['dimmable'] = device_config['dimmable']
device['channel'] = channel
device['driver'] = config[CONF_DRIVER]
device['host'] = config[CONF_HOST]
device['port'] = config[CONF_PORT]
lights.append(FutureNowLight(device))
add_entities(lights, True)
def to_futurenow_level(level):
"""Convert the given HASS light level (0-255) to FutureNow (0-100)."""
return int((level * 100) / 255)
def to_hass_level(level):
"""Convert the given FutureNow (0-100) light level to HASS (0-255)."""
return int((level * 255) / 100)
class FutureNowLight(Light):
"""Representation of an FutureNow light."""
def __init__(self, device):
"""Initialize the light."""
import pyfnip
self._name = device['name']
self._dimmable = device['dimmable']
self._channel = device['channel']
self._brightness = None
self._last_brightness = 255
self._state = None
if device['driver'] == CONF_DRIVER_FNIP6X10AD:
self._light = pyfnip.FNIP6x2adOutput(device['host'],<|fim▁hole|> device['port'],
self._channel)
if device['driver'] == CONF_DRIVER_FNIP8X10A:
self._light = pyfnip.FNIP8x10aOutput(device['host'],
device['port'],
self._channel)
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
if self._dimmable:
return SUPPORT_BRIGHTNESS
return 0
def turn_on(self, **kwargs):
"""Turn the light on."""
if self._dimmable:
level = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness)
else:
level = 255
self._light.turn_on(to_futurenow_level(level))
def turn_off(self, **kwargs):
"""Turn the light off."""
self._light.turn_off()
if self._brightness:
self._last_brightness = self._brightness
def update(self):
"""Fetch new state data for this light."""
state = int(self._light.is_on())
self._state = bool(state)
self._brightness = to_hass_level(state)<|fim▁end|> | |
<|file_name|>afip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api, _
from openerp.exceptions import Warning
import logging
_logger = logging.getLogger(__name__)
class afip_incoterm(models.Model):
_name = 'afip.incoterm'
_description = 'Afip Incoterm'
afip_code = fields.Char(
'Code', required=True)
name = fields.Char(
'Name', required=True)
class afip_point_of_sale(models.Model):
_name = 'afip.point_of_sale'
_description = 'Afip Point Of Sale'
prefix = fields.Char(
'Prefix'
)
sufix = fields.Char(
'Sufix'
)
type = fields.Selection([
('manual', 'Manual'),
('preprinted', 'Preprinted'),
('online', 'Online'),
# Agregados por otro modulo
# ('electronic', 'Electronic'),
# ('fiscal_printer', 'Fiscal Printer'),
],
'Type',
default='manual',
required=True,
)
name = fields.Char(
compute='get_name',
)
number = fields.Integer(
'Number', required=True
)
company_id = fields.Many2one(
'res.company', 'Company', required=True,
default=lambda self: self.env['res.company']._company_default_get(
'afip.point_of_sale')
)
journal_ids = fields.One2many(
'account.journal',
'point_of_sale_id',
'Journals',
)
document_sequence_type = fields.Selection(
[('own_sequence', 'Own Sequence'),
('same_sequence', 'Same Invoice Sequence')],
string='Document Sequence Type',
default='own_sequence',
required=True,
help="Use own sequence or invoice sequence on Debit and Credit Notes?"
)
journal_document_class_ids = fields.One2many(
'account.journal.afip_document_class',
compute='get_journal_document_class_ids',
string='Documents Classes',
)
@api.one
@api.depends('type', 'sufix', 'prefix', 'number')
def get_name(self):
# TODO mejorar esto y que tome el lable traducido del selection<|fim▁hole|> name = 'Preimpresa'
elif self.type == 'online':
name = 'Online'
elif self.type == 'electronic':
name = 'Electronica'
if self.prefix:
name = '%s %s' % (self.prefix, name)
if self.sufix:
name = '%s %s' % (name, self.sufix)
name = '%04d - %s' % (self.number, name)
self.name = name
@api.one
@api.depends('journal_ids.journal_document_class_ids')
def get_journal_document_class_ids(self):
journal_document_class_ids = self.env[
'account.journal.afip_document_class'].search([
('journal_id.point_of_sale_id', '=', self.id)])
self.journal_document_class_ids = journal_document_class_ids
_sql_constraints = [('number_unique', 'unique(number, company_id)',
'Number Must be Unique per Company!'), ]
class afip_document_class(models.Model):
_name = 'afip.document_class'
_description = 'Afip Document Class'
name = fields.Char(
'Name', size=120)
doc_code_prefix = fields.Char(
'Document Code Prefix', help="Prefix for Documents Codes on Invoices \
and Account Moves. For eg. 'FA ' will build 'FA 0001-0000001' Document Number")
afip_code = fields.Integer(
'AFIP Code', required=True)
document_letter_id = fields.Many2one(
'afip.document_letter', 'Document Letter')
report_name = fields.Char(
'Name on Reports',
help='Name that will be printed in reports, for example "CREDIT NOTE"')
document_type = fields.Selection([
('invoice', 'Invoices'),
('credit_note', 'Credit Notes'),
('debit_note', 'Debit Notes'),
('receipt', 'Receipt'),
('ticket', 'Ticket'),
('in_document', 'In Document'),
('other_document', 'Other Documents')
],
string='Document Type',
help='It defines some behaviours on automatic journal selection and\
in menus where it is shown.')
active = fields.Boolean(
'Active', default=True)
class afip_document_letter(models.Model):
_name = 'afip.document_letter'
_description = 'Afip Document letter'
name = fields.Char(
'Name', size=64, required=True)
afip_document_class_ids = fields.One2many(
'afip.document_class', 'document_letter_id', 'Afip Document Classes')
issuer_ids = fields.Many2many(
'afip.responsability', 'afip_doc_letter_issuer_rel',
'letter_id', 'responsability_id', 'Issuers',)
receptor_ids = fields.Many2many(
'afip.responsability', 'afip_doc_letter_receptor_rel',
'letter_id', 'responsability_id', 'Receptors',)
active = fields.Boolean(
'Active', default=True)
vat_discriminated = fields.Boolean(
'Vat Discriminated on Invoices?',
help="If True, the vat will be discriminated on invoice report.")
_sql_constraints = [('name', 'unique(name)', 'Name must be unique!'), ]
class afip_responsability(models.Model):
_name = 'afip.responsability'
_description = 'AFIP VAT Responsability'
name = fields.Char(
'Name', size=64, required=True)
code = fields.Char(
'Code', size=8, required=True)
active = fields.Boolean(
'Active', default=True)
issued_letter_ids = fields.Many2many(
'afip.document_letter', 'afip_doc_letter_issuer_rel',
'responsability_id', 'letter_id', 'Issued Document Letters')
received_letter_ids = fields.Many2many(
'afip.document_letter', 'afip_doc_letter_receptor_rel',
'responsability_id', 'letter_id', 'Received Document Letters')
vat_tax_required_on_sales_invoices = fields.Boolean(
'VAT Tax Required on Sales Invoices?',
help='If True, then a vay tax is mandatory on each sale invoice for companies of this responsability',
)
_sql_constraints = [('name', 'unique(name)', 'Name must be unique!'),
('code', 'unique(code)', 'Code must be unique!')]
class afip_document_type(models.Model):
_name = 'afip.document_type'
_description = 'AFIP document types'
name = fields.Char(
'Name', size=120, required=True)
code = fields.Char(
'Code', size=16, required=True)
afip_code = fields.Integer(
'AFIP Code', required=True)
active = fields.Boolean(
'Active', default=True)<|fim▁end|> | if self.type == 'manual':
name = 'Manual'
elif self.type == 'preprinted': |
<|file_name|>KeyRelationImpl.cpp<|end_file_name|><|fim▁begin|>// -*- mode: c++; c-basic-style: "bsd"; c-basic-offset: 4; -*-
/*
* kdm/data/KeyRelationImpl.cpp
* Copyright (C) Cátedra SAES-UMU 2010 <[email protected]>
* Copyright (C) INCHRON GmbH 2016 <[email protected]>
*
* EMF4CPP is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EMF4CPP is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.<|fim▁hole|> * You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "KeyRelation.hpp"
#include <kdm/data/DataPackage.hpp>
#include <kdm/data/AbstractDataRelationship.hpp>
#include <kdm/kdm/Attribute.hpp>
#include <kdm/kdm/Annotation.hpp>
#include <kdm/kdm/Stereotype.hpp>
#include <kdm/kdm/ExtendedValue.hpp>
#include <kdm/data/UniqueKey.hpp>
#include <kdm/data/ReferenceKey.hpp>
#include <kdm/core/KDMEntity.hpp>
#include <ecore/EObject.hpp>
#include <ecore/EClass.hpp>
#include <ecore/EStructuralFeature.hpp>
#include <ecore/EReference.hpp>
#include <ecore/EObject.hpp>
#include <ecorecpp/mapping.hpp>
/*PROTECTED REGION ID(KeyRelationImpl.cpp) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
using namespace ::kdm::data;
void KeyRelation::_initialize()
{
// Supertypes
::kdm::data::AbstractDataRelationship::_initialize();
// References
/*PROTECTED REGION ID(KeyRelationImpl__initialize) START*/
// Please, enable the protected region if you add manually written code.
// To do this, add the keyword ENABLED before START.
/*PROTECTED REGION END*/
}
// Operations
// EObject
::ecore::EJavaObject KeyRelation::eGet(::ecore::EInt _featureID,
::ecore::EBoolean _resolve)
{
::ecore::EJavaObject _any;
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
_any = m_attribute->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
_any = m_annotation->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
_any = m_stereotype->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
_any = m_taggedValue->asEListOf< ::ecore::EObject_ptr >();
}
return _any;
case ::kdm::data::DataPackage::KEYRELATION__TO:
{
if (m_to)
_any = ::ecore::as < ::ecore::EObject > (m_to);
}
return _any;
case ::kdm::data::DataPackage::KEYRELATION__FROM:
{
if (m_from)
_any = ::ecore::as < ::ecore::EObject > (m_from);
}
return _any;
}
throw "Error";
}
void KeyRelation::eSet(::ecore::EInt _featureID,
::ecore::EJavaObject const& _newValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::Element::getAttribute().clear();
::kdm::core::Element::getAttribute().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::Element::getAnnotation().clear();
::kdm::core::Element::getAnnotation().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::ModelElement::getStereotype().clear();
::kdm::core::ModelElement::getStereotype().insert_all(*_t0);
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
::ecorecpp::mapping::EList< ::ecore::EObject_ptr >::ptr_type _t0 =
::ecorecpp::mapping::any::any_cast < ::ecorecpp::mapping::EList
< ::ecore::EObject_ptr > ::ptr_type > (_newValue);
::kdm::core::ModelElement::getTaggedValue().clear();
::kdm::core::ModelElement::getTaggedValue().insert_all(*_t0);
}
return;
case ::kdm::data::DataPackage::KEYRELATION__TO:
{
::ecore::EObject_ptr _t0 = ::ecorecpp::mapping::any::any_cast
< ::ecore::EObject_ptr > (_newValue);
::kdm::data::UniqueKey_ptr _t1 =
dynamic_cast< ::kdm::data::UniqueKey* >(_t0.get()); /*/// std::dynamic_pointer_cast< ::kdm::data::UniqueKey >(_t0);*/
::kdm::data::KeyRelation::setTo(_t1);
}
return;
case ::kdm::data::DataPackage::KEYRELATION__FROM:
{
::ecore::EObject_ptr _t0 = ::ecorecpp::mapping::any::any_cast
< ::ecore::EObject_ptr > (_newValue);
::kdm::data::ReferenceKey_ptr _t1 =
dynamic_cast< ::kdm::data::ReferenceKey* >(_t0.get()); /*/// std::dynamic_pointer_cast< ::kdm::data::ReferenceKey >(_t0);*/
::kdm::data::KeyRelation::setFrom(_t1);
}
return;
}
throw "Error";
}
::ecore::EBoolean KeyRelation::eIsSet(::ecore::EInt _featureID)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
return m_attribute && m_attribute->size();
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
return m_annotation && m_annotation->size();
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
return m_stereotype && m_stereotype->size();
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
return m_taggedValue && m_taggedValue->size();
case ::kdm::data::DataPackage::KEYRELATION__TO:
return (bool) m_to;
case ::kdm::data::DataPackage::KEYRELATION__FROM:
return (bool) m_from;
}
throw "Error";
}
void KeyRelation::eUnset(::ecore::EInt _featureID)
{
switch (_featureID)
{
}
throw "Error";
}
::ecore::EClass_ptr KeyRelation::_eClass()
{
static ::ecore::EClass_ptr _eclass =
dynamic_cast< ::kdm::data::DataPackage* >(::kdm::data::DataPackage::_instance().get())->getKeyRelation();
return _eclass;
}
/** Set the local end of a reference with an EOpposite property.
*/
void KeyRelation::_inverseAdd(::ecore::EInt _featureID,
::ecore::EJavaObject const& _newValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
}
return;
case ::kdm::data::DataPackage::KEYRELATION__TO:
{
}
return;
case ::kdm::data::DataPackage::KEYRELATION__FROM:
{
}
return;
}
throw "Error: _inverseAdd() does not handle this featureID";
}
/** Unset the local end of a reference with an EOpposite property.
*/
void KeyRelation::_inverseRemove(::ecore::EInt _featureID,
::ecore::EJavaObject const& _oldValue)
{
switch (_featureID)
{
case ::kdm::core::CorePackage::ELEMENT__ATTRIBUTE:
{
}
return;
case ::kdm::core::CorePackage::ELEMENT__ANNOTATION:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__STEREOTYPE:
{
}
return;
case ::kdm::core::CorePackage::MODELELEMENT__TAGGEDVALUE:
{
}
return;
case ::kdm::data::DataPackage::KEYRELATION__TO:
{
}
return;
case ::kdm::data::DataPackage::KEYRELATION__FROM:
{
}
return;
}
throw "Error: _inverseRemove() does not handle this featureID";
}<|fim▁end|> | * See the GNU Lesser General Public License for more details.
* |
<|file_name|>test_2595_Spinner.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010 Resolver Systems Ltd.
<|fim▁hole|># All Rights Reserved
#
from functionaltest import FunctionalTest
class Test_2595_Throbber(FunctionalTest):
def test_spinner_appears_during_recalcs(self):
# * Harold likes to know when dirigible is working hard on his calculations
# * He logs in and creates a new sheet
self.login_and_create_new_sheet()
# * When the grid has appeared, the spinner might be visible, but it disappears
# rapidly as the initial empty recalc completes.
self.wait_for_spinner_to_stop()
# * and enters some hard-working user-code
self.append_usercode('import time\ntime.sleep(20)\nworksheet[1,1].value="ready"')
# * He spots the spinner on the page
self.wait_for(self.is_spinner_visible,
lambda : 'spinner not present',
timeout_seconds = 5)
# * When the recalc is done, he sees the spinner go away
self.wait_for_cell_value(1, 1, 'ready', timeout_seconds=25)
self.assertTrue(self.is_element_present('css=#id_spinner_image.hidden'))<|fim▁end|> | |
<|file_name|>optimizer.py<|end_file_name|><|fim▁begin|>import logging
from math import isclose
try: # pragma: no cover
import torch
optim = torch.optim
except ImportError: # pragma: no cover
optim = None
def pinverse(t):
"""
Computes the pseudo-inverse of a matrix using SVD.
Parameters
----------
t: torch.tensor
The matrix whose inverse is to be calculated.
Returns
-------
torch.tensor: Inverse of the matrix `t`.
"""
u, s, v = t.svd()
t_inv = v @ torch.diag(torch.where(s != 0, 1 / s, s)) @ u.t()
return t_inv
def optimize(
loss_fn, params={}, loss_args={}, opt="adam", max_iter=10000, exit_delta=1e-4
):
"""
Generic function to optimize loss functions.
Parameters
----------
loss_fn: Function
The function to optimize. It must return a torch.Tensor object.
params: dict {str: torch.Tensor}
The parameters which need to be optimized along with their initial values. The
dictionary should be of the form: {variable name: initial value}
loss_args: dict {str: torch.Tensor}
Extra parameters which loss function needs to compute the loss.
opt: str | Instance of torch.optim.Optimizer
The optimizer to use. Should either be an instance of torch.optim or a str.
When str is given initializes the optimizer with default parameters.
If str the options are:
1. Adadelta: Adadelta algorithm (Ref: https://arxiv.org/abs/1212.5701)
2. Adagrad: Adagrad algorithm (Ref: http://jmlr.org/papers/v12/duchi11a.html)
3. Adam: Adam algorithm (Ref: https://arxiv.org/abs/1412.6980)
4. SparseAdam: Lazy version of Adam. Suitable for sparse tensors.
5. Adamax: Adamax algorithm (variant of Adam based on infinity norm)
6. ASGD: Averaged Stochastic Gradient Descent (Ref: https://dl.acm.org/citation.cfm?id=131098)
7. LBFGS: L-BFGS Algorithm
8. RMSprop: RMSprop Algorithm (Ref: https://arxiv.org/abs/1308.0850v5)<|fim▁hole|> max_iter: int (default: 10000)
The maximum number of iterations to run the optimization for.
exit_delta: float
The optmization exit criteria. When change in loss in an iteration is less than
`exit_delta` the optimizer returns the values.
Returns
-------
dict: The values that were given in params in the same format.
Examples
--------
"""
# TODO: Add option to modify the optimizers.
init_loss = float("inf")
if isinstance(opt, str):
opt_dict = {
"adadelta": optim.Adadelta,
"adagrad": optim.Adagrad,
"adam": optim.Adam,
"sparseadam": optim.SparseAdam,
"adamax": optim.Adamax,
"asgd": optim.ASGD,
"lbfgs": optim.LBFGS,
"rmsprop": optim.RMSprop,
"rprop": optim.Rprop,
"sgd": optim.SGD,
}
opt = opt_dict[opt.lower()](params.values())
for t in range(max_iter):
def closure():
opt.zero_grad()
loss = loss_fn(params, loss_args)
loss.backward()
return loss
opt.step(closure=closure)
if isclose(init_loss, closure().item(), abs_tol=exit_delta):
logging.info(f"Converged after {t} iterations.")
return params
else:
init_loss = closure().item()
logging.info(
f"Couldn't converge after {max_iter} iterations. Try increasing max_iter or change optimizer parameters"
)
return params<|fim▁end|> | 9. Rprop: Resilient Backpropagation Algorithm
10. SGD: Stochastic Gradient Descent.
|
<|file_name|>InsertStream_test.js<|end_file_name|><|fim▁begin|><|fim▁hole|>import assert from 'assert';
import sinon from 'sinon';
import fs from 'fs';
import stream from 'stream';
import { InsertStream } from '../';
/**
* Transform class to turn our string JSON data into objects.
*/
class TransformToObject extends stream.Transform {
constructor() {
super({objectMode:true});
}
_transform(data, encoding, cb) {
data.split('\n')
.filter(line => line.length)
.forEach(line => this.push(JSON.parse(line)));
cb();
}
}
function getSpies() {
const insertSpy = sinon.spy();
const insertManySpy = sinon.spy();
const collection = {
insert(data, cb) {
insertSpy(data);
cb();
},
insertMany(data, cb) {
insertManySpy(data);
cb();
}
};
return [insertSpy, insertManySpy, collection];
}
function getOption(opts) {
return {
highWaterMark: opts.highWaterMark,
collectionName: 'test-collection',
db: {
collection: function(name, cb) {
cb(opts.err, opts.collection);
}
}
};
}
function createTestStream(highWaterMark, err, cb) {
const [insertSpy, insertManySpy, collection] = getSpies();
const option = getOption({highWaterMark, collection, err});
const stream = fs.createReadStream(`${__dirname}/data.json`, {encoding: 'utf8'})
.pipe(new TransformToObject())
.pipe(new InsertStream(option));
cb(stream, {insertSpy, insertManySpy});
}
export function testSingleWrite(done) {
createTestStream(1, null, (stream, spies) => {
stream.on('finish', () => {
assert.ok(spies.insertSpy.calledThrice);
assert.ok(spies.insertManySpy.notCalled);
done();
});
});
}
export function testBatchWrite(done) {
createTestStream(2, null, (stream, spies) => {
stream.on('finish', () => {
assert.ok(spies.insertSpy.calledOnce);
assert.ok(spies.insertManySpy.calledOnce);
assert.equal(spies.insertManySpy.getCall(0).args[0].length, 2);
done();
});
});
}
export function testError(done) {
createTestStream(1, {}, stream => {
stream.on('error', () => {
assert.ok(true);
done();
});
});
}<|fim▁end|> | |
<|file_name|>formdata.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::FormDataBinding::FormDataMethods;
use crate::dom::bindings::codegen::Bindings::FormDataBinding::FormDataWrap;
use crate::dom::bindings::codegen::UnionTypes::FileOrUSVString;
use crate::dom::bindings::error::{Error, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::iterable::Iterable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::DomRoot;<|fim▁hole|>use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::blob::Blob;
use crate::dom::file::File;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlformelement::{FormDatum, FormDatumValue, HTMLFormElement};
use dom_struct::dom_struct;
use html5ever::LocalName;
use script_traits::serializable::BlobImpl;
#[dom_struct]
pub struct FormData {
reflector_: Reflector,
data: DomRefCell<Vec<(LocalName, FormDatum)>>,
}
impl FormData {
fn new_inherited(form_datums: Option<Vec<FormDatum>>) -> FormData {
let data = match form_datums {
Some(data) => data
.iter()
.map(|datum| (LocalName::from(datum.name.as_ref()), datum.clone()))
.collect::<Vec<(LocalName, FormDatum)>>(),
None => Vec::new(),
};
FormData {
reflector_: Reflector::new(),
data: DomRefCell::new(data),
}
}
pub fn new(form_datums: Option<Vec<FormDatum>>, global: &GlobalScope) -> DomRoot<FormData> {
reflect_dom_object(
Box::new(FormData::new_inherited(form_datums)),
global,
FormDataWrap,
)
}
// https://xhr.spec.whatwg.org/#dom-formdata
pub fn Constructor(
global: &GlobalScope,
form: Option<&HTMLFormElement>,
) -> Fallible<DomRoot<FormData>> {
if let Some(opt_form) = form {
return match opt_form.get_form_dataset(None, None) {
Some(form_datums) => Ok(FormData::new(Some(form_datums), global)),
None => Err(Error::InvalidState),
};
}
Ok(FormData::new(None, global))
}
}
impl FormDataMethods for FormData {
// https://xhr.spec.whatwg.org/#dom-formdata-append
fn Append(&self, name: USVString, str_value: USVString) {
let datum = FormDatum {
ty: DOMString::from("string"),
name: DOMString::from(name.0.clone()),
value: FormDatumValue::String(DOMString::from(str_value.0)),
};
self.data
.borrow_mut()
.push((LocalName::from(name.0), datum));
}
#[allow(unrooted_must_root)]
// https://xhr.spec.whatwg.org/#dom-formdata-append
fn Append_(&self, name: USVString, blob: &Blob, filename: Option<USVString>) {
let datum = FormDatum {
ty: DOMString::from("file"),
name: DOMString::from(name.0.clone()),
value: FormDatumValue::File(DomRoot::from_ref(&*self.create_an_entry(blob, filename))),
};
self.data
.borrow_mut()
.push((LocalName::from(name.0), datum));
}
// https://xhr.spec.whatwg.org/#dom-formdata-delete
fn Delete(&self, name: USVString) {
self.data
.borrow_mut()
.retain(|(datum_name, _)| datum_name != &LocalName::from(name.0.clone()));
}
// https://xhr.spec.whatwg.org/#dom-formdata-get
fn Get(&self, name: USVString) -> Option<FileOrUSVString> {
self.data
.borrow()
.iter()
.filter(|(datum_name, _)| datum_name == &LocalName::from(name.0.clone()))
.next()
.map(|(_, datum)| match &datum.value {
FormDatumValue::String(ref s) => {
FileOrUSVString::USVString(USVString(s.to_string()))
},
FormDatumValue::File(ref b) => FileOrUSVString::File(DomRoot::from_ref(&*b)),
})
}
// https://xhr.spec.whatwg.org/#dom-formdata-getall
fn GetAll(&self, name: USVString) -> Vec<FileOrUSVString> {
self.data
.borrow()
.iter()
.filter_map(|datum| {
if datum.0 != LocalName::from(name.0.clone()) {
return None;
}
Some(match &datum.1.value {
FormDatumValue::String(ref s) => {
FileOrUSVString::USVString(USVString(s.to_string()))
},
FormDatumValue::File(ref b) => FileOrUSVString::File(DomRoot::from_ref(&*b)),
})
})
.collect()
}
// https://xhr.spec.whatwg.org/#dom-formdata-has
fn Has(&self, name: USVString) -> bool {
self.data
.borrow()
.iter()
.any(|(datum_name, _0)| datum_name == &LocalName::from(name.0.clone()))
}
// https://xhr.spec.whatwg.org/#dom-formdata-set
fn Set(&self, name: USVString, str_value: USVString) {
let mut data = self.data.borrow_mut();
let local_name = LocalName::from(name.0.clone());
data.retain(|(datum_name, _)| datum_name != &local_name);
data.push((
local_name,
FormDatum {
ty: DOMString::from("string"),
name: DOMString::from(name.0),
value: FormDatumValue::String(DOMString::from(str_value.0)),
},
));
}
#[allow(unrooted_must_root)]
// https://xhr.spec.whatwg.org/#dom-formdata-set
fn Set_(&self, name: USVString, blob: &Blob, filename: Option<USVString>) {
let mut data = self.data.borrow_mut();
let local_name = LocalName::from(name.0.clone());
data.retain(|(datum_name, _)| datum_name != &local_name);
data.push((
LocalName::from(name.0.clone()),
FormDatum {
ty: DOMString::from("file"),
name: DOMString::from(name.0),
value: FormDatumValue::File(DomRoot::from_ref(
&*self.create_an_entry(blob, filename),
)),
},
));
}
}
impl FormData {
// https://xhr.spec.whatwg.org/#create-an-entry
// Steps 3-4.
fn create_an_entry(&self, blob: &Blob, opt_filename: Option<USVString>) -> DomRoot<File> {
let name = match opt_filename {
Some(filename) => DOMString::from(filename.0),
None if blob.downcast::<File>().is_none() => DOMString::from("blob"),
None => DOMString::from(""),
};
let bytes = blob.get_bytes().unwrap_or(vec![]);
File::new(
&self.global(),
BlobImpl::new_from_bytes(bytes, blob.type_string()),
name,
None,
)
}
pub fn datums(&self) -> Vec<FormDatum> {
self.data
.borrow()
.iter()
.map(|(_, datum)| datum.clone())
.collect()
}
}
impl Iterable for FormData {
type Key = USVString;
type Value = FileOrUSVString;
fn get_iterable_length(&self) -> u32 {
self.data.borrow().len() as u32
}
fn get_value_at_index(&self, n: u32) -> FileOrUSVString {
let data = self.data.borrow();
let datum = &data.get(n as usize).unwrap().1;
match &datum.value {
FormDatumValue::String(ref s) => FileOrUSVString::USVString(USVString(s.to_string())),
FormDatumValue::File(ref b) => FileOrUSVString::File(DomRoot::from_ref(b)),
}
}
fn get_key_at_index(&self, n: u32) -> USVString {
let data = self.data.borrow();
let key = &data.get(n as usize).unwrap().0;
USVString(key.to_string())
}
}<|fim▁end|> | |
<|file_name|>install.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# called from wnf.py
# lib/wnf.py --install [rootpassword] [dbname] [source]
from __future__ import unicode_literals
import os, sys, json
import webnotes
import webnotes.db
import getpass
from webnotes.model.db_schema import DbManager
from webnotes.model.sync import sync_for
from webnotes.utils import cstr
class Installer:
def __init__(self, root_login, root_password=None, db_name=None, site=None, site_config=None):
make_conf(db_name, site=site, site_config=site_config)
self.site = site
if isinstance(root_password, list):
root_password = root_password[0]
self.make_connection(root_login, root_password)
webnotes.local.conn = self.conn
webnotes.local.session = webnotes._dict({'user':'Administrator'})
self.dbman = DbManager(self.conn)
def make_connection(self, root_login, root_password):
if root_login:
if not root_password:
root_password = webnotes.conf.get("root_password") or None
if not root_password:
root_password = getpass.getpass("MySQL root password: ")
self.root_password = root_password
self.conn = webnotes.db.Database(user=root_login, password=root_password)
def install(self, db_name, source_sql=None, admin_password = 'admin', verbose=0,
force=0):
if force or (db_name not in self.dbman.get_database_list()):
# delete user (if exists)
self.dbman.delete_user(db_name)
else:
raise Exception("Database %s already exists" % (db_name,))
# create user and db
self.dbman.create_user(db_name, webnotes.conf.db_password)
if verbose: print "Created user %s" % db_name
# create a database
self.dbman.create_database(db_name)
if verbose: print "Created database %s" % db_name
# grant privileges to user
self.dbman.grant_all_privileges(db_name, db_name)
if verbose: print "Granted privileges to user %s and database %s" % (db_name, db_name)
# flush user privileges
self.dbman.flush_privileges()
# close root connection
self.conn.close()
webnotes.connect(db_name=db_name, site=self.site)
self.dbman = DbManager(webnotes.conn)
# import in db_name
if verbose: print "Starting database import..."
# get the path of the sql file to import
if not source_sql:
source_sql = os.path.join(os.path.dirname(webnotes.__file__), "..",
'conf', 'Framework.sql')
self.dbman.restore_database(db_name, source_sql, db_name, webnotes.conf.db_password)
if verbose: print "Imported from database %s" % source_sql
self.create_auth_table()
# fresh app
if 'Framework.sql' in source_sql:
if verbose: print "Installing app..."
self.install_app(verbose=verbose)
# update admin password
self.update_admin_password(admin_password)
# create public folder
from webnotes.install_lib import setup_public_folder
setup_public_folder.make(site=self.site)
if not self.site:
from webnotes.build import bundle
bundle(False)
return db_name
def install_app(self, verbose=False):
sync_for("lib", force=True, sync_everything=True, verbose=verbose)
self.import_core_docs()
try:
from startup import install
except ImportError, e:
install = None
if os.path.exists("app"):
sync_for("app", force=True, sync_everything=True, verbose=verbose)
if os.path.exists(os.path.join("app", "startup", "install_fixtures")):
install_fixtures()
# build website sitemap
from website.doctype.website_sitemap_config.website_sitemap_config import build_website_sitemap_config
build_website_sitemap_config()
if verbose: print "Completing App Import..."
install and install.post_import()
if verbose: print "Updating patches..."
self.set_all_patches_as_completed()
self.assign_all_role_to_administrator()
def update_admin_password(self, password):
from webnotes.auth import _update_password
webnotes.conn.begin()
_update_password("Administrator", webnotes.conf.get("admin_password") or password)<|fim▁hole|>
def import_core_docs(self):
install_docs = [
# profiles
{'doctype':'Profile', 'name':'Administrator', 'first_name':'Administrator',
'email':'admin@localhost', 'enabled':1},
{'doctype':'Profile', 'name':'Guest', 'first_name':'Guest',
'email':'guest@localhost', 'enabled':1},
# userroles
{'doctype':'UserRole', 'parent': 'Administrator', 'role': 'Administrator',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype':'UserRole', 'parent': 'Guest', 'role': 'Guest',
'parenttype':'Profile', 'parentfield':'user_roles'},
{'doctype': "Role", "role_name": "Report Manager"}
]
webnotes.conn.begin()
for d in install_docs:
bean = webnotes.bean(d)
bean.insert()
webnotes.conn.commit()
def set_all_patches_as_completed(self):
try:
from patches.patch_list import patch_list
except ImportError, e:
print "No patches to update."
return
for patch in patch_list:
webnotes.doc({
"doctype": "Patch Log",
"patch": patch
}).insert()
webnotes.conn.commit()
def assign_all_role_to_administrator(self):
webnotes.bean("Profile", "Administrator").get_controller().add_roles(*webnotes.conn.sql_list("""
select name from tabRole"""))
webnotes.conn.commit()
def create_auth_table(self):
webnotes.conn.sql_ddl("""create table if not exists __Auth (
`user` VARCHAR(180) NOT NULL PRIMARY KEY,
`password` VARCHAR(180) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8""")
def make_conf(db_name=None, db_password=None, site=None, site_config=None):
try:
from werkzeug.exceptions import NotFound
import conf
try:
webnotes.init(site=site)
except NotFound:
pass
if not site and webnotes.conf.site:
site = webnotes.conf.site
if site:
# conf exists and site is specified, create site_config.json
make_site_config(site, db_name, db_password, site_config)
elif os.path.exists("conf.py"):
print "conf.py exists"
else:
# pyc file exists but py doesn't
raise ImportError
except ImportError:
if site:
raise Exception("conf.py does not exist")
else:
# create conf.py
with open(os.path.join("lib", "conf", "conf.py"), "r") as confsrc:
with open("conf.py", "w") as conftar:
conftar.write(confsrc.read() % get_conf_params(db_name, db_password))
webnotes.destroy()
webnotes.init(site=site)
def make_site_config(site, db_name=None, db_password=None, site_config=None):
import conf
if not getattr(conf, "sites_dir", None):
raise Exception("sites_dir missing in conf.py")
site_path = os.path.join(conf.sites_dir, site)
if not os.path.exists(site_path):
os.mkdir(site_path)
site_file = os.path.join(site_path, "site_config.json")
if not os.path.exists(site_file):
if not (site_config and isinstance(site_config, dict)):
site_config = get_conf_params(db_name, db_password)
with open(site_file, "w") as f:
f.write(json.dumps(site_config, indent=1, sort_keys=True))
def get_conf_params(db_name=None, db_password=None):
if not db_name:
db_name = raw_input("Database Name: ")
if not db_name:
raise Exception("Database Name Required")
if not db_password:
from webnotes.utils import random_string
db_password = random_string(16)
return {"db_name": db_name, "db_password": db_password}
def install_fixtures():
print "Importing install fixtures..."
for basepath, folders, files in os.walk(os.path.join("app", "startup", "install_fixtures")):
for f in files:
f = cstr(f)
if f.endswith(".json"):
print "Importing " + f
with open(os.path.join(basepath, f), "r") as infile:
webnotes.bean(json.loads(infile.read())).insert_or_update()
webnotes.conn.commit()
if f.endswith(".csv"):
from core.page.data_import_tool.data_import_tool import import_file_by_path
import_file_by_path(os.path.join(basepath, f), ignore_links = True, overwrite=True)
webnotes.conn.commit()
if os.path.exists(os.path.join("app", "startup", "install_fixtures", "files")):
if not os.path.exists(os.path.join("public", "files")):
os.makedirs(os.path.join("public", "files"))
os.system("cp -r %s %s" % (os.path.join("app", "startup", "install_fixtures", "files", "*"),
os.path.join("public", "files")))<|fim▁end|> | webnotes.conn.commit() |
<|file_name|>focus_tab.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# License: GPLv3 Copyright: 2020, Kovid Goyal <kovid at kovidgoyal.net>
from typing import TYPE_CHECKING, Optional
from .base import (
MATCH_TAB_OPTION, ArgsType, Boss, PayloadGetType, PayloadType, RCOptions,
RemoteCommand, ResponseType, Window
)
if TYPE_CHECKING:
from kitty.cli_stub import FocusTabRCOptions as CLIOptions
class FocusTab(RemoteCommand):
'''
match: The tab to focus
'''
short_desc = 'Focus the specified tab'
desc = 'The active window in the specified tab will be focused.'
options_spec = MATCH_TAB_OPTION + '''
--no-response
type=bool-set
default=false
Don't wait for a response indicating the success of the action. Note that
using this option means that you will not be notified of failures.
'''
argspec = ''
def message_to_kitty(self, global_opts: RCOptions, opts: 'CLIOptions', args: ArgsType) -> PayloadType:
return {'match': opts.match}<|fim▁hole|> boss.set_active_tab(tab)
break
return None
focus_tab = FocusTab()<|fim▁end|> |
def response_from_kitty(self, boss: Boss, window: Optional[Window], payload_get: PayloadGetType) -> ResponseType:
for tab in self.tabs_for_match_payload(boss, window, payload_get):
if tab: |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>// See the License for the specific language governing permissions and
// limitations under the License.
extern crate lib;
fn main() {
lib::greet("World!");
}<|fim▁end|> | |
<|file_name|>fans.js<|end_file_name|><|fim▁begin|>var fans=require('../../modules/blog/fans');
var User=require('../../modules/resume/user');
var async = require('asyncawait/async');
var await = require('asyncawait/await');
module.exports=(async(function(method,req,res){
var result;
if(method==='get'){
}
else if(method==='post'){
var userId=req.session.uid;
var targetId=req.body.targetId;
if(userId){
if(userId==targetId){
result={
status:-1,
msg:"你咋可以自己关注自己呢?自恋!"
}
}else{
//已登录才能关注,查询是否已关注过
var isFansDate=await(fans.findOne({
where:{
userId:userId,
targetId:targetId
}
}))
if(isFansDate){
result={
status:-1,
msg:"已关注"
}
}
else{
var fansDate=await(fans.create({
userId:userId,
targetId:targetId
}))
if(fansDate){
result={
status:0,
msg:"关注成功"
}
}else{
result={
status:-1,
msg:"关注失败"
}
}
}
}
}else{
result={
status:-1,
msg:"未登录"
}<|fim▁hole|> }
}
else if(method==='delete'){
var targetId=req.query.targetId;
var userId=req.session.uid;
if(userId){
//已登录才能取消关注,查询是否已关注过
var isFansDate=await(fans.findOne({
where:{
userId:userId,
targetId:targetId
}
}))
if(isFansDate){
var fansDate=await(fans.destroy({
where:{
userId:userId,
targetId:targetId
}
}))
if(fansDate){
result={
status:0,
msg:"取消关注成功"
}
}else{
result={
status:-1,
msg:"取消关注失败"
}
}
}
else{
result={
status:-1,
msg:"未关注"
}
}
}else{
result={
status:-1,
msg:"未登录"
}
}
}
res.writeHead(200,{"Content-Type":"text/html;charset=utf-8"});
res.end(JSON.stringify(result))
}))<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""<|fim▁hole|>import os
import re
import subprocess
import sys
from setuptools.command import sdist
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias] = canonical_email
return mapping
def canonicalize_emails(changelog, mapping):
"""Takes in a string and an email alias mapping and replaces all
instances of the aliases in the string with their real email.
"""
for alias, email in mapping.iteritems():
changelog = changelog.replace(alias, email)
return changelog
# Get requirements from the first file that exists
def get_reqs_from_files(requirements_files):
reqs_in = []
for requirements_file in requirements_files:
if os.path.exists(requirements_file):
return open(requirements_file, 'r').read().split('\n')
return []
def parse_requirements(requirements_files=['requirements.txt',
'tools/pip-requires']):
requirements = []
for line in get_reqs_from_files(requirements_files):
# For the requirements list, we need to inject only the portion
# after egg= so that distutils knows the package it's looking for
# such as:
# -e git://github.com/openstack/nova/master#egg=nova
if re.match(r'\s*-e\s+', line):
requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
line))
# such as:
# http://github.com/openstack/nova/zipball/master#egg=nova
elif re.match(r'\s*https?:', line):
requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
line))
# -f lines are for index locations, and don't get used here
elif re.match(r'\s*-f\s+', line):
pass
# argparse is part of the standard library starting with 2.7
# adding it to the requirements list screws distro installs
elif line == 'argparse' and sys.version_info >= (2, 7):
pass
else:
requirements.append(line)
return requirements
def parse_dependency_links(requirements_files=['requirements.txt',
'tools/pip-requires']):
dependency_links = []
# dependency_links inject alternate locations to find packages listed
# in requirements
for line in get_reqs_from_files(requirements_files):
# skip comments and blank lines
if re.match(r'(\s*#)|(\s*$)', line):
continue
# lines with -e or -f need the whole line, minus the flag
if re.match(r'\s*-[ef]\s+', line):
dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line))
# lines that are only urls can go in unmolested
elif re.match(r'\s*https?:', line):
dependency_links.append(line)
return dependency_links
def write_requirements():
venv = os.environ.get('VIRTUAL_ENV', None)
if venv is not None:
with open("requirements.txt", "w") as req_file:
output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
stdout=subprocess.PIPE)
requirements = output.communicate()[0].strip()
req_file.write(requirements)
def _run_shell_command(cmd):
output = subprocess.Popen(["/bin/sh", "-c", cmd],
stdout=subprocess.PIPE)
out = output.communicate()
if len(out) == 0:
return None
if len(out[0].strip()) == 0:
return None
return out[0].strip()
def _get_git_next_version_suffix(branch_name):
datestamp = datetime.datetime.now().strftime('%Y%m%d')
if branch_name == 'milestone-proposed':
revno_prefix = "r"
else:
revno_prefix = ""
_run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
milestone_cmd = "git show meta/openstack/release:%s" % branch_name
milestonever = _run_shell_command(milestone_cmd)
if not milestonever:
milestonever = ""
post_version = _get_git_post_version()
revno = post_version.split(".")[-1]
return "%s~%s.%s%s" % (milestonever, datestamp, revno_prefix, revno)
def _get_git_current_tag():
return _run_shell_command("git tag --contains HEAD")
def _get_git_tag_info():
return _run_shell_command("git describe --tags")
def _get_git_post_version():
current_tag = _get_git_current_tag()
if current_tag is not None:
return current_tag
else:
tag_info = _get_git_tag_info()
if tag_info is None:
base_version = "0.0"
cmd = "git --no-pager log --oneline"
out = _run_shell_command(cmd)
revno = len(out.split("\n"))
else:
tag_infos = tag_info.split("-")
base_version = "-".join(tag_infos[:-2])
revno = tag_infos[-2]
return "%s.%s" % (base_version, revno)
def write_git_changelog():
"""Write a changelog based on the git changelog."""
if os.path.isdir('.git'):
git_log_cmd = 'git log --stat'
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open("ChangeLog", "w") as changelog_file:
changelog_file.write(canonicalize_emails(changelog, mailmap))
def generate_authors():
"""Create AUTHORS file using git commits."""
jenkins_email = '[email protected]'
old_authors = 'AUTHORS.in'
new_authors = 'AUTHORS'
if os.path.isdir('.git'):
# don't include jenkins email address in AUTHORS file
git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | "
"grep -v " + jenkins_email)
changelog = _run_shell_command(git_log_cmd)
mailmap = parse_mailmap()
with open(new_authors, 'w') as new_authors_fh:
new_authors_fh.write(canonicalize_emails(changelog, mailmap))
if os.path.exists(old_authors):
with open(old_authors, "r") as old_authors_fh:
new_authors_fh.write('\n' + old_authors_fh.read())
_rst_template = """%(heading)s
%(underline)s
.. automodule:: %(module)s
:members:
:undoc-members:
:show-inheritance:
"""
def read_versioninfo(project):
"""Read the versioninfo file. If it doesn't exist, we're in a github
zipball, and there's really know way to know what version we really
are, but that should be ok, because the utility of that should be
just about nil if this code path is in use in the first place."""
versioninfo_path = os.path.join(project, 'versioninfo')
if os.path.exists(versioninfo_path):
with open(versioninfo_path, 'r') as vinfo:
version = vinfo.read().strip()
else:
version = "0.0.0"
return version
def write_versioninfo(project, version):
"""Write a simple file containing the version of the package."""
open(os.path.join(project, 'versioninfo'), 'w').write("%s\n" % version)
def get_cmdclass():
"""Return dict of commands to run from setup.py."""
cmdclass = dict()
def _find_modules(arg, dirname, files):
for filename in files:
if filename.endswith('.py') and filename != '__init__.py':
arg["%s.%s" % (dirname.replace('/', '.'),
filename[:-3])] = True
class LocalSDist(sdist.sdist):
"""Builds the ChangeLog and Authors files from VC first."""
def run(self):
write_git_changelog()
generate_authors()
# sdist.sdist is an old style class, can't use super()
sdist.sdist.run(self)
cmdclass['sdist'] = LocalSDist
# If Sphinx is installed on the box running setup.py,
# enable setup.py to build the documentation, otherwise,
# just ignore it
try:
from sphinx.setup_command import BuildDoc
class LocalBuildDoc(BuildDoc):
def generate_autoindex(self):
print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
modules = {}
option_dict = self.distribution.get_option_dict('build_sphinx')
source_dir = os.path.join(option_dict['source_dir'][1], 'api')
if not os.path.exists(source_dir):
os.makedirs(source_dir)
for pkg in self.distribution.packages:
if '.' not in pkg:
os.path.walk(pkg, _find_modules, modules)
module_list = modules.keys()
module_list.sort()
autoindex_filename = os.path.join(source_dir, 'autoindex.rst')
with open(autoindex_filename, 'w') as autoindex:
autoindex.write(""".. toctree::
:maxdepth: 1
""")
for module in module_list:
output_filename = os.path.join(source_dir,
"%s.rst" % module)
heading = "The :mod:`%s` Module" % module
underline = "=" * len(heading)
values = dict(module=module, heading=heading,
underline=underline)
print "Generating %s" % output_filename
with open(output_filename, 'w') as output_file:
output_file.write(_rst_template % values)
autoindex.write(" %s.rst\n" % module)
def run(self):
if not os.getenv('SPHINX_DEBUG'):
self.generate_autoindex()
for builder in ['html', 'man']:
self.builder = builder
self.finalize_options()
self.project = self.distribution.get_name()
self.version = self.distribution.get_version()
self.release = self.distribution.get_version()
BuildDoc.run(self)
cmdclass['build_sphinx'] = LocalBuildDoc
except ImportError:
pass
return cmdclass
def get_git_branchname():
for branch in _run_shell_command("git branch --color=never").split("\n"):
if branch.startswith('*'):
_branch_name = branch.split()[1].strip()
if _branch_name == "(no":
_branch_name = "no-branch"
return _branch_name
def get_pre_version(projectname, base_version):
"""Return a version which is based"""
if os.path.isdir('.git'):
current_tag = _get_git_current_tag()
if current_tag is not None:
version = current_tag
else:
branch_name = os.getenv('BRANCHNAME',
os.getenv('GERRIT_REFNAME',
get_git_branchname()))
version_suffix = _get_git_next_version_suffix(branch_name)
version = "%s~%s" % (base_version, version_suffix)
write_versioninfo(projectname, version)
return version.split('~')[0]
else:
version = read_versioninfo(projectname)
return version.split('~')[0]
def get_post_version(projectname):
"""Return a version which is equal to the tag that's on the current
revision if there is one, or tag plus number of additional revisions
if the current revision has no tag."""
if os.path.isdir('.git'):
version = _get_git_post_version()
write_versioninfo(projectname, version)
return version
return read_versioninfo(projectname)<|fim▁end|> | Utilities with minimum-depends for use in setup.py
"""
import datetime |
<|file_name|>enu.py<|end_file_name|><|fim▁begin|>"""
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import numpy as np
import latlon
import ecef
class Enu(object):
def __init__(self, e, n, u):
self.e = e
self.n = n
self.u = u
def __eq__(self, other):
return self.e == other.e and self.n == other.n and self.u == other.u
def __hash__(self):
return hash((self.e, self.n, self.u))
def to_ecef(self, origin):
# this doesn't work at the poles because longitude is not uniquely defined there
sin_lon = origin._sin_lon()
sin_lat = origin._sin_lat()<|fim▁hole|> [cos_lon, - sin_lon * sin_lat, sin_lon * cos_lat],
[0, cos_lat, sin_lat]])
enu_vector = np.array([[self.e], [self.n], [self.u]])
ecef_vector = np.dot(global_to_ecef_matrix, enu_vector)
return ecef.Ecef(ecef_vector[0][0], ecef_vector[1][0], ecef_vector[2][0])<|fim▁end|> | cos_lon = origin._cos_lon()
cos_lat = origin._cos_lat()
global_to_ecef_matrix = np.array([[-sin_lon, -cos_lon * sin_lat, cos_lon * cos_lat], |
<|file_name|>cmdboardpolygonremove.cpp<|end_file_name|><|fim▁begin|>/*
* LibrePCB - Professional EDA for everyone!
* Copyright (C) 2013 LibrePCB Developers, see AUTHORS.md for contributors.
* https://librepcb.org/
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*******************************************************************************
* Includes
******************************************************************************/
#include "cmdboardpolygonremove.h"
#include "../board.h"
#include "../items/bi_polygon.h"
#include <QtCore>
/*******************************************************************************
* Namespace
******************************************************************************/
namespace librepcb {
namespace project {
/*******************************************************************************
* Constructors / Destructor
******************************************************************************/
CmdBoardPolygonRemove::CmdBoardPolygonRemove(BI_Polygon& polygon) noexcept
: UndoCommand(tr("Remove polygon from board")),
mBoard(polygon.getBoard()),
mPolygon(polygon) {
}
CmdBoardPolygonRemove::~CmdBoardPolygonRemove() noexcept {<|fim▁hole|> ******************************************************************************/
bool CmdBoardPolygonRemove::performExecute() {
performRedo(); // can throw
return true;
}
void CmdBoardPolygonRemove::performUndo() {
mBoard.addPolygon(mPolygon); // can throw
}
void CmdBoardPolygonRemove::performRedo() {
mBoard.removePolygon(mPolygon); // can throw
}
/*******************************************************************************
* End of File
******************************************************************************/
} // namespace project
} // namespace librepcb<|fim▁end|> | }
/*******************************************************************************
* Inherited from UndoCommand |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | default_app_config = 'providers.com.dailyssrn.apps.AppConfig' |
<|file_name|>tektronixMDO3012.py<|end_file_name|><|fim▁begin|>"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2016 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .tektronixMDO3000 import *
class tektronixMDO3012(tektronixMDO3000):
"Tektronix MDO3012 IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MDO3012')
super(tektronixMDO3012, self).__init__(*args, **kwargs)
<|fim▁hole|> self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 100e6
# AFG option
self._output_count = 1
self._init_channels()
self._init_outputs()<|fim▁end|> | self._analog_channel_count = 2
self._digital_channel_count = 16 |
<|file_name|>stylesheet.js<|end_file_name|><|fim▁begin|>import { red } from "./colors.js";
export default `body { background: url("${
new URL("./file.png" + __resourceQuery, import.meta.url).href<|fim▁hole|>}"); color: ${red}; }`;<|fim▁end|> | |
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Author: Nic Wolfe <[email protected]>
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
# pylint:disable=too-many-lines
from __future__ import unicode_literals
import ast
import base64
import ctypes
import datetime
import hashlib
import io
import operator
import os
import platform
import random
import re
import shutil
import socket
import ssl
import stat
import time
import traceback
import urllib
import uuid
import xml.etree.ElementTree as ET
import zipfile
from contextlib import closing
from itertools import cycle, izip
import adba
import certifi
import cfscrape
import requests
from cachecontrol import CacheControl
from requests.utils import urlparse
import sickbeard
from sickbeard import classes, db, logger
from sickbeard.common import USER_AGENT
from sickrage.helper import MEDIA_EXTENSIONS, SUBTITLE_EXTENSIONS, episode_num, pretty_file_size
from sickrage.helper.encoding import ek
from sickrage.show.Show import Show
# pylint: disable=protected-access
# Access to a protected member of a client class
urllib._urlopener = classes.SickBeardURLopener()
def indentXML(elem, level=0):
"""
Does our pretty printing, makes Matt very happy
"""
i = "\n" + level * " "
if elem:
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indentXML(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def remove_non_release_groups(name):
"""
Remove non release groups from name
"""
if not name:
return name
# Do not remove all [....] suffixes, or it will break anime releases ## Need to verify this is true now
# Check your database for funky release_names and add them here, to improve failed handling, archiving, and history.
# select release_name from tv_episodes WHERE LENGTH(release_name);
# [eSc], [SSG], [GWC] are valid release groups for non-anime
removeWordsList = {
r'\[rartv\]$': 'searchre',
r'\[rarbg\]$': 'searchre',
r'\.\[eztv\]$': 'searchre',
r'\[eztv\]$': 'searchre',
r'\[ettv\]$': 'searchre',
r'\[cttv\]$': 'searchre',
r'\.\[vtv\]$': 'searchre',
r'\[vtv\]$': 'searchre',
r'\[EtHD\]$': 'searchre',
r'\[GloDLS\]$': 'searchre',
r'\[silv4\]$': 'searchre',
r'\[Seedbox\]$': 'searchre',
r'\[PublicHD\]$': 'searchre',
r'\.\[PublicHD\]$': 'searchre',
r'\.\[NO.RAR\]$': 'searchre',
r'\[NO.RAR\]$': 'searchre',
r'-\=\{SPARROW\}\=-$': 'searchre',
r'\=\{SPARR$': 'searchre',
r'\.\[720P\]\[HEVC\]$': 'searchre',
r'\[AndroidTwoU\]$': 'searchre',
r'\[brassetv\]$': 'searchre',
r'\[Talamasca32\]$': 'searchre',
r'\(musicbolt\.com\)$': 'searchre',
r'\.\(NLsub\)$': 'searchre',
r'\(NLsub\)$': 'searchre',
r'\.\[BT\]$': 'searchre',
r' \[1044\]$': 'searchre',
r'\.RiPSaLoT$': 'searchre',
r'\.GiuseppeTnT$': 'searchre',
r'\.Renc$': 'searchre',
r'\.gz$': 'searchre',
r'\.English$': 'searchre',
r'\.German$': 'searchre',
r'\.\.Italian$': 'searchre',
r'\.Italian$': 'searchre',
r'(?<![57])\.1$': 'searchre',
r'-NZBGEEK$': 'searchre',
r'-Siklopentan$': 'searchre',
r'-Chamele0n$': 'searchre',
r'-Obfuscated$': 'searchre',
r'-BUYMORE$': 'searchre',
r'-\[SpastikusTV\]$': 'searchre',
r'-RP$': 'searchre',
r'-20-40$': 'searchre',
r'\.\[www\.usabit\.com\]$': 'searchre',
r'^\[www\.Cpasbien\.pe\] ': 'searchre',
r'^\[www\.Cpasbien\.com\] ': 'searchre',
r'^\[ www\.Cpasbien\.pw \] ': 'searchre',
r'^\.www\.Cpasbien\.pw': 'searchre',
r'^\[www\.newpct1\.com\]': 'searchre',
r'^\[ www\.Cpasbien\.com \] ': 'searchre',
r'- \{ www\.SceneTime\.com \}$': 'searchre',
r'^\{ www\.SceneTime\.com \} - ': 'searchre',
r'^\]\.\[www\.tensiontorrent.com\] - ': 'searchre',
r'^\]\.\[ www\.tensiontorrent.com \] - ': 'searchre',
r'- \[ www\.torrentday\.com \]$': 'searchre',
r'^\[ www\.TorrentDay\.com \] - ': 'searchre',
r'\[NO-RAR\] - \[ www\.torrentday\.com \]$': 'searchre',
}
_name = name
for remove_string, remove_type in removeWordsList.iteritems():
if remove_type == 'search':
_name = _name.replace(remove_string, '')
elif remove_type == 'searchre':
_name = re.sub(r'(?i)' + remove_string, '', _name)
return _name
def isMediaFile(filename):
"""
Check if named file may contain media
:param filename: Filename to check
:return: True if this is a known media file, False if not
"""
# ignore samples
try:
if re.search(r'(^|[\W_])(?<!shomin.)(sample\d*)[\W_]', filename, re.I):
return False
# ignore RARBG release intro
if re.search(r'^RARBG\.(\w+\.)?(mp4|avi|txt)$', filename, re.I):
return False
# ignore MAC OS's retarded "resource fork" files
if filename.startswith('._'):
return False
filname_parts = filename.rpartition(".")
if re.search('extras?$', filname_parts[0], re.I):
return False
return filname_parts[-1].lower() in MEDIA_EXTENSIONS
except TypeError as error: # Not a string
logger.log('Invalid filename. Filename must be a string. {0}'.format(error), logger.DEBUG) # pylint: disable=no-member
return False
def isRarFile(filename):
"""
Check if file is a RAR file, or part of a RAR set
:param filename: Filename to check
:return: True if this is RAR/Part file, False if not
"""
archive_regex = r'(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)'
if re.search(archive_regex, filename):
return True
return False
def isBeingWritten(filepath):
"""
Check if file has been written in last 60 seconds
:param filepath: Filename to check
:return: True if file has been written recently, False if none
"""
# Return True if file was modified within 60 seconds. it might still be being written to.
ctime = max(ek(os.path.getctime, filepath), ek(os.path.getmtime, filepath))
if ctime > time.time() - 60:
return True
return False
def remove_file_failed(failed_file):
"""
Remove file from filesystem
:param file: File to remove
"""
try:
ek(os.remove, failed_file)
except Exception:
pass
def makeDir(path):
"""
Make a directory on the filesystem
:param path: directory to make
:return: True if success, False if failure
"""
if not ek(os.path.isdir, path):
try:
ek(os.makedirs, path)
# do the library update for synoindex
sickbeard.notifiers.synoindex_notifier.addFolder(path)
except OSError:
return False
return True
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
"""
Contacts indexer to check for information on shows by showid
:param regShowName: Name of show
:param indexer: Which indexer to use
:param indexer_id: Which indexer ID to look for
:param ui: Custom UI for indexer use
:return:
"""
showNames = [re.sub('[. -]', ' ', regShowName)]
# Query Indexers for each search term and build the list of results
for i in sickbeard.indexerApi().indexers if not indexer else int(indexer or []):
# Query Indexers for each search term and build the list of results
lINDEXER_API_PARMS = sickbeard.indexerApi(i).api_params.copy()
if ui is not None:
lINDEXER_API_PARMS['custom_ui'] = ui
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
for name in showNames:
logger.log("Trying to find " + name + " on " + sickbeard.indexerApi(i).name, logger.DEBUG)
try:
search = t[indexer_id] if indexer_id else t[name]
except Exception:
continue
try:
seriesname = search[0][b'seriesname']
except Exception:
seriesname = None
try:
series_id = search[0][b'id']
except Exception:
series_id = None
if not (seriesname and series_id):
continue
ShowObj = Show.find(sickbeard.showList, int(series_id))
# Check if we can find the show in our list (if not, it's not the right show)
if (indexer_id is None) and (ShowObj is not None) and (ShowObj.indexerid == int(series_id)):
return seriesname, i, int(series_id)
elif (indexer_id is not None) and (int(indexer_id) == int(series_id)):
return seriesname, i, int(indexer_id)
if indexer:
break
return None, None, None
def listMediaFiles(path):
"""
Get a list of files possibly containing media in a path
:param path: Path to check for files
:return: list of files
"""
if not dir or not ek(os.path.isdir, path):
return []
files = []
for curFile in ek(os.listdir, path):
fullCurFile = ek(os.path.join, path, curFile)
# if it's a folder do it recursively
if ek(os.path.isdir, fullCurFile) and not curFile.startswith('.') and not curFile == 'Extras':
files += listMediaFiles(fullCurFile)
elif isMediaFile(curFile):
files.append(fullCurFile)
return files
def copyFile(srcFile, destFile):
"""
Copy a file from source to destination
:param srcFile: Path of source file
:param destFile: Path of destination file
"""
try:
from shutil import SpecialFileError, Error
except ImportError:
from shutil import Error
SpecialFileError = Error
try:
ek(shutil.copyfile, srcFile, destFile)
except (SpecialFileError, Error) as error:
logger.log('{0}'.format(error), logger.WARNING)
except Exception as error:
logger.log('{0}'.format(error), logger.ERROR)
else:
try:
ek(shutil.copymode, srcFile, destFile)
except OSError:
pass
def moveFile(srcFile, destFile):
"""
Move a file from source to destination
:param srcFile: Path of source file
:param destFile: Path of destination file
"""
try:
ek(shutil.move, srcFile, destFile)
fixSetGroupID(destFile)
except OSError:
copyFile(srcFile, destFile)
ek(os.unlink, srcFile)
def link(src, dst):
"""
Create a file link from source to destination.
TODO: Make this unicode proof
:param src: Source file
:param dst: Destination file
"""
if platform.system() == 'Windows':
if ctypes.windll.kernel32.CreateHardLinkW(ctypes.c_wchar_p(unicode(dst)), ctypes.c_wchar_p(unicode(src)), None) == 0:
raise ctypes.WinError()
else:
ek(os.link, src, dst)
def hardlinkFile(srcFile, destFile):
"""
Create a hard-link (inside filesystem link) between source and destination
:param srcFile: Source file
:param destFile: Destination file
"""
try:
ek(link, srcFile, destFile)
fixSetGroupID(destFile)
except Exception as error:
logger.log("Failed to create hardlink of {0} at {1}. Error: {2}. Copying instead".format
(srcFile, destFile, error), logger.WARNING)
copyFile(srcFile, destFile)
def symlink(src, dst):
"""
Create a soft/symlink between source and destination
:param src: Source file
:param dst: Destination file
"""
if platform.system() == 'Windows':
if ctypes.windll.kernel32.CreateSymbolicLinkW(ctypes.c_wchar_p(unicode(dst)), ctypes.c_wchar_p(unicode(src)), 1 if ek(os.path.isdir, src) else 0) in [0, 1280]:
raise ctypes.WinError()
else:
ek(os.symlink, src, dst)
def moveAndSymlinkFile(srcFile, destFile):
"""
Move a file from source to destination, then create a symlink back from destination from source. If this fails, copy
the file from source to destination
:param srcFile: Source file
:param destFile: Destination file
"""
try:
moveFile(srcFile, destFile)
symlink(destFile, srcFile)
except Exception as error:
logger.log("Failed to create symlink of {0} at {1}. Error: {2}. Copying instead".format
(srcFile, destFile, error), logger.WARNING)
copyFile(srcFile, destFile)
def make_dirs(path):
"""
Creates any folders that are missing and assigns them the permissions of their
parents
"""
logger.log("Checking if the path {0} already exists".format(path), logger.DEBUG)
if not ek(os.path.isdir, path):
# Windows, create all missing folders
if platform.system() == 'Windows':
try:
logger.log("Folder {0} didn't exist, creating it".format(path), logger.DEBUG)
ek(os.makedirs, path)
except (OSError, IOError) as error:
logger.log("Failed creating {0} : {1}".format(path, error), logger.ERROR)
return False
# not Windows, create all missing folders and set permissions
else:
sofar = ''
folder_list = path.split(os.path.sep)
# look through each subfolder and make sure they all exist
for cur_folder in folder_list:
sofar += cur_folder + os.path.sep
# if it exists then just keep walking down the line
if ek(os.path.isdir, sofar):
continue
try:
logger.log("Folder {0} didn't exist, creating it".format(sofar), logger.DEBUG)
ek(os.mkdir, sofar)
# use normpath to remove end separator, otherwise checks permissions against itself
chmodAsParent(ek(os.path.normpath, sofar))
# do the library update for synoindex
sickbeard.notifiers.synoindex_notifier.addFolder(sofar)
except (OSError, IOError) as error:
logger.log("Failed creating {0} : {1}".format(sofar, error), logger.ERROR)
return False
return True
def rename_ep_file(cur_path, new_path, old_path_length=0):
"""
Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
left that are now empty.
:param cur_path: The absolute path to the file you want to move/rename
:param new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
:param old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
"""
# new_dest_dir, new_dest_name = ek(os.path.split, new_path) # @UnusedVariable
if old_path_length == 0 or old_path_length > len(cur_path):
# approach from the right
cur_file_name, cur_file_ext = ek(os.path.splitext, cur_path) # @UnusedVariable
else:
# approach from the left
cur_file_ext = cur_path[old_path_length:]
cur_file_name = cur_path[:old_path_length]
if cur_file_ext[1:] in SUBTITLE_EXTENSIONS:
# Extract subtitle language from filename
sublang = ek(os.path.splitext, cur_file_name)[1][1:]
# Check if the language extracted from filename is a valid language
if sublang in sickbeard.subtitles.subtitle_code_filter():
cur_file_ext = '.' + sublang + cur_file_ext
# put the extension on the incoming file
new_path += cur_file_ext
make_dirs(ek(os.path.dirname, new_path))
# move the file
try:
logger.log("Renaming file from {0} to {1}".format(cur_path, new_path))
ek(shutil.move, cur_path, new_path)
except (OSError, IOError) as error:
logger.log("Failed renaming {0} to {1} : {2}".format(cur_path, new_path, error), logger.ERROR)
return False
# clean up any old folders that are empty
delete_empty_folders(ek(os.path.dirname, cur_path))
return True
def delete_empty_folders(check_empty_dir, keep_dir=None):
"""
Walks backwards up the path and deletes any empty folders found.
:param check_empty_dir: The path to clean (absolute path to a folder)
:param keep_dir: Clean until this path is reached
"""
# treat check_empty_dir as empty when it only contains these items
ignore_items = []
logger.log("Trying to clean any empty folders under " + check_empty_dir)
# as long as the folder exists and doesn't contain any files, delete it
while ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
check_files = ek(os.listdir, check_empty_dir)
if not check_files or (len(check_files) <= len(ignore_items) and all(
check_file in ignore_items for check_file in check_files)):
# directory is empty or contains only ignore_items
try:<|fim▁hole|> sickbeard.notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
except OSError as error:
logger.log("Unable to delete {0}. Error: {1}".format(check_empty_dir, error), logger.WARNING)
break
check_empty_dir = ek(os.path.dirname, check_empty_dir)
else:
break
def fileBitFilter(mode):
"""
Strip special filesystem bits from file
:param mode: mode to check and strip
:return: required mode for media file
"""
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
if mode & bit:
mode -= bit
return mode
def chmodAsParent(childPath):
"""
Retain permissions of parent for childs
(Does not work for Windows hosts)
:param childPath: Child Path to change permissions to sync from parent
"""
if platform.system() == 'Windows':
return
parentPath = ek(os.path.dirname, childPath)
if not parentPath:
logger.log("No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG)
return
childPath = ek(os.path.join, parentPath, ek(os.path.basename, childPath))
parentPathStat = ek(os.stat, parentPath)
parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE])
childPathStat = ek(os.stat, childPath.encode(sickbeard.SYS_ENCODING))
childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE])
if ek(os.path.isfile, childPath):
childMode = fileBitFilter(parentMode)
else:
childMode = parentMode
if childPath_mode == childMode:
return
childPath_owner = childPathStat.st_uid # pylint: disable=no-member
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
if user_id not in (childPath_owner, 0):
logger.log("Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
return
try:
ek(os.chmod, childPath, childMode)
except OSError:
logger.log("Failed to set permission for {0} to {1:o}, parent directory has {2:o}".format(childPath, childMode, parentMode), logger.DEBUG)
def fixSetGroupID(childPath):
"""
Inherid SGID from parent
(does not work on Windows hosts)
:param childPath: Path to inherit SGID permissions from parent
"""
if platform.system() == 'Windows':
return
parentPath = ek(os.path.dirname, childPath)
parentStat = ek(os.stat, parentPath)
parentMode = stat.S_IMODE(parentStat[stat.ST_MODE])
childPath = ek(os.path.join, parentPath, ek(os.path.basename, childPath))
if parentMode & stat.S_ISGID:
parentGID = parentStat[stat.ST_GID]
childStat = ek(os.stat, childPath.encode(sickbeard.SYS_ENCODING))
childGID = childStat[stat.ST_GID]
if childGID == parentGID:
return
childPath_owner = childStat.st_uid # pylint: disable=no-member
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
if user_id != 0 and user_id != childPath_owner:
logger.log("Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
logger.DEBUG)
return
try:
ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
logger.log("Respecting the set-group-ID bit on the parent directory for {0}".format(childPath), logger.DEBUG)
except OSError:
logger.log(
"Failed to respect the set-group-ID bit on the parent directory for {0} (setting group ID {1})".format(
childPath, parentGID), logger.ERROR)
def is_anime_in_show_list():
"""
Check if any shows in list contain anime
:return: True if global showlist contains Anime, False if not
"""
for show in sickbeard.showList:
if show.is_anime:
return True
return False
def update_anime_support():
"""Check if we need to support anime, and if we do, enable the feature"""
sickbeard.ANIMESUPPORT = is_anime_in_show_list()
def get_absolute_number_from_season_and_episode(show, season, episode):
"""
Find the absolute number for a show episode
:param show: Show object
:param season: Season number
:param episode: Episode number
:return: The absolute number
"""
absolute_number = None
if season and episode:
main_db_con = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
sql_results = main_db_con.select(sql, [show.indexerid, season, episode])
if len(sql_results) == 1:
absolute_number = int(sql_results[0][b"absolute_number"])
logger.log("Found absolute number {absolute} for show {show} {ep}".format
(absolute=absolute_number, show=show.name,
ep=episode_num(season, episode)), logger.DEBUG)
else:
logger.log("No entries for absolute number for show {show} {ep}".format
(show=show.name, ep=episode_num(season, episode)), logger.DEBUG)
return absolute_number
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
episodes = []
season = None
if len(absolute_numbers):
if not show and indexer_id:
show = Show.find(sickbeard.showList, indexer_id)
for absolute_number in absolute_numbers if show else []:
ep = show.getEpisode(None, None, absolute_number=absolute_number)
if ep:
episodes.append(ep.episode)
season = ep.season # this will always take the last found season so eps that cross the season border are not handeled well
return season, episodes
def sanitizeSceneName(name, anime=False):
"""
Takes a show name and returns the "scenified" version of it.
:param anime: Some show have a ' in their name(Kuroko's Basketball) and is needed for search.
:return: A string containing the scene version of the show name given.
"""
# assert isinstance(name, unicode), name + ' is not unicode'
if not name:
return ''
bad_chars = ',:()!?\u2019'
if not anime:
bad_chars += "'"
# strip out any bad chars
for x in bad_chars:
name = name.replace(x, "")
# tidy up stuff that doesn't belong in scene names
name = name.replace("&", "and")
name = re.sub(r"[- /]+", ".", name)
name = re.sub(r"[.]+", ".", name)
if name.endswith('.'):
name = name[:-1]
return name
_binOps = {
ast.Add: operator.add,
ast.Sub: operator.sub,
ast.Mult: operator.mul,
ast.Div: operator.div,
ast.Mod: operator.mod
}
def arithmeticEval(s):
"""
A safe eval supporting basic arithmetic operations.
:param s: expression to evaluate
:return: value
"""
node = ast.parse(s, mode='eval')
def _eval(node):
if isinstance(node, ast.Expression):
return _eval(node.body)
elif isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.Num):
return node.n
elif isinstance(node, ast.BinOp):
return _binOps[type(node.op)](_eval(node.left), _eval(node.right))
else:
raise Exception('Unsupported type {0}'.format(node))
return _eval(node.body)
def create_https_certificates(ssl_cert, ssl_key):
"""
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
:param ssl_cert: Path of SSL certificate file to write
:param ssl_key: Path of SSL keyfile to write
:return: True on success, False on failure
"""
# assert isinstance(ssl_key, unicode)
# assert isinstance(ssl_cert, unicode)
try:
from OpenSSL import crypto # @UnresolvedImport
from certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
serial # @UnresolvedImport
except Exception:
logger.log("pyopenssl module missing, please install for https access", logger.WARNING)
return False
# Create the CA Certificate
cakey = createKeyPair(TYPE_RSA, 1024)
careq = createCertRequest(cakey, CN='Certificate Authority')
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
cname = 'SickRage'
pkey = createKeyPair(TYPE_RSA, 1024)
req = createCertRequest(pkey, CN=cname)
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
# Save the key and certificate to disk
try:
# pylint: disable=no-member
# Module has no member
io.open(ssl_key, 'wb').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
io.open(ssl_cert, 'wb').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
except Exception:
logger.log("Error creating SSL key and certificate", logger.ERROR)
return False
return True
def backupVersionedFile(old_file, version):
"""
Back up an old version of a file
:param old_file: Original file, to take a backup from
:param version: Version of file to store in backup
:return: True if success, False if failure
"""
numTries = 0
new_file = old_file + '.' + 'v' + str(version)
while not ek(os.path.isfile, new_file):
if not ek(os.path.isfile, old_file):
logger.log("Not creating backup, {0} doesn't exist".format(old_file), logger.DEBUG)
break
try:
logger.log("Trying to back up {0} to {1}".format(old_file, new_file), logger.DEBUG)
shutil.copy(old_file, new_file)
logger.log("Backup done", logger.DEBUG)
break
except Exception as error:
logger.log("Error while trying to back up {0} to {1} : {2}".format(old_file, new_file, error), logger.WARNING)
numTries += 1
time.sleep(1)
logger.log("Trying again.", logger.DEBUG)
if numTries >= 10:
logger.log("Unable to back up {0} to {1} please do it manually.".format(old_file, new_file), logger.ERROR)
return False
return True
def restoreVersionedFile(backup_file, version):
"""
Restore a file version to original state
:param backup_file: File to restore
:param version: Version of file to restore
:return: True on success, False on failure
"""
numTries = 0
new_file, ext_ = ek(os.path.splitext, backup_file)
restore_file = new_file + '.' + 'v' + str(version)
if not ek(os.path.isfile, new_file):
logger.log("Not restoring, {0} doesn't exist".format(new_file), logger.DEBUG)
return False
try:
logger.log("Trying to backup {0} to {1}.r{2} before restoring backup".format
(new_file, new_file, version), logger.DEBUG)
shutil.move(new_file, new_file + '.' + 'r' + str(version))
except Exception as error:
logger.log("Error while trying to backup DB file {0} before proceeding with restore: {1}".format
(restore_file, error), logger.WARNING)
return False
while not ek(os.path.isfile, new_file):
if not ek(os.path.isfile, restore_file):
logger.log("Not restoring, {0} doesn't exist".format(restore_file), logger.DEBUG)
break
try:
logger.log("Trying to restore file {0} to {1}".format(restore_file, new_file), logger.DEBUG)
shutil.copy(restore_file, new_file)
logger.log("Restore done", logger.DEBUG)
break
except Exception as error:
logger.log("Error while trying to restore file {0}. Error: {1}".format(restore_file, error), logger.WARNING)
numTries += 1
time.sleep(1)
logger.log("Trying again. Attempt #: {0}".format(numTries), logger.DEBUG)
if numTries >= 10:
logger.log("Unable to restore file {0} to {1}".format(restore_file, new_file), logger.WARNING)
return False
return True
def get_lan_ip():
"""Returns IP of system"""
try:
return [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][0]
except Exception:
return socket.gethostname()
def check_url(url):
"""
Check if a URL exists without downloading the whole file.
We only check the URL header.
"""
try:
requests.head(url, verify=False).raise_for_status()
except Exception as error:
handle_requests_exception(error)
return False
return True
def anon_url(*url):
"""
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
"""
return '' if None in url else '{0}{1}'.format(sickbeard.ANON_REDIRECT, ''.join(str(s) for s in url))
"""
Encryption
==========
By Pedro Jose Pereira Vieito <[email protected]> (@pvieito)
* If encryption_version==0 then return data without encryption
* The keys should be unique for each device
To add a new encryption_version:
1) Code your new encryption_version
2) Update the last encryption_version available in webserve.py
3) Remember to maintain old encryption versions and key generators for retrocompatibility
"""
# Key Generators
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
# Encryption Functions
def encrypt(data, encryption_version=0, _decrypt=False):
# Version 1: Simple XOR encryption (this is not very secure, but works)
if encryption_version == 1:
if _decrypt:
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
else:
return base64.encodestring(
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
# Version 2: Simple XOR encryption (this is not very secure, but works)
elif encryption_version == 2:
if _decrypt:
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(sickbeard.ENCRYPTION_SECRET)))
else:
return base64.encodestring(
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(sickbeard.ENCRYPTION_SECRET)))).strip()
# Version 0: Plain text
else:
return data
def decrypt(data, encryption_version=0):
return encrypt(data, encryption_version, _decrypt=True)
def full_sanitizeSceneName(name):
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().strip()
def _check_against_names(nameInQuestion, show, season=-1):
showNames = []
if season in [-1, 1]:
showNames = [show.name]
showNames.extend(sickbeard.scene_exceptions.get_scene_exceptions(show.indexerid, season=season))
for showName in showNames:
nameFromList = full_sanitizeSceneName(showName)
if nameFromList == nameInQuestion:
return True
return False
def get_show(name, tryIndexers=False):
if not sickbeard.showList:
return
showObj = None
fromCache = False
if not name:
return showObj
try:
# check cache for show
cache = sickbeard.name_cache.retrieveNameFromCache(name)
if cache:
fromCache = True
showObj = Show.find(sickbeard.showList, int(cache))
# try indexers
if not showObj and tryIndexers:
showObj = Show.find(
sickbeard.showList, searchIndexerForShowID(full_sanitizeSceneName(name), ui=classes.ShowListUI)[2])
# try scene exceptions
if not showObj:
ShowID = sickbeard.scene_exceptions.get_scene_exception_by_name(name)[0]
if ShowID:
showObj = Show.find(sickbeard.showList, int(ShowID))
# add show to cache
if showObj and not fromCache:
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
except Exception as error:
logger.log("Error when attempting to find show: {0} in SickRage. Error: {1} ".format(name, error), logger.DEBUG)
return showObj
def is_hidden_folder(folder):
"""
Returns True if folder is hidden.
On Linux based systems hidden folders start with . (dot)
:param folder: Full path of folder to check
"""
def is_hidden(filepath):
name = ek(os.path.basename, ek(os.path.abspath, filepath))
return name.startswith('.') or has_hidden_attribute(filepath)
def has_hidden_attribute(filepath):
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(ctypes.c_wchar_p(unicode(filepath)))
assert attrs != -1
result = bool(attrs & 2)
except (AttributeError, AssertionError):
result = False
return result
if ek(os.path.isdir, folder):
if is_hidden(folder):
return True
return False
def real_path(path):
"""
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
"""
return ek(os.path.normpath, ek(os.path.normcase, ek(os.path.realpath, path)))
def validateShow(show, season=None, episode=None):
indexer_lang = show.lang
try:
lINDEXER_API_PARMS = sickbeard.indexerApi(show.indexer).api_params.copy()
lINDEXER_API_PARMS['language'] = indexer_lang or sickbeard.INDEXER_DEFAULT_LANGUAGE
if show.dvdorder:
lINDEXER_API_PARMS['dvdorder'] = True
t = sickbeard.indexerApi(show.indexer).indexer(**lINDEXER_API_PARMS)
if season is None and episode is None:
return t
return t[show.indexerid][season][episode]
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
pass
def set_up_anidb_connection():
"""Connect to anidb"""
if not sickbeard.USE_ANIDB:
logger.log("Usage of anidb disabled. Skiping", logger.DEBUG)
return False
if not sickbeard.ANIDB_USERNAME and not sickbeard.ANIDB_PASSWORD:
logger.log("anidb username and/or password are not set. Aborting anidb lookup.", logger.DEBUG)
return False
if not sickbeard.ADBA_CONNECTION:
def anidb_logger(msg):
return logger.log("anidb: {0} ".format(msg), logger.DEBUG)
try:
sickbeard.ADBA_CONNECTION = adba.Connection(keepAlive=True, log=anidb_logger)
except Exception as error:
logger.log("anidb exception msg: {0} ".format(error), logger.WARNING)
return False
try:
if not sickbeard.ADBA_CONNECTION.authed():
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
else:
return True
except Exception as error:
logger.log("anidb exception msg: {0} ".format(error), logger.WARNING)
return False
return sickbeard.ADBA_CONNECTION.authed()
def makeZip(fileList, archive):
"""
Create a ZIP of files
:param fileList: A list of file names - full path each name
:param archive: File name for the archive with a full path
"""
try:
a = zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
for f in fileList:
a.write(f)
a.close()
return True
except Exception as error:
logger.log("Zip creation error: {0} ".format(error), logger.ERROR)
return False
def extractZip(archive, targetDir):
"""
Unzip a file to a directory
:param fileList: A list of file names - full path each name
:param archive: The file name for the archive with a full path
"""
try:
if not ek(os.path.exists, targetDir):
ek(os.mkdir, targetDir)
zip_file = zipfile.ZipFile(archive, 'r', allowZip64=True)
for member in zip_file.namelist():
filename = ek(os.path.basename, member)
# skip directories
if not filename:
continue
# copy file (taken from zipfile's extract)
source = zip_file.open(member)
target = file(ek(os.path.join, targetDir, filename), "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
zip_file.close()
return True
except Exception as error:
logger.log("Zip extraction error: {0} ".format(error), logger.ERROR)
return False
def backupConfigZip(fileList, archive, arcname=None):
"""
Store the config file as a ZIP
:param fileList: List of files to store
:param archive: ZIP file name
:param arcname: Archive path
:return: True on success, False on failure
"""
try:
a = zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
for f in fileList:
a.write(f, ek(os.path.relpath, f, arcname))
a.close()
return True
except Exception as error:
logger.log("Zip creation error: {0} ".format(error), logger.ERROR)
return False
def restoreConfigZip(archive, targetDir):
"""
Restores a Config ZIP file back in place
:param archive: ZIP filename
:param targetDir: Directory to restore to
:return: True on success, False on failure
"""
try:
if not ek(os.path.exists, targetDir):
ek(os.mkdir, targetDir)
else:
def path_leaf(path):
head, tail = ek(os.path.split, path)
return tail or ek(os.path.basename, head)
bakFilename = '{0}-{1}'.format(path_leaf(targetDir), datetime.datetime.now().strftime('%Y%m%d_%H%M%S'))
shutil.move(targetDir, ek(os.path.join, ek(os.path.dirname, targetDir), bakFilename))
zip_file = zipfile.ZipFile(archive, 'r', allowZip64=True)
for member in zip_file.namelist():
zip_file.extract(member, targetDir)
zip_file.close()
return True
except Exception as error:
logger.log("Zip extraction error: {0}".format(error), logger.ERROR)
shutil.rmtree(targetDir)
return False
def mapIndexersToShow(showObj):
mapped = {}
# init mapped indexers object
for indexer in sickbeard.indexerApi().indexers:
mapped[indexer] = showObj.indexerid if int(indexer) == int(showObj.indexer) else 0
main_db_con = db.DBConnection()
sql_results = main_db_con.select(
"SELECT * FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?",
[showObj.indexerid, showObj.indexer])
# for each mapped entry
for curResult in sql_results:
nlist = [i for i in curResult if i is not None]
# Check if its mapped with both tvdb and tvrage.
if len(nlist) >= 4:
logger.log("Found indexer mapping in cache for show: " + showObj.name, logger.DEBUG)
mapped[int(curResult[b'mindexer'])] = int(curResult[b'mindexer_id'])
break
else:
sql_l = []
for indexer in sickbeard.indexerApi().indexers:
if indexer == showObj.indexer:
mapped[indexer] = showObj.indexerid
continue
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
try:
mapped_show = t[showObj.name]
except Exception:
logger.log("Unable to map " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
indexer).name + " for show: " + showObj.name + ", skipping it", logger.DEBUG)
continue
if mapped_show and len(mapped_show) == 1:
logger.log("Mapping " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
indexer).name + " for show: " + showObj.name, logger.DEBUG)
mapped[indexer] = int(mapped_show[0][b'id'])
logger.log("Adding indexer mapping to DB for show: " + showObj.name, logger.DEBUG)
sql_l.append([
"INSERT OR IGNORE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) VALUES (?,?,?,?)",
[showObj.indexerid, showObj.indexer, int(mapped_show[0][b'id']), indexer]])
if sql_l:
main_db_con = db.DBConnection()
main_db_con.mass_action(sql_l)
return mapped
def touchFile(fname, atime=None):
"""
Touch a file (change modification date)
:param fname: Filename to touch
:param atime: Specific access time (defaults to None)
:return: True on success, False on failure
"""
if atime and fname and ek(os.path.isfile, fname):
ek(os.utime, fname, (atime, atime))
return True
return False
def make_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session = cfscrape.create_scraper(sess=session)
return CacheControl(sess=session, cache_etags=True)
def request_defaults(kwargs):
hooks = kwargs.pop('hooks', None)
cookies = kwargs.pop('cookies', None)
allow_proxy = kwargs.pop('allow_proxy', True)
verify = certifi.old_where() if all([sickbeard.SSL_VERIFY, kwargs.pop('verify', True)]) else False
# request session proxies
if allow_proxy and sickbeard.PROXY_SETTING:
logger.log("Using global proxy: " + sickbeard.PROXY_SETTING, logger.DEBUG)
parsed_url = urlparse(sickbeard.PROXY_SETTING)
address = sickbeard.PROXY_SETTING if parsed_url.scheme else 'http://' + sickbeard.PROXY_SETTING
proxies = {
"http": address,
"https": address,
}
else:
proxies = None
return hooks, cookies, verify, proxies
def getURL(url, post_data=None, params=None, headers=None, # pylint:disable=too-many-arguments, too-many-return-statements, too-many-branches, too-many-locals
timeout=30, session=None, **kwargs):
"""
Returns data retrieved from the url provider.
"""
try:
response_type = kwargs.pop('returns', 'text')
stream = kwargs.pop('stream', False)
hooks, cookies, verify, proxies = request_defaults(kwargs)
if params and isinstance(params, (list, dict)):
for param in params:
if isinstance(params[param], unicode):
params[param] = params[param].encode('utf-8')
if post_data and isinstance(post_data, (list, dict)):
for param in post_data:
if isinstance(post_data[param], unicode):
post_data[param] = post_data[param].encode('utf-8')
resp = session.request(
'POST' if post_data else 'GET', url, data=post_data or {}, params=params or {},
timeout=timeout, allow_redirects=True, hooks=hooks, stream=stream,
headers=headers, cookies=cookies, proxies=proxies, verify=verify
)
resp.raise_for_status()
except Exception as error:
handle_requests_exception(error)
return None
try:
return resp if response_type == 'response' or response_type is None else resp.json() if response_type == 'json' else getattr(resp, response_type, resp)
except ValueError:
logger.log('Requested a json response but response was not json, check the url: {0}'.format(url), logger.DEBUG)
return None
def download_file(url, filename, session=None, headers=None, **kwargs): # pylint:disable=too-many-return-statements
"""
Downloads a file specified
:param url: Source URL
:param filename: Target file on filesystem
:param session: request session to use
:param headers: override existing headers in request session
:return: True on success, False on failure
"""
try:
hooks, cookies, verify, proxies = request_defaults(kwargs)
with closing(session.get(url, allow_redirects=True, stream=True,
verify=verify, headers=headers, cookies=cookies,
hooks=hooks, proxies=proxies)) as resp:
resp.raise_for_status()
try:
with io.open(filename, 'wb') as fp:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fp.write(chunk)
fp.flush()
chmodAsParent(filename)
except Exception as error:
logger.log("Problem downloading file, setting permissions or writing file to \"{0}\" - ERROR: {1}".format(filename, error), logger.WARNING)
except Exception as error:
handle_requests_exception(error)
return False
return True
def handle_requests_exception(requests_exception): # pylint: disable=too-many-branches, too-many-statements
default = "Request failed: {0}"
try:
raise requests_exception
except requests.exceptions.SSLError as error:
if ssl.OPENSSL_VERSION_INFO < (1, 0, 1, 5):
logger.log("SSL Error requesting url: '{0}' You have {1}, try upgrading OpenSSL to 1.0.1e+".format(error.request.url, ssl.OPENSSL_VERSION))
if sickbeard.SSL_VERIFY:
logger.log("SSL Error requesting url: '{0}' Try disabling Cert Verification on the advanced tab of /config/general")
logger.log(default.format(error), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
except requests.exceptions.HTTPError as error:
if not (hasattr(error, 'response') and error.response and \
hasattr(error.response, 'status_code') and error.response.status_code == 404 and \
hasattr(error.response, 'headers') and error.response.headers.get('X-Content-Type-Options') == 'nosniff'):
logger.log(default.format(error))
except requests.exceptions.TooManyRedirects as error:
logger.log(default.format(error))
except requests.exceptions.ConnectTimeout as error:
logger.log(default.format(error))
except requests.exceptions.ReadTimeout as error:
logger.log(default.format(error))
except requests.exceptions.ProxyError as error:
logger.log(default.format(error))
except requests.exceptions.ConnectionError as error:
logger.log(default.format(error))
except requests.exceptions.ContentDecodingError as error:
logger.log(default.format(error))
logger.log(traceback.format_exc(), logger.DEBUG)
except requests.exceptions.ChunkedEncodingError as error:
logger.log(default.format(error))
except requests.exceptions.InvalidURL as error:
logger.log(default.format(error))
except requests.exceptions.InvalidSchema as error:
logger.log(default.format(error))
except requests.exceptions.MissingSchema as error:
logger.log(default.format(error))
except requests.exceptions.RetryError as error:
logger.log(default.format(error))
except requests.exceptions.StreamConsumedError as error:
logger.log(default.format(error))
except requests.exceptions.URLRequired as error:
logger.log(default.format(error))
except Exception as error:
logger.log(default.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
def get_size(start_path='.'):
"""
Find the total dir and filesize of a path
:param start_path: Path to recursively count size
:return: total filesize
"""
if not ek(os.path.isdir, start_path):
return -1
total_size = 0
for dirpath, dirnames_, filenames in ek(os.walk, start_path):
for f in filenames:
fp = ek(os.path.join, dirpath, f)
try:
total_size += ek(os.path.getsize, fp)
except OSError as error:
logger.log("Unable to get size for file {0} Error: {1}".format(fp, error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
return total_size
def generateApiKey():
""" Return a new randomized API_KEY"""
logger.log("Generating New API key")
secure_hash = hashlib.sha512(str(time.time()))
secure_hash.update(str(random.SystemRandom().getrandbits(4096)))
return secure_hash.hexdigest()[:32]
def remove_article(text=''):
"""Remove the english articles from a text string"""
return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
def generateCookieSecret():
"""Generate a new cookie secret"""
return base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes)
def disk_usage(path):
if platform.system() == 'Windows':
free = ctypes.c_ulonglong(0)
if ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(unicode(path)), None, None, ctypes.pointer(free)) == 0:
raise ctypes.WinError()
return free.value
elif hasattr(os, 'statvfs'): # POSIX
if platform.system() == 'Darwin':
try:
import subprocess
call = subprocess.Popen(["df", "-k", path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = call.communicate()[0]
return int(output.split("\n")[1].split()[3]) * 1024
except Exception:
pass
st = ek(os.statvfs, path)
return st.f_bavail * st.f_frsize # pylint: disable=no-member
else:
raise Exception("Unable to determine free space on your OS")
def verify_freespace(src, dest, oldfile=None, method="copy"):
"""
Checks if the target system has enough free space to copy or move a file.
:param src: Source filename
:param dest: Destination path
:param oldfile: File to be replaced (defaults to None)
:return: True if there is enough space for the file, False if there isn't. Also returns True if the OS doesn't support this option
"""
if not isinstance(oldfile, list):
oldfile = [oldfile] if oldfile else []
logger.log("Trying to determine free space on destination drive", logger.DEBUG)
if not ek(os.path.isfile, src):
logger.log("A path to a file is required for the source. {0} is not a file.".format(src), logger.WARNING)
return True
# shortcut: if we are moving the file and the destination == src dir,
# then by definition there is enough space
if method == "move" and ek(os.stat, src).st_dev == ek(os.stat, dest if ek(os.path.exists, dest) else ek(os.path.dirname, dest)).st_dev: # pylint: disable=no-member
logger.log("Process method is 'move' and src and destination are on the same device, skipping free space check", logger.INFO)
return True
try:
diskfree = disk_usage(dest if ek(os.path.exists, dest) else ek(os.path.dirname, dest))
except Exception as error:
logger.log("Unable to determine free space, so I will assume there is enough.", logger.WARNING)
logger.log("Error: {error}".format(error=error), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
return True
# Lets also do this for symlink and hardlink
if 'link' in method and diskfree > 1024**2:
return True
neededspace = ek(os.path.getsize, src)
if oldfile:
for f in oldfile:
if ek(os.path.isfile, f.location):
diskfree += ek(os.path.getsize, f.location)
if diskfree > neededspace:
return True
else:
logger.log("Not enough free space: Needed: {0} bytes ( {1} ), found: {2} bytes ( {3} )".format
(neededspace, pretty_file_size(neededspace), diskfree, pretty_file_size(diskfree)), logger.WARNING)
return False
def getDiskSpaceUsage(diskPath=None):
"""
returns the free space in human readable bytes for a given path or False if no path given
:param diskPath: the filesystem path being checked
"""
if diskPath and ek(os.path.exists, diskPath):
try:
free = disk_usage(diskPath)
except Exception as error:
logger.log("Unable to determine free space", logger.WARNING)
logger.log("Error: {error}".format(error=error), logger.DEBUG)
logger.log(traceback.format_exc(), logger.DEBUG)
else:
return pretty_file_size(free)
return False
# https://gist.github.com/thatalextaylor/7408395
def pretty_time_delta(seconds):
sign_string = '-' if seconds < 0 else ''
seconds = abs(int(seconds))
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
time_delta = sign_string
if days > 0:
time_delta += '{0}d'.format(days)
if hours > 0:
time_delta += '{0}h'.format(hours)
if minutes > 0:
time_delta += '{0}m'.format(minutes)
if seconds > 0:
time_delta += '{0}s'.format(seconds)
return time_delta
def isFileLocked(checkfile, writeLockCheck=False):
"""
Checks to see if a file is locked. Performs three checks
1. Checks if the file even exists
2. Attempts to open the file for reading. This will determine if the file has a write lock.
Write locks occur when the file is being edited or copied to, e.g. a file copy destination
3. If the readLockCheck parameter is True, attempts to rename the file. If this fails the
file is open by some other process for reading. The file can be read, but not written to
or deleted.
:param file: the file being checked
:param writeLockCheck: when true will check if the file is locked for writing (prevents move operations)
"""
checkfile = ek(os.path.abspath, checkfile)
if not ek(os.path.exists, checkfile):
return True
try:
f = ek(io.open, checkfile, 'rb')
f.close() # pylint: disable=no-member
except IOError:
return True
if writeLockCheck:
lockFile = checkfile + ".lckchk"
if ek(os.path.exists, lockFile):
ek(os.remove, lockFile)
try:
ek(os.rename, checkfile, lockFile)
time.sleep(1)
ek(os.rename, lockFile, checkfile)
except (OSError, IOError):
return True
return False
def getTVDBFromID(indexer_id, indexer): # pylint:disable=too-many-return-statements
session = make_session()
tvdb_id = ''
if indexer == 'IMDB':
url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?imdbid={0}".format(indexer_id)
data = getURL(url, session=session, returns='content')
if data is None:
return tvdb_id
try:
tree = ET.fromstring(data)
for show in tree.getiterator("Series"):
tvdb_id = show.findtext("seriesid")
except SyntaxError:
pass
return tvdb_id
elif indexer == 'ZAP2IT':
url = "http://www.thetvdb.com/api/GetSeriesByRemoteID.php?zap2it={0}".format(indexer_id)
data = getURL(url, session=session, returns='content')
if data is None:
return tvdb_id
try:
tree = ET.fromstring(data)
for show in tree.getiterator("Series"):
tvdb_id = show.findtext("seriesid")
except SyntaxError:
pass
return tvdb_id
elif indexer == 'TVMAZE':
url = "http://api.tvmaze.com/shows/{0}".format(indexer_id)
data = getURL(url, session=session, returns='json')
if data is None:
return tvdb_id
tvdb_id = data[b'externals'][b'thetvdb']
return tvdb_id
else:
return tvdb_id
def get_showname_from_indexer(indexer, indexer_id, lang='en'):
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
lINDEXER_API_PARMS['language'] = lang or sickbeard.INDEXER_DEFAULT_LANGUAGE
logger.log('{0}: {1!r}'.format(sickbeard.indexerApi(indexer).name, lINDEXER_API_PARMS))
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
s = t[int(indexer_id)]
if hasattr(s, 'data'):
return s.data.get('seriesname')
return None
def is_ip_private(ip):
priv_lo = re.compile(r"^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
priv_24 = re.compile(r"^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
priv_20 = re.compile(r"^192\.168\.\d{1,3}.\d{1,3}$")
priv_16 = re.compile(r"^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$")
return priv_lo.match(ip) or priv_24.match(ip) or priv_20.match(ip) or priv_16.match(ip)<|fim▁end|> | logger.log("Deleting empty folder: " + check_empty_dir)
# need shutil.rmtree when ignore_items is really implemented
ek(os.rmdir, check_empty_dir)
# do the library update for synoindex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.