prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>plugin.js<|end_file_name|><|fim▁begin|>(function () {
var print = (function () {
'use strict';
var global = tinymce.util.Tools.resolve('tinymce.PluginManager');
var register = function (editor) {
editor.addCommand('mcePrint', function () {
editor.getWin().print();
});
};
var $_brqngvjljjgwecz3 = { register: register };
var register$1 = function (editor) {
editor.addButton('print', {
title: 'Print',
cmd: 'mcePrint'
});
editor.addMenuItem('print', {
text: 'Print',
cmd: 'mcePrint',
icon: 'print'
});
};
var $_3xztukjmjjgwecz4 = { register: register$1 };
global.add('print', function (editor) {
$_brqngvjljjgwecz3.register(editor);
$_3xztukjmjjgwecz4.register(editor);
editor.addShortcut('Meta+P', '', 'mcePrint');
});<|fim▁hole|> function Plugin () {
}
return Plugin;
}());
})();<|fim▁end|>
| |
<|file_name|>lists.py<|end_file_name|><|fim▁begin|>##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Uwe Hermann <[email protected]>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
#
# Chip specific properties:
#
# - vendor: chip manufacturer
# - model: chip model
# - size: total EEPROM size (in number of bytes)
# - page_size: page size (in number of bytes)
# - page_wraparound: Whether writes wrap-around at page boundaries
# - addr_bytes: number of EEPROM address bytes used
# - addr_pins: number of address pins (A0/A1/A2) on this chip
# - max_speed: max. supported I²C speed (in kHz)
#
chips = {
# Generic chip (128 bytes, 8 bytes page size)
'generic': {
'vendor': '',
'model': 'Generic',
'size': 128,
'page_size': 8,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 3,
'max_speed': 400,
},
# Microchip
'microchip_24aa65': {
'vendor': 'Microchip',
'model': '24AA65',
'size': 8 * 1024,
'page_size': 64, # Actually 8, but there are 8 pages of "input cache"
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 400,
},
'microchip_24lc65': {
'vendor': 'Microchip',
'model': '24LC65',
'size': 8 * 1024,
'page_size': 64, # Actually 8, but there are 8 pages of "input cache"
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 400,
},
'microchip_24c65': {
'vendor': 'Microchip',
'model': '24C65',
'size': 8 * 1024,
'page_size': 64, # Actually 8, but there are 8 pages of "input cache"
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 400,
},
'microchip_24aa64': {
'vendor': 'Microchip',
'model': '24AA64',
'size': 8 * 1024,
'page_size': 32,
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 400, # 100 for VCC < 2.5V
},
'microchip_24lc64': {
'vendor': 'Microchip',
'model': '24LC64',
'size': 8 * 1024,
'page_size': 32,
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 400,
},
'microchip_24aa02uid': {
'vendor': 'Microchip',
'model': '24AA02UID',
'size': 256,
'page_size': 8,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 0, # Pins A0, A1, A2 not used
'max_speed': 400,
},
'microchip_24aa025uid': {
'vendor': 'Microchip',
'model': '24AA025UID',
'size': 256,
'page_size': 16,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 3,
'max_speed': 400,
},
'microchip_24aa025uid_sot23': {
'vendor': 'Microchip',
'model': '24AA025UID (SOT-23)',
'size': 256,
'page_size': 16,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 2, # SOT-23 package: A2 not available
'max_speed': 400,
},
# ON Semiconductor
'onsemi_cat24c256': {
'vendor': 'ON Semiconductor',
'model': 'CAT24C256',
'size': 32 * 1024,
'page_size': 64,
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3,
'max_speed': 1000,
},
'onsemi_cat24m01': {
'vendor': 'ON Semiconductor',
'model': 'CAT24M01',
'size': 128 * 1024,
'page_size': 256,
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 2, # Pin A0 not connected
'max_speed': 1000,
},
# Siemens
'siemens_slx_24c01': {
'vendor': 'Siemens',
'model': 'SLx 24C01',
'size': 128,
'page_size': 8,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 0, # Pins A0, A1, A2 are not connected (NC)
'max_speed': 400,
},
'siemens_slx_24c02': {
'vendor': 'Siemens',
'model': 'SLx 24C02',
'size': 256,
'page_size': 8,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 0, # Pins A0, A1, A2 are not connected (NC)
'max_speed': 400,
},
# ST
'st_m24c01': {
'vendor': 'ST',
'model': 'M24C01',
'size': 128,
'page_size': 16,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 3, # Called E0, E1, E2 on this chip.
'max_speed': 400,
},
'st_m24c02': {
'vendor': 'ST',
'model': 'M24C02',
'size': 256,
'page_size': 16,<|fim▁hole|> },
'st_m24c32': {
'vendor': 'ST',
'model': 'M24C32',
'size': 4 * 1024,
'page_size': 32,
'page_wraparound': True,
'addr_bytes': 2,
'addr_pins': 3, # Called E0, E1, E2 on this chip.
'max_speed': 1000,
},
# Xicor
'xicor_x24c02': {
'vendor': 'Xicor',
'model': 'X24C02',
'size': 256,
'page_size': 4,
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 3,
'max_speed': 100,
},
}<|fim▁end|>
|
'page_wraparound': True,
'addr_bytes': 1,
'addr_pins': 3, # Called E0, E1, E2 on this chip.
'max_speed': 400,
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() {
println!("Hello, world!");
}
// Description:
// Consider an array of sheep where some sheep may be missing from their place. We need a function that counts the number of sheep present in the array (true means present).
// For example,
// &[true, true, true, false,
// true, true, true, true ,
// true, false, true, false,
// true, false, false, true ,
// true, true, true, true ,
// false, false, true, true]
// The correct answer would be 17.
<|fim▁hole|>// Hint: Don't forget to check for bad values like null/undefined
fn count_sheep(sheep: &[bool]) -> u8 {
sheep.into_iter().filter(|&i| *i == true ).collect::<Vec<_>>().len() as u8
}<|fim▁end|>
| |
<|file_name|>address.rs<|end_file_name|><|fim▁begin|>//! Ethereum address
use std::str::FromStr;
use std::fmt;
use utils::bigint::M256;
use utils::{read_hex, ParseHexError};
use rlp::{Encodable, RlpStream};
#[derive(Eq, PartialEq, Debug, Copy, Clone, Hash)]
/// Represents an Ethereum address. This address is 20 bytes long.
pub struct Address([u8; 20]);
impl Address {
/// Bits needed to represent this value.
pub fn bits(&self) -> usize {
let u: M256 = self.clone().into();
u.bits()
}
}
impl Default for Address {
fn default() -> Address {
Address([0u8; 20])
}
}
impl Encodable for Address {
fn rlp_append(&self, s: &mut RlpStream) {
let buffer: [u8; 20] = self.clone().into();<|fim▁hole|>impl Into<M256> for Address {
fn into(self) -> M256 {
M256::from(self.0.as_ref())
}
}
impl From<M256> for Address {
fn from(mut val: M256) -> Address {
let mut i = 20;
let mut a = [0u8; 20];
while i != 0 {
let u: u64 = (val & 0xFF.into()).into();
a[i-1] = u as u8;
i -= 1;
val = val >> 8;
}
Address(a)
}
}
impl Into<[u8; 20]> for Address {
fn into(self) -> [u8; 20] {
self.0
}
}
impl FromStr for Address {
type Err = ParseHexError;
fn from_str(s: &str) -> Result<Address, ParseHexError> {
read_hex(s).and_then(|v| {
if v.len() > 20 {
Err(ParseHexError::TooLong)
} else if v.len() < 20 {
Err(ParseHexError::TooShort)
} else {
let mut a = [0u8; 20];
for i in 0..20 {
a[i] = v[i];
}
Ok(Address(a))
}
})
}
}
impl fmt::LowerHex for Address {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..20 {
write!(f, "{:02x}", self.0[i])?;
}
Ok(())
}
}
impl fmt::UpperHex for Address {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..20 {
write!(f, "{:02X}", self.0[i])?;
}
Ok(())
}
}<|fim▁end|>
|
s.encoder().encode_value(&buffer);
}
}
|
<|file_name|>narglatch_sick.py<|end_file_name|><|fim▁begin|>import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('narglatch_sick')
mobileTemplate.setLevel(21)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(60)
mobileTemplate.setHideType("Bristley Hide")
mobileTemplate.setHideAmount(45)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(40)
mobileTemplate.setSocialGroup("narglatch")
mobileTemplate.setAssistRange(2)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
mobileTemplate.setStalker(False)
templates = Vector()
templates.add('object/mobile/shared_narglatch_hue.iff')<|fim▁hole|> weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_claw_2')
attacks.add('bm_slash_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('narglatch_sick', mobileTemplate)
return<|fim▁end|>
|
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
|
<|file_name|>chardetect.rs<|end_file_name|><|fim▁begin|>use chardet::detect;
use rayon::prelude::*;
use super::consts::space_fix;
use std::fs::File;
use std::io::{self, BufReader, Read};
use std::path::Path;
#[derive(Debug, Default)]
pub struct CharDet {
pub files: Vec<String>,
}
impl CharDet {
pub fn call(self) {
debug!("{:?}", self);
let max_len = self.files.as_slice().iter().max_by_key(|p| p.len()).unwrap().len();
// println!("{}{:3}CharSet{:13}Rate{:8}Info", space_fix("File",max_len), "", "", "");
self.files.par_iter().for_each(|file| match chardet(file) {
Ok(o) => {
let (mut charset, rate, info) = o;
// "WINDOWS_1258".len() = 12 -> 12+6 = 18
if charset.is_empty() {
charset = "Binary".to_owned();
}
println!(
"{}: {} {:.4}{:6}{}",
space_fix(file, max_len),
space_fix(&charset, 18),
rate,
"",
info
);
}
Err(e) => eprintln!("{}: {:?}", space_fix(file, max_len), e),
})
}
pub fn check(&self) -> Result<(), String> {
for path in &self.files {
let path = Path::new(path);
if !path.exists() {
return Err(format!("Args(File): {:?} is not exists", path));
}
if !path.is_file() {
return Err(format!("Args(File): {:?} is not a file", path));
}<|fim▁hole|> }
}
fn chardet(f: &str) -> io::Result<(String, f32, String)> {
let mut file = BufReader::new(File::open(f)?);
let mut bytes = Vec::default();
file.read_to_end(&mut bytes)?;
Ok(detect(bytes.as_slice()))
}<|fim▁end|>
|
}
Ok(())
|
<|file_name|>many-array-test.js<|end_file_name|><|fim▁begin|>import setupStore from 'dummy/tests/helpers/store';
import Ember from 'ember';
import {module, test} from 'qunit';
import DS from 'ember-data';
var env, store;
var attr = DS.attr;
var hasMany = DS.hasMany;
var belongsTo = DS.belongsTo;
var run = Ember.run;
var Post, Tag;
module("unit/many_array - DS.ManyArray", {
beforeEach() {
Post = DS.Model.extend({
title: attr('string'),
tags: hasMany('tag', { async: false })
});
Tag = DS.Model.extend({
name: attr('string'),
post: belongsTo('post', { async: false })
});
env = setupStore({
post: Post,
tag: Tag
});
store = env.store;
},
afterEach() {
run(function() {
store.destroy();
});
}
});
test("manyArray.save() calls save() on all records", function(assert) {
assert.expect(3);
run(function() {
Tag.reopen({
save() {
assert.ok(true, 'record.save() was called');
return Ember.RSVP.resolve();
}
});
store.push({
data: [{
type: 'tag',
id: '1',
attributes: {
name: 'Ember.js'
}
}, {
type: 'tag',
id: '2',
attributes: {
name: 'Tomster'
}
}, {
type: 'post',
id: '3',
attributes: {
title: 'A framework for creating ambitious web applications'
},
relationships: {
tags: {
data: [
{ type: 'tag', id: '1' },
{ type: 'tag', id: '2' }
]
}
}
}]
});
var post = store.peekRecord('post', 3);
post.get('tags').save().then(function() {
assert.ok(true, 'manyArray.save() promise resolved');
});
});
});
test("manyArray trigger arrayContentChange functions with the correct values", function(assert) {
assert.expect(12);
var willChangeStartIdx;
var willChangeRemoveAmt;
var willChangeAddAmt;
var originalArrayContentWillChange = DS.ManyArray.prototype.arrayContentWillChange;
var originalArrayContentDidChange = DS.ManyArray.prototype.arrayContentDidChange;
DS.ManyArray.reopen({
arrayContentWillChange(startIdx, removeAmt, addAmt) {
willChangeStartIdx = startIdx;
willChangeRemoveAmt = removeAmt;
willChangeAddAmt = addAmt;
return this._super.apply(this, arguments);
},
arrayContentDidChange(startIdx, removeAmt, addAmt) {
assert.equal(startIdx, willChangeStartIdx, 'WillChange and DidChange startIdx should match');
assert.equal(removeAmt, willChangeRemoveAmt, 'WillChange and DidChange removeAmt should match');
assert.equal(addAmt, willChangeAddAmt, 'WillChange and DidChange addAmt should match');
return this._super.apply(this, arguments);
}
});
run(function() {
store.push({
data: [{
type: 'tag',
id: '1',
attributes: {
name: 'Ember.js'
}
}, {
type: 'tag',
id: '2',
attributes: {
name: 'Tomster'
}
}, {
type: 'post',
id: '3',
attributes: {
title: 'A framework for creating ambitious web applications'
},
relationships: {
tags: {
data: [
{ type: 'tag', id: '1' }
]
}
}
}]
});
<|fim▁hole|> type: 'post',
id: '3',
attributes: {
title: 'A framework for creating ambitious web applications'
},
relationships: {
tags: {
data: [
{ type: 'tag', id: '1' },
{ type: 'tag', id: '2' }
]
}
}
}
});
store.peekRecord('post', 3);
});
DS.ManyArray.reopen({
arrayContentWillChange: originalArrayContentWillChange,
arrayContentDidChange: originalArrayContentDidChange
});
});<|fim▁end|>
|
store.peekRecord('post', 3);
store.push({
data: {
|
<|file_name|>BaseDbContext.js<|end_file_name|><|fim▁begin|>/**
* Imports
*/
var path = require('path');
var fs = require('fs');
var _ = require('lodash');
/**
* BaseDbContext class
* @param {Object} options
*/
var BaseDbContext = module.exports = function(options) {
options || (options = {});
this.entities = {};
this._loadModels();
this.initialize.apply(this, arguments);
}
_.extend(BaseDbContext.prototype, {
initialize: function () {},
modelsFolder: [],
_loadModels: function () {
if(!this.db) { return; }
var self = this;
this.modelsFolder.forEach(function (folderpath) {
fs.readdirSync(folderpath).forEach(function(file) {
var modelName = file.split('.')[0];
var model = self.db.import(path.join(folderpath, file));
self.entities[modelName] = model;
});
Object.keys(self.entities).forEach(function(modelName) {
if ('associate' in self.entities[modelName]) {
self.entities[modelName].associate(self.entities);
}
});
});
},
sync: function () {
return this.db.sync();
},
drop: function () {
return this.db.drop();
}
});
/**
* JavaScript extend function
*/
function extend(protoProps, staticProps) {
var parent = this;
var child;
if (protoProps && _.has(protoProps, 'constructor')) {
child = protoProps.constructor;
} else {
child = function() {
return parent.apply(this, arguments);
};
}
_.extend(child, parent, staticProps);
child.prototype = Object.create(parent.prototype, {
constructor: {
value: child,
enumerable: false,
writable: true,
configurable: true
}
});
if (protoProps) _.extend(child.prototype, protoProps);
child.__super__ = parent.prototype;
return child;<|fim▁hole|>
BaseDbContext.extend = extend;
module.exports = BaseDbContext;<|fim▁end|>
|
};
|
<|file_name|>perl_stack.hpp<|end_file_name|><|fim▁begin|>#ifndef YOBAPERL_PERL_STACK_HPP
#define YOBAPERL_PERL_STACK_HPP
#include "yobaperl/common.hpp"
namespace yoba {
class Perl;
class Scalar;<|fim▁hole|>
}
namespace yoba { namespace priv {
class PerlStack
{
public:
PerlStack(Perl & perl);
~PerlStack();
void extend(SSize_t size);
void pushSV(SV * sv, bool extend);
SV * popSV();
void pushScalar(Scalar arg);
void pushArray(Array args);
Scalar popScalar();
Array popArray();
void call(Code code, I32 flags);
void callMethod(Code code, I32 flags);
void eval(const std::string & code, I32 flags);
private:
Perl & _perl;
PerlInterpreter * _interpreter = nullptr;
SV ** sp = nullptr;
I32 _returns_count = -1;
};
}} // namespace yoba::priv
#endif // YOBAPERL_PERL_STACK_HPP<|fim▁end|>
|
class Array;
class Code;
|
<|file_name|>turns.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>
#include "geometry/point2d.hpp"
#include "std/initializer_list.hpp"
#include "std/limits.hpp"
#include "std/string.hpp"
#include "std/vector.hpp"
#include "3party/osrm/osrm-backend/typedefs.h"
namespace routing
{
using TNodeId = uint32_t;
using TEdgeWeight = double;
/// \brief Unique identification for a road edge between two junctions (joints).
/// In case of OSRM it's NodeID and in case of RoadGraph (IndexGraph)
/// it's mwm id, feature id, segment id and direction.
struct UniNodeId
{
enum class Type
{
Osrm,
Mwm,
};
UniNodeId(Type type) : m_type(type) {}
UniNodeId(FeatureID const & featureId, uint32_t segId, bool forward)
: m_type(Type::Mwm), m_featureId(featureId), m_segId(segId), m_forward(forward)
{
}
UniNodeId(uint32_t nodeId) : m_type(Type::Osrm), m_nodeId(nodeId) {}
bool operator==(UniNodeId const & rh) const;
bool operator<(UniNodeId const & rh) const;
void Clear();
uint32_t GetNodeId() const;
FeatureID const & GetFeature() const;
uint32_t GetSegId() const;
bool IsForward() const;
private:
Type m_type;
/// \note In case of OSRM unique id is kept in |m_featureId.m_index|.
/// So |m_featureId.m_mwmId|, |m_segId| and |m_forward| have default values.
FeatureID m_featureId; // |m_featureId.m_index| is NodeID for OSRM.
uint32_t m_segId = 0; // Not valid for OSRM.
bool m_forward = true; // Segment direction in |m_featureId|.
NodeID m_nodeId = SPECIAL_NODEID;
};
string DebugPrint(UniNodeId::Type type);
namespace turns
{
/// @todo(vbykoianko) It's a good idea to gather all the turns information into one entity.
/// For the time being several separate entities reflect the turn information. Like Route::TTurns
double constexpr kFeaturesNearTurnMeters = 3.0;
/*!
* \warning The order of values below shall not be changed.
* TurnRight(TurnLeft) must have a minimal value and
* TurnSlightRight(TurnSlightLeft) must have a maximum value
* \warning The values of TurnDirection shall be synchronized with values of TurnDirection enum in
* java.
*/
enum class TurnDirection
{
NoTurn = 0,
GoStraight,
TurnRight,
TurnSharpRight,
TurnSlightRight,
TurnLeft,
TurnSharpLeft,
TurnSlightLeft,
UTurnLeft,
UTurnRight,
TakeTheExit,
EnterRoundAbout,
LeaveRoundAbout,
StayOnRoundAbout,
StartAtEndOfStreet,
ReachedYourDestination,
Count /**< This value is used for internals only. */
};
string DebugPrint(TurnDirection const l);
/*!
* \warning The values of PedestrianDirectionType shall be synchronized with values in java
*/
enum class PedestrianDirection
{
None = 0,
Upstairs,
Downstairs,
LiftGate,
Gate,
ReachedYourDestination,
Count /**< This value is used for internals only. */
};
string DebugPrint(PedestrianDirection const l);
/*!
* \warning The values of LaneWay shall be synchronized with values of LaneWay enum in java.
*/
enum class LaneWay
{
None = 0,
Reverse,
SharpLeft,
Left,
SlightLeft,
MergeToRight,
Through,
MergeToLeft,
SlightRight,
Right,
SharpRight,
Count /**< This value is used for internals only. */
};
string DebugPrint(LaneWay const l);
typedef vector<LaneWay> TSingleLane;
struct SingleLaneInfo
{
TSingleLane m_lane;
bool m_isRecommended = false;
SingleLaneInfo() = default;
SingleLaneInfo(initializer_list<LaneWay> const & l) : m_lane(l) {}
bool operator==(SingleLaneInfo const & other) const;
};
string DebugPrint(SingleLaneInfo const & singleLaneInfo);
struct TurnItem
{
TurnItem()
: m_index(numeric_limits<uint32_t>::max()),
m_turn(TurnDirection::NoTurn),
m_exitNum(0),
m_keepAnyway(false),
m_pedestrianTurn(PedestrianDirection::None)
{
}
TurnItem(uint32_t idx, TurnDirection t, uint32_t exitNum = 0)
: m_index(idx), m_turn(t), m_exitNum(exitNum), m_keepAnyway(false)
, m_pedestrianTurn(PedestrianDirection::None)
{
}
TurnItem(uint32_t idx, PedestrianDirection p)
: m_index(idx), m_turn(TurnDirection::NoTurn), m_exitNum(0), m_keepAnyway(false)
, m_pedestrianTurn(p)
{
}
bool operator==(TurnItem const & rhs) const
{
return m_index == rhs.m_index && m_turn == rhs.m_turn && m_lanes == rhs.m_lanes &&
m_exitNum == rhs.m_exitNum && m_sourceName == rhs.m_sourceName &&
m_targetName == rhs.m_targetName && m_keepAnyway == rhs.m_keepAnyway &&
m_pedestrianTurn == rhs.m_pedestrianTurn;
}
uint32_t m_index; /*!< Index of point on polyline (number of segment + 1). */
TurnDirection m_turn; /*!< The turn instruction of the TurnItem */
vector<SingleLaneInfo> m_lanes; /*!< Lane information on the edge before the turn. */
uint32_t m_exitNum; /*!< Number of exit on roundabout. */
string m_sourceName; /*!< Name of the street which the ingoing edge belongs to */
string m_targetName; /*!< Name of the street which the outgoing edge belongs to */
/*!
* \brief m_keepAnyway is true if the turn shall not be deleted
* and shall be demonstrated to an end user.
*/
bool m_keepAnyway;
/*!
* \brief m_pedestrianTurn is type of corresponding direction for a pedestrian, or None
* if there is no pedestrian specific direction
*/
PedestrianDirection m_pedestrianTurn;
};
string DebugPrint(TurnItem const & turnItem);
struct TurnItemDist
{
TurnItem m_turnItem;
double m_distMeters;
};
string DebugPrint(TurnItemDist const & turnItemDist);
string const GetTurnString(TurnDirection turn);
bool IsLeftTurn(TurnDirection t);
bool IsRightTurn(TurnDirection t);
bool IsLeftOrRightTurn(TurnDirection t);
bool IsStayOnRoad(TurnDirection t);
bool IsGoStraightOrSlightTurn(TurnDirection t);
/*!
* \param l A variant of going along a lane.
* \param t A turn direction.
* \return True if @l corresponds with @t exactly. For example it returns true
* when @l equals to LaneWay::Right and @t equals to TurnDirection::TurnRight.
* Otherwise it returns false.
*/
bool IsLaneWayConformedTurnDirection(LaneWay l, TurnDirection t);
/*!
* \param l A variant of going along a lane.
* \param t A turn direction.
* \return True if @l corresponds with @t approximately. For example it returns true
* when @l equals to LaneWay::Right and @t equals to TurnDirection::TurnSlightRight.
* Otherwise it returns false.
*/
bool IsLaneWayConformedTurnDirectionApproximately(LaneWay l, TurnDirection t);
/*!
* \brief Parse lane information which comes from @lanesString
* \param lanesString lane information. Example through|through|through|through;right
* \param lanes the result of parsing.
* \return true if @lanesString parsed successfully, false otherwise.
* Note 1: if @lanesString is empty returns false.
* Note 2: @laneString is passed by value on purpose. It'll be used(changed) in the method.
*/
bool ParseLanes(string lanesString, vector<SingleLaneInfo> & lanes);
void SplitLanes(string const & lanesString, char delimiter, vector<string> & lanes);
bool ParseSingleLane(string const & laneString, char delimiter, TSingleLane & lane);
/*!
* \returns pi minus angle from vector [junctionPoint, ingoingPoint]
* to vector [junctionPoint, outgoingPoint]. A counterclockwise rotation.
* Angle is in range [-pi, pi].
*/
double PiMinusTwoVectorsAngle(m2::PointD const & junctionPoint, m2::PointD const & ingoingPoint,
m2::PointD const & outgoingPoint);
} // namespace turns
} // namespace routing<|fim▁end|>
|
#pragma once
#include "indexer/feature_decl.hpp"
|
<|file_name|>CEPMigrationTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.operator;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.cep.Event;
import org.apache.flink.cep.SubEvent;
import org.apache.flink.cep.nfa.NFA;
import org.apache.flink.cep.nfa.compiler.NFACompiler;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.streaming.api.watermark.Watermark;<|fim▁hole|>import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OperatorSnapshotUtil;
import org.apache.flink.streaming.util.migration.MigrationTestUtil;
import org.apache.flink.streaming.util.migration.MigrationVersion;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedQueue;
import static org.apache.flink.cep.operator.CepOperatorTestUtilities.getKeyedCepOpearator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests for checking whether CEP operator can restore from snapshots that were done
* using previous Flink versions.
*
* <p>For regenerating the binary snapshot file of previous versions you have to run the
* {@code write*()} method on the corresponding Flink release-* branch.
*/
@RunWith(Parameterized.class)
public class CEPMigrationTest {
/**
* TODO change this to the corresponding savepoint version to be written (e.g. {@link MigrationVersion#v1_3} for 1.3)
* TODO and remove all @Ignore annotations on write*Snapshot() methods to generate savepoints
*/
private final MigrationVersion flinkGenerateSavepointVersion = null;
private final MigrationVersion migrateVersion;
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<MigrationVersion> parameters () {
return Arrays.asList(MigrationVersion.v1_3, MigrationVersion.v1_4, MigrationVersion.v1_5);
}
public CEPMigrationTest(MigrationVersion migrateVersion) {
this.migrateVersion = migrateVersion;
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAfterBranchingPatternSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processElement(new StreamRecord<Event>(middleEvent2, 3));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-after-branching-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreAfterBranchingPattern() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-after-branching-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4));
harness.processElement(new StreamRecord<>(endEvent, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 2 results
assertEquals(3, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent1 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent1, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent1, patternMap3.get("start").get(0));
assertEquals(middleEvent3, patternMap3.get("middle").get(0));
assertEquals(endEvent1, patternMap3.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeStartingNewPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<Event>(startEvent1, 1));
harness.processElement(new StreamRecord<Event>(new Event(42, "foobar", 1.0), 2));
harness
.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-starting-new-pattern-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testRestoreStartingNewPatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
final SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
final Event startEvent2 = new Event(42, "start", 5.0);
final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
final Event endEvent = new Event(42, "end", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-starting-new-pattern-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent2, 5));
harness.processElement(new StreamRecord<Event>(middleEvent2, 6));
harness.processElement(new StreamRecord<>(endEvent, 7));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and 3 results
assertEquals(4, result.size());
Object resultObject1 = result.poll();
assertTrue(resultObject1 instanceof StreamRecord);
StreamRecord<?> resultRecord1 = (StreamRecord<?>) resultObject1;
assertTrue(resultRecord1.getValue() instanceof Map);
Object resultObject2 = result.poll();
assertTrue(resultObject2 instanceof StreamRecord);
StreamRecord<?> resultRecord2 = (StreamRecord<?>) resultObject2;
assertTrue(resultRecord2.getValue() instanceof Map);
Object resultObject3 = result.poll();
assertTrue(resultObject3 instanceof StreamRecord);
StreamRecord<?> resultRecord3 = (StreamRecord<?>) resultObject3;
assertTrue(resultRecord3.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap1 =
(Map<String, List<Event>>) resultRecord1.getValue();
assertEquals(startEvent1, patternMap1.get("start").get(0));
assertEquals(middleEvent1, patternMap1.get("middle").get(0));
assertEquals(endEvent, patternMap1.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap2 =
(Map<String, List<Event>>) resultRecord2.getValue();
assertEquals(startEvent1, patternMap2.get("start").get(0));
assertEquals(middleEvent2, patternMap2.get("middle").get(0));
assertEquals(endEvent, patternMap2.get("end").get(0));
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap3 =
(Map<String, List<Event>>) resultRecord3.getValue();
assertEquals(startEvent2, patternMap3.get("start").get(0));
assertEquals(middleEvent2, patternMap3.get("middle").get(0));
assertEquals(endEvent, patternMap3.get("end").get(0));
// and now go for a checkpoint with the new serializers
final Event startEvent3 = new Event(42, "start", 2.0);
final SubEvent middleEvent3 = new SubEvent(42, "foo", 1.0, 11.0);
final Event endEvent1 = new Event(42, "end", 2.0);
harness.processElement(new StreamRecord<Event>(startEvent3, 21));
harness.processElement(new StreamRecord<Event>(middleEvent3, 23));
// simulate snapshot/restore with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(1L, 1L);
harness.close();
harness = new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
harness.setup();
harness.initializeState(snapshot);
harness.open();
harness.processElement(new StreamRecord<>(endEvent1, 25));
harness.processWatermark(new Watermark(50));
result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject4 = result.poll();
assertTrue(resultObject4 instanceof StreamRecord);
StreamRecord<?> resultRecord4 = (StreamRecord<?>) resultObject4;
assertTrue(resultRecord4.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap4 =
(Map<String, List<Event>>) resultRecord4.getValue();
assertEquals(startEvent3, patternMap4.get("start").get(0));
assertEquals(middleEvent3, patternMap4.get("middle").get(0));
assertEquals(endEvent1, patternMap4.get("end").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeSinglePatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processWatermark(new Watermark(5));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-single-pattern-afterwards-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testSinglePatternAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new Event(42, "start", 1.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-single-pattern-afterwards-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
} finally {
harness.close();
}
}
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(6));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot,
"src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
@Test
public void testAndOrSubtypeConditionsAfterMigration() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
new KeyedOneInputStreamOperatorTestHarness<>(
getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
keySelector,
BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
MigrationTestUtil.restoreFromSnapshot(
harness,
OperatorSnapshotUtil.getResourceFilename("cep-migration-conditions-flink" + migrateVersion + "-snapshot"),
migrateVersion);
harness.open();
final Event endEvent = new SubEvent(42, "end", 1.0, 2.0);
harness.processElement(new StreamRecord<>(endEvent, 9));
harness.processWatermark(new Watermark(20));
ConcurrentLinkedQueue<Object> result = harness.getOutput();
// watermark and the result
assertEquals(2, result.size());
Object resultObject = result.poll();
assertTrue(resultObject instanceof StreamRecord);
StreamRecord<?> resultRecord = (StreamRecord<?>) resultObject;
assertTrue(resultRecord.getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<String, List<Event>> patternMap =
(Map<String, List<Event>>) resultRecord.getValue();
assertEquals(startEvent1, patternMap.get("start").get(0));
assertEquals(endEvent, patternMap.get("start").get(1));
} finally {
harness.close();
}
}
private static class SinglePatternNFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private SinglePatternNFAFactory() {
this(false);
}
private SinglePatternNFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAComplexConditionsFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAComplexConditionsFactory() {
this(false);
}
private NFAComplexConditionsFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.or(new SubEventEndFilter())
.times(2)
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class NFAFactory implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private NFAFactory() {
this(false);
}
private NFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new StartFilter())
.followedByAny("middle")
.subtype(SubEvent.class)
.where(new MiddleFilter())
.followedByAny("end")
.where(new EndFilter())
// add a window timeout to test whether timestamps of elements in the
// priority queue in CEP operator are correctly checkpointed/restored
.within(Time.milliseconds(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static class StartFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}
private static class MiddleFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}
private static class EndFilter extends SimpleCondition<Event> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
}
private static class SubEventEndFilter extends SimpleCondition<SubEvent> {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("end");
}
}
}<|fim▁end|>
|
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness;
|
<|file_name|>jquery.timeago.am.js<|end_file_name|><|fim▁begin|>(function (factory) {
if (typeof define === 'function' && define.amd) {
define(['jquery'], factory);
} else if (typeof module === 'object' && typeof module.exports === 'object') {
factory(require('jquery'));
} else {
factory(jQuery);
}
}(function (jQuery) {
// Amharic
jQuery.timeago.settings.strings = {
prefixAgo: null,
prefixFromNow: null,
suffixAgo: "በፊት",
suffixFromNow: "በኋላ",<|fim▁hole|> minute: "ከአንድ ደቂቃ ገደማ",
minutes: "ከ%d ደቂቃ",
hour: "ከአንድ ሰዓት ገደማ",
hours: "ከ%d ሰዓት ገደማ",
day: "ከአንድ ቀን",
days: "ከ%d ቀን",
month: "ከአንድ ወር ገደማ",
months: "ከ%d ወር",
year: "ከአንድ ዓመት ገደማ",
years: "ከ%d ዓመት",
wordSeparator: " ",
numbers: []
};
}));<|fim▁end|>
|
seconds: "ከአንድ ደቂቃ በታች",
|
<|file_name|>test_cycles.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from nose.tools import raises
from openfisca_core import periods
from openfisca_core.columns import IntCol
from openfisca_core.formulas import CycleError, SimpleFormulaColumn
from openfisca_core.tests import dummy_country
from openfisca_core.tests.dummy_country import Individus, reference_formula
from openfisca_core.tools import assert_near
# 1 <--> 2 with same period
@reference_formula
class variable1(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
return period, simulation.calculate('variable2', period)
@reference_formula
class variable2(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
return period, simulation.calculate('variable1', period)
# 3 <--> 4 with a period offset, but without explicit cycle allowed
@reference_formula
class variable3(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
return period, simulation.calculate('variable4', period.last_year)
@reference_formula
class variable4(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
return period, simulation.calculate('variable3', period)
# 5 -f-> 6 with a period offset, with cycle flagged but not allowed
# <---
@reference_formula
class variable5(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
variable6 = simulation.calculate('variable6', period.last_year, max_nb_cycles = 0)
return period, 5 + variable6
@reference_formula
class variable6(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
variable5 = simulation.calculate('variable5', period)
return period, 6 + variable5
# december cotisation depending on november value
@reference_formula
class cotisation(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
period = period.this_month
if period.start.month == 12:
return period, 2 * simulation.calculate('cotisation', period.last_month, max_nb_cycles = 1)
else:
return period, self.zeros() + 1
# 7 -f-> 8 with a period offset, with explicit cycle allowed (1 level)
# <---
@reference_formula
class variable7(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
variable8 = simulation.calculate('variable8', period.last_year, max_nb_cycles = 1)
return period, 7 + variable8
@reference_formula
class variable8(SimpleFormulaColumn):
column = IntCol
entity_class = Individus
def function(self, simulation, period):
variable7 = simulation.calculate('variable7', period)
return period, 8 + variable7
# TaxBenefitSystem instance declared after formulas
tax_benefit_system = dummy_country.init_tax_benefit_system()
reference_period = periods.period(u'2013')
@raises(AssertionError)
def test_pure_cycle():
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period,
parent1 = dict(),
).new_simulation(debug = True)
simulation.calculate('variable1')
<|fim▁hole|> parent1 = dict(),
).new_simulation(debug = True)
simulation.calculate('variable3')
def test_allowed_cycle():
"""
Calculate variable5 then variable6 then in the order order, to verify that the first calculated variable
has no effect on the result.
"""
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period,
parent1 = dict(),
).new_simulation(debug = True)
variable6 = simulation.calculate('variable6')
variable5 = simulation.calculate('variable5')
variable6_last_year = simulation.calculate('variable6', reference_period.last_year)
assert_near(variable5, [5])
assert_near(variable6, [11])
assert_near(variable6_last_year, [0])
def test_allowed_cycle_different_order():
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period,
parent1 = dict(),
).new_simulation(debug = True)
variable5 = simulation.calculate('variable5')
variable6 = simulation.calculate('variable6')
variable6_last_year = simulation.calculate('variable6', reference_period.last_year)
assert_near(variable5, [5])
assert_near(variable6, [11])
assert_near(variable6_last_year, [0])
def test_cotisation_1_level():
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period.last_month, # December
parent1 = dict(),
).new_simulation(debug = True)
cotisation = simulation.calculate('cotisation')
assert_near(cotisation, [2])
def test_cycle_1_level():
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period,
parent1 = dict(),
).new_simulation(debug = True)
variable7 = simulation.calculate('variable7')
# variable8 = simulation.calculate('variable8')
assert_near(variable7, [22])<|fim▁end|>
|
@raises(CycleError)
def test_cycle_time_offset():
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period,
|
<|file_name|>rmTimeUtils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 RainMachine, Green Electronics LLC
# All rights reserved.
# Authors: Nicu Pavel <[email protected]>
# Codrin Juravle <[email protected]>
from datetime import datetime, timedelta, tzinfo
from math import sin, cos, asin, acos, sqrt
import time, calendar
import ctypes,os, fcntl, errno
from RMUtilsFramework.rmLogging import log
ZERO = timedelta(0)
Y2K38_MAX_YEAR = 2037
Y2K38_MAX_TIMESTAMP = 2147483647
# For monotonic time
class timespec(ctypes.Structure):
_fields_ = [
('tv_sec', ctypes.c_long),
('tv_nsec', ctypes.c_long)
]
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
utc_t0 = datetime(1970, 1, 1, tzinfo=utc)
def rmYMDToTimestamp(year, month, day):
if year > Y2K38_MAX_YEAR: #Y2K38
year = Y2K38_MAX_YEAR
try:
return int(datetime(year, month, day).strftime("%s"))
except ValueError:
return int(time.mktime(datetime(year, month, day).timetuple())) # Windows platform doesn't have strftime(%s)
def rmYMDFromTimestamp(timestamp):
if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38
timestamp = Y2K38_MAX_TIMESTAMP
d = datetime.fromtimestamp(timestamp)
return d.year, d.month, d.day
def rmTimestampToDate(timestamp):
if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38
timestamp = Y2K38_MAX_TIMESTAMP
return datetime.fromtimestamp(timestamp)
def rmTimestampToDateAsString(timestamp, format = None):
if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38
timestamp = Y2K38_MAX_TIMESTAMP
if format:
return datetime.fromtimestamp(timestamp).strftime(format)
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
def rmCurrentTimestampToDateAsString(format = None):
timestamp = int(time.time())
if format:
return datetime.fromtimestamp(timestamp).strftime(format)
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
def rmTimestampToUtcDateAsString(timestamp, format = None):
if timestamp > Y2K38_MAX_TIMESTAMP: #Y2K38
timestamp = Y2K38_MAX_TIMESTAMP
if format:
return datetime.utcfromtimestamp(timestamp).strftime(format)
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
def rmTimestampFromDateAsString(dateString, format):
return int(datetime.strptime(dateString, format).strftime("%s"))
# Converts a date string in UTC format to a local timestamp (ex: 2019-05-20T12:00:00Z)
def rmTimestampFromUTCDateAsString(dateString, format):
dt = datetime.strptime(dateString, format)
return int((dt - datetime.utcfromtimestamp(0)).total_seconds())
def rmTimestampFromDateAsStringWithOffset(dateString):
# format in form of 2015-04-24T08:00:00-04:00 converted to UTC timestamp
if dateString is None:
return None
try:
sign = int(dateString[19:20] + '1')
(hour, minute) = [int(s) for s in dateString[20:].split(':')]
offset = sign * (hour * 60 * 60 + minute * 60)
except:
return None
try:
start_time = datetime.strptime(dateString[:19], "%Y-%m-%dT%H:%M:%S")
timestamp = int(calendar.timegm(start_time.timetuple())) - offset
except:
return None
return timestamp
def rmTimestampToYearMonthDay(timestamp):
d = datetime.fromtimestamp(timestamp)
return d.year, d.month, d.day
def rmNowToYearMonthDay():
d = datetime.now()
return d.year, d.month, d.day
def rmNormalizeTimestamp(timestamp):
return int(datetime.fromtimestamp(timestamp).strftime('%s'))
def rmTimestampToDayOfYear(timestamp):
if timestamp is None:
timestamp = rmCurrentDayTimestamp()
d = datetime.fromtimestamp(timestamp).timetuple()
return d.tm_yday
def rmNowDateTime():
return datetime.now()
def rmCurrentTimestamp():
return int(time.time())
def rmCurrentDayTimestamp():
return rmGetStartOfDay(int(time.time()))
def rmCurrentMinuteTimestamp():
timestamp = int(time.time())
return timestamp - (timestamp % 60)
def rmGetStartOfDay(timestamp):
tuple = datetime.fromtimestamp(timestamp).timetuple()
return int(datetime(tuple.tm_year, tuple.tm_mon, tuple.tm_mday).strftime("%s"))
def rmGetStartOfDayUtc(timestamp):
tuple = datetime.utcfromtimestamp(timestamp).timetuple()
dt = datetime(tuple.tm_year, tuple.tm_mon, tuple.tm_mday, tzinfo=utc)
return int((dt-utc_t0).total_seconds())
def rmTimestampIsLeapYear(timestamp):
d = datetime.fromtimestamp(timestamp)
#try:
# datetime(d.year, 2, 29)
# return True
#except ValueError:
# return False
if d.year % 400 == 0:
return True
elif d.year % 100 == 0:
return False
elif d.year % 4 == 0:
return True
return False
def rmConvertDateStringToFormat(dateString, inputFormat, outputFormat):
return datetime.strptime(dateString, inputFormat).strftime(outputFormat)
def rmDayRange(startDayTimestamp, numDays):
d = datetime.fromtimestamp(startDayTimestamp)
if numDays >=0:
dateList = [int(time.mktime( (d + timedelta(days=x)).timetuple() )) for x in range(0, numDays)]
else:
numDays = -numDays
dateList = [int(time.mktime( (d - timedelta(days=x)).timetuple() )) for x in range(0, numDays)]
return dateList
def rmDeltaDayFromTimestamp(startDayTimeStamp, deltaDays):
d = datetime.fromtimestamp(startDayTimeStamp)
if deltaDays < 0:
d = d - timedelta(days=-deltaDays)
else:
d = d + timedelta(days=deltaDays)
return int(time.mktime(d.timetuple()))
def rmGetNumberOfDaysBetweenTimestamps(startTimestamp, endTimestamp):
d1 = datetime.fromtimestamp(startTimestamp)
d2 = datetime.fromtimestamp(endTimestamp)
delta = d2-d1
return delta.days
# Sunrise and sunset for specific location and elevation
def computeSuntransitAndDayLenghtForDayTs(ts, lat, lon, elevation):
ts = rmGetStartOfDayUtc(ts)
n = julianDayFromTimestamp(ts)
J = __computeMeanSolarNoon(n, lon)
M = __computeSolarMeanAnomay(J)
C = __equationOfTheCenter(M)
L = __computeEclipticLongitude(M, C)
Jtr = computeSolarTransit(J, M, L)
delta = __computeSinSunDeclination(L)
w0 = computeHourAngle(lat, delta, elevation)
return Jtr, w0
def rmGetSunsetTimestampForDayTimestamp(ts, lat, lon, elevation):
Jtr, w0 = computeSuntransitAndDayLenghtForDayTs(ts, lat, -lon, elevation)
Jset = Jtr+w0/360
tsJset = julianDayToUTC(Jset)
return tsJset
def rmGetSunriseTimestampForDayTimestamp(ts, lat, lon, elevation):
if lat is None or lon is None:
log.debug("Latitude or longitude is not set. Returning same timestamp")
return ts
Jtr, w0 = computeSuntransitAndDayLenghtForDayTs(ts, lat, -lon, elevation)
Jrise = Jtr-w0/360
tsJrise = julianDayToUTC(Jrise)
return tsJrise
def julianDayFromTimestamp(ts):
ts = rmGetStartOfDayUtc(ts) + 12*3600
JD = float(ts)/86400 + 2440587.5
return JD - 2451545.0 + 0.0008
def julianDayToUTC(JD):
return (JD - 2440587.5)*86400
def __cosa(degree):
radian = degree/180*3.14159265359
return cos(radian)
def __sina(degree):
radian = degree/180*3.14159265359
return sin(radian)
def __acosa(x):
if abs(x) > 1:
return 180. if x< 0 else 0.
radian = acos(x)
return radian/3.14159265359*180.
def __asina(x):
if abs(x) > 1:
return -90. if x< 0 else 90.
radian = asin(x)
return radian/(3.14159265359)*180.
def __computeMeanSolarNoon(jd, wlon):
J = wlon/360 + jd
return J
def __computeSolarMeanAnomay(solarNoon): #degrees
return (357.5291 + 0.98560028*solarNoon)%360
def __equationOfTheCenter(solarMeanAnomaly): # constant from sine
M = solarMeanAnomaly
return 1.9148*__sina(M) + 0.0200*__sina(2*M) + 0.0003*__sina(3*M)
def __computeEclipticLongitude(solarMeanAnomaly, eqCenter): #degrees (it adds a sum a sines)
L = (solarMeanAnomaly + eqCenter + 180 + 102.9372) % 360
return L
def computeSolarTransit(meanSolarNoon, solarMeanAnomaly, eclipticLongitude): #substract sinuses from 12 am
Jtr = 2451545.0 + meanSolarNoon + (0.0053*__sina(solarMeanAnomaly) - 0.0069*__sina(2*eclipticLongitude))
return Jtr
def __computeSinSunDeclination(L):
delta = __sina(L)*__sina(23.439 )
return delta
def computeHourAngle(nlat, sdelta, elevation):
if elevation < 0:
elevation = 0
elevCoef = -2.076*sqrt(elevation)/60
cosw0 = (__sina(-0.83+elevCoef) - __sina(nlat)*sdelta)/ ( sqrt(1-sdelta*sdelta) * __cosa(nlat))
return __acosa(cosw0)
def rmNTPFetch(server = "pool.ntp.org", withRequestDrift = False):
import struct
from socket import socket, AF_INET, SOCK_DGRAM
requestPacket = '\x1b' + 47 * '\0'
startTime = time.time()
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(5)
except Exception, e:
log.error("NTPFetch: Can't create socket")
return None
try:
sock.sendto(requestPacket, (server, 123))
data, ip = sock.recvfrom(1024)
except Exception, e:
#log.error("NTPFetch: Error receiving data: %s" % e)
return None
try:
if data:
timestamp = struct.unpack('!12I', data)[10]
timestamp -= 2208988800L # = date in sec since epoch
# http://stackoverflow.com/questions/1599060/how-can-i-get-an-accurate-utc-time-with-python
if withRequestDrift:
reqTime = time.time() - startTime
timestamp += reqTime / 2
return timestamp
except:
log.error("NTPFetch: Conversion failed.")
return None
def getAlarmElapsedRealTime():
### DEPRECATED: This method was used on Android to get the UP_TIME (replaced by monotonicTime())
elapsedTime = -1
try:
alarmFile = open("/dev/alarm", 'r')
if alarmFile:
t = timespec()
# ANDROID_ALARM_GET_TIME(ANDROID_ALARM_ELAPSED_REALTIME) = 0x40086134
result = fcntl.ioctl(alarmFile.fileno(), 0x40086134, t)
if result == 0:
elapsedTime = t.tv_sec
alarmFile.close()
except Exception, e:
log.error(e)
return elapsedTime
class rmMonotonicTime:
CLOCK_MONOTONIC_RAW = 4 # see <linux/time.h>
def __init__(self, fallback = True):
self.fallback = fallback
self.clock_gettime = None
self.get = None
self.monotonicInit()
def monotonicInit(self):
try:
from RMOSGlue.rmOSPlatform import RMOSPlatform
if RMOSPlatform().AUTODETECTED == RMOSPlatform.ANDROID:
librt = ctypes.CDLL('libc.so', use_errno=True)
log.info("Initialised Android monotonic clock")
elif RMOSPlatform().AUTODETECTED == RMOSPlatform.OPENWRT:
librt = ctypes.CDLL('librt.so.0', use_errno=True)
log.info("Initialised OpenWRT monotonic clock")
else:
librt = ctypes.CDLL('librt.so.1', use_errno=True)
log.info("Initialised generic monotonic clock")
self.clock_gettime = librt.clock_gettime
self.clock_gettime.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)]
self.get = self.monotonicTime
except Exception, e:
self.get = self.monotonicFallback
log.error("Cannot initialise monotonicClock will use fallback time.time() method !")
<|fim▁hole|>
def monotonicFallback(self, asSeconds = True):
if asSeconds:
return int(time.time())
return time.time()
def monotonicTime(self, asSeconds = True):
t = timespec()
if self.clock_gettime(rmMonotonicTime.CLOCK_MONOTONIC_RAW , ctypes.pointer(t)) != 0:
errno_ = ctypes.get_errno()
if self.fallback:
log.info("Monotonic Clock Error ! Reverting to time.time() fallback")
return self.monotonicFallback(asSeconds)
else:
raise OSError(errno_, os.strerror(errno_))
if asSeconds:
return t.tv_sec
return t.tv_sec + t.tv_nsec * 1e-9
#-----------------------------------------------------------------------------------------------
#
#
#
globalMonotonicTime = rmMonotonicTime()<|fim▁end|>
| |
<|file_name|>searches.py<|end_file_name|><|fim▁begin|>"""JSON implementations of relationship searches."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import objects
from . import queries
from .. import utilities
from ..osid import searches as osid_searches
from ..primitives import Id
from ..utilities import get_registry
from dlkit.abstract_osid.osid import errors
from dlkit.abstract_osid.relationship import searches as abc_relationship_searches
class RelationshipSearch(abc_relationship_searches.RelationshipSearch, osid_searches.OsidSearch):
"""The search interface for governing relationship searches."""
def __init__(self, runtime):
self._namespace = 'relationship.Relationship'
self._runtime = runtime
record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime)
self._record_type_data_sets = record_type_data_sets
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
self._id_list = None
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_searches.OsidSearch.__init__(self, runtime)
@utilities.arguments_not_none
def search_among_relationships(self, relationship_ids):
"""Execute this search among the given list of relationships.
arg: relationship_ids (osid.id.IdList): list of relationships
raise: NullArgument - ``relationship_ids`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._id_list = relationship_ids
@utilities.arguments_not_none
def order_relationship_results(self, relationship_search_order):
"""Specify an ordering to the search results.
arg: relationship_search_order
(osid.relationship.RelationshipSearchOrder):
relationship search order
raise: NullArgument - ``relationship_search_order`` is ``null``
raise: Unsupported - ``relationship_search_order`` is not of
this service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_relationship_search_record(self, relationship_search_record_type):
"""Gets the relationship search record corresponding to the given relationship search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: relationship_search_record_type (osid.type.Type): a
relationship search record type
return: (osid.relationship.records.RelationshipSearchRecord) -
the relationship search record
raise: NullArgument - ``relationship_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(relationship_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class RelationshipSearchResults(abc_relationship_searches.RelationshipSearchResults, osid_searches.OsidSearchResults):
"""This interface provides a means to capture results of a search."""
def __init__(self, results, query_terms, runtime):
# if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip
# self._results = [r for r in results]
self._namespace = 'relationship.Relationship'
self._results = results
self._query_terms = query_terms
self._runtime = runtime
self.retrieved = False
def get_relationships(self):
"""Gets the relationship list resulting from a search.
return: (osid.relationship.RelationshipList) - the relationship
list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.RelationshipList(self._results, runtime=self._runtime)
relationships = property(fget=get_relationships)
def get_relationship_query_inspector(self):
"""Gets the inspector for the query to examine the terms used in the search.
return: (osid.relationship.RelationshipQueryInspector) - the
relationship query inspector
*compliance: mandatory -- This method must be implemented.*
"""
return queries.RelationshipQueryInspector(self._query_terms, runtime=self._runtime)
relationship_query_inspector = property(fget=get_relationship_query_inspector)
@utilities.arguments_not_none
def get_relationship_search_results_record(self, relationship_search_record_type):
"""Gets the relationship search results record corresponding to the given relationship search record ``Type``.
This method must be used to retrieve an object implementing the
requested record interface along with all of its ancestor
interfaces.
arg: relationship_search_record_type (osid.type.Type): a
relationship search record type
return:
(osid.relationship.records.RelationshipSearchResultsReco
rd) - the relationship search results record
raise: NullArgument - ``relationship_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(relationship_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class FamilySearch(abc_relationship_searches.FamilySearch, osid_searches.OsidSearch):
"""The search interface for governing family searches."""
def __init__(self, runtime):
self._namespace = 'relationship.Family'
self._runtime = runtime
record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime)
self._record_type_data_sets = record_type_data_sets
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
self._id_list = None
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_searches.OsidSearch.__init__(self, runtime)
@utilities.arguments_not_none
def search_among_families(self, family_ids):
"""Execute this search among the given list of families.
arg: family_ids (osid.id.IdList): list of families
raise: NullArgument - ``family_ids`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._id_list = family_ids
@utilities.arguments_not_none
def order_family_results(self, family_search_order):
"""Specify an ordering to the search results.
arg: family_search_order
(osid.relationship.FamilySearchOrder): family search
order
raise: NullArgument - ``family_search_order`` is ``null``
raise: Unsupported - ``family_search_order`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_family_search_record(self, family_search_record_type):
"""Gets the family search record corresponding to the given family search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: family_search_record_type (osid.type.Type): a family
search record type
return: (osid.relationship.records.FamilySearchRecord) - the
family search record
raise: NullArgument - ``family_search_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred<|fim▁hole|>
"""
raise errors.Unimplemented()
class FamilySearchResults(abc_relationship_searches.FamilySearchResults, osid_searches.OsidSearchResults):
"""This interface provides a means to capture results of a search and is used as a vehicle to perform a search within a previous result set."""
def __init__(self, results, query_terms, runtime):
# if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip
# self._results = [r for r in results]
self._namespace = 'relationship.Family'
self._results = results
self._query_terms = query_terms
self._runtime = runtime
self.retrieved = False
def get_families(self):
"""Gets the family list resulting from a search.
return: (osid.relationship.FamilyList) - the family list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.FamilyList(self._results, runtime=self._runtime)
families = property(fget=get_families)
def get_family_query_inspector(self):
"""Gets the inspector for the query to examine the terms used in the search.
return: (osid.relationship.FamilyQueryInspector) - the family
query inspector
*compliance: mandatory -- This method must be implemented.*
"""
return queries.FamilyQueryInspector(self._query_terms, runtime=self._runtime)
family_query_inspector = property(fget=get_family_query_inspector)
@utilities.arguments_not_none
def get_family_search_results_record(self, family_search_record_type):
"""Gets the family search results record corresponding to the given family search record Type.
This method is used to retrieve an object implementing the
requested record.
arg: family_search_record_type (osid.type.Type): a family
search record type
return: (osid.relationship.records.FamilySearchResultsRecord) -
the family search results record
raise: NullArgument - ``FamilySearchRecordType`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(family_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()<|fim▁end|>
|
raise: Unsupported -
``has_record_type(family_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
WAGTAIL_ROOT = os.path.dirname(__file__)
STATIC_ROOT = os.path.join(WAGTAIL_ROOT, 'test-static')
MEDIA_ROOT = os.path.join(WAGTAIL_ROOT, 'test-media')
MEDIA_URL = '/media/'
DATABASES = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3'),
'NAME': os.environ.get('DATABASE_NAME', 'wagtail'),
'USER': os.environ.get('DATABASE_USER', None),
'PASSWORD': os.environ.get('DATABASE_PASS', None),
'HOST': os.environ.get('DATABASE_HOST', None),
'TEST': {
'NAME': os.environ.get('DATABASE_NAME', None),
}
}
}
SECRET_KEY = 'not needed'
ROOT_URLCONF = 'wagtail.tests.urls'
STATIC_URL = '/static/'
STATIC_ROOT = STATIC_ROOT
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
USE_TZ = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
'wagtail.tests.context_processors.do_not_use_static_url',
'wagtail.contrib.settings.context_processors.settings',
],
},
},
{
'BACKEND': 'django.template.backends.jinja2.Jinja2',
'APP_DIRS': True,
'OPTIONS': {
'extensions': [
'wagtail.wagtailcore.jinja2tags.core',
'wagtail.wagtailadmin.jinja2tags.userbar',
'wagtail.wagtailimages.jinja2tags.images',
],
},
},
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'wagtail.wagtailcore.middleware.SiteMiddleware',
'wagtail.wagtailredirects.middleware.RedirectMiddleware',
)
INSTALLED_APPS = (
# Install wagtailredirects with its appconfig
# Theres nothing special about wagtailredirects, we just need to have one
# app which uses AppConfigs to test that hooks load properly
'wagtail.wagtailredirects.apps.WagtailRedirectsAppConfig',
'wagtail.tests.testapp',
'wagtail.tests.demosite',
'wagtail.tests.customuser',
'wagtail.tests.snippets',
'wagtail.tests.routablepage',
'wagtail.tests.search',
'wagtail.contrib.wagtailstyleguide',
'wagtail.contrib.wagtailsitemaps',
'wagtail.contrib.wagtailroutablepage',
'wagtail.contrib.wagtailfrontendcache',
'wagtail.contrib.wagtailapi',
'wagtail.contrib.wagtailsearchpromotions',
'wagtail.contrib.settings',
'wagtail.wagtailforms',
'wagtail.wagtailsearch',
'wagtail.wagtailembeds',
'wagtail.wagtailimages',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailsnippets',
'wagtail.wagtaildocs',
'wagtail.wagtailadmin',
'wagtail.wagtailcore',
'taggit',
'rest_framework',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
# Using DatabaseCache to make sure that the cache is cleared between tests.
# This prevents false-positives in some wagtail core tests where we are
# changing the 'wagtail_root_paths' key which may cause future tests to fail.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache',
}
}
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher', # don't use the intentionally slow default password hasher
)
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
AUTH_USER_MODEL = 'customuser.CustomUser'
if 'ELASTICSEARCH_URL' in os.environ:
WAGTAILSEARCH_BACKENDS['elasticsearch'] = {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch',
'URLS': [os.environ['ELASTICSEARCH_URL']],
'TIMEOUT': 10,
'max_retries': 1,
'AUTO_UPDATE': False,
}<|fim▁hole|><|fim▁end|>
|
WAGTAIL_SITE_NAME = "Test Site"
|
<|file_name|>hark.py<|end_file_name|><|fim▁begin|># -*- coding: latin-1 -*-
import re
import json
from .common import InfoExtractor
from ..utils import determine_ext
class HarkIE(InfoExtractor):
_VALID_URL = r'https?://www\.hark\.com/clips/(.+?)-.+'
_TEST = {
u'url': u'http://www.hark.com/clips/mmbzyhkgny-obama-beyond-the-afghan-theater-we-only-target-al-qaeda-on-may-23-2013',
u'file': u'mmbzyhkgny.mp3',
u'md5': u'6783a58491b47b92c7c1af5a77d4cbee',
u'info_dict': {
u'title': u"Obama: 'Beyond The Afghan Theater, We Only Target Al Qaeda' on May 23, 2013",
u'description': u'President Barack Obama addressed the nation live on May 23, 2013 in a speech aimed at addressing counter-terrorism policies including the use of drone strikes, detainees at Guantanamo Bay prison facility, and American citizens who are terrorists.',
u'duration': 11,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group(1)
json_url = "http://www.hark.com/clips/%s.json" %(video_id)
info_json = self._download_webpage(json_url, video_id)
info = json.loads(info_json)
final_url = info['url']
return {'id': video_id,
'url' : final_url,
'title': info['name'],
'ext': determine_ext(final_url),
'description': info['description'],<|fim▁hole|><|fim▁end|>
|
'thumbnail': info['image_original'],
'duration': info['duration'],
}
|
<|file_name|>api_python.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys, urllib2
def main():
if len(sys.argv) < 2:
print("Error, usage: {0} <your url>".format(sys.argv[0]))
return 1
url = sys.argv[1]
print(urllib2.urlopen('http://t34.me/api/?u=' + url).read())
return 0<|fim▁hole|>if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>vertexdomain.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-<|fim▁hole|>#
# vertexdomain.py
#
# Copyright 2016 notna <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#<|fim▁end|>
| |
<|file_name|>assessments.py<|end_file_name|><|fim▁begin|># Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to manage all aspects of student assessments."""
__author__ = '[email protected] (Philip Guo)'
import datetime
import logging
from models import courses
from models import models
from models import review
from models import student_work
from models import transforms
from models import utils
from models.models import Student
from models.models import ValidStudent
from models.models import Profile
from models.models import StudentAnswersEntity
from tools import verify
from utils import BaseHandler
from utils import HUMAN_READABLE_DATETIME_FORMAT
from google.appengine.ext import db
# questions per module - training 2 - 12 modules
# last is postcourse
# training
#MODULE_QUESTIONS = [4,10,7,5,5,5,5,7,5,5,5,11,7]
# recertification
MODULE_QUESTIONS = [2,4,5,4,3,7]
# mandatory modules 1 to 8 - needed?
#MANDATORY_MODULES = 8
# number of question modules
#MAX_MODULES = 6
MAX_MODULES = len(MODULE_QUESTIONS)-1
def calc_total_score(student):
#
mn = MODULE_QUESTIONS
# mm = MANDATORY_MODULES
#
overall_score = -1
ms = []
for i in range(1,MAX_MODULES+1):
course = 'a'+str(i)+'course'
ms.append(utils.get_score(student, course))
# get profile for this user - mandatary modules
valid = ValidStudent.get_valid(student.key().name())
prof = Profile.get_by_key_name(valid.profile)
auth = eval(prof.auth)
# complete = mandatory modules are done (have scores)
complete = True
i = 0
for score in ms[:MAX_MODULES]:<|fim▁hole|> # compute overall score after mandatory modules are done
if complete:
part_score = 0
tq = 0
for i in range(MAX_MODULES):
if ms[i] <> None:
part_score += mn[i] * ms[i]
tq += mn[i]
# todo - somar 0.5 antes do int?
overall_score = int((part_score/tq)+0.5)
return overall_score
def store_score(course, student, assessment_name, assessment_type,score):
"""Stores a student's score on a particular assessment.
Args:
course: the course containing the assessment.
student: the student whose data is stored.
assessment_type: the type of the assessment.
score: the student's score on this assessment.
Returns:
the result of the assessment, if appropriate.
"""
# FIXME: Course creators can edit this code to implement custom
# assessment scoring and storage behavior
# TODO(pgbovine): Note that the latest version of answers are always saved,
# but scores are only saved if they're higher than the previous attempt.
# This can lead to unexpected analytics behavior. Resolve this.
existing_score = course.get_score(student, assessment_name)
# remember to cast to int for comparison
# logging.error('assessment name : %s exist score : %s score %s ',assessment_name,existing_score, score)
if assessment_name != 'postcourse':
if (existing_score is None) or (score > int(existing_score)):
utils.set_score(student, assessment_name, score)
# special handling for computing final score:
if assessment_name == 'postcourse':
# midcourse_score = utils.get_score(student, 'midcourse')
# if midcourse_score is None:
# midcourse_score = 0
# else:
# midcourse_score = int(midcourse_score)
if existing_score is None:
postcourse_score = score
else:
postcourse_score = int(existing_score)
if score > postcourse_score:
postcourse_score = score
# Calculate overall score based on a formula
overall_score = calc_total_score(student)
# logging.error('overall_score : %s ', overall_score)
# if utils.get_score(student, 'postcourse') == 0 and (overall_score > -1) :
# utils.set_score(student, 'postcourse', overall_score)
# utils.set_score(student, 'overall_score', overall_score)
# TODO(pgbovine): this changing of assessment_type is ugly ...
if overall_score == 100:
assessment_name = 'postcourse_100'
else:
if overall_score >= 90:
assessment_name = 'postcourse_pass'
else:
if overall_score > 0:
assessment_name = 'postcourse_fail'
else:
assessment_name = 'not_complete'
# utils.set_score(student, 'overall_score', overall_score)
# store the overall_score of the first run of training in post_course
# post_s= utils.get_score(student, 'postcourse')
# logging.error('postcourse : %s ', utils.get_score(student, 'postcourse'))
if utils.get_score(student, 'postcourse') == None and (overall_score > -1):
utils.set_score(student, 'postcourse', overall_score)
utils.set_score(student, 'overall_score', overall_score)
over_s= utils.get_score(student, 'overall_score')
if over_s <> None:
overall_score = calc_total_score(student)
utils.set_score(student, 'overall_score', overall_score)
return assessment_name
class AnswerHandler(BaseHandler):
"""Handler for saving assessment answers."""
# Find student entity and save answers
@db.transactional(xg=True)
def update_assessment_transaction(
self, email, assessment_name,assessment_type,new_answers, score):
"""Stores answer and updates user scores.
Args:
email: the student's email address.
assessment_type: the type of the assessment (as stated in unit.csv).
new_answers: the latest set of answers supplied by the student.
score: the numerical assessment score.
Returns:
the student instance.
"""
student = Student.get_enrolled_student_by_email(email)
course = self.get_course()
# It may be that old Student entities don't have user_id set; fix it.
if not student.user_id:
student.user_id = self.get_user().user_id()
answers = StudentAnswersEntity.get_by_key_name(student.user_id)
if not answers:
answers = StudentAnswersEntity(key_name=student.user_id)
answers.updated_on = datetime.datetime.now()
utils.set_answer(answers, assessment_name, new_answers)
store_score(course, student, assessment_name, assessment_type,score)
student.put()
answers.put()
# Also record the event, which is useful for tracking multiple
# submissions and history.
models.EventEntity.record(
'submit-assessment', self.get_user(), transforms.dumps({
'type': 'assessment-%s' % assessment_name,
'values': new_answers, 'location': 'AnswerHandler'}))
return student
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'assessment-post'):
return
course = self.get_course()
assessment_type = self.request.get('assessment_type')
assessment_name = self.request.get('assessment_name')
if not assessment_type:
self.error(404)
logging.error('No assessment type supplied.')
return
unit = course.find_unit_by_id(assessment_type)
if unit is None or unit.type != verify.UNIT_TYPE_ASSESSMENT:
self.error(404)
logging.error('No assessment named %s exists.', assessment_type)
return
self.template_value['navbar'] = {'course': True}
self.template_value['assessment'] = assessment_name
# self.template_value['assessment'] = self.request.get('assessment_name')
self.template_value['assessment_name'] = unit.title
self.template_value['is_last_assessment'] = (
course.is_last_assessment(unit))
# Convert answers from JSON to dict.
answers = self.request.get('answers')
answers = transforms.loads(answers) if answers else []
grader = unit.workflow.get_grader()
# Scores are not recorded for human-reviewed assignments.
score = 0
if grader == courses.AUTO_GRADER:
score = int(round(float(self.request.get('score'))))
# Record assessment transaction.
student = self.update_assessment_transaction(
student.key().name(), assessment_name, assessment_type, answers, score)
if grader == courses.HUMAN_GRADER:
rp = course.get_reviews_processor()
# Guard against duplicate submissions of a human-graded assessment.
previously_submitted = rp.does_submission_exist(
unit.unit_id, student.get_key())
if not previously_submitted:
# Check that the submission due date has not passed.
time_now = datetime.datetime.now()
submission_due_date = unit.workflow.get_submission_due_date()
if time_now > submission_due_date:
self.template_value['time_now'] = time_now.strftime(
HUMAN_READABLE_DATETIME_FORMAT)
self.template_value['submission_due_date'] = (
submission_due_date.strftime(
HUMAN_READABLE_DATETIME_FORMAT))
self.template_value['error_code'] = (
'assignment_deadline_exceeded')
self.render('error.html')
return
submission_key = student_work.Submission.write(
unit.unit_id, student.get_key(), answers)
rp.start_review_process_for(
unit.unit_id, submission_key, student.get_key())
# Record completion event in progress tracker.
course.get_progress_tracker().put_assessment_completed(
student, assessment_type)
self.template_value['previously_submitted'] = previously_submitted
matcher = unit.workflow.get_matcher()
self.template_value['matcher'] = matcher
if matcher == review.PEER_MATCHER:
self.template_value['review_dashboard_url'] = (
'reviewdashboard?unit=%s' % unit.unit_id
)
self.render('reviewed_assessment_confirmation.html')
return
else:
# Record completion event in progress tracker.
course.get_progress_tracker().put_assessment_completed(
student, assessment_type)
# Save the submission in the datastore, overwriting the earlier
# version if it exists.
submission_key = student_work.Submission.write(
unit.unit_id, student.get_key(), answers)
self.template_value['result'] = course.get_overall_result(student)
self.template_value['score'] = score
self.template_value['overall_score'] = course.get_overall_score(
student)
self.render('test_confirmation.html')<|fim▁end|>
|
if auth[i]:
complete = complete and (score <> None)
i += 1
|
<|file_name|>connect.rs<|end_file_name|><|fim▁begin|>//! CONNECT
use std::io::{self, Read, Write};
use crate::control::variable_header::protocol_level::SPEC_3_1_1;
use crate::control::variable_header::{ConnectFlags, KeepAlive, ProtocolLevel, ProtocolName, VariableHeaderError};
use crate::control::{ControlType, FixedHeader, PacketType};
use crate::encodable::VarBytes;
use crate::packet::{DecodablePacket, PacketError};
use crate::topic_name::{TopicName, TopicNameDecodeError, TopicNameError};
use crate::{Decodable, Encodable};
/// `CONNECT` packet
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct ConnectPacket {
fixed_header: FixedHeader,
protocol_name: ProtocolName,
protocol_level: ProtocolLevel,
flags: ConnectFlags,
keep_alive: KeepAlive,<|fim▁hole|>
payload: ConnectPacketPayload,
}
encodable_packet!(ConnectPacket(protocol_name, protocol_level, flags, keep_alive, payload));
impl ConnectPacket {
pub fn new<C>(client_identifier: C) -> ConnectPacket
where
C: Into<String>,
{
ConnectPacket::with_level("MQTT", client_identifier, SPEC_3_1_1).expect("SPEC_3_1_1 should always be valid")
}
pub fn with_level<P, C>(protoname: P, client_identifier: C, level: u8) -> Result<ConnectPacket, VariableHeaderError>
where
P: Into<String>,
C: Into<String>,
{
let protocol_level = ProtocolLevel::from_u8(level).ok_or(VariableHeaderError::InvalidProtocolVersion)?;
let mut pk = ConnectPacket {
fixed_header: FixedHeader::new(PacketType::with_default(ControlType::Connect), 0),
protocol_name: ProtocolName(protoname.into()),
protocol_level,
flags: ConnectFlags::empty(),
keep_alive: KeepAlive(0),
payload: ConnectPacketPayload::new(client_identifier.into()),
};
pk.fix_header_remaining_len();
Ok(pk)
}
pub fn set_keep_alive(&mut self, keep_alive: u16) {
self.keep_alive = KeepAlive(keep_alive);
}
pub fn set_user_name(&mut self, name: Option<String>) {
self.flags.user_name = name.is_some();
self.payload.user_name = name;
self.fix_header_remaining_len();
}
pub fn set_will(&mut self, topic_message: Option<(TopicName, Vec<u8>)>) {
self.flags.will_flag = topic_message.is_some();
self.payload.will = topic_message.map(|(t, m)| (t, VarBytes(m)));
self.fix_header_remaining_len();
}
pub fn set_password(&mut self, password: Option<String>) {
self.flags.password = password.is_some();
self.payload.password = password;
self.fix_header_remaining_len();
}
pub fn set_client_identifier<I: Into<String>>(&mut self, id: I) {
self.payload.client_identifier = id.into();
self.fix_header_remaining_len();
}
pub fn set_will_retain(&mut self, will_retain: bool) {
self.flags.will_retain = will_retain;
}
pub fn set_will_qos(&mut self, will_qos: u8) {
assert!(will_qos <= 2);
self.flags.will_qos = will_qos;
}
pub fn set_clean_session(&mut self, clean_session: bool) {
self.flags.clean_session = clean_session;
}
pub fn user_name(&self) -> Option<&str> {
self.payload.user_name.as_ref().map(|x| &x[..])
}
pub fn password(&self) -> Option<&str> {
self.payload.password.as_ref().map(|x| &x[..])
}
pub fn will(&self) -> Option<(&str, &[u8])> {
self.payload.will.as_ref().map(|(topic, msg)| (&topic[..], &*msg.0))
}
pub fn will_retain(&self) -> bool {
self.flags.will_retain
}
pub fn will_qos(&self) -> u8 {
self.flags.will_qos
}
pub fn client_identifier(&self) -> &str {
&self.payload.client_identifier[..]
}
pub fn protocol_name(&self) -> &str {
&self.protocol_name.0
}
pub fn protocol_level(&self) -> ProtocolLevel {
self.protocol_level
}
pub fn clean_session(&self) -> bool {
self.flags.clean_session
}
/// Read back the "reserved" Connect flag bit 0. For compliant implementations this should
/// always be false.
pub fn reserved_flag(&self) -> bool {
self.flags.reserved
}
}
impl DecodablePacket for ConnectPacket {
type DecodePacketError = ConnectPacketError;
fn decode_packet<R: Read>(reader: &mut R, fixed_header: FixedHeader) -> Result<Self, PacketError<Self>> {
let protoname: ProtocolName = Decodable::decode(reader)?;
let protocol_level: ProtocolLevel = Decodable::decode(reader)?;
let flags: ConnectFlags = Decodable::decode(reader)?;
let keep_alive: KeepAlive = Decodable::decode(reader)?;
let payload: ConnectPacketPayload =
Decodable::decode_with(reader, Some(flags)).map_err(PacketError::PayloadError)?;
Ok(ConnectPacket {
fixed_header,
protocol_name: protoname,
protocol_level,
flags,
keep_alive,
payload,
})
}
}
/// Payloads for connect packet
#[derive(Debug, Eq, PartialEq, Clone)]
struct ConnectPacketPayload {
client_identifier: String,
will: Option<(TopicName, VarBytes)>,
user_name: Option<String>,
password: Option<String>,
}
impl ConnectPacketPayload {
pub fn new(client_identifier: String) -> ConnectPacketPayload {
ConnectPacketPayload {
client_identifier,
will: None,
user_name: None,
password: None,
}
}
}
impl Encodable for ConnectPacketPayload {
fn encode<W: Write>(&self, writer: &mut W) -> Result<(), io::Error> {
self.client_identifier.encode(writer)?;
if let Some((will_topic, will_message)) = &self.will {
will_topic.encode(writer)?;
will_message.encode(writer)?;
}
if let Some(ref user_name) = self.user_name {
user_name.encode(writer)?;
}
if let Some(ref password) = self.password {
password.encode(writer)?;
}
Ok(())
}
fn encoded_length(&self) -> u32 {
self.client_identifier.encoded_length()
+ self
.will
.as_ref()
.map(|(a, b)| a.encoded_length() + b.encoded_length())
.unwrap_or(0)
+ self.user_name.as_ref().map(|t| t.encoded_length()).unwrap_or(0)
+ self.password.as_ref().map(|t| t.encoded_length()).unwrap_or(0)
}
}
impl Decodable for ConnectPacketPayload {
type Error = ConnectPacketError;
type Cond = Option<ConnectFlags>;
fn decode_with<R: Read>(
reader: &mut R,
rest: Option<ConnectFlags>,
) -> Result<ConnectPacketPayload, ConnectPacketError> {
let mut need_will = false;
let mut need_user_name = false;
let mut need_password = false;
if let Some(r) = rest {
need_will = r.will_flag;
need_user_name = r.user_name;
need_password = r.password;
}
let ident = String::decode(reader)?;
let will = if need_will {
let topic = TopicName::decode(reader).map_err(|e| match e {
TopicNameDecodeError::IoError(e) => ConnectPacketError::from(e),
TopicNameDecodeError::InvalidTopicName(e) => e.into(),
})?;
let msg = VarBytes::decode(reader)?;
Some((topic, msg))
} else {
None
};
let uname = if need_user_name {
Some(String::decode(reader)?)
} else {
None
};
let pwd = if need_password {
Some(String::decode(reader)?)
} else {
None
};
Ok(ConnectPacketPayload {
client_identifier: ident,
will,
user_name: uname,
password: pwd,
})
}
}
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
pub enum ConnectPacketError {
IoError(#[from] io::Error),
TopicNameError(#[from] TopicNameError),
}
#[cfg(test)]
mod test {
use super::*;
use std::io::Cursor;
use crate::{Decodable, Encodable};
#[test]
fn test_connect_packet_encode_basic() {
let packet = ConnectPacket::new("12345".to_owned());
let expected = b"\x10\x11\x00\x04MQTT\x04\x00\x00\x00\x00\x0512345";
let mut buf = Vec::new();
packet.encode(&mut buf).unwrap();
assert_eq!(&expected[..], &buf[..]);
}
#[test]
fn test_connect_packet_decode_basic() {
let encoded_data = b"\x10\x11\x00\x04MQTT\x04\x00\x00\x00\x00\x0512345";
let mut buf = Cursor::new(&encoded_data[..]);
let packet = ConnectPacket::decode(&mut buf).unwrap();
let expected = ConnectPacket::new("12345".to_owned());
assert_eq!(expected, packet);
}
#[test]
fn test_connect_packet_user_name() {
let mut packet = ConnectPacket::new("12345".to_owned());
packet.set_user_name(Some("mqtt_player".to_owned()));
let mut buf = Vec::new();
packet.encode(&mut buf).unwrap();
let mut decode_buf = Cursor::new(buf);
let decoded_packet = ConnectPacket::decode(&mut decode_buf).unwrap();
assert_eq!(packet, decoded_packet);
}
}<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const express = require('express');
const app = express();
app.use(express.static('./src/'))
app.listen(8000, () => {<|fim▁hole|><|fim▁end|>
|
console.log('The server is running on the http://localhost:8000/......');
});
|
<|file_name|>apphtml_settings.py<|end_file_name|><|fim▁begin|># vim: fileencoding=utf-8
"""
AppHtml settings
@author Toshiya NISHIO(http://www.toshiya240.com)
"""
defaultTemplate = {
'1) 小さいボタン': '${badgeS}',
'2) 大きいボタン': '${badgeL}',
'3) テキストのみ': '${textonly}',
"4) アイコン付き(小)": u"""<span class="appIcon"><img class="appIconImg" height="60" src="${icon60url}" style="float:left;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeS" style="display:inline-block; margin:6px">${badgeS}</span><br style="clear:both;">
""",
"5) アイコン付き(大)": u"""<span class="appIcon"><img class="appIconImg" height="100" src="${icon100url}" style="float:left;;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeL" style="display:inline-block; margin:4px">${badgeL}</span><br style="clear:both;">
"""
}
<|fim▁hole|>settings = {
'phg': "",
'cnt': 8,
'scs': {
'iphone': 320,
'ipad': 320,
'mac': 480
},
'template': {
'software': defaultTemplate,
'iPadSoftware': defaultTemplate,
'macSoftware': defaultTemplate,
'song': defaultTemplate,
'album': defaultTemplate,
'movie': defaultTemplate,
'ebook': defaultTemplate
}
}<|fim▁end|>
| |
<|file_name|>task-move-position.js<|end_file_name|><|fim▁begin|>KB.component('task-move-position', function (containerElement, options) {
function getSelectedValue(id) {
var element = KB.dom(document).find('#' + id);
if (element) {
return parseInt(element.options[element.selectedIndex].value);
}
return null;
}
function getSwimlaneId() {
var swimlaneId = getSelectedValue('form-swimlanes');
return swimlaneId === null ? options.board[0].id : swimlaneId;
}
function getColumnId() {
var columnId = getSelectedValue('form-columns');
return columnId === null ? options.board[0].columns[0].id : columnId;
}
function getPosition() {
var position = getSelectedValue('form-position');
return position === null ? 1 : position;
}
function getPositionChoice() {
var element = KB.find('input[name=positionChoice]:checked');
if (element) {
return element.value;
}
return 'before';
}
function onSwimlaneChanged() {
var columnSelect = KB.dom(document).find('#form-columns');
KB.dom(columnSelect).replace(buildColumnSelect());
var taskSection = KB.dom(document).find('#form-tasks');
KB.dom(taskSection).replace(buildTasks());
}
function onColumnChanged() {
var taskSection = KB.dom(document).find('#form-tasks');
KB.dom(taskSection).replace(buildTasks());
}
function onError(message) {
KB.trigger('modal.stop');
KB.find('#message-container')
.replace(KB.dom('div')
.attr('id', 'message-container')
.attr('class', 'alert alert-error')
.text(message)
.build()
);
}
function onSubmit() {
var position = getPosition();
var positionChoice = getPositionChoice();
if (positionChoice === 'after') {
position++;
}
KB.find('#message-container').replace(KB.dom('div').attr('id', 'message-container').build());
KB.http.postJson(options.saveUrl, {
"column_id": getColumnId(),
"swimlane_id": getSwimlaneId(),
"position": position
}).success(function () {
window.location.reload(true);
}).error(function (response) {
if (response) {
onError(response.message);
}
});
}
function buildSwimlaneSelect() {
var swimlanes = [];
options.board.forEach(function(swimlane) {
swimlanes.push({'value': swimlane.id, 'text': swimlane.name});
});
return KB.dom('select')
.attr('id', 'form-swimlanes')
.change(onSwimlaneChanged)
.for('option', swimlanes)
.build();
}
function buildColumnSelect() {
var columns = [];
var swimlaneId = getSwimlaneId();
options.board.forEach(function(swimlane) {
if (swimlaneId === swimlane.id) {
swimlane.columns.forEach(function(column) {
columns.push({'value': column.id, 'text': column.title});
});
}
});
return KB.dom('select')
.attr('id', 'form-columns')
.change(onColumnChanged)
.for('option', columns)
.build();
}
function buildTasks() {
var tasks = [];
var swimlaneId = getSwimlaneId();
var columnId = getColumnId();
var container = KB.dom('div').attr('id', 'form-tasks');
options.board.forEach(function (swimlane) {
if (swimlaneId === swimlane.id) {
swimlane.columns.forEach(function (column) {
if (columnId === column.id) {
column.tasks.forEach(function (task) {
tasks.push({'value': task.position, 'text': '#' + task.id + ' - ' + task.title});
});
}
});
}
});
if (tasks.length > 0) {
container
.add(KB.html.label(options.positionLabel, 'form-position'))
.add(KB.dom('select').attr('id', 'form-position').for('option', tasks).build())
.add(KB.html.radio(options.beforeLabel, 'positionChoice', 'before'))
.add(KB.html.radio(options.afterLabel, 'positionChoice', 'after'))
;
}
return container.build();
}
this.render = function () {
KB.on('modal.submit', onSubmit);
var form = KB.dom('div')<|fim▁hole|> .add(KB.html.label(options.swimlaneLabel, 'form-swimlanes'))
.add(buildSwimlaneSelect())
.add(KB.html.label(options.columnLabel, 'form-columns'))
.add(buildColumnSelect())
.add(buildTasks())
.build();
containerElement.appendChild(form);
};
});<|fim▁end|>
|
.on('submit', onSubmit)
.add(KB.dom('div').attr('id', 'message-container').build())
|
<|file_name|>association-set-test.js<|end_file_name|><|fim▁begin|>import Helper, { states } from './_helper';
import { module, test } from 'qunit';
module('Integration | ORM | Has Many | Named Reflexive | association #set', function(hooks) {
hooks.beforeEach(function() {
this.helper = new Helper();
});
/*
The model can update its association via parent, for all states
*/
states.forEach((state) => {
test(`a ${state} can update its association to a list of saved children`, function(assert) {
let [ tag, originalTags ] = this.helper[state]();
let savedTag = this.helper.savedChild();
tag.labels = [ savedTag ];
<|fim▁hole|> tag.save();
originalTags.forEach(originalTag => {
originalTag.reload();
assert.notOk(originalTag.labels.includes(tag), 'old inverses were cleared');
});
});
test(`a ${state} can update its association to a new parent`, function(assert) {
let [ tag, originalTags ] = this.helper[state]();
let newTag = this.helper.newChild();
tag.labels = [ newTag ];
assert.ok(tag.labels.includes(newTag));
assert.equal(tag.labelIds[0], undefined);
assert.ok(newTag.labels.includes(tag), 'the inverse was set');
tag.save();
originalTags.forEach(originalTag => {
originalTag.reload();
assert.notOk(originalTag.labels.includes(tag), 'old inverses were cleared');
});
});
test(`a ${state} can clear its association via an empty list`, function(assert) {
let [ tag, originalTags ] = this.helper[state]();
tag.labels = [ ];
assert.deepEqual(tag.labelIds, [ ]);
assert.equal(tag.labels.models.length, 0);
tag.save();
originalTags.forEach(originalTag => {
originalTag.reload();
assert.notOk(originalTag.labels.includes(tag), 'old inverses were cleared');
});
});
test(`a ${state} can clear its association via an empty list`, function(assert) {
let [ tag, originalTags ] = this.helper[state]();
tag.labels = null;
assert.deepEqual(tag.labelIds, [ ]);
assert.equal(tag.labels.models.length, 0);
tag.save();
originalTags.forEach(originalTag => {
originalTag.reload();
assert.notOk(originalTag.labels.includes(tag), 'old inverses were cleared');
});
});
});
});<|fim▁end|>
|
assert.ok(tag.labels.includes(savedTag));
assert.equal(tag.labelIds[0], savedTag.id);
assert.ok(savedTag.labels.includes(tag), 'the inverse was set');
|
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|>##############################################################################<|fim▁hole|>#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Products Management Group',
'version': '13.0.1.0.0',
'category': 'base.module_category_knowledge_management',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': [
'sale',
],
'data': [
'security/product_management_security.xml',
],
'installable': False,
}<|fim▁end|>
| |
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>"""Support for Abode Security System cameras."""
from datetime import timedelta
import logging
import requests
from homeassistant.components.camera import Camera
from homeassistant.util import Throttle
from . import DOMAIN as ABODE_DOMAIN, AbodeDevice
DEPENDENCIES = ['abode']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Abode camera devices."""
import abodepy.helpers.constants as CONST
import abodepy.helpers.timeline as TIMELINE
data = hass.data[ABODE_DOMAIN]
devices = []
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA):
if data.is_excluded(device):
continue
devices.append(AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE))
data.devices.extend(devices)
add_entities(devices)
class AbodeCamera(AbodeDevice, Camera):
"""Representation of an Abode camera."""
def __init__(self, data, device, event):
"""Initialize the Abode device."""
AbodeDevice.__init__(self, data, device)
Camera.__init__(self)
self._event = event
self._response = None
async def async_added_to_hass(self):
"""Subscribe Abode events."""
await super().async_added_to_hass()
self.hass.async_add_job(
self._data.abode.events.add_timeline_callback,
self._event, self._capture_callback
)
def capture(self):
"""Request a new image capture."""
return self._device.capture()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def refresh_image(self):
"""Find a new image on the timeline."""
if self._device.refresh_image():
self.get_image()
def get_image(self):
"""Attempt to download the most recent capture."""
if self._device.image_url:
try:
self._response = requests.get(
self._device.image_url, stream=True)
self._response.raise_for_status()
except requests.HTTPError as err:
_LOGGER.warning("Failed to get camera image: %s", err)
self._response = None
else:
self._response = None
def camera_image(self):
"""Get a camera image."""
self.refresh_image()
if self._response:
return self._response.content
return None
<|fim▁hole|> """Update the image with the device then refresh device."""
self._device.update_image_location(capture)
self.get_image()
self.schedule_update_ha_state()<|fim▁end|>
|
def _capture_callback(self, capture):
|
<|file_name|>meeting_rooms.py<|end_file_name|><|fim▁begin|>intervals = [[10,20],[6,15],[0,22]]<|fim▁hole|>
print(sorted(intervals))<|fim▁end|>
| |
<|file_name|>MC_AudioPlayer.py<|end_file_name|><|fim▁begin|>from enigma import eTimer, iServiceInformation, iPlayableService, ePicLoad, RT_VALIGN_CENTER, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_HALIGN_CENTER, gFont, eListbox, ePoint, eListboxPythonMultiContent, eServiceCenter, getDesktop
from Components.MenuList import MenuList
from Screens.Screen import Screen
from Screens.ServiceInfo import ServiceInfoList, ServiceInfoListEntry
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Components.Pixmap import Pixmap
from Components.Label import Label
from Screens.ChoiceBox import ChoiceBox
from ServiceReference import ServiceReference
from Components.Button import Button
from Components.ScrollLabel import ScrollLabel
from Components.Sources.List import List
from Screens.MessageBox import MessageBox
from Screens.HelpMenu import HelpableScreen
from twisted.internet import reactor, defer
from twisted.web import client
from twisted.web.client import HTTPClientFactory, downloadPage
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Playlist import PlaylistIOInternal, PlaylistIOM3U, PlaylistIOPLS
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.config import *
from Tools.Directories import resolveFilename, fileExists, pathExists, createDir, SCOPE_MEDIA, SCOPE_PLAYLIST, SCOPE_SKIN_IMAGE
from MC_Filelist import FileList
from Screens.InfoBarGenerics import InfoBarSeek
import os
from os import path as os_path, remove as os_remove, listdir as os_listdir
from __init__ import _
config.plugins.mc_ap = ConfigSubsection()
sorts = [('default',_("default")),('alpha',_("alphabet")), ('alphareverse',_("alphabet backward")),('date',_("date")),('datereverse',_("date backward")),('size',_("size")),('sizereverse',_("size backward"))]
config.plugins.mc_ap_sortmode = ConfigSubsection()
config.plugins.mc_ap_sortmode.enabled = ConfigSelection(sorts)
config.plugins.mc_ap.showJpg = ConfigYesNo(default=True)
config.plugins.mc_ap.jpg_delay = ConfigInteger(default=10, limits=(5, 999))
config.plugins.mc_ap.repeat = ConfigSelection(default="off", choices = [("off", "off"),("single", "single"),("all", "all")])
config.plugins.mc_ap.lastDir = ConfigText(default=resolveFilename(SCOPE_MEDIA))
screensaverlist = [('default',_("default"))]
hddpath="/hdd/saver/"
if pathExists(hddpath):
files = os_listdir(hddpath)
for x in files:
if pathExists(hddpath + x):
screensaverlist += [(hddpath +'%s/' % (x),_("%s") % (x))]
config.plugins.mc_ap.whichjpg = ConfigSelection(screensaverlist)
playlist = []
#try:
# from enigma import evfd
#except Exception, e:
# print "Media Center: Import evfd failed"
radirl = "http://ipkserver.hdmedia-universe.com/bmcradio/"
#for lyrics
def getEncodedString(value):
returnValue = ""
try:
returnValue = value.encode("utf-8", 'ignore')
except UnicodeDecodeError:
try:
returnValue = value.encode("iso8859-1", 'ignore')
except UnicodeDecodeError:
try:
returnValue = value.decode("cp1252").encode("utf-8")
except UnicodeDecodeError:
returnValue = "n/a"
return returnValue
class myHTTPClientFactory(HTTPClientFactory):
def __init__(self, url, method='GET', postdata=None, headers=None,
agent="SHOUTcast", timeout=0, cookies=None,
followRedirect=1, lastModified=None, etag=None):
HTTPClientFactory.__init__(self, url, method=method, postdata=postdata,
headers=headers, agent=agent, timeout=timeout, cookies=cookies,followRedirect=followRedirect)
def sendUrlCommand(url, contextFactory=None, timeout=50, *args, **kwargs):
scheme, host, port, path = client._parse(url)
factory = myHTTPClientFactory(url, *args, **kwargs)
reactor.connectTCP(host, port, factory, timeout=timeout)
return factory.deferred
mcpath = "/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/"
def PlaylistEntryComponent(serviceref):
res = [ serviceref ]
text = serviceref.getName()
if text is "":
text = os_path.split(serviceref.getPath().split('/')[-1])[1]
res.append((eListboxPythonMultiContent.TYPE_TEXT,25, 1, 470, 22, 0, RT_VALIGN_CENTER, text))
return res
class PlayList(MenuList):
def __init__(self, enableWrapAround = False):
MenuList.__init__(self, playlist, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", 15))
self.l.setItemHeight(23)
MC_AudioPlayer.currPlaying = -1
self.oldCurrPlaying = -1
self.serviceHandler = eServiceCenter.getInstance()
def clear(self):
del self.list[:]
self.l.setList(self.list)
MC_AudioPlayer.currPlaying = -1
self.oldCurrPlaying = -1
def getSelection(self):
return self.l.getCurrentSelection()[0]
def addFile(self, serviceref):
self.list.append(PlaylistEntryComponent(serviceref))
def updateFile(self, index, newserviceref):
if index < len(self.list):
self.list[index] = PlaylistEntryComponent(newserviceref, STATE_NONE)
def deleteFile(self, index):
if MC_AudioPlayer.currPlaying >= index:
MC_AudioPlayer.currPlaying -= 1
del self.list[index]
def setCurrentPlaying(self, index):
self.oldCurrPlaying = MC_AudioPlayer.currPlaying
MC_AudioPlayer.currPlaying = index
self.moveToIndex(index)
def updateState(self, state):
if len(self.list) > self.oldCurrPlaying and self.oldCurrPlaying != -1:
self.list[self.oldCurrPlaying] = PlaylistEntryComponent(self.list[self.oldCurrPlaying][0], STATE_NONE)
if MC_AudioPlayer.currPlaying != -1 and MC_AudioPlayer.currPlaying < len(self.list):
self.list[MC_AudioPlayer.currPlaying] = PlaylistEntryComponent(self.list[MC_AudioPlayer.currPlaying][0], state)
self.updateList()
def playFile(self):
self.updateState(STATE_PLAY)
def pauseFile(self):
self.updateState(STATE_PAUSE)
def stopFile(self):
self.updateState(STATE_STOP)
def rewindFile(self):
self.updateState(STATE_REWIND)
def forwardFile(self):
self.updateState(STATE_FORWARD)
GUI_WIDGET = eListbox
def updateList(self):
self.l.setList(self.list)
def getCurrentIndex(self):
return MC_AudioPlayer.currPlaying
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
return l and self.serviceHandler.info(l[0]).getEvent(l[0])
def getCurrent(self):
l = self.l.getCurrentSelection()
return l and l[0]
def getServiceRefList(self):
return [ x[0] for x in self.list ]
def __len__(self):
return len(self.list)
class MC_AudioPlayer(Screen, HelpableScreen, InfoBarSeek):
def __init__(self, session):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
InfoBarSeek.__init__(self, actionmap = "MediaPlayerSeekActions")
self.jpgList = []
self.jpgIndex = 0
self.jpgLastIndex = -1
self.isVisible = True
self.coverArtFileName = ""
self["fileinfo"] = Label()
self["text"] = Label(_("Lyrics"))
self["coverArt"] = MediaPixmap()
self["currentfolder"] = Label()
self["currentfavname"] = Label()
self.standardInfoBar = False
try:
if config.av.downmix_ac3.value == False:
config.av.downmix_ac3.value = True
config.av.downmix_ac3.save()
os.system("touch /tmp/.ac3on")
except Exception, e:
print "Media Center: no ac3"
self["play"] = Pixmap()
self["green"] = Pixmap()
self["screensaver"] = MediaPixmap()
self.PlaySingle = 0
MC_AudioPlayer.STATE = "NONE"
lstdir = []
self.playlist = PlayList()
MC_AudioPlayer.playlistplay = 0
MC_AudioPlayer.currPlaying = -1
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEOF: self.doEOF,
iPlayableService.evStopped: self.StopPlayback,
iPlayableService.evUser+11: self.__evDecodeError,
iPlayableService.evUser+12: self.__evPluginError,
iPlayableService.evUser+13: self["coverArt"].embeddedCoverArt,
iPlayableService.evUser+14: self["screensaver"].screensaver
})
self["actions"] = HelpableActionMap(self, "MC_AudioPlayerActions",
{
"ok": (self.KeyOK, "Play selected file"),
"playpause": (self.PlayPause, "Play / Pause"),
"cancel": (self.Exit, "Exit Audio Player"),
"left": (self.leftUp, "List Top"),
"right": (self.rightDown, "List Bottom"),
"up": (self.up, "List up"),
"down": (self.down, "List down"),
"menu": (self.showMenu, "File / Folder Options"),
"video": (self.visibility, "Show / Hide Player"),
"info": (self.showLyrics, "Lyrics"),
"stop": (self.StopPlayback, "Stop Playback"),
"red": (self.Playlists, "Playlists"),
"green": (self.Repeat, "Repeat"),
"yellow": (self.addFiletoPls, "Add file to playlist"),
"blue": (self.Settings, "Settings"),
"next": (self.KeyNext, "Next song"),
"previous": (self.KeyPrevious, "Previous song"),
}, -2)
self.playlistparsers = {}
self.addPlaylistParser(PlaylistIOM3U, "m3u")
self.addPlaylistParser(PlaylistIOPLS, "pls")
self.addPlaylistParser(PlaylistIOInternal, "e2pls")
currDir = config.plugins.mc_ap.lastDir.value
if not pathExists(currDir):
currDir = "/"
sort = config.plugins.mc_ap_sortmode.enabled.value
self["currentfolder"].setText(str(currDir))
self.filelist = []
self["filelist"] = []
inhibitDirs = ["/bin", "/boot", "/dev", "/dev.static", "/etc", "/lib" , "/proc", "/ram", "/root" , "/sbin", "/sys", "/tmp", "/usr", "/var"]
self.filelist = FileList(currDir, useServiceRef = True, showDirectories = True, showFiles = True, matchingPattern = "(?i)^.*\.(mp2|mp3|wav|wave|wma|m4a|ogg|ra|flac|m3u|pls|e2pls)", inhibitDirs = inhibitDirs, sort = sort)
self["filelist"] = self.filelist
self["filelist"].show()
self.JpgTimer = eTimer()
self.JpgTimer.callback.append(self.showBackgroundJPG)
self.getJPG()
self.FileInfoTimer = eTimer()
self.FileInfoTimer.callback.append(self.updateFileInfo)
self.onLayoutFinish.append(self.updategreen)
def Repeat(self):
if config.plugins.mc_ap.repeat.getValue() == "off":
config.plugins.mc_ap.repeat.value = "single"
self["green"].instance.setPixmapFromFile(mcpath +"icons/repeatonegreen.png")
elif config.plugins.mc_ap.repeat.getValue() == "single":
config.plugins.mc_ap.repeat.value = "all"
self["green"].instance.setPixmapFromFile(mcpath +"icons/repeatallgreen.png")
else:
config.plugins.mc_ap.repeat.value = "off"
self["green"].instance.setPixmapFromFile(mcpath +"icons/repeatoffgreen.png")
config.plugins.mc_ap.save()
def updategreen(self):
if config.plugins.mc_ap.repeat.getValue() == "all":
self["green"].instance.setPixmapFromFile(mcpath +"icons/repeatallgreen.png")
elif config.plugins.mc_ap.repeat.getValue() == "single":
self["green"].instance.setPixmapFromFile(mcpath +"icons/repeatonegreen.png")
else:
return
def unlockShow(self):
return
def lockShow(self):
return
def up(self):
self["filelist"].up()
# if config.plugins.mc_global.vfd.value == "on":
# evfd.getInstance().vfd_write_string(self["filelist"].getName())
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def down(self):
self["filelist"].down()
# if config.plugins.mc_global.vfd.value == "on":
# evfd.getInstance().vfd_write_string(self["filelist"].getName())
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def leftUp(self):
self["filelist"].pageUp()
# if config.plugins.mc_global.vfd.value == "on":
# evfd.getInstance().vfd_write_string(self["filelist"].getName())
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def rightDown(self):
self["filelist"].pageDown()
# if config.plugins.mc_global.vfd.value == "on":
# evfd.getInstance().vfd_write_string(self["filelist"].getName())
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def KeyOK(self):
if self["filelist"].canDescent():
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
self.filelist.descent()
self["currentfolder"].setText(str(self.filelist.getCurrentDirectory()))
else:
if self.filelist.getServiceRef().type == 4098: # playlist
ServiceRef = self.filelist.getServiceRef()
extension = ServiceRef.getPath()[ServiceRef.getPath().rfind('.') + 1:]
if self.playlistparsers.has_key(extension):
self.playlist.clear()
playlist = self.playlistparsers[extension]()
list = playlist.open(ServiceRef.getPath())
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
MC_AudioPlayer.currPlaying = 0
self.PlayServicepls()
else:
self.PlaySingle = 1
self.PlayService()
def PlayPause(self):
if MC_AudioPlayer.STATE == "PLAY":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.pause()
MC_AudioPlayer.STATE = "PAUSED"
self["play"].instance.setPixmapFromFile(mcpath +"icons/pause_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "PAUSED":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.unpause()
MC_AudioPlayer.STATE = "PLAY"
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
else:
self.KeyOK()
def KeyNext(self):
if MC_AudioPlayer.STATE != "NONE":
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
if MC_AudioPlayer.playlistplay == 1:
next = self.playlist.getCurrentIndex() + 1
if next < len(self.playlist):
MC_AudioPlayer.currPlaying = MC_AudioPlayer.currPlaying + 1
else:
MC_AudioPlayer.currPlaying = 0
self.PlayServicepls()
else:
self.down()
self.PlayService()
def KeyPrevious(self):
if MC_AudioPlayer.STATE != "NONE":
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
if MC_AudioPlayer.playlistplay == 1:
next = self.playlist.getCurrentIndex() - 1
if next != -1:
MC_AudioPlayer.currPlaying = MC_AudioPlayer.currPlaying - 1
else:
MC_AudioPlayer.currPlaying = 0
self.PlayServicepls()
else:
self.up()
self.PlayService()
def visibility(self, force=1):
if self.isVisible == True:
self.isVisible = False
self.hide()
else:
self.isVisible = True
self.show()
def Playlists(self):
self.session.openWithCallback(self.updd, MC_AudioPlaylist)
def updd(self):
self.updateFileInfo()
sort = config.plugins.mc_ap_sortmode.enabled.value
self.filelist.refresh(sort)
if MC_AudioPlayer.STATE == "PLAY":
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "PAUSED":
self["play"].instance.setPixmapFromFile(mcpath +"icons/pause_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "NONE":
self["play"].instance.setPixmapFromFile(mcpath +"icons/stop_enabled.png")
else:
return
def PlayService(self):
playlistplay = 0
self.JpgTimer.stop()
self.session.nav.playService(self["filelist"].getServiceRef())
MC_AudioPlayer.STATE = "PLAY"
self.FileInfoTimer.start(2000, True)
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
path = self["filelist"].getCurrentDirectory()
self["coverArt"].updateCoverArt(path)
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
def PlayServicepls(self):
MC_AudioPlayer.playlistplay = 1
x = self.playlist.getCurrentIndex()
x = len(self.playlist)
self.session.nav.playService(self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()])
MC_AudioPlayer.STATE = "PLAY"
self.FileInfoTimer.start(2000, True)
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
#path = self["filelist"].getCurrentDirectory() + self["filelist"].getFilename()
#self["coverArt"].updateCoverArt(path)
def StopPlayback(self):
if self.isVisible == False:
self.show()
self.isVisible = True
if self.session.nav.getCurrentService() is None:
return
else:
self.session.nav.stopService()
if config.plugins.mc_ap.showJpg.getValue():
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
MC_AudioPlayer.STATE = "NONE"
self["play"].instance.setPixmapFromFile(mcpath +"icons/stop_enabled.png")
def JumpToFolder(self, jumpto = None):
if jumpto is None:
return
else:
self["filelist"].changeDir(jumpto)
self["currentfolder"].setText(("%s") % (jumpto))
def updateFileInfo(self):
currPlay = self.session.nav.getCurrentService()
if currPlay is not None:
sTitle = currPlay.info().getInfoString(iServiceInformation.sTagTitle)
sArtist = currPlay.info().getInfoString(iServiceInformation.sTagArtist)
sAlbum = currPlay.info().getInfoString(iServiceInformation.sTagAlbum)
sGenre = currPlay.info().getInfoString(iServiceInformation.sTagGenre)
sComment = currPlay.info().getInfoString(iServiceInformation.sTagComment)
sYear = currPlay.info().getInfoString(iServiceInformation.sTagDate)
if sTitle == "":
sTitle = currPlay.info().getName().split('/')[-1]
self["fileinfo"].setText(_("Title: ") + sTitle + _("\nArtist: ") + sArtist + _("\nAlbum: ") + sAlbum + _("\nYear: ") + sYear + _("\nGenre: ") + sGenre + _("\nComment: ") + sComment)
def addFiletoPls(self):
if self.filelist.canDescent():
x = self.filelist.getName()
if x == "..":
return
self.addDirtoPls(self.filelist.getSelection()[0])
elif self.filelist.getServiceRef().type == 4098: # playlist
ServiceRef = self.filelist.getServiceRef()
extension = ServiceRef.getPath()[ServiceRef.getPath().rfind('.') + 1:]
if self.playlistparsers.has_key(extension):
playlist = self.playlistparsers[extension]()
list = playlist.open(ServiceRef.getPath())
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
else:
self.playlist.addFile(self.filelist.getServiceRef())
self.playlist.updateList()
def addDirtoPls(self, directory, recursive = True):
if directory == '/':
return
filelist = FileList(directory, useServiceRef = True, showMountpoints = False, isTop = True)
for x in filelist.getFileList():
if x[0][1] == True: #isDir
#if recursive:
# if x[0][0] != directory:
# self.playlist.addFile(x[0][1])
return
elif filelist.getServiceRef() and filelist.getServiceRef().type == 4097:
self.playlist.addFile(x[0][0])
self.playlist.updateList()
def deleteFile(self):
self.service = self.filelist.getServiceRef()
if self.service.type != 4098 and self.session.nav.getCurrentlyPlayingServiceOrGroup() is not None:
if self.service == self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.StopPlayback()
self.session.openWithCallback(self.deleteFileConfirmed, MessageBox, _("Do you really want to delete this file ?"))
def deleteFileConfirmed(self, confirmed):
if confirmed:
delfile = self["filelist"].getFilename()
os.remove(delfile)
sort = config.plugins.mc_ap_sortmode.enabled.value
self.filelist.refresh(sort)
def deleteDir(self):
self.session.openWithCallback(self.deleteDirConfirmed, MessageBox, _("Do you really want to delete this directory and it's content ?"))
def deleteDirConfirmed(self, confirmed):
if confirmed:
import shutil
deldir = self.filelist.getSelection()[0]
shutil.rmtree(deldir)
sort = config.plugins.mc_ap_sortmode.enabled.value
self.filelist.refresh(sort)
def getJPG(self):
if config.plugins.mc_ap.whichjpg.value == "default":
path = mcpath +"saver/"
else:
path = config.plugins.mc_ap.whichjpg.value
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".jpg"):
self.jpgList.append(name)
def showBackgroundJPG(self):
if len(self.jpgList) > 0:
if self.jpgIndex < len(self.jpgList) -1:
self.jpgIndex += 1
else:
self.jpgIndex = 0
print "MediaCenter: Last JPG Index: " + str(self.jpgLastIndex)
if self.jpgLastIndex != self.jpgIndex or self.jpgLastIndex == -1:
if config.plugins.mc_ap.whichjpg.value == "default":
path = mcpath +"saver/" + self.jpgList[self.jpgIndex]
else:
path = config.plugins.mc_ap.whichjpg.value + self.jpgList[self.jpgIndex]
self["screensaver"].screensaver(path)
self.jpgLastIndex = self.jpgIndex
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
else:
print "MediaCenter: No Background Files found ..."
def doEOF(self):
if MC_AudioPlayer.playlistplay == 1:
next = self.playlist.getCurrentIndex() + 1
if next < len(self.playlist):
MC_AudioPlayer.currPlaying = MC_AudioPlayer.currPlaying + 1
self.PlayServicepls()
elif config.plugins.mc_ap.repeat.getValue() == "single":
self.StopPlayback()
self.PlayService()
elif config.plugins.mc_ap.repeat.getValue() == "all":
self.down()
if self.filelist.getName() == "..":
self.down()
self.checkisdir()
self.PlayService()
else:
self.down()
self.PlayService()
def checkisdir(self):
if self["filelist"].canDescent():
self.down()
self.checkisdir()
else:
self.PlayService()
def __evDecodeError(self):
currPlay = self.session.nav.getCurrentService()
sVideoType = currPlay.info().getInfoString(iServiceInformation.sVideoType)
self.session.open(MessageBox, _("This Dreambox can't decode %s video streams!") % sVideoType, type = MessageBox.TYPE_INFO,timeout = 20 )
def __evPluginError(self):
currPlay = self.session.nav.getCurrentService()
message = currPlay.info().getInfoString(iServiceInformation.sUser+12)
self.session.open(MessageBox, message, type = MessageBox.TYPE_INFO,timeout = 20 )
def addPlaylistParser(self, parser, extension):
self.playlistparsers[extension] = parser
def Shuffle(self):
if self.currPlaying == 1:
return
sort = "shuffle"
self.filelist.refresh(sort)
def showMenu(self):
menu = []
menu.append((_("shuffle"), "shuffle"))
if self.filelist.canDescent():
x = self.filelist.getName()
if x == "..":
return
menu.append((_("add directory to playlist"), "copydir"))
menu.append((_("delete directory"), "deletedir"))
else:
menu.append((_("add file to playlist"), "copyfile"))
menu.append((_("add file to playlist and play"), "copyandplay"))
menu.append((_("add all files in directory to playlist"), "copyfiles"))
menu.append((_("delete file"), "deletefile"))
self.session.openWithCallback(self.menuCallback, ChoiceBox, title="", list=menu)
def menuCallback(self, choice):
if choice is None:
return
if choice[1] == "copydir":
self.addDirtoPls(self.filelist.getSelection()[0])
elif choice[1] == "deletedir":
self.deleteDir()
elif choice[1] == "copyfile":
self.addFiletoPls()
elif choice[1] == "copyandplay":
self.addFiletoPls()
MC_AudioPlayer.currPlaying = len(self.playlist) - 1
self.PlayServicepls()
elif choice[1] == "copyfiles":
self.addDirtoPls(os_path.dirname(self.filelist.getSelection()[0].getPath()) + "/", recursive = False)
elif choice[1] == "deletefile":
self.deleteFile()
elif choice[1] == "shuffle":
self.Shuffle()
def Settings(self):
self.session.openWithCallback(self.updd, AudioPlayerSettings)
def Exit(self):
if self.isVisible == False:
self.visibility()
return
if self.filelist.getCurrentDirectory() is None:
config.plugins.mc_ap.lastDir.value = "devicelist"
else:
config.plugins.mc_ap.lastDir.value = self.filelist.getCurrentDirectory()
self.FileInfoTimer.stop()
del self["coverArt"].picload
del self["screensaver"].picload
if os.path.isfile("/tmp/.ac3on"):
config.av.downmix_ac3.value = False
config.av.downmix_ac3.save()
os.remove("/tmp/.ac3on")
config.plugins.mc_ap.save()
if self.session.nav.getCurrentService() is not None:
self.session.nav.stopService()
MC_AudioPlayer.STATE = "NONE"
# if config.plugins.mc_global.vfd.value == "on":
# evfd.getInstance().vfd_write_string(_("My Music"))
self.close()
def screensavercheckup(self):
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
def showLyrics(self):
if MC_AudioPlayer.STATE == "PLAY":
self.session.openWithCallback(self.updd, Lyrics)
class MC_WebRadio(Screen, HelpableScreen):
def __init__(self, session):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.jpgList = []
self.jpgIndex = 0
self.jpgLastIndex = -1
self.isVisible = True
self["key_blue"] = Button(_("Settings"))
self["fileinfo"] = Label()
try:
if config.av.downmix_ac3.value == False:
config.av.downmix_ac3.value = True
config.av.downmix_ac3.save()
os.system("touch /tmp/.ac3on")
except Exception, e:
print "Media Center: no ac3"
self["play"] = Pixmap()
self["screensaver"] = MediaPixmap()
MC_AudioPlayer.STATE = "NONE"
lstdir = []
self.playlist = PlayList()
MC_AudioPlayer.playlistplay = 0
MC_AudioPlayer.currPlaying = -1
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEOF: self.doEOF,
iPlayableService.evStopped: self.StopPlayback,
iPlayableService.evUser+11: self.__evDecodeError,
iPlayableService.evUser+12: self.__evPluginError,
iPlayableService.evUser+14: self["screensaver"].screensaver
})
self["actions"] = HelpableActionMap(self, "MC_AudioPlayerActions",
{
"ok": (self.KeyOK, "Play selected file"),
"playpause": (self.PlayPause, "Play / Pause"),
"cancel": (self.Exit, "Exit Audio Player"),
"left": (self.leftUp, "List Top"),
"right": (self.rightDown, "List Bottom"),
"up": (self.up, "List up"),
"down": (self.down, "List down"),
"video": (self.visibility, "Show / Hide Player"),
"green": (self.showMenu, "Menu"),
"stop": (self.StopPlayback, "Stop Playback"),
"red": (self.deleteFile, "Delete"),
"blue": (self.Settings, "Settings"),
}, -2)
self.playlistparsers = {}
self.addPlaylistParser(PlaylistIOM3U, "m3u")
self.addPlaylistParser(PlaylistIOPLS, "pls")
self.addPlaylistParser(PlaylistIOInternal, "e2pls")
currDir = mcpath +"radio/"
if not pathExists(currDir):
currDir = "/"
self.filelist = []
self["filelist"] = []
inhibitDirs = ["/bin", "/boot", "/dev", "/dev.static", "/etc", "/lib" , "/proc", "/ram", "/root" , "/sbin", "/sys", "/tmp", "/usr", "/var"]
self.filelist = FileList(currDir, useServiceRef = True, showDirectories = False, showFiles = True, matchingPattern = "(?i)^.*\.(m3u|pls|e2pls)", additionalExtensions = "4098:m3u 4098:e2pls 4098:pls")
self["filelist"] = self.filelist
self["filelist"].show()
self.JpgTimer = eTimer()
self.JpgTimer.callback.append(self.showBackgroundJPG)
self.getJPG()
self.FileInfoTimer = eTimer()
self.FileInfoTimer.callback.append(self.updateFileInfo)
def unlockShow(self):
return
def lockShow(self):
return
def up(self):
self["filelist"].up()
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def down(self):
self["filelist"].down()
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def leftUp(self):
self["filelist"].pageUp()
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def rightDown(self):
self["filelist"].pageDown()
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
def KeyOK(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
ServiceRef = self.filelist.getServiceRef()
extension = ServiceRef.getPath()[ServiceRef.getPath().rfind('.') + 1:]
if self.playlistparsers.has_key(extension):
self.playlist.clear()
playlist = self.playlistparsers[extension]()
list = playlist.open(ServiceRef.getPath())
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
MC_AudioPlayer.currPlaying = 0
self.PlayServicepls()
def PlayPause(self):
if MC_AudioPlayer.STATE == "PLAY":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.pause()
MC_AudioPlayer.STATE = "PAUSED"
self["play"].instance.setPixmapFromFile(mcpath +"icons/pause_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "PAUSED":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.unpause()
MC_AudioPlayer.STATE = "PLAY"
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
else:
self.KeyOK()
def visibility(self, force=1):
if self.isVisible == True:
self.isVisible = False
self.hide()
else:
self.isVisible = True
self.show()
def updd(self):
self.updateFileInfo()
sort = config.plugins.mc_ap_sortmode.enabled.value
self.filelist.refresh(sort)
if MC_AudioPlayer.STATE == "PLAY":
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "PAUSED":
self["play"].instance.setPixmapFromFile(mcpath +"icons/pause_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
elif MC_AudioPlayer.STATE == "NONE":
self["play"].instance.setPixmapFromFile(mcpath +"icons/stop_enabled.png")
else:
return
def PlayServicepls(self):
MC_AudioPlayer.playlistplay = 1
x = self.playlist.getCurrentIndex()
x = len(self.playlist)
self.session.nav.playService(self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()])
MC_AudioPlayer.STATE = "PLAY"
self.FileInfoTimer.start(2000, True)
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
self.playlist.clear()
def StopPlayback(self):
if self.isVisible == False:
self.show()
self.isVisible = True
if self.session.nav.getCurrentService() is None:
return
else:
self.session.nav.stopService()
if config.plugins.mc_ap.showJpg.getValue():
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
MC_AudioPlayer.STATE = "NONE"
self["play"].instance.setPixmapFromFile(mcpath +"icons/stop_enabled.png")
def updateFileInfo(self):
currPlay = self.session.nav.getCurrentService()
if currPlay is not None:
sTitle = currPlay.info().getInfoString(iServiceInformation.sTagTitle)
sArtist = currPlay.info().getInfoString(iServiceInformation.sTagArtist)
sAlbum = currPlay.info().getInfoString(iServiceInformation.sTagAlbum)
sGenre = currPlay.info().getInfoString(iServiceInformation.sTagGenre)
sComment = currPlay.info().getInfoString(iServiceInformation.sTagComment)
sYear = currPlay.info().getInfoString(iServiceInformation.sTagDate)
if sTitle == "":
sTitle = currPlay.info().getName().split('/')[-1]
self["fileinfo"].setText(_("Title: ") + sTitle + _("\nArtist: ") + sArtist + _("\nAlbum: ") + sAlbum + _("\nYear: ") + sYear + _("\nGenre: ") + sGenre + _("\nComment: ") + sComment)
self.FileInfoTimer.start(10000, True)
def deleteFile(self):
self.service = self.filelist.getServiceRef()
if self.service.type != 4098 and self.session.nav.getCurrentlyPlayingServiceOrGroup() is not None:
if self.service == self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.StopPlayback()
self.session.openWithCallback(self.deleteFileConfirmed, MessageBox, _("Do you really want to delete this file ?"))
def deleteFileConfirmed(self, confirmed):
if confirmed:
delfile = self["filelist"].getFilename()
os.remove(delfile)
sort = config.plugins.mc_ap_sortmode.enabled.value
self.filelist.refresh(sort)
def getJPG(self):
if config.plugins.mc_ap.whichjpg.value == "default":
path = mcpath +"saver/"
else:
path = config.plugins.mc_ap.whichjpg.value
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".jpg"):
self.jpgList.append(name)
def showBackgroundJPG(self):
if len(self.jpgList) > 0:
if self.jpgIndex < len(self.jpgList) -1:
self.jpgIndex += 1
else:
self.jpgIndex = 0
if self.jpgLastIndex != self.jpgIndex or self.jpgLastIndex == -1:
if config.plugins.mc_ap.whichjpg.value == "default":
path = mcpath +"saver/" + self.jpgList[self.jpgIndex]
else:
path = config.plugins.mc_ap.whichjpg.value + self.jpgList[self.jpgIndex]
self["screensaver"].screensaver(path)
self.jpgLastIndex = self.jpgIndex
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
else:
print "MediaCenter: No Background Files found ..."
def doEOF(self):
self.StopPlayback()
if config.plugins.mc_ap.showJpg.getValue():
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
def __evDecodeError(self):
currPlay = self.session.nav.getCurrentService()
sVideoType = currPlay.info().getInfoString(iServiceInformation.sVideoType)
self.session.open(MessageBox, _("This Dreambox can't decode %s video streams!") % sVideoType, type = MessageBox.TYPE_INFO,timeout = 20 )
def __evPluginError(self):
currPlay = self.session.nav.getCurrentService()
message = currPlay.info().getInfoString(iServiceInformation.sUser+12)
self.session.open(MessageBox, message, type = MessageBox.TYPE_INFO,timeout = 20 )
def addPlaylistParser(self, parser, extension):
self.playlistparsers[extension] = parser
def Settings(self):
self.session.openWithCallback(self.updd, AudioPlayerSettings)
def Exit(self):
if self.isVisible == False:
self.visibility()
return
self.FileInfoTimer.stop()
del self["screensaver"].picload
if os.path.isfile("/tmp/.ac3on"):
config.av.downmix_ac3.value = False
config.av.downmix_ac3.save()
os.remove("/tmp/.ac3on")
if self.session.nav.getCurrentService() is not None:
self.session.nav.stopService()
MC_AudioPlayer.STATE = "NONE"
self.close()
def screensavercheckup(self):
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
def showMenu(self):
if fileExists("/tmp/index.html"):
os.remove("/tmp/index.html")
menu = []
menu.append((_("70-80er"), "70-80er/"))
menu.append((_("Alternative"), "Alternative/"))
menu.append((_("Ambient"), "Ambient/"))
menu.append((_("Artist"), "Artist/"))
menu.append((_("Big Band"), "Big%20Band/"))
menu.append((_("Blues"), "Blues/"))
menu.append((_("Bluegrass"), "Bluegrass/"))
menu.append((_("Chillout"), "Chillout/"))
menu.append((_("Classic"), "classical/"))
menu.append((_("Classic Rock"), "classic%20rock/"))
menu.append((_("Countrymusic"), "Countrymusik/"))
menu.append((_("Hip Hop"), "HipHop/"))
menu.append((_("Hits"), "Hits/"))
menu.append((_("Moviemusic"), "Moviemusik/"))
menu.append((_("Oldies"), "Oldies/"))
menu.append((_("Party"), "Party/"))
menu.append((_("Reggae"), "Reggae/"))
menu.append((_("Rock"), "Rock/"))
menu.append((_("Rundfunk"), "Rundfunk/"))
menu.append((_("Smooth"), "Smooth/"))
menu.append((_("Soul"), "Soul/"))
menu.append((_("Techno/House"), "Techno/"))
menu.append((_("Worldmusic"), "Worldmusik/"))
self.session.openWithCallback(self.menuCallback, ChoiceBox, title="", list=menu)
def menuCallback(self, choice):
if choice is None:
return
os.system("echo "+ choice[1] +" > /tmp/.webselect | wget -O /tmp/index.html "+ radirl +""+ choice[1])
self.session.openWithCallback(self.updd, MC_WebDown)
class MC_WebDown(Screen):
def __init__(self, session):
Screen.__init__(self, session)
list = []
if fileExists("/tmp/index.html"):
names = open("/tmp/index.html").read().split('\n')
for x in names:
list.append((x, _(x)))
self["menu"] = List(list)
self["actions"] = ActionMap(["OkCancelActions", "DirectionActions"],
{
"cancel": self.exit,
"ok": self.okbuttonClick
}, -1)
def okbuttonClick(self):
selection = self["menu"].getCurrent()
if selection is not None:
gen = open("/tmp/.webselect").read().split('\n')
os.system("wget -O '"+ mcpath +"radio/"+ selection[1] +"' '"+ radirl +""+ gen[0] +""+ selection[1].replace(" ", "%20") +"'")
os.remove("/tmp/index.html")
self.close()
def exit(self):
os.remove("/tmp/index.html")
self.close()
class MC_AudioPlaylist(Screen, InfoBarSeek):
def __init__(self, session):
Screen.__init__(self, session)
InfoBarSeek.__init__(self, actionmap = "MediaPlayerSeekActions")
self["key_red"] = Button("Back")
self["key_green"] = Button(" ")
self["key_yellow"] = Button(" ")
self["key_blue"] = Button(_("File Browser"))
self.jpgList = []
self.jpgIndex = 0
self.jpgLastIndex = -1
self["play"] = Pixmap()
self.isVisible = True
self["fileinfo"] = Label()
#self["coverArt"] = MediaPixmap()
self["screensaver"] = MediaPixmap()
self.FileInfoTimer = eTimer()
self.FileInfoTimer.callback.append(self.updateFileInfo)
self.PlaySingle = 0
self.playlist = PlayList()
self["playlist"] = self.playlist
self.playlistIOInternal = PlaylistIOInternal()
self.playlistparsers = {}
self.addPlaylistParser(PlaylistIOM3U, "m3u")
self.addPlaylistParser(PlaylistIOPLS, "pls")
self.addPlaylistParser(PlaylistIOInternal, "e2pls")
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEOF: self.fileupdate,
#iPlayableService.evStopped: self.StopPlayback,
#iPlayableService.evUser+13: self["coverArt"].embeddedCoverArt,
iPlayableService.evUser+14: self["screensaver"].screensaver
})
self["actions"] = HelpableActionMap(self, "MC_AudioPlayerActions",
{
"ok": (self.KeyOK, "Play from selected file"),
"cancel": (self.Exit, "Exit Audio Player"),
"left": (self.leftUp, "List Top"),
"right": (self.rightDown, "List Bottom"),
"up": (self.up, "List up"),
"down": (self.down, "List down"),
"menu": (self.showMenu, "File / Folder Options"),
"video": (self.visibility, "Show / Hide Player"),
"info": (self.showLyrics, "Lyrics"),
"stop": (self.StopPlayback, "Stop Playback"),
"red": (self.Exit, "Close Playlist"),
#"green": (self.close, "Play All"),
#"yellow": (self.Exit, "Playlists"),
"blue": (self.Exit, "Close Playlist"),
"next": (self.KeyNext, "Next song"),
"previous": (self.KeyPrevious, "Previous song"),
"playpause": (self.PlayPause, "Play / Pause"),
"stop": (self.StopPlayback, "Stop"),
}, -2)
self.JpgTimer = eTimer()
self.JpgTimer.callback.append(self.showBackgroundJPG)
self.getJPG()
if MC_AudioPlayer.STATE != "NONE":
self.updateFileInfo()
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
def unlockShow(self):
return
def lockShow(self):
return
def up(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
self["playlist"].up()
def down(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
self["playlist"].down()
def leftUp(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
self["playlist"].pageUp()
def rightDown(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
self["playlist"].pageDown()
def KeyOK(self):
if len(self.playlist.getServiceRefList()):
x = self.playlist.getSelectionIndex()
self.playlist.setCurrentPlaying(self.playlist.getSelectionIndex())
x = self.playlist.getCurrentIndex()
x = len(self.playlist)
self.PlayService()
def PlayPause(self):
if MC_AudioPlayer.STATE != "NONE":
if MC_AudioPlayer.STATE == "PLAY":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.pause()
MC_AudioPlayer.STATE = "PAUSED"
elif MC_AudioPlayer.STATE == "PAUSED":
service = self.session.nav.getCurrentService()
pausable = service.pause()
pausable.unpause()
MC_AudioPlayer.STATE = "PLAY"
else:
self.KeyOK()
def KeyNext(self):
if MC_AudioPlayer.STATE != "NONE":
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
if MC_AudioPlayer.playlistplay == 1:
next = self.playlist.getCurrentIndex() + 1
if next < len(self.playlist):
MC_AudioPlayer.currPlaying = MC_AudioPlayer.currPlaying + 1
else:
MC_AudioPlayer.currPlaying = 0
self.PlayService()
else:
self.session.open(MessageBox, _("You have to close playlist before you can go to the next song while playing from file browser."), MessageBox.TYPE_ERROR)
def KeyPrevious(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
if MC_AudioPlayer.playlistplay == 1:
next = self.playlist.getCurrentIndex() - 1
if next != -1:
MC_AudioPlayer.currPlaying = MC_AudioPlayer.currPlaying - 1
else:
MC_AudioPlayer.currPlaying = 0
self.PlayService()
else:
self.session.open(MessageBox, _("You have to close playlist before you can go to the previous song while playing from file browser."), MessageBox.TYPE_ERROR)
def PlayService(self):
MC_AudioPlayer.playlistplay = 1
self.session.nav.playService(self.playlist.getServiceRefList()[self.playlist.getCurrentIndex()])
MC_AudioPlayer.STATE = "PLAY"
self.FileInfoTimer.start(2000, True)
self["play"].instance.setPixmapFromFile(mcpath +"icons/play_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
# path = self["filelist"].getCurrentDirectory()
# self["coverArt"].updateCoverArt(path)
def StopPlayback(self):
if self.isVisible == False:
self.show()
self.isVisible = True
if self.session.nav.getCurrentService() is None:
return
else:
self.session.nav.stopService()
MC_AudioPlayer.STATE = "NONE"
self["play"].instance.setPixmapFromFile(mcpath +"icons/stop_enabled.png")
if config.plugins.mc_ap.showJpg.getValue():
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
def visibility(self, force=1):
if self.isVisible == True:
self.isVisible = False
self.hide()
else:
self.isVisible = True
self.show()
def Settings(self):
self.session.openWithCallback(self.updd, MC_AudioPlaylist)
def updd(self):
if MC_AudioPlayer.STATE != "NONE" and config.plugins.mc_ap.showJpg.getValue():
self.screensavercheckup()
else:
return
def Exit(self):
del self["screensaver"].picload
if config.plugins.mc_ap.showJpg.getValue():
self.JpgTimer.stop()
self.close()
def fileupdate(self):
self.FileInfoTimer.start(2000, True)
if config.plugins.mc_ap.showJpg.getValue():
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
def updateFileInfo(self):
currPlay = self.session.nav.getCurrentService()
if currPlay is not None:<|fim▁hole|> sArtist = currPlay.info().getInfoString(iServiceInformation.sTagArtist)
sAlbum = currPlay.info().getInfoString(iServiceInformation.sTagAlbum)
sGenre = currPlay.info().getInfoString(iServiceInformation.sTagGenre)
sComment = currPlay.info().getInfoString(iServiceInformation.sTagComment)
sYear = currPlay.info().getInfoString(iServiceInformation.sTagDate)
if sTitle == "":
sTitle = currPlay.info().getName().split('/')[-1]
self["fileinfo"].setText("Title: " + sTitle + "\nArtist: " + sArtist + "\nAlbum: " + sAlbum + "\nYear: " + sYear + "\nGenre: " + sGenre + "\nComment: " + sComment)
def save_playlist(self):
from Screens.InputBox import InputBox
self.session.openWithCallback(self.save_pls,InputBox, title=_("Please enter filename (empty = use current date)"),windowTitle = _("Save Playlist"))
def save_pls(self, name):
if name is not None:
name = name.strip()
if name == "":
name = strftime("%y%m%d_%H%M%S")
name += ".e2pls"
self.playlistIOInternal.clear()
for x in self.playlist.list:
self.playlistIOInternal.addService(ServiceReference(x[0]))
self.playlistIOInternal.save(resolveFilename(SCOPE_PLAYLIST) + name)
def load_playlist(self):
listpath = []
playlistdir = resolveFilename(SCOPE_PLAYLIST)
try:
for i in os_listdir(playlistdir):
listpath.append((i,playlistdir + i))
except IOError,e:
print "Error while scanning subdirs ",e
self.session.openWithCallback(self.load_pls, ChoiceBox, title=_("Please select a playlist..."), list = listpath)
def load_pls(self,path):
if path is not None:
self.playlist.clear()
extension = path[0].rsplit('.',1)[-1]
if self.playlistparsers.has_key(extension):
playlist = self.playlistparsers[extension]()
list = playlist.open(path[1])
for x in list:
self.playlist.addFile(x.ref)
self.playlist.updateList()
def delete_saved_playlist(self):
listpath = []
playlistdir = resolveFilename(SCOPE_PLAYLIST)
try:
for i in os_listdir(playlistdir):
listpath.append((i,playlistdir + i))
except IOError,e:
print "Error while scanning subdirs ",e
self.session.openWithCallback(self.delete_saved_pls, ChoiceBox, title=_("Please select a playlist to delete..."), list = listpath)
def delete_saved_pls(self,path):
if path is not None:
self.delname = path[1]
self.session.openWithCallback(self.delete_saved_pls_conf, MessageBox, _("Do you really want to delete %s?") % (path[1]))
def delete_saved_pls_conf(self, confirmed):
if confirmed:
try:
os_remove(self.delname)
except OSError,e:
self.session.open(MessageBox, _("Delete failed!"), MessageBox.TYPE_ERROR)
def addPlaylistParser(self, parser, extension):
self.playlistparsers[extension] = parser
def showMenu(self):
menu = []
menu.append((_("delete from playlist"), "deleteentry"))
menu.append((_("clear playlist"), "clear"))
menu.append((_("load playlist"), "loadplaylist"));
menu.append((_("save playlist"), "saveplaylist"));
menu.append((_("delete saved playlist"), "deleteplaylist"));
self.session.openWithCallback(self.menuCallback, ChoiceBox, title="", list=menu)
def menuCallback(self, choice):
if choice is None:
return
if choice[1] == "deleteentry":
self.playlist.deleteFile(self.playlist.getSelectionIndex())
self.playlist.updateList()
elif choice[1] == "clear":
self.playlist.clear()
elif choice[1] == "loadplaylist":
self.load_playlist()
elif choice[1] == "saveplaylist":
self.save_playlist()
elif choice[1] == "deleteplaylist":
self.delete_saved_playlist()
def getJPG(self):
if config.plugins.mc_ap.whichjpg.value == "default":
path = mcpath +"saver/"
else:
path = config.plugins.mc_ap.whichjpg.value
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".jpg"):
self.jpgList.append(name)
def showBackgroundJPG(self):
if len(self.jpgList) > 0:
if self.jpgIndex < len(self.jpgList) -1:
self.jpgIndex += 1
else:
self.jpgIndex = 0
if self.jpgLastIndex != self.jpgIndex or self.jpgLastIndex == -1:
path = mcpath +"saver/" + self.jpgList[self.jpgIndex]
self["screensaver"].screensaver(path)
self.jpgLastIndex = self.jpgIndex
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
else:
print "MediaCenter: No Background Files found ..."
def showLyrics(self):
if MC_AudioPlayer.STATE == "PLAY":
self.session.openWithCallback(self.updd, Lyrics)
def screensavercheckup(self):
self.JpgTimer.stop()
self["screensaver"].showDefaultCover()
time = config.plugins.mc_ap.jpg_delay.getValue() * 1000
self.JpgTimer.start(time, True)
class Lyrics(Screen):
if getDesktop(0).size().width() == 1920:
skin = """
<screen name="Lyrics" position="0,0" size="1920,1080" flags="wfNoBorder" backgroundColor="#00000000" title="Lyrics">
<eLabel backgroundColor="#999999" position="50,50" size="620,2" zPosition="1"/>
<widget name="headertext" position="50,73" zPosition="1" size="620,23" font="Regular;20" transparent="1" foregroundColor="#fcc000" backgroundColor="#00000000"/>
<widget name="coverly" position="700,120" size="160,133" zPosition="9" valign="center" halign="center" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/skins/defaultHD/images/no_coverArt.png" transparent="1" alphatest="blend" />
<widget name="resulttext" position="50,100" zPosition="1" size="620,20" font="Regular;16" transparent="1" backgroundColor="#00000000"/>
<widget name="lyric_text" position="50,150" zPosition="2" size="620,350" font="Regular;18" transparent="0" backgroundColor="#00000000"/>
</screen>"""
else:
skin = """
<screen name="Lyrics" position="0,0" size="720,576" flags="wfNoBorder" backgroundColor="#00000000" title="Lyrics">
<eLabel backgroundColor="#999999" position="50,50" size="620,2" zPosition="1"/>
<widget name="headertext" position="50,73" zPosition="1" size="620,23" font="Regular;20" transparent="1" foregroundColor="#fcc000" backgroundColor="#00000000"/>
<widget name="coverly" position="700,120" size="160,133" zPosition="9" valign="center" halign="center" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/BMediaCenter/skins/defaultHD/images/no_coverArt.png" transparent="1" alphatest="blend" />
<widget name="resulttext" position="50,100" zPosition="1" size="620,20" font="Regular;16" transparent="1" backgroundColor="#00000000"/>
<widget name="lyric_text" position="50,150" zPosition="2" size="620,350" font="Regular;18" transparent="0" backgroundColor="#00000000"/>
</screen>"""
def __init__(self, session):
self.session = session
Screen.__init__(self, session)
self["headertext"] = Label(_("Lyrics"))
self["resulttext"] = Label()
self["coverly"] = MediaPixmap()
curPlay = self.session.nav.getCurrentService()
if curPlay is not None:
title = curPlay.info().getInfoString(iServiceInformation.sTagTitle)
os.system("echo '"+ str(title) +"' > /tmp/.oldplaying | echo '"+ str(title) +"' > /tmp/.curplaying ")
self.RFTimer = eTimer()
self.RFTimer.callback.append(self.refresh)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUser+11: self["coverly"].coverlyrics
})
self["actions"] = HelpableActionMap(self, "MC_AudioPlayerActions",
{
"cancel": self.Exit,
"up": self.pageUp,
"left": self.pageUp,
"down": self.pageDown,
"right": self.pageDown
}, -2)
self["lyric_text"] = ScrollLabel()
self.refresh()
self.onLayoutFinish.append(self.startRun)
def refresh(self):
time = 10000
self.RFTimer.start(time, True)
curPlay = self.session.nav.getCurrentService()
title = curPlay.info().getInfoString(iServiceInformation.sTagTitle)
os.system("echo '"+ str(title) +"' > /tmp/.curplaying")
old = open("/tmp/.oldplaying").read()
oldtitle = old.split('\r\n')
tit = open("/tmp/.curplaying").read()
titlee = tit.split('\r\n')
if oldtitle == titlee:
return
else:
self.startRun()
os.system("echo '"+ str(title) +"' > /tmp/.oldplaying")
def startRun(self):
text = getEncodedString(self.getLyricsFromID3Tag()).replace("\r\n","\n")
text = text.replace("\r","\n")
self["lyric_text"].setText(text)
def getLyricsFromID3Tag(self):
curPlay = self.session.nav.getCurrentService()
if curPlay is not None:
titlely = curPlay.info().getInfoString(iServiceInformation.sTagTitle)
artistly = curPlay.info().getInfoString(iServiceInformation.sTagArtist)
if titlely == "":
titlely = curPlay.info().getName().split('/')[-1]
if artistly == "":
artistly = titlely
from urllib import quote
url = "http://api.chartlyrics.com/apiv1.asmx/SearchLyricDirect?artist=%s&song=%s" % (quote(artistly), quote(titlely))
sendUrlCommand(url, None,10).addCallback(self.gotLyrics).addErrback(self.urlError)
return "No lyrics found in id3-tag, trying api.chartlyrics.com..."
def urlError(self, error = None):
if error is not None:
self["resulttext"].setText(str(error.getErrorMessage()))
self["lyric_text"].setText("")
def gotLyrics(self, xmlstring):
from xml.etree.cElementTree import fromstring as cet_fromstring
root = cet_fromstring(xmlstring)
lyrictext = ""
lyrictext = root.findtext("{http://api.chartlyrics.com/}Lyric").encode("utf-8", 'ignore')
self["lyric_text"].setText(lyrictext)
title = root.findtext("{http://api.chartlyrics.com/}LyricSong").encode("utf-8", 'ignore')
artist = root.findtext("{http://api.chartlyrics.com/}LyricArtist").encode("utf-8", 'ignore')
coverly = root.findtext("{http://api.chartlyrics.com/}LyricCovertArtUrl").encode("utf-8", 'ignore')
os.system("wget -O /tmp/.onlinecover "+ coverly +"")
self["coverly"].coverlyrics()
result = _("Response -> lyrics for: %s (%s)") % (title,artist)
self["resulttext"].setText(result)
if not lyrictext:
self["resulttext"].setText(_("No lyrics found"))
self["lyric_text"].setText("")
self["coverly"].showDefaultCover()
def pageUp(self):
self["lyric_text"].pageUp()
def pageDown(self):
self["lyric_text"].pageDown()
def Exit(self):
del self["coverly"].picload
if fileExists("/tmp/.onlinecover"):
os.remove("/tmp/.onlinecover")
if fileExists("/tmp/.curplaying") and fileExists("/tmp/.oldplaying"):
os.system("rm -rf /tmp/.*playing")
self.RFTimer.stop()
self.close()
class MediaPixmap(Pixmap):
def __init__(self):
Pixmap.__init__(self)
self.coverArtFileName = ""
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintCoverArtPixmapCB)
self.coverFileNames = ["cover.jpg", "folder.png", "folder.jpg"]
def applySkin(self, desktop, screen):
from Tools.LoadPixmap import LoadPixmap
noCoverFile = None
if self.skinAttributes is not None:
for (attrib, value) in self.skinAttributes:
if attrib == "pixmap":
noCoverFile = value
break
if noCoverFile is None:
noCoverFile = resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/no_coverArt.png")
self.noCoverPixmap = LoadPixmap(noCoverFile)
return Pixmap.applySkin(self, desktop, screen)
def onShow(self):
Pixmap.onShow(self)
from Components.AVSwitch import AVSwitch
sc = AVSwitch().getFramebufferScale()
#0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB)
self.picload.setPara((self.instance.size().width(), self.instance.size().height(), sc[0], sc[1], False, 1, "#00000000"))
def paintCoverArtPixmapCB(self, picInfo=None):
ptr = self.picload.getData()
if ptr != None:
self.instance.setPixmap(ptr.__deref__())
def updateCoverArt(self, path):
while not path.endswith("/"):
path = path[:-1]
new_coverArtFileName = None
for filename in self.coverFileNames:
if fileExists(path + filename):
new_coverArtFileName = path + filename
if self.coverArtFileName != new_coverArtFileName:
self.coverArtFileName = new_coverArtFileName
if new_coverArtFileName:
self.picload.startDecode(self.coverArtFileName)
else:
self.showDefaultCover()
def showDefaultCover(self):
self.instance.setPixmap(self.noCoverPixmap)
def embeddedCoverArt(self):
self.coverArtFileName = "/tmp/.id3coverart"
self.picload.startDecode(self.coverArtFileName)
def coverlyrics(self):
self.coverArtFileName = "/tmp/.onlinecover"
self.picload.startDecode(self.coverArtFileName)
def screensaver(self, path):
self.picload.startDecode(path)
class AudioPlayerSettings(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.close,
"cancel": self.close,
"left": self.keyLeft,
"right": self.keyRight,
"0": self.keyNumber,
"1": self.keyNumber,
"2": self.keyNumber,
"3": self.keyNumber,
"4": self.keyNumber,
"5": self.keyNumber,
"6": self.keyNumber,
"7": self.keyNumber,
"8": self.keyNumber,
"9": self.keyNumber
}, -1)
self.list = []
self["configlist"] = ConfigList(self.list)
self.list.append(getConfigListEntry(_("Screensaver Enable:"), config.plugins.mc_ap.showJpg))
self.list.append(getConfigListEntry(_("Screensaver Interval"), config.plugins.mc_ap.jpg_delay))
self.list.append(getConfigListEntry(_("Screensaver Style:"), config.plugins.mc_ap.whichjpg))
self.list.append(getConfigListEntry(_("Filelist Sorting:"), config.plugins.mc_ap_sortmode.enabled))
def keyLeft(self):
self["configlist"].handleKey(KEY_LEFT)
def keyRight(self):
self["configlist"].handleKey(KEY_RIGHT)
def keyNumber(self, number):
self["configlist"].handleKey(KEY_0 + number)<|fim▁end|>
|
sTitle = currPlay.info().getInfoString(iServiceInformation.sTagTitle)
|
<|file_name|>VisitorOutput.java<|end_file_name|><|fim▁begin|>package org.xmlcml.ami.visitor;
import java.io.File;
import java.util.List;
import org.apache.commons.io.FilenameUtils;
import org.apache.log4j.Logger;
import org.xmlcml.ami.util.AMIUtil;
import org.xmlcml.ami.visitable.VisitableInput;
/** manages the output.
*
* Decides whether to create files or directories. May map the structure onto the input structure.
*
* @author pm286
*
*/
public class VisitorOutput {
private static final Logger LOG = Logger.getLogger(VisitorOutput.class);
private static final String DEFAULT_OUTPUT_LOCATION = "target/";
private static final String DEFAULT_BASENAME = "dummy";
private static final String DEFAULT_OUTPUT_SUFFIX = ".xml";
private String outputLocation;
// private VisitableInput visitableInput;
private List<VisitableInput> visitableInputList;
private String extension;
private boolean isDirectory;
private File outputDirectory;
/** reads outputLocation and ducktypes the type (File, Directory, etc.).
*
* @param outputLocation
*/
public VisitorOutput(String outputLocation) {
setDefaults();
this.outputLocation = outputLocation;
generateOutputDirectoryName();
}
/** this creates a default outputLocation
*
*/
public VisitorOutput() {
setDefaults();
}
private void setDefaults() {
outputLocation = DEFAULT_OUTPUT_LOCATION;
extension = DEFAULT_OUTPUT_SUFFIX;
outputDirectory = new File(DEFAULT_OUTPUT_LOCATION);
}
/** not yet used
*
* @param visitableInput
*/
public void setVisitableInputList(List<VisitableInput> visitableInputList) {
this.visitableInputList = visitableInputList;
}
private void generateOutputDirectoryName() {
if (outputLocation.startsWith(AMIUtil.HTTP)) {
throw new RuntimeException("Cannot output to URL: "+outputLocation);
}
if (outputLocation.startsWith(AMIUtil.DOI)) {
throw new RuntimeException("Cannot output to DOI: "+outputLocation);
}
if (outputLocation == null) {
LOG.info("No explicit output location");
} else {
outputLocation = FilenameUtils.normalize(new File(outputLocation).getAbsolutePath());
extension = FilenameUtils.getExtension(outputLocation);
isDirectory = AMIUtil.endsWithSeparator(outputLocation) || extension == null || extension.equals("");
outputDirectory = new File(outputLocation);
}
}
protected String getOutputLocation() {
return outputLocation;
}
protected String getExtension() {
return extension;
}
protected boolean isDirectory() {
return isDirectory;
}
public File getOutputDirectoryFile() {
if (outputDirectory != null) {
LOG.trace("outputDirectory: "+outputDirectory);
if (outputDirectory.exists() && !outputDirectory.isDirectory()) {
LOG.info("existing file is not a directory: "+outputDirectory);
} else {
ifNotEndsWithSlashUseParentAsOutputDirectory();
outputDirectory.mkdirs();
String baseName = (visitableInputList == null || visitableInputList.size() == 0) ? DEFAULT_BASENAME
: visitableInputList.get(0).getBaseName();
LOG.trace("basename "+baseName);
outputDirectory = new File(outputDirectory, baseName+"."+extension);
}
} else {
throw new RuntimeException("Null output directory");
}
return outputDirectory;
}
private void ifNotEndsWithSlashUseParentAsOutputDirectory() {
if (!outputDirectory.toString().endsWith("/")) {
File parent = outputDirectory.getParentFile();
outputDirectory = (parent == null) ? outputDirectory : parent;
}
}
public void setExtension(String extension) {
this.extension = extension;
}
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>knowledgebase.js<|end_file_name|><|fim▁begin|>/**
* Created by Dennis on 08/11/16.
*/
/*
* action types<|fim▁hole|>/*
* other constants
*/
/*
* action creators
*/
export function setKnowledgebase(k){
return function (dispatch) {
dispatch({type: KNOWLEDGEBASE_CHANGE, knowledgebase: k});
}
}<|fim▁end|>
|
*/
export const KNOWLEDGEBASE_CHANGE = 'KNOWLEDGEBASE_CHANGE'
|
<|file_name|>aggregation.py<|end_file_name|><|fim▁begin|>'''
TODO:
optimize adds, multiplies, 'or' and 'and' as they can accept more than two values
validate type info on specific functions
'''
from .matching import AstHandler, ParseError, DateTimeFunc
class AggregationParser(AstHandler):
FUNC_TO_ARGS = {'concat': '+', # more than 1
'strcasecmp': 2,
'substr': 3,
'toLower': 1,
'toUpper': 1,
'dayOfYear': 1,
'dayOfMonth': 1,
'dayOfWeek': 1,
'year': 1,
'month': 1,
'week': 1,
'hour': 1,
'minute': 1,
'second': 1,
'millisecond': 1,
'date': 1,
'cmp': 2,
'ifnull': 2}
SPECIAL_VALUES = {'False': False,
'false': False,
'True': True,
'true': True,
'None': None,
'null': None}
def handle_Str(self, node):
return node.s
def handle_Num(self, node):
return node.n
def handle_Name(self, node):
return self.SPECIAL_VALUES.get(node.id, '$' + node.id)
def handle_NameConstant(self,node):
return self.SPECIAL_VALUES.get(str(node.value),node.value)
def handle_Attribute(self, node):
return '${0}.{1}'.format(self.handle(node.value), node.attr).replace('$$', '$')
def handle_UnaryOp(self, op):
return {self.handle(op.op): self.handle(op.operand)}
def handle_IfExp(self, op):
return {'$cond': [self.handle(op.test),
self.handle(op.body),
self.handle(op.orelse)]}
def handle_Call(self, node):
name = node.func.id
if name == 'date':
return DateTimeFunc().handle_date(node)
if name not in self.FUNC_TO_ARGS:
raise ParseError('Unsupported function ({0}).'.format(name),
col_offset=node.col_offset)
if len(node.args) != self.FUNC_TO_ARGS[name] and \
self.FUNC_TO_ARGS[name] != '+' or len(node.args) == 0:
raise ParseError('Invalid number of arguments to function {0}'.format(name),
col_offset=node.col_offset)
# because of SERVER-9289 the following fails: {'$year': {'$add' :['$time_stamp', 1]}}
# wrapping both single arg functions in a list solves it: {'$year': [{'$add' :['$time_stamp', 1]}]}
return {'$' + node.func.id: list(map(self.handle, node.args))}
def handle_BinOp(self, node):
return {self.handle(node.op): [self.handle(node.left),
self.handle(node.right)]}
def handle_Not(self, not_node):
return '$not'
def handle_And(self, op):
return '$and'
def handle_Or(self, op):
return '$or'
def handle_BoolOp(self, op):
return {self.handle(op.op): list(map(self.handle, op.values))}
def handle_Compare(self, node):
if len(node.ops) != 1:
raise ParseError('Invalid number of comparators: {0}'.format(len(node.ops)),
col_offset=node.comparators[1].col_offset)
return {self.handle(node.ops[0]): [self.handle(node.left),
self.handle(node.comparators[0])]}
def handle_Gt(self, node):
return '$gt'
def handle_Lt(self,node):
return '$lt'
def handle_GtE(self, node):
return '$gte'
def handle_LtE(self, node):
return '$lte'
def handle_Eq(self, node):
return '$eq'
def handle_NotEq(self, node):
return '$ne'
def handle_Add(self, node):
return '$add'
def handle_Sub(self, node):
return '$subtract'
def handle_Mod(self, node):
return '$mod'
def handle_Mult(self, node):
return '$multiply'
def handle_Div(self, node):
return '$divide'
class AggregationGroupParser(AstHandler):
GROUP_FUNCTIONS = ['addToSet', 'push', 'first', 'last',
'max', 'min', 'avg', 'sum']
def handle_Call(self, node):
if len(node.args) != 1:
raise ParseError('The {0} group aggregation function accepts one argument'.format(node.func.id),<|fim▁hole|> col_offset=node.col_offset,
options=self.GROUP_FUNCTIONS)
return {'$' + node.func.id: AggregationParser().handle(node.args[0])}<|fim▁end|>
|
col_offset=node.col_offset)
if node.func.id not in self.GROUP_FUNCTIONS:
raise ParseError('Unsupported group function: {0}'.format(node.func.id),
|
<|file_name|>users.client.routes.js<|end_file_name|><|fim▁begin|>'use strict';
// Setting up route
angular.module('users').config(['$stateProvider',
function ($stateProvider) {
// Users state routing
$stateProvider
.state('settings', {
abstract: true,
url: '/settings',
templateUrl: 'modules/users/client/views/settings/settings.client.view.html',
data: {
// DL - adding supervisor and technician roles
roles: ['user', 'admin', 'technician', 'supervisor']
}<|fim▁hole|> .state('settings.profile', {
url: '/profile',
templateUrl: 'modules/users/client/views/settings/edit-profile.client.view.html'
})
.state('settings.password', {
url: '/password',
templateUrl: 'modules/users/client/views/settings/change-password.client.view.html'
})
.state('settings.accounts', {
url: '/accounts',
templateUrl: 'modules/users/client/views/settings/manage-social-accounts.client.view.html'
})
.state('settings.picture', {
url: '/picture',
templateUrl: 'modules/users/client/views/settings/change-profile-picture.client.view.html'
})
.state('authentication', {
abstract: true,
url: '/authentication',
templateUrl: 'modules/users/client/views/authentication/authentication.client.view.html'
})
.state('authentication.signup', {
url: '/signup',
templateUrl: 'modules/users/client/views/authentication/signup.client.view.html'
})
.state('authentication.signin', {
url: '/signin?err',
templateUrl: 'modules/users/client/views/authentication/signin.client.view.html'
})
.state('password', {
abstract: true,
url: '/password',
template: '<ui-view/>'
})
.state('password.forgot', {
url: '/forgot',
templateUrl: 'modules/users/client/views/password/forgot-password.client.view.html'
})
.state('password.reset', {
abstract: true,
url: '/reset',
template: '<ui-view/>'
})
.state('password.reset.invalid', {
url: '/invalid',
templateUrl: 'modules/users/client/views/password/reset-password-invalid.client.view.html'
})
.state('password.reset.success', {
url: '/success',
templateUrl: 'modules/users/client/views/password/reset-password-success.client.view.html'
})
.state('password.reset.form', {
url: '/:token',
templateUrl: 'modules/users/client/views/password/reset-password.client.view.html'
});
}
]);<|fim▁end|>
|
})
|
<|file_name|>hmm.py<|end_file_name|><|fim▁begin|>hmm = [<|fim▁hole|> "https://media4.giphy.com/media/MsWnkCVSXz73i/giphy.gif",
"https://media1.giphy.com/media/l2JJEIMLgrXPEbDGM/giphy.gif",
"https://media0.giphy.com/media/dgK22exekwOLm/giphy.gif"
]<|fim▁end|>
|
"https://media3.giphy.com/media/TPl5N4Ci49ZQY/giphy.gif",
"https://media0.giphy.com/media/l14qxlCgJ0zUk/giphy.gif",
|
<|file_name|>messages.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2015 Thomas Amland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
Start = object()
Restart = object()
Resume = object()
Terminate = object()
Restart = "restart"
Terminate = "terminate"<|fim▁hole|>
DeadLetter = namedtuple('DeadLetter', ['message', 'sender', 'recipient'])<|fim▁end|>
|
Failure = namedtuple('Failure', ['ref', 'exception', 'traceback'])
Supervise = namedtuple('Supervise', ['ref'])
Terminated = namedtuple('Terminated', ['ref'])
|
<|file_name|>PlayerManager.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012 MineStar.de
*
* This file is part of Contao.
<|fim▁hole|> * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* Contao is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Contao. If not, see <http://www.gnu.org/licenses/>.
*/
package de.minestar.contao.manager;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.bukkit.entity.Player;
import de.minestar.contao.core.Settings;
import de.minestar.contao.data.ContaoGroup;
import de.minestar.contao.data.User;
import de.minestar.core.MinestarCore;
import de.minestar.core.units.MinestarPlayer;
public class PlayerManager {
private Map<String, User> onlineUserMap = new HashMap<String, User>();
private Map<ContaoGroup, TreeSet<User>> groupMap = new HashMap<ContaoGroup, TreeSet<User>>();
public PlayerManager() {
for (ContaoGroup cGroup : ContaoGroup.values()) {
groupMap.put(cGroup, new TreeSet<User>());
}
}
public void addUser(User user) {
onlineUserMap.put(user.getMinecraftNickname().toLowerCase(), user);
groupMap.get(user.getGroup()).add(user);
}
public void removeUser(String userName) {
User user = onlineUserMap.remove(userName.toLowerCase());
groupMap.get(user.getGroup()).remove(user);
}
public User getUser(Player player) {
return getUser(player.getName());
}
public User getUser(String userName) {
return onlineUserMap.get(userName.toLowerCase());
}
public String getGroupAsString(ContaoGroup contaoGroup) {
Set<User> groupMember = groupMap.get(contaoGroup);
if (groupMember.isEmpty())
return null;
StringBuilder sBuilder = new StringBuilder();
// BUILD HEAD
sBuilder.append(Settings.getColor(contaoGroup));
sBuilder.append(contaoGroup.getDisplayName());
sBuilder.append('(');
sBuilder.append(getGroupSize(contaoGroup));
sBuilder.append(") : ");
// ADD USER
for (User user : groupMember) {
sBuilder.append(user.getMinecraftNickname());
sBuilder.append(", ");
}
// DELETE THE LAST COMMATA
sBuilder.delete(0, sBuilder.length() - 2);
return sBuilder.toString();
}
public int getGroupSize(ContaoGroup contaoGroup) {
return groupMap.get(contaoGroup).size();
}
public void changeGroup(User user, ContaoGroup newGroup) {
groupMap.get(user.getGroup()).remove(user);
groupMap.get(newGroup).add(user);
setGroup(user, newGroup);
}
public void setGroup(User user, ContaoGroup newGroup) {
MinestarPlayer mPlayer = MinestarCore.getPlayer(user.getMinecraftNickname());
if (mPlayer != null) {
mPlayer.setGroup(newGroup.getMinestarGroup());
}
}
public boolean canBeFree(User probeUser) {
// TODO: Implement requirements
return false;
}
}<|fim▁end|>
|
*
* Contao is free software: you can redistribute it and/or modify
|
<|file_name|>json.py<|end_file_name|><|fim▁begin|>from io import StringIO
class TOKEN_TYPE:
OPERATOR = 0
STRING = 1
NUMBER = 2
BOOLEAN = 3
NULL = 4
class __TOKENIZER_STATE:
WHITESPACE = 0
INTEGER_0 = 1
INTEGER_SIGN = 2
INTEGER = 3
INTEGER_EXP = 4
INTEGER_EXP_0 = 5
FLOATING_POINT_0 = 6
FLOATING_POINT = 8
STRING = 9
STRING_ESCAPE = 10
STRING_END = 11
TRUE_1 = 12
TRUE_2 = 13
TRUE_3 = 14
FALSE_1 = 15
FALSE_2 = 16
FALSE_3 = 17
FALSE_4 = 18
NULL_1 = 19
NULL_2 = 20
NULL_3 = 21
UNICODE_1 = 22
UNICODE_2 = 23
UNICODE_3 = 24
UNICODE_4 = 25
def tokenize(stream):
def is_delimiter(char):
return char.isspace() or char in "{}[]:,"
token = []
charcode = 0
completed = False
now_token = ""
def process_char(char, charcode):
nonlocal token, completed, now_token
advance = True
add_char = False
next_state = state
if state == __TOKENIZER_STATE.WHITESPACE:
if char == "{":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, "{")
elif char == "}":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, "}")
elif char == "[":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, "[")
elif char == "]":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, "]")
elif char == ",":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, ",")
elif char == ":":
completed = True
now_token = (TOKEN_TYPE.OPERATOR, ":")
elif char == "\"":
next_state = __TOKENIZER_STATE.STRING
elif char in "123456789":
next_state = __TOKENIZER_STATE.INTEGER
add_char = True
elif char == "0":
next_state = __TOKENIZER_STATE.INTEGER_0
add_char = True
elif char == "-":
next_state = __TOKENIZER_STATE.INTEGER_SIGN
add_char = True
elif char == "f":
next_state = __TOKENIZER_STATE.FALSE_1
elif char == "t":
next_state = __TOKENIZER_STATE.TRUE_1
elif char == "n":
next_state = __TOKENIZER_STATE.NULL_1
elif not char.isspace():
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.INTEGER:
if char in "0123456789":
add_char = True
elif char == ".":
next_state = __TOKENIZER_STATE.FLOATING_POINT_0
add_char = True
elif char == "e" or char == 'E':
next_state = __TOKENIZER_STATE.INTEGER_EXP_0
add_char = True
elif is_delimiter(char):
next_state = __TOKENIZER_STATE.WHITESPACE
completed = True
now_token = (TOKEN_TYPE.NUMBER, int("".join(token)))
advance = False
else:
raise ValueError("A number must contain only digits. Got '{}'".format(char))
elif state == __TOKENIZER_STATE.INTEGER_0:
if char == ".":
next_state = __TOKENIZER_STATE.FLOATING_POINT_0
add_char = True
elif char == "e" or char == 'E':
next_state = __TOKENIZER_STATE.INTEGER_EXP_0
add_char = True
elif is_delimiter(char):
next_state = __TOKENIZER_STATE.WHITESPACE
completed = True
now_token = (TOKEN_TYPE.NUMBER, 0)
advance = False
else:
raise ValueError("A 0 must be followed by a '.' or a 'e'. Got '{0}'".format(char))
elif state == __TOKENIZER_STATE.INTEGER_SIGN:
if char == "0":
next_state = __TOKENIZER_STATE.INTEGER_0
add_char = True
elif char in "123456789":
next_state = __TOKENIZER_STATE.INTEGER
add_char = True
else:
raise ValueError("A - must be followed by a digit. Got '{0}'".format(char))
elif state == __TOKENIZER_STATE.INTEGER_EXP_0:
if char == "+" or char == "-" or char in "0123456789":
next_state = __TOKENIZER_STATE.INTEGER_EXP
add_char = True
else:
raise ValueError("An e in a number must be followed by a '+', '-' or digit. Got '{0}'".format(char))
elif state == __TOKENIZER_STATE.INTEGER_EXP:
if char in "0123456789":
add_char = True
elif is_delimiter(char):
completed = True
now_token = (TOKEN_TYPE.NUMBER, float("".join(token)))
next_state = __TOKENIZER_STATE.WHITESPACE
advance = False
else:
raise ValueError("A number exponent must consist only of digits. Got '{}'".format(char))
elif state == __TOKENIZER_STATE.FLOATING_POINT:
if char in "0123456789":
add_char = True
elif char == "e" or char == "E":
next_state = __TOKENIZER_STATE.INTEGER_EXP_0
add_char = True
elif is_delimiter(char):
completed = True
now_token = (TOKEN_TYPE.NUMBER, float("".join(token)))
next_state = __TOKENIZER_STATE.WHITESPACE
advance = False
else:
raise ValueError("A number must include only digits")
elif state == __TOKENIZER_STATE.FLOATING_POINT_0:
if char in "0123456789":
next_state = __TOKENIZER_STATE.FLOATING_POINT
add_char = True
else:
raise ValueError("A number with a decimal point must be followed by a fractional part")
elif state == __TOKENIZER_STATE.FALSE_1:
if char == "a":
next_state = __TOKENIZER_STATE.FALSE_2
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.FALSE_2:
if char == "l":
next_state = __TOKENIZER_STATE.FALSE_3
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.FALSE_3:
if char == "s":
next_state = __TOKENIZER_STATE.FALSE_4
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.FALSE_4:
if char == "e":
next_state = __TOKENIZER_STATE.WHITESPACE
completed = True
now_token = (TOKEN_TYPE.BOOLEAN, False)
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.TRUE_1:
if char == "r":
next_state = __TOKENIZER_STATE.TRUE_2
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.TRUE_2:
if char == "u":
next_state = __TOKENIZER_STATE.TRUE_3
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.TRUE_3:
if char == "e":
next_state = __TOKENIZER_STATE.WHITESPACE
completed = True
now_token = (TOKEN_TYPE.BOOLEAN, True)
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.NULL_1:
if char == "u":
next_state = __TOKENIZER_STATE.NULL_2
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.NULL_2:
if char == "l":
next_state = __TOKENIZER_STATE.NULL_3
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.NULL_3:
if char == "l":
next_state = __TOKENIZER_STATE.WHITESPACE
completed = True
now_token = (TOKEN_TYPE.NULL, None)
else:
raise ValueError("Invalid JSON character: '{0}'".format(char))
elif state == __TOKENIZER_STATE.STRING:
if char == "\"":
completed = True
now_token = (TOKEN_TYPE.STRING, "".join(token))
next_state = __TOKENIZER_STATE.STRING_END
elif char == "\\":
next_state = __TOKENIZER_STATE.STRING_ESCAPE
else:
add_char = True
elif state == __TOKENIZER_STATE.STRING_END:
if is_delimiter(char):
advance = False
next_state = __TOKENIZER_STATE.WHITESPACE
else:
raise ValueError("Expected whitespace or an operator after strin. Got '{}'".format(char))
elif state == __TOKENIZER_STATE.STRING_ESCAPE:
next_state = __TOKENIZER_STATE.STRING
if char == "\\" or char == "\"":
add_char = True
elif char == "b":
char = "\b"
add_char = True
elif char == "f":
char = "\f"
add_char = True
elif char == "n":
char = "\n"
add_char = True
elif char == "t":
char = "\t"
add_char = True
elif char == "r":
char = "\r"
add_char = True
elif char == "/":
char = "/"
add_char = True
elif char == "u":
next_state = __TOKENIZER_STATE.UNICODE_1
charcode = 0
else:
raise ValueError("Invalid string escape: {}".format(char))
elif state == __TOKENIZER_STATE.UNICODE_1:
if char in "0123456789":
charcode = (ord(char) - 48) * 4096
elif char in "abcdef":
charcode = (ord(char) - 87) * 4096
elif char in "ABCDEF":
charcode = (ord(char) - 55) * 4096
else:<|fim▁hole|> elif state == __TOKENIZER_STATE.UNICODE_2:
if char in "0123456789":
charcode += (ord(char) - 48) * 256
elif char in "abcdef":
charcode += (ord(char) - 87) * 256
elif char in "ABCDEF":
charcode += (ord(char) - 55) * 256
else:
raise ValueError("Invalid character code: {}".format(char))
next_state = __TOKENIZER_STATE.UNICODE_3
char = ""
elif state == __TOKENIZER_STATE.UNICODE_3:
if char in "0123456789":
charcode += (ord(char) - 48) * 16
elif char in "abcdef":
charcode += (ord(char) - 87) * 16
elif char in "ABCDEF":
charcode += (ord(char) - 55) * 16
else:
raise ValueError("Invalid character code: {}".format(char))
next_state = __TOKENIZER_STATE.UNICODE_4
char = ""
elif state == __TOKENIZER_STATE.UNICODE_4:
if char in "0123456789":
charcode += ord(char) - 48
elif char in "abcdef":
charcode += ord(char) - 87
elif char in "ABCDEF":
charcode += ord(char) - 55
else:
raise ValueError("Invalid character code: {}".format(char))
next_state = __TOKENIZER_STATE.STRING
char = chr(charcode)
add_char = True
if add_char:
token.append(char)
return advance, next_state, charcode
state = __TOKENIZER_STATE.WHITESPACE
char = stream.read(1)
index = 0
while char:
try:
advance, state, charcode = process_char(char, charcode)
except ValueError as e:
raise ValueError("".join([e.args[0], " at index {}".format(index)]))
if completed:
completed = False
token = []
yield now_token
if advance:
char = stream.read(1)
index += 1
process_char(" ", charcode)
if completed:
yield now_token
def parse_string(string):
return parse(StringIO(string))
def parse(file):
token_stream = tokenize(file)
val, token_type, token = __parse(token_stream, next(token_stream))
if token is not None:
raise ValueError("Improperly closed JSON object")
try:
next(token_stream)
except StopIteration:
return val
raise ValueError("Additional string after end of JSON")
def __parse(token_stream, first_token):
class KVP:
def __init__(self, key):
self.key = key
self.value = None
self.set = False
def __str__(self):
if self.set:
return "{}: {}".format(self.key, self.value)
else:
return "{}: <NULL>".format(self.key)
stack = []
token_type, token = first_token
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
else:
raise ValueError("Expected object or array. Got '{}'".format(token))
else:
raise ValueError("Expected object or array. Got '{}'".format(token))
last_type, last_token = token_type, token
try:
token_type, token = next(token_stream)
except StopIteration as e:
raise ValueError("Too many opening braces") from e
try:
while True:
if isinstance(stack[-1], list):
if last_type == TOKEN_TYPE.OPERATOR:
if last_token == "[":
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
elif token != "]":
raise ValueError("Array must either be empty or contain a value. Got '{}'".
format(token))
else:
stack.append(token)
elif last_token == ",":
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
else:
raise ValueError("Array value expected. Got '{}'".format(token))
else:
stack.append(token)
elif last_token == "]":
value = stack.pop()
if len(stack) == 0:
return value, token_type, token
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
elif isinstance(stack[-1], KVP):
stack[-1].value = value
stack[-1].set = True
value = stack.pop()
if len(stack) == 0:
return value, token_type, token
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
else:
raise ValueError("Array items must be followed by a comma or closing bracket. "
"Got '{}'".format(value))
else:
raise ValueError("Array items must be followed by a comma or closing bracket. "
"Got '{}'".format(value))
elif last_token == "}":
raise ValueError("Array closed with a '}'")
else:
raise ValueError("Array should not contain ':'")
else:
raise ValueError("Unknown Error")
elif isinstance(stack[-1], dict):
if last_type == TOKEN_TYPE.OPERATOR:
if last_token == "{":
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
elif token != "}":
raise ValueError("Object must either be empty or contain key value pairs."
" Got '{}'".format(token))
elif token_type == TOKEN_TYPE.STRING:
stack.append(KVP(token))
else:
raise ValueError("Object keys must be strings. Got '{}'".format(token))
elif last_token == ",":
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
else:
raise ValueError("Object key expected. Got '{}'".format(token))
elif token_type == TOKEN_TYPE.STRING:
stack.append(KVP(token))
else:
raise ValueError("Object keys must be strings. Got '{}'".format(token))
elif last_token == "}":
value = stack.pop()
if len(stack) == 0:
return value, token_type, token
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
elif isinstance(stack[-1], KVP):
stack[-1].value = value
stack[-1].set = True
value = stack.pop()
if len(stack) == 0:
return value, token_type, token
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
else:
raise ValueError("Object key value pairs must be followed by a comma or "
"closing bracket. Got '{}'".format(value))
elif last_token == "]":
raise ValueError("Object closed with a ']'")
else:
raise ValueError("Object key value pairs should be separated by comma, not ':'")
elif isinstance(stack[-1], KVP):
if stack[-1].set:
if token_type == TOKEN_TYPE.OPERATOR:
if token != "}" and token != ",":
raise ValueError("Object key value pairs should be followed by ',' or '}'. Got '"
+ token + "'")
value = stack.pop()
if len(stack) == 0:
return value, token_type, token
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
else:
raise ValueError("Object key value pairs must be followed by a comma or closing bracket. "
"Got '{}'".format(value))
if token == "}" and len(stack) == 1:
return stack[0], None, None
else:
raise ValueError("Object key value pairs should be followed by ',' or '}'. Got '"
+ token + "'")
else:
if token_type == TOKEN_TYPE.OPERATOR and token == ":" and last_type == TOKEN_TYPE.STRING:
pass
elif last_type == TOKEN_TYPE.OPERATOR and last_token == ":":
if token_type == TOKEN_TYPE.OPERATOR:
if token == "{":
stack.append({})
elif token == "[":
stack.append([])
else:
raise ValueError("Object property value expected. Got '{}'".format(token))
else:
stack[-1].value = token
stack[-1].set = True
else:
raise ValueError("Object keys must be separated from values by a single ':'. "
"Got '{}'".format(token))
else:
value = stack.pop()
if isinstance(stack[-1], list):
stack[-1].append(value)
elif isinstance(stack[-1], dict):
stack[-1][value.key] = value.value
else:
raise ValueError("Array items must be followed by a comma or closing bracket. "
"Got '{}'".format(value))
last_type, last_token = token_type, token
token_type, token = next(token_stream)
except StopIteration as e:
if len(stack) == 1:
return stack[0], None, None
else:
raise ValueError("JSON Object not properly closed") from e
def stream_array(token_stream):
def process_token(token_type, token):
if token_type == TOKEN_TYPE.OPERATOR:
if token == ']':
return None, None, None
elif token == ",":
token_type, token = next(token_stream)
if token_type == TOKEN_TYPE.OPERATOR:
if token == "[" or token == "{":
return __parse(token_stream, (token_type, token))
else:
raise ValueError("Expected an array value. Got '{}'".format(token))
else:
return token, None, None
elif token == "[" or token == "{":
return __parse(token_stream, (token_type, token))
else:
raise ValueError("Array entries must be followed by ',' or ']'. Got '{}'".format(token))
else:
return token, None, None
token_type, token = next(token_stream)
if token_type != TOKEN_TYPE.OPERATOR or token != '[':
raise ValueError("Array must start with '['. Got '{}'".format(token))
token_type, token = next(token_stream)
while True:
while token is not None:
value, token_type, token = process_token(token_type, token)
if value is None:
return
yield value
token_type, token = next(token_stream)<|fim▁end|>
|
raise ValueError("Invalid character code: {}".format(char))
next_state = __TOKENIZER_STATE.UNICODE_2
char = ""
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from readSVCsig import readSVCdata
from readSVCsig import readSVCheader<|fim▁hole|><|fim▁end|>
|
from readSVCsig import readSVCsig
|
<|file_name|>extension.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#Copyright (C) Fiz Vazquez [email protected]
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os, sys
import logging
from .lib.xmlUtils import XMLParser
from .gui.windowextensions import WindowExtensions
class Extension:
def __init__(self, data_path = None, parent = None):
self.data_path=data_path
self.parent = parent
self.pytrainer_main = parent
def getActiveExtensions(self):
retorno = []
for extension in self.getExtensionList():
if self.getExtensionInfo(extension[0])[2] == "1":
retorno.append(extension[0])
return retorno
def manageExtensions(self):
ExtensionList = self.getExtensionList()
windowextension = WindowExtensions(self.data_path, self)
windowextension.setList(ExtensionList)
windowextension.run()
def getExtensionList(self):
extensiondir = self.data_path+"/extensions"
extensionList = []
for extension in os.listdir(extensiondir):
extensionxmlfile = extensiondir+"/"+extension+"/conf.xml"
if os.path.isfile(extensionxmlfile):
extensioninfo = XMLParser(extensionxmlfile)
name = extensioninfo.getValue("pytrainer-extension","name")
description = extensioninfo.getValue("pytrainer-extension","description")
extensionList.append((extensiondir+"/"+extension,name,description))
return extensionList
def getExtensionInfo(self,pathExtension):
info = XMLParser(pathExtension+"/conf.xml")
name = info.getValue("pytrainer-extension","name")
description = info.getValue("pytrainer-extension","description")
code = info.getValue("pytrainer-extension","extensioncode")
extensiondir = self.pytrainer_main.profile.extensiondir
helpfile = pathExtension+"/"+info.getValue("pytrainer-extension","helpfile")
type = info.getValue("pytrainer-extension","type")
if not os.path.isfile(extensiondir+"/"+code+"/conf.xml"):
status = 0
else:
info = XMLParser(extensiondir+"/"+code+"/conf.xml")
status = info.getValue("pytrainer-extension","status")
#print name,description,status,helpfile,type
return name,description,status,helpfile,type
def getExtensionConfParams(self,pathExtension):
info = XMLParser(pathExtension+"/conf.xml")
code = info.getValue("pytrainer-extension","extensioncode")
extensiondir = self.pytrainer_main.profile.extensiondir
params = {}
if not os.path.isfile(extensiondir+"/"+code+"/conf.xml"):
prefs = info.getAllValues("conf-values")
prefs.append(("status","0"))
for pref in prefs:
params[pref[0]] = info.getValue("pytrainer-extension",pref[0])
else:<|fim▁hole|> info = XMLParser(extensiondir+"/"+code+"/conf.xml")
for pref in prefs:
params[pref[0]] = info.getValue("pytrainer-extension",pref[0])
#params.append((pref[0],info.getValue("pytrainer-extension",pref[0])))
return params
def setExtensionConfParams(self,pathExtension,savedOptions):
info = XMLParser(pathExtension+"/conf.xml")
code = info.getValue("pytrainer-extension","extensioncode")
extensiondir = self.pytrainer_main.profile.extensiondir+"/"+code
if not os.path.isdir(extensiondir):
os.mkdir(extensiondir)
if not os.path.isfile(extensiondir+"/conf.xml"):
savedOptions.append(("status","0"))
info = XMLParser(extensiondir+"/conf.xml")
info.createXMLFile("pytrainer-extension",savedOptions)
def loadExtension(self,pathExtension):
info = XMLParser(pathExtension+"/conf.xml")
txtbutton = info.getValue("pytrainer-extension","extensionbutton")
name = info.getValue("pytrainer-extension","name")
type = info.getValue("pytrainer-extension","type")
#print "Loading Extension %s" %name
return txtbutton,pathExtension,type
def getCodeConfValue(self,code,value):
extensiondir = self.pytrainer_main.profile.extensiondir
info = XMLParser(extensiondir+"/"+code+"/conf.xml")
return info.getValue("pytrainer-extension",value)
def importClass(self, pathExtension):
logging.debug('>>')
info = XMLParser(pathExtension+"/conf.xml")
#import extension
extension_dir = os.path.realpath(pathExtension)
extension_filename = info.getValue("pytrainer-extension","executable")
extension_classname = info.getValue("pytrainer-extension","extensioncode")
extension_type = info.getValue("pytrainer-extension","type")
options = self.getExtensionConfParams(pathExtension)
logging.debug("Extension Filename: %s", extension_filename )
logging.debug("Extension Classname: %s", extension_classname)
logging.debug("Extension Type: %s", extension_type)
logging.debug("Extension options: %s", options)
sys.path.insert(0, extension_dir)
module = __import__(extension_filename)
extensionMain = getattr(module, extension_classname)
logging.debug('<<')
return extensionMain(parent=self, pytrainer_main=self.parent, conf_dir=self.pytrainer_main.profile.confdir, options=options)<|fim▁end|>
|
prefs = info.getAllValues("conf-values")
prefs.append(("status","0"))
|
<|file_name|>mongodb.py<|end_file_name|><|fim▁begin|>#!/bin/python
from zeroos.core0.client import Client
import sys
import time
"""
This script expect you know the IP of the core0 and you can access it from the machine running this script.
an easy way to do it is to build the initramfs with a customr zerotier network id (https://github.com/g8os/initramfs/tree/0.10.0#customize-build)
At boot core0 will connect to the zerotier network and you can assing an IP to it.
"""
CORE0IP = "INSERT CORE0 IP HERE"
ZEROTIER = "INSERT ZEROTIER NETWORK ID HERE"
def main(init=False):
print("[+] connect to core0")
cl = Client(CORE0IP)
try:
cl.ping()
except Exception as e:
print("cannot connect to the core0: %s" % e)
return 1
print("[+] prepare data disks")
cl.system('mkdir -p /dev/mongodb_storage').get()
if init:
cl.btrfs.create('mongodb_storage', ['/dev/sda'])
disks = cl.disk.list().get('blockdevices', [])
if len(disks) < 1:
print("[-] need at least one data disk available")
return
disks_by_name = {d['name']: d for d in disks}
if disks_by_name['sda']['mountpoint'] is None:
print("[+] mount disk")
cl.disk.mount('/dev/sda', '/dev/mongodb_storage', [''])
try:
print("[+] create container")
container_id = cl.container.create('https://stor.jumpscale.org/stor2/flist/ubuntu-g8os-flist/mongodb-g8os.flist',
mount={"/dev/mongodb_storage": "/mnt/data"},
zerotier=ZEROTIER).get()
print("[+] container created, ID: %s" % container_id)<|fim▁hole|> return 1
container = cl.container.client(container_id)
print("[+] get zerotier ip")
container_ip = get_zerotier_ip(container)
print("[+] configure mongodb")
container.system("bash -c 'echo DAEMONUSER=\"root\" > /etc/default/mongodb'").get()
container.system("sed -i 's/dbpath.*/dbpath=\/mnt\/data/' /etc/mongodb.conf").get()
container.system("sed -i '/bind.*/d' /etc/mongodb.conf").get()
container.system("bash -c 'echo nounixsocket=true >> /etc/mongodb.conf'").get()
print("[+] starts mongod")
res = container.system('/etc/init.d/mongodb start').get()
print("[+] you can connect to mongodb at %s:27017" % container_ip)
def get_zerotier_ip(container):
i = 0
while i < 10:
addrs = container.info.nic()
ifaces = {a['name']: a for a in addrs}
for iface, info in ifaces.items():
if iface.startswith('zt'):
cidr = info['addrs'][0]['addr']
return cidr.split('/')[0]
time.sleep(2)
i += 1
raise TimeoutError("[-] couldn't get an ip on zerotier network")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='attach disks to core0')
parser.add_argument('--init', type=bool, default=False, const=True, required=False,
help='creation filesystem and subvolume', nargs='?')
args = parser.parse_args()
# print(args.init)
main(init=args.init)<|fim▁end|>
|
except Exception as e:
print("[-] error during container creation: %s" % e)
|
<|file_name|>unique-move-drop.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let i = ~100;
let j = ~200;
let j = i;
assert!(*j == 100);
}<|fim▁end|>
| |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import sys
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(PROJECT_DIR + '/../'))
sys.path.append(os.path.abspath(PROJECT_DIR + '/../realestate/'))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|>
|
#!/usr/bin/env python
import os
|
<|file_name|>code.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
$("#bSort").hide();
$("#bShare").hide();
var movieCache = new Array();
//var movieRatingCache = new Array();
var isDebugMode = false;
var webSocketAddress, imdbServletUrl, treeUrl, sortUrl, shareServletUrl, sharedDataUrl;
if (isDebugMode) {
webSocketAddress = "ws://localhost:8080/RankixSocket";
} else {
webSocketAddress = "ws://theapache64.xyz:8080/rankix/RankixSocket";
}
imdbServletUrl = "/rankix/imdbServlet";
treeUrl = "/rankix/Tree";
sortUrl = "/rankix/sortServlet";
shareServletUrl = "/rankix/shareServlet";
sharedDataUrl = "/rankix/shared/";
var sharedLink = null;
var isWorking = true;
function showProgressBar() {
$("#divProgress").css({'display': 'block'});
$("#divProgress").slideDown(2000);
}
function getParameterByName(name) {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
function hideProgress() {
$("#divProgress").slideUp(2000);
}
function consoleData(data) {
$("#console_data").prepend('<p>' + data + '</p>');
}
webSocket = new WebSocket(webSocketAddress);
consoleData("CONNECTING...");
$("#bRankix").removeClass("btn-primary").addClass("btn-disabled");
webSocket.onopen = function (evt) {
hideProgress();
freeApp();
consoleData("CONNECTED TO SOCKET :)");
};
//Checking if the intent is to access shared data
var params = window.location.search;
var key = getParameterByName('key');
if (key.length == 10) {
postProgress(70, "Loading shared data... ")
//Valid key
$.ajax({
url: sharedDataUrl + key,
type: "get",
success: function (response) {
hideProgress();
if (response.error) {
alert(response.message);
} else {
$("div#results").html(response.data);
}
},
error: function (xhr) {
<|fim▁hole|> consoleData("Error " + xhr.data);
consoleData("Error " + xhr.data);
}
});
}
$("#bShare").click(function () {
if (sharedLink == null) {
var shareData = $("#results").html();
postProgress(70, "Sharing... This may take some time.")
$.ajax({
url: shareServletUrl,
xhr: function () {
var xhr = new window.XMLHttpRequest();
xhr.upload.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with upload progress here
postProgress(percentComplete, "Saving result...");
}
}, false);
xhr.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with download progress
postProgress(percentComplete, "Downloading shared link...");
}
}, false);
return xhr;
},
type: "post",
data: {share_data: shareData},
success: function (response) {
console.log(response);
hideProgress();
if (response.error) {
consoleData("Error :" + response.message);
} else {
sharedLink = response.shared_data_url;
window.prompt("Shared! Press Control+C to copy the link", sharedLink);
consoleData(response.shared_data_url);
}
},
error: function (xhr) {
console.log(xhr);
}
});
} else {
window.prompt("Press Control + C to copy the shareable link.", sharedLink);
}
});
$("#bSort").click(function () {
sharedLink = null;
var resultHtml = $("#results").html();
$.ajax({
url: sortUrl,
xhr: function () {
var xhr = new window.XMLHttpRequest();
xhr.upload.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with upload progress here
postProgress(percentComplete, "Sorting (UP)...");
}
}, false);
xhr.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with download progress
postProgress(percentComplete, "Sorting (DOWN)...");
}
}, false);
return xhr;
},
type: "post",
data: {results: resultHtml},
success: function (response) {
postProgress(100, "Finished");
hideProgress();
console.log(response);
if (!response.error) {
$("#bSort").fadeOut(1000);
$("#bShare").fadeIn(1000);
$("#results").html(response.results);
} else {
alert(response.message);
}
},
error: function (xhr) {
hideProgress();
console.log(xhr);
consoleData("Error while " + xhr.data);
}
});
});
function postProgress(perc, sentance) {
if (perc == 100) {
$("#pbProgress")
.removeClass("progress-bar-info progress-bar-striped active")
.addClass("progress-bar-success")
.attr('aria-valuenow', 100)
.css({width: "100%"})
.html("Finished (100%)");
} else if (perc == 10) {
$("#pbProgress")
.removeClass("progress-bar-success")
.addClass("progress-bar-info progress-bar-striped active")
.attr('aria-valuenow', 10)
.css({width: "10%"})
.html("Initializing...");
} else {
$("#pbProgress")
.attr('aria-valuenow', perc)
.css({width: perc + "%"})
.html(sentance);
}
}
function freezeApp() {
isWorking = true;
$("#bRankix").removeClass("btn-primary").addClass("btn-disabled");
}
function freeApp() {
isWorking = false;
$("#bRankix").addClass("btn-primary").removeClass("btn-disabled");
}
$("#bRankix").click(function () {
if (isWorking) {
consoleData("Work in progress...")
return;
}
sharedLink = null;
$("#bSort").hide();
$("#bShare").hide();
var movieCache = new Array();
//TODO: X
//var movieRatingCache = new Array();
postProgress(20, "Contacting TREE Manager...");
if (webSocket == null || webSocket.readyState != 1) {
consoleData("Reopening new socket...");
webSocket = new WebSocket(webSocketAddress);
isWorking = false;
}
var treeData = $("#taTree").val();
function showError(errorReason) {
$("div#results").prepend('<p r="0" class="text-danger"><strong>Error: </strong>' + errorReason + '</p>\n');
}
if (treeData.trim().length == 0) {
alert("Tree data can't be empty!");
} else {
showProgressBar();
freezeApp();
$.ajax({
url: treeUrl,
xhr: function () {
var xhr = new window.XMLHttpRequest();
xhr.upload.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with upload progress here
postProgress(percentComplete, "Scanning tree ...");
}
}, false);
xhr.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var percentComplete = evt.loaded / evt.total;
//Do something with download progress
postProgress(percentComplete, "Downloading scan result...");
}
}, false);
return xhr;
},
type: "post",
data: {tree: treeData},
success: function (data) {
postProgress(100, "Tree scan completed, please wait...")
if (data.error) {
postProgress(0, "");
hideProgress();
freeApp();
showError(data.message);
consoleData(data.message);
} else {
$("#pbProgress")
.removeClass("progress-bar-success")
.addClass("progress-bar-info progress-bar-striped active");
var movieNameAndId = [];
/*{
"ignored_element_count":14,
"total_elements_found":18,
"rankixed_file_count":0,
"movie_file_count":4}*/
var totalElementsCount = data.total_elements_found;
var rankixedFileCount = data.rankixed_file_count;
var ignoredFileCount = data.ignored_element_count;
var fineFileCount = data.movie_file_count;
var date = new Date();
var startTime = date.getTime();
consoleData("---------------------------------");
consoleData("Requested at " + date.toLocaleTimeString());
consoleData("Total elements count: " + totalElementsCount);
consoleData("Rankixed file count: " + rankixedFileCount);
consoleData("Ignored file count: " + ignoredFileCount);
consoleData("Fine file count: " + fineFileCount);
if (fineFileCount == 0) {
freeApp();
showError("No fine file found!");
return;
}
$("div#results").html("");
function handleData(data) {
var movieName = movieNameAndId[data.id];
console.log(data);
function addResult(fontSize, movieName, imdbId, imdbRating) {
$("div#results").prepend('<p r="' + imdbRating + '" data-toggle="modal" data-target="#myModal" class="movieRow" id="' + imdbId + '" style="font-size:' + fontSize + 'px;">' + movieName + '<small class="text-muted"> has ' + imdbRating + '</small></p>\n');
}
if (!data.error) {
var fontSize = data.data * 5;
addResult(fontSize, movieName, data.imdb_id, data.data);
} else {
console.log('Data is ' + data.message);
var myRegexp = /^Invalid movie name (.+)$/;
var match = myRegexp.exec(data.message);
movieName = match[1];
$("div#results").prepend('<p r="0" class="text-danger"><strong>' + movieName + '</strong> has no rating</p>\n');
}
var scoreCount = $("#results p").length;
var perc = (scoreCount / movieNameAndId.length ) * 100;
postProgress(perc, parseInt(perc) + "%");
if (perc == 100) {
$("#bSort").fadeIn(1000);
$("#bShare").fadeIn(1000);
var finishTime = new Date().getTime();
consoleData("Took " + Math.round(((finishTime - startTime) / 1000)) + "s");
consoleData("---------------------------------");
freeApp();
/*
//SORTING REMOVED
$("div#results p").sort(function (a, b) {
console.log(a.id + " " + b.id);
return parseFloat(a.id) > parseFloat(b.id);
}).each(function(){
var elem = $(this);
elem.remove();
$(elem).prependTo("div#results");
});*/
}
}
data.results.forEach(function (obj) {
movieNameAndId[obj.id] = obj.name;
webSocket.send(JSON.stringify(obj));
/*
//Cacheing concept must be improved
if (obj.id in movieRatingCache) {
consoleData("Downloading from cache : " + obj.name);
handleData(movieRatingCache[obj.id]);
} else {
}*/
});
webSocket.onmessage = function (evt) {
var data = JSON.parse(evt.data);
//Adding to cache
//movieRatingCache[data.id] = data;
handleData(data);
};
webSocket.onclose = function (evt) {
consoleData("Socket closed");
freeApp();
$("div#results").prepend("<p r='0' class='text-info'>SOCKET Closed</p>\n");
};
webSocket.onerror = function (evt) {
freeApp();
$("div#results").prepend("<p r='0' class='text-danger'>" + evt.data + "</p>\n");
};
}
},
error: function () {
hideProgress();
freeApp();
showError("Network error occured, Please check your connection! ");
}
});
}
});
$('div#results').on('click', 'p.movieRow', function () {
var id = $(this).attr('id');
//Set loading
$("h4.modal-title").html("Loading...");
$("div.modal-body").hide();
$("#imgPoster").html("");
if (id in movieCache) {
consoleData("Data available in cache for " + movieCache[id].name);
showMovieDetailedDialog(movieCache[id]);
} else {
//Not available in cache so download
$.ajax({
url: imdbServletUrl,
type: "get",
data: {imdbId: id},
success: function (data) {
movieCache[id] = data;
consoleData("Movie loaded " + data.name);
showMovieDetailedDialog(data);
},
error: function (xhr) {
$("#bDismissDialog").click();
consoleData("Error occurred!");
}
});
}
function showMovieDetailedDialog(data) {
var img = $('<img />').load(function () {
$("#imgPoster").html("");
$("#imgPoster").append(img);
}).error(function () {
consoleData("Failed to load image");
}).attr('src', data.poster_url);
$("b#bRating").text(data.rating);
$("b#bGender").text(data.gender);
$("p#pPlot").text(data.plot);
$("h4.modal-title").text(data.name);
$("div.modal-body").slideDown(500);
}
});
});<|fim▁end|>
|
console.log(xhr);
|
<|file_name|>yf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import os
import sys
from optparse import OptionParser, OptionValueError
import math
import vtk
cohesion = 1.0
friction_angle = 20 * math.pi / 180.0
sinphi = math.sin(friction_angle)
cosphi = math.sin(friction_angle)
cohcos = cohesion * cosphi
dp_c = 3.0
dp_phi = math.pi / 6.0
dp_sinphi = math.sin(dp_phi)
dp_cosphi = math.cos(dp_phi)
dp_t = 0.0
dp_tc = 2.0
def ismoother(f_diff):
if (abs(f_diff) >= opts.smoothing_tol):
return 0.0
return 0.5 * (opts.smoothing_tol - abs(f_diff)) - opts.smoothing_tol / math.pi * math.cos(0.5 * math.pi * f_diff / opts.smoothing_tol)
def yield_function_2(yf1, yf2):
return max(yf1, yf2) + ismoother(yf1 - yf2)
def yield_function(x, y, z):
yfs = []
if opts.twoD_example:
yfs += [- x, - y, y - 1.0, - y - 0.5 + 0.5 * x]
if opts.twoD_example_alternative:
yfs += [y - 1.0, - y - 0.5 + 0.5 * x, - x, - y]
if opts.dp:
yfs += [y + x * dp_sinphi - dp_c * dp_cosphi, x - dp_t, -x - dp_tc]
if opts.tensile:
yfs += [x - opts.tensile_strength, y - opts.tensile_strength, z - opts.tensile_strength]
if opts.mc:
yfs += [0.5 * (x - z) + 0.5 * (x + z) * sinphi - cohcos,
0.5 * (y - z) + 0.5 * (y + z) * sinphi - cohcos,
0.5 * (x - y) + 0.5 * (x + y) * sinphi - cohcos,
0.5 * (y - x) + 0.5 * (x + y) * sinphi - cohcos,
0.5 * (z - y) + 0.5 * (y + z) * sinphi - cohcos,
0.5 * (z - x) + 0.5 * (x + z) * sinphi - cohcos]
yf = yfs[0]
for i in range(1, len(yfs)):
yf = yield_function_2(yf, yfs[i])
return yf
# parse command line
p = OptionParser(usage="""usage: %prog [options] <vtk_file>
Inserts yield function values into <vtk_file>.
Only 3D input is accepted: this program assumes that the individual yield functions are functions of x, y, z.
""")
p.add_option("-v", action="store_true", dest="verbose", help="Verbose")
p.add_option("--name", action="store", type="string", default="yield_function", dest="name", help="The pointdata produced will have this name. Default=%default")
p.add_option("--smoothing_tol", action="store", type="float", default=0.1, dest="smoothing_tol", help="The smoothing tolerance (a) parameter. Default=%default")
p.add_option("-t", action="store_true", dest="tensile", help="Yield function will contain contributions from tensile (Rankine) failure")
p.add_option("--tensile_strength", action="store", type="float", default=0.7, dest="tensile_strength", help="Tensile strength")
p.add_option("-m", action="store_true", dest="mc", help="Yield function will contain contributions from Mohr-Coulomb failure")
p.add_option("-d", action="store_true", dest="dp", help="Yield function will contain contributions from Drucker-Prager failure")
p.add_option("-e", action="store_true", dest="twoD_example", help="Yield function will contain contributions from an example 2D yield function")<|fim▁hole|>
# get the com filename
if len(args) != 1:
p.print_help()
sys.exit(1)
in_file = args[0]
if opts.verbose: print "Reading", in_file
if in_file.endswith(".vtp"):
indata = vtk.vtkXMLPolyDataReader()
writer = vtk.vtkXMLPolyDataWriter()
elif in_file.endswith(".vtu"):
indata = vtk.vtkXMLUnstructuredGridReader()
writer = vtk.vtkXMLUnstructuredGridWriter()
elif in_file.endswith(".vtr"):
indata = vtk.vtkXMLRectilinearGridReader()
writer = vtk.vtkXMLRectilinearGridWriter()
else:
print "This program has not yet been configured to read files of type", in_file
sys.exit(2)
indata.SetFileName(in_file)
indata.Update()
indata = indata.GetOutput()
if opts.verbose: print "Generating", opts.name
yf = vtk.vtkDoubleArray()
yf.SetName(opts.name)
yf.SetNumberOfValues(indata.GetNumberOfPoints())
for ptid in range(indata.GetNumberOfPoints()):
(x, y, z) = indata.GetPoint(ptid)
yf.SetValue(ptid, yield_function(x, y, z))
indata.GetPointData().AddArray(yf)
if opts.verbose: print "Writing", in_file
writer.SetFileName(in_file)
writer.SetDataModeToBinary()
writer.SetInputConnection(indata.GetProducerPort())
writer.Write()
sys.exit(0)<|fim▁end|>
|
p.add_option("-a", action="store_true", dest="twoD_example_alternative", help="Yield function will contain contributions from an alternative example 2D yield function")
(opts, args) = p.parse_args()
|
<|file_name|>auditgroups.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Audit groups and removes inactive users.
"""
import datetime
from django.contrib.auth.models import Group, User
from django.contrib.admin.models import LogEntry, CHANGE
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from django.utils import timezone
from crashstats.authentication.models import PolicyException
VALID_EMAIL_DOMAINS = ("mozilla.com", "mozilla.org")
def get_or_create_auditgroups_user():
try:
return User.objects.get(username="auditgroups")
except User.DoesNotExist:
return User.objects.create_user(
username="auditgroups",
email="[email protected]",
first_name="SYSTEMUSER",
last_name="DONOTDELETE",
is_active=False,
)
<|fim▁hole|>def delta_days(since_datetime):
"""Return the delta in days between now and since_datetime"""
return (timezone.now() - since_datetime).days
class Command(BaseCommand):
help = "Audits Django groups and removes inactive users."
def add_arguments(self, parser):
parser.add_argument(
"--dry-run", action="store_true", help="Whether or not to do a dry run."
)
def is_employee_or_exception(self, user):
# If this user has a policy exception, then they're allowed
if PolicyException.objects.filter(user=user).exists():
return True
if user.email.endswith(VALID_EMAIL_DOMAINS):
return True
return False
def audit_hackers_group(self, dryrun=True):
# Figure out the cutoff date for inactivity
cutoff = timezone.now() - datetime.timedelta(days=365)
self.stdout.write("Using cutoff: %s" % cutoff)
# Get all users in the "Hackers" group
try:
hackers_group = Group.objects.get(name="Hackers")
except Group.DoesNotExist:
self.stdout.write('"Hackers" group does not exist.')
return
# Go through the users and mark the ones for removal
users_to_remove = []
for user in hackers_group.user_set.all():
if not user.is_active:
users_to_remove.append((user, "!is_active"))
elif not self.is_employee_or_exception(user):
users_to_remove.append((user, "not employee or exception"))
elif user.last_login and user.last_login < cutoff:
days = delta_days(user.last_login)
# This user is inactive. Check for active API tokens.
active_tokens = [
token for token in user.token_set.all() if not token.is_expired
]
if not active_tokens:
users_to_remove.append((user, "inactive %sd, no tokens" % days))
else:
self.stdout.write(
"SKIP: %s (inactive %sd, but has active tokens: %s)"
% (user.email, days, len(active_tokens))
)
auditgroups_user = get_or_create_auditgroups_user()
# Log or remove the users that have been marked
for user, reason in users_to_remove:
self.stdout.write("Removing: %s (%s)" % (user.email, reason))
if dryrun is False:
hackers_group.user_set.remove(user)
# Toss a LogEntry in so we can keep track of when people get
# de-granted and what did it
LogEntry.objects.log_action(
user_id=auditgroups_user.id,
content_type_id=ContentType.objects.get_for_model(User).pk,
object_id=user.pk,
object_repr=user.email,
action_flag=CHANGE,
change_message="Removed %s from hackers--%s."
% (user.email, reason),
)
self.stdout.write("Total removed: %s" % len(users_to_remove))
def handle(self, **options):
dryrun = options["dry_run"]
if dryrun:
self.stdout.write("Dry run--this is what we think should happen.")
self.audit_hackers_group(dryrun=dryrun)<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![allow(non_snake_case)]
mod counter;
use counter::Counter;
use std::mem::transmute;
#[no_mangle]
pub extern fn createCounter(val: u32) -> *mut Counter {
let _counter = unsafe { transmute(Box::new(Counter::new(val))) };
_counter
}
<|fim▁hole|>pub extern fn getCounterValue(ptr: *mut Counter) -> u32 {
let mut _counter = unsafe { &mut *ptr };
_counter.get()
}
#[no_mangle]
pub extern fn incrementCounterBy(ptr: *mut Counter, bys_ptr: *const u32, bys_len: usize) -> u32 {
let mut _counter = unsafe { &mut *ptr };
let bys = unsafe { std::slice::from_raw_parts(bys_ptr, bys_len) };
_counter.incr(bys)
}
#[no_mangle]
pub extern fn decrementCounterBy(ptr: *mut Counter, bys_ptr: *const u32, bys_len: usize) -> u32 {
let mut _counter = unsafe { &mut *ptr };
let bys = unsafe { std::slice::from_raw_parts(bys_ptr, bys_len) };
_counter.decr(bys)
}
#[no_mangle]
pub extern fn destroyCounter(ptr: *mut Counter) {
let _counter: Box<Counter> = unsafe{ transmute(ptr) };
// Drop
}<|fim▁end|>
|
#[no_mangle]
|
<|file_name|>test_generic.py<|end_file_name|><|fim▁begin|>#
# Copyright 2015 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
import yaml
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslotest import mockpatch
from ceilometer import declarative
from ceilometer.hardware.inspector import base as inspector_base
from ceilometer.hardware.pollsters import generic
from ceilometer import sample
from ceilometer.tests import base as test_base
class TestMeterDefinition(test_base.BaseTestCase):
def test_config_definition(self):
cfg = dict(name='test',
type='gauge',
unit='B',
snmp_inspector={})
definition = generic.MeterDefinition(cfg)
self.assertEqual('test', definition.name)
self.assertEqual('gauge', definition.type)
self.assertEqual('B', definition.unit)
self.assertEqual({}, definition.snmp_inspector)
def test_config_missing_field(self):
cfg = dict(name='test', type='gauge')
try:
generic.MeterDefinition(cfg)
except declarative.MeterDefinitionException as e:
self.assertEqual("Missing field unit", e.brief_message)
def test_config_invalid_field(self):
cfg = dict(name='test',
type='gauge',
unit='B',
invalid={})
definition = generic.MeterDefinition(cfg)
self.assertEqual("foobar", getattr(definition, 'invalid', 'foobar'))
def test_config_invalid_type_field(self):
cfg = dict(name='test',
type='invalid',
unit='B',
snmp_inspector={})
try:
generic.MeterDefinition(cfg)
except declarative.MeterDefinitionException as e:
self.assertEqual("Unrecognized type value invalid",
e.brief_message)
@mock.patch('ceilometer.hardware.pollsters.generic.LOG')
def test_bad_metric_skip(self, LOG):
cfg = {'metric': [dict(name='test1',
type='gauge',
unit='B',
snmp_inspector={}),
dict(name='test_bad',
type='invalid',
unit='B',
snmp_inspector={}),
dict(name='test2',
type='gauge',
unit='B',
snmp_inspector={})]}
data = generic.load_definition(cfg)<|fim▁hole|> LOG.error.assert_called_with(
"Error loading meter definition: %s",
"Unrecognized type value invalid")
class FakeInspector(inspector_base.Inspector):
net_metadata = dict(name='test.teest',
mac='001122334455',
ip='10.0.0.2',
speed=1000)
DATA = {
'test': (0.99, {}, {}),
'test2': (90, net_metadata, {}),
}
def inspect_generic(self, host, cache,
extra_metadata=None, param=None):
yield self.DATA[host.hostname]
class TestGenericPollsters(test_base.BaseTestCase):
@staticmethod
def faux_get_inspector(url, namespace=None):
return FakeInspector()
def setUp(self):
super(TestGenericPollsters, self).setUp()
self.conf = self.useFixture(fixture_config.Config()).conf
self.resources = ["snmp://test", "snmp://test2"]
self.useFixture(mockpatch.Patch(
'ceilometer.hardware.inspector.get_inspector',
self.faux_get_inspector))
self.conf(args=[])
self.pollster = generic.GenericHardwareDeclarativePollster()
def _setup_meter_def_file(self, cfg):
if six.PY3:
cfg = cfg.encode('utf-8')
meter_cfg_file = fileutils.write_to_tempfile(content=cfg,
prefix="snmp",
suffix="yaml")
self.conf.set_override(
'meter_definitions_file',
meter_cfg_file, group='hardware')
cfg = declarative.load_definitions(
self.conf, {}, self.conf.hardware.meter_definitions_file)
return cfg
def _check_get_samples(self, name, definition,
expected_value, expected_type, expected_unit=None):
self.pollster._update_meter_definition(definition)
cache = {}
samples = list(self.pollster.get_samples(None, cache,
self.resources))
self.assertTrue(samples)
self.assertIn(self.pollster.CACHE_KEY, cache)
for resource in self.resources:
self.assertIn(resource, cache[self.pollster.CACHE_KEY])
self.assertEqual(set([name]),
set([s.name for s in samples]))
match = [s for s in samples if s.name == name]
self.assertEqual(expected_value, match[0].volume)
self.assertEqual(expected_type, match[0].type)
if expected_unit:
self.assertEqual(expected_unit, match[0].unit)
def test_get_samples(self):
param = dict(matching_type='type_exact',
oid='1.3.6.1.4.1.2021.10.1.3.1',
type='lambda x: float(str(x))')
meter_def = generic.MeterDefinition(dict(type='gauge',
name='hardware.test1',
unit='process',
snmp_inspector=param))
self._check_get_samples('hardware.test1',
meter_def,
0.99, sample.TYPE_GAUGE,
expected_unit='process')
def test_get_pollsters_extensions(self):
param = dict(matching_type='type_exact',
oid='1.3.6.1.4.1.2021.10.1.3.1',
type='lambda x: float(str(x))')
meter_cfg = yaml.dump(
{'metric': [dict(type='gauge',
name='hardware.test1',
unit='process',
snmp_inspector=param),
dict(type='gauge',
name='hardware.test2.abc',
unit='process',
snmp_inspector=param)]})
self._setup_meter_def_file(meter_cfg)
pollster = generic.GenericHardwareDeclarativePollster
# Clear cached mapping
pollster.mapping = None
exts = pollster.get_pollsters_extensions()
self.assertEqual(2, len(exts))
self.assertIn(exts[0].name, ['hardware.test1', 'hardware.test2.abc'])
self.assertIn(exts[1].name, ['hardware.test1', 'hardware.test2.abc'])<|fim▁end|>
|
self.assertEqual(2, len(data))
|
<|file_name|>actor.go<|end_file_name|><|fim▁begin|>// Package sharedaction handles all operations that do not require a cloud
// controller
package sharedaction
type AuthActor interface {
IsLoggedIn() bool
}
// Actor handles all shared actions
type Actor struct {
Config Config
AuthActor
}
// NewActor returns an Actor with default settings
func NewActor(config Config) *Actor {
var authActor AuthActor = NewDefaultAuthActor(config)
if config.IsCFOnK8s() {
authActor = NewK8sAuthActor(config)
}
<|fim▁hole|> }
}<|fim▁end|>
|
return &Actor{
AuthActor: authActor,
Config: config,
|
<|file_name|>quotes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import scrapy
class QuotesSpider(scrapy.Spider):
name = 'quotes'
allowed_domains = ['quotes.topscrape.com/tag/humor/']
start_urls = ['http://quotes.topscrape.com/tag/humor/']
def parse(self, response):
for quote in response.css('div.quote'):
yield {
'text': quote.css('span.text::text').extract_first(),
'author': quote.xpath('span/small/text()').extract_first(),<|fim▁hole|> next_page = response.css('li.next a::attr("href")').extract_first()
if next_page is not None:
yield response.follow(next_page, self.parse)<|fim▁end|>
|
}
|
<|file_name|>manifest.rs<|end_file_name|><|fim▁begin|>use std::hash;
use std::fmt::{mod, Show, Formatter};
use semver::Version;
use serialize::{Encoder,Encodable};
use core::source::SourceId;
use core::{
Dependency,
PackageId,
Summary
};
use core::package_id::Metadata;
use core::dependency::SerializedDependency;
use util::{CargoResult, human};
#[deriving(PartialEq,Clone)]
pub struct Manifest {
summary: Summary,
authors: Vec<String>,
targets: Vec<Target>,
target_dir: Path,
doc_dir: Path,
sources: Vec<SourceId>,
build: Vec<String>,
warnings: Vec<String>,
exclude: Vec<String>,
}
impl Show for Manifest {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Manifest({}, authors={}, targets={}, target_dir={}, \
build={})",
self.summary, self.authors, self.targets,
self.target_dir.display(), self.build)
}
}
#[deriving(PartialEq,Clone,Encodable)]
pub struct SerializedManifest {
name: String,
version: String,
dependencies: Vec<SerializedDependency>,
authors: Vec<String>,
targets: Vec<Target>,
target_dir: String,
doc_dir: String,
build: Option<Vec<String>>,
}
impl<E, S: Encoder<E>> Encodable<S, E> for Manifest {
fn encode(&self, s: &mut S) -> Result<(), E> {
SerializedManifest {
name: self.summary.get_name().to_string(),<|fim▁hole|> SerializedDependency::from_dependency(d)
}).collect(),
authors: self.authors.clone(),
targets: self.targets.clone(),
target_dir: self.target_dir.display().to_string(),
doc_dir: self.doc_dir.display().to_string(),
build: if self.build.len() == 0 { None } else { Some(self.build.clone()) },
}.encode(s)
}
}
#[deriving(Show, Clone, PartialEq, Hash, Encodable)]
pub enum LibKind {
Lib,
Rlib,
Dylib,
StaticLib
}
impl LibKind {
pub fn from_str(string: &str) -> CargoResult<LibKind> {
match string {
"lib" => Ok(Lib),
"rlib" => Ok(Rlib),
"dylib" => Ok(Dylib),
"staticlib" => Ok(StaticLib),
_ => Err(human(format!("{} was not one of lib|rlib|dylib|staticlib",
string)))
}
}
pub fn from_strs<S: Str>(strings: Vec<S>) -> CargoResult<Vec<LibKind>> {
strings.iter().map(|s| LibKind::from_str(s.as_slice())).collect()
}
pub fn crate_type(&self) -> &'static str {
match *self {
Lib => "lib",
Rlib => "rlib",
Dylib => "dylib",
StaticLib => "staticlib"
}
}
}
#[deriving(Show, Clone, Hash, PartialEq, Encodable)]
pub enum TargetKind {
LibTarget(Vec<LibKind>),
BinTarget
}
#[deriving(Encodable, Decodable, Clone, PartialEq, Show)]
pub struct Profile {
env: String, // compile, test, dev, bench, etc.
opt_level: uint,
debug: bool,
test: bool,
doctest: bool,
doc: bool,
dest: Option<String>,
plugin: bool,
harness: bool, // whether to use the test harness (--test)
}
impl Profile {
fn default() -> Profile {
Profile {
env: String::new(),
opt_level: 0,
debug: false,
test: false,
doc: false,
dest: None,
plugin: false,
doctest: false,
harness: true,
}
}
pub fn default_dev() -> Profile {
Profile {
env: "compile".to_string(), // run in the default environment only
opt_level: 0,
debug: true,
.. Profile::default()
}
}
pub fn default_test() -> Profile {
Profile {
env: "test".to_string(),
debug: true,
test: true,
dest: None,
.. Profile::default()
}
}
pub fn default_bench() -> Profile {
Profile {
env: "bench".to_string(),
opt_level: 3,
test: true,
dest: Some("release".to_string()),
.. Profile::default()
}
}
pub fn default_release() -> Profile {
Profile {
env: "release".to_string(),
opt_level: 3,
dest: Some("release".to_string()),
.. Profile::default()
}
}
pub fn default_doc() -> Profile {
Profile {
env: "doc".to_string(),
dest: None,
doc: true,
.. Profile::default()
}
}
pub fn is_compile(&self) -> bool {
self.env.as_slice() == "compile"
}
pub fn is_doc(&self) -> bool {
self.doc
}
pub fn is_test(&self) -> bool {
self.test
}
pub fn uses_test_harness(&self) -> bool {
self.harness
}
pub fn is_doctest(&self) -> bool {
self.doctest
}
pub fn is_plugin(&self) -> bool {
self.plugin
}
pub fn get_opt_level(&self) -> uint {
self.opt_level
}
pub fn get_debug(&self) -> bool {
self.debug
}
pub fn get_env(&self) -> &str {
self.env.as_slice()
}
pub fn get_dest(&self) -> Option<&str> {
self.dest.as_ref().map(|d| d.as_slice())
}
pub fn opt_level(mut self, level: uint) -> Profile {
self.opt_level = level;
self
}
pub fn debug(mut self, debug: bool) -> Profile {
self.debug = debug;
self
}
pub fn test(mut self, test: bool) -> Profile {
self.test = test;
self
}
pub fn doctest(mut self, doctest: bool) -> Profile {
self.doctest = doctest;
self
}
pub fn doc(mut self, doc: bool) -> Profile {
self.doc = doc;
self
}
pub fn plugin(mut self, plugin: bool) -> Profile {
self.plugin = plugin;
self
}
pub fn harness(mut self, harness: bool) -> Profile {
self.harness = harness;
self
}
}
impl<H: hash::Writer> hash::Hash<H> for Profile {
fn hash(&self, into: &mut H) {
// Be sure to match all fields explicitly, but ignore those not relevant
// to the actual hash of a profile.
let Profile {
opt_level,
debug,
plugin,
dest: ref dest,
harness: harness,
// test flags are separated by file, not by profile hash, and
// env/doc also don't matter for the actual contents of the output
// file, just where the output file is located.
doc: _,
env: _,
test: _,
doctest: _,
} = *self;
(opt_level, debug, plugin, dest, harness).hash(into)
}
}
#[deriving(Clone, Hash, PartialEq)]
pub struct Target {
kind: TargetKind,
name: String,
src_path: Path,
profile: Profile,
metadata: Option<Metadata>,
}
#[deriving(Encodable)]
pub struct SerializedTarget {
kind: Vec<&'static str>,
name: String,
src_path: String,
profile: Profile,
metadata: Option<Metadata>
}
impl<E, S: Encoder<E>> Encodable<S, E> for Target {
fn encode(&self, s: &mut S) -> Result<(), E> {
let kind = match self.kind {
LibTarget(ref kinds) => kinds.iter().map(|k| k.crate_type()).collect(),
BinTarget => vec!("bin")
};
SerializedTarget {
kind: kind,
name: self.name.clone(),
src_path: self.src_path.display().to_string(),
profile: self.profile.clone(),
metadata: self.metadata.clone()
}.encode(s)
}
}
impl Show for Target {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}(name={}, path={}, profile={})", self.kind, self.name,
self.src_path.display(), self.profile)
}
}
impl Manifest {
pub fn new(summary: &Summary, targets: &[Target],
target_dir: &Path, doc_dir: &Path, sources: Vec<SourceId>,
build: Vec<String>, exclude: Vec<String>) -> Manifest {
Manifest {
summary: summary.clone(),
authors: Vec::new(),
targets: Vec::from_slice(targets),
target_dir: target_dir.clone(),
doc_dir: doc_dir.clone(),
sources: sources,
build: build,
warnings: Vec::new(),
exclude: exclude,
}
}
pub fn get_summary(&self) -> &Summary {
&self.summary
}
pub fn get_package_id(&self) -> &PackageId {
self.get_summary().get_package_id()
}
pub fn get_name(&self) -> &str {
self.get_package_id().get_name()
}
pub fn get_version(&self) -> &Version {
self.get_summary().get_package_id().get_version()
}
pub fn get_authors(&self) -> &[String] {
self.authors.as_slice()
}
pub fn get_dependencies(&self) -> &[Dependency] {
self.get_summary().get_dependencies()
}
pub fn get_targets(&self) -> &[Target] {
self.targets.as_slice()
}
pub fn get_target_dir(&self) -> &Path {
&self.target_dir
}
pub fn get_doc_dir(&self) -> &Path {
&self.doc_dir
}
pub fn get_source_ids(&self) -> &[SourceId] {
self.sources.as_slice()
}
pub fn get_build(&self) -> &[String] {
self.build.as_slice()
}
pub fn add_warning(&mut self, s: String) {
self.warnings.push(s)
}
pub fn get_warnings(&self) -> &[String] {
self.warnings.as_slice()
}
pub fn get_exclude(&self) -> &[String] {
self.exclude.as_slice()
}
}
impl Target {
pub fn file_stem(&self) -> String {
match self.metadata {
Some(ref metadata) => format!("{}{}", self.name, metadata.extra_filename),
None => self.name.clone()
}
}
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>,
src_path: &Path, profile: &Profile,
metadata: Metadata) -> Target {
Target {
kind: LibTarget(crate_targets),
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
metadata: Some(metadata)
}
}
pub fn bin_target(name: &str, src_path: &Path, profile: &Profile,
metadata: Option<Metadata>) -> Target {
Target {
kind: BinTarget,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
metadata: metadata,
}
}
pub fn example_target(name: &str, src_path: &Path, profile: &Profile) -> Target {
Target {
kind: BinTarget,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
metadata: None,
}
}
pub fn test_target(name: &str, src_path: &Path,
profile: &Profile, metadata: Metadata) -> Target {
Target {
kind: BinTarget,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
metadata: Some(metadata),
}
}
pub fn bench_target(name: &str, src_path: &Path,
profile: &Profile, metadata: Metadata) -> Target {
Target {
kind: BinTarget,
name: name.to_string(),
src_path: src_path.clone(),
profile: profile.clone(),
metadata: Some(metadata),
}
}
pub fn get_name(&self) -> &str {
self.name.as_slice()
}
pub fn get_src_path(&self) -> &Path {
&self.src_path
}
pub fn is_lib(&self) -> bool {
match self.kind {
LibTarget(_) => true,
_ => false
}
}
pub fn is_dylib(&self) -> bool {
match self.kind {
LibTarget(ref kinds) => kinds.iter().any(|&k| k == Dylib),
_ => false
}
}
pub fn is_rlib(&self) -> bool {
match self.kind {
LibTarget(ref kinds) =>
kinds.iter().any(|&k| k == Rlib || k == Lib),
_ => false
}
}
pub fn is_staticlib(&self) -> bool {
match self.kind {
LibTarget(ref kinds) => kinds.iter().any(|&k| k == StaticLib),
_ => false
}
}
pub fn is_bin(&self) -> bool {
match self.kind {
BinTarget => true,
_ => false
}
}
pub fn get_profile(&self) -> &Profile {
&self.profile
}
pub fn get_metadata(&self) -> Option<&Metadata> {
self.metadata.as_ref()
}
pub fn rustc_crate_types(&self) -> Vec<&'static str> {
match self.kind {
LibTarget(ref kinds) => {
kinds.iter().map(|kind| kind.crate_type()).collect()
},
BinTarget => vec!("bin")
}
}
}<|fim▁end|>
|
version: self.summary.get_version().to_string(),
dependencies: self.summary.get_dependencies().iter().map(|d| {
|
<|file_name|>map.js<|end_file_name|><|fim▁begin|>/**
* @ngdoc filter
* @name map
* @kind function
*
* @description
* Returns a new collection of the results of each expression execution.
*/
angular.module('a8m.map', [])
.filter('map', ['$parse', function($parse) {
return function (collection, expression) {<|fim▁hole|>
if(!isArray(collection) || isUndefined(expression)) {
return collection;
}
return collection.map(function (elm) {
return $parse(expression)(elm);
});
}
}]);<|fim▁end|>
|
collection = (isObject(collection))
? toArray(collection)
: collection;
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
SITE_ROOT = os.path.dirname(os.path.abspath(__file__))
DEBUG = True # 调试模式
TEMPLATE_DIR = os.path.join(SITE_ROOT, 'templates') # 模板目录
BASE_TEMPLATE = 'base' # 基础模板
# URL 映射
URLS = (
'/', 'Index',
'(/j)?/shorten', 'Shorten',
'/([0-9a-zA-Z]{5,})', 'Expand',
'/j/expand', 'Expand',
'/.*', 'Index',
)
# 数据库配置
DATABASES = {
'dbn': 'mysql',
'db': 'shorturl',<|fim▁hole|> 'port': 3306,
}<|fim▁end|>
|
'user': 'py',
'pw': 'py_passwd',
'host': 'localhost',
|
<|file_name|>util.test.ts<|end_file_name|><|fim▁begin|>import {
objectNameToFirestorePaths,
isValidDocumentName,
isValidDocumentId,
mirrorDocumentPathToTombstonePath,
} from "../src/util";
const root = process.env.FIRESTORE_ROOT;
const prefixes = process.env.PREFIXES_SUBCOLLECTION_NAME;
const prefixesTombstones = process.env.PREFIXES_TOMBSTONES_NAME;
const items = process.env.ITEMS_SUBCOLLECTION_NAME;
const itemsTombstones = process.env.ITEMS_TOMBSTONES_NAME;
describe("isValidDocumentName", () => {
test("valid names work", () => {
const deepNestedName = "a/b".repeat(50) + "c.jpg";<|fim▁hole|> });
test("cannot be more than 100 subcollections deep", () => {
const name = "a/".repeat(100) + "b.jpg";
expect(isValidDocumentName(name)).toBe(false);
});
test("cannot be larger than 6 KiB", () => {
const name = "a".repeat(6144 + 1);
expect(isValidDocumentName(name)).toBe(false);
});
});
describe("isValidDocumentId", () => {
test("valid names work", () => {
const name = "foo/bar/image.jpg";
const ids = name.split("/");
expect(ids.every(isValidDocumentId)).toBe(true);
});
test("double forward-slash fails", () => {
const name = "//";
const ids = name.split("/");
expect(ids.every(isValidDocumentId)).toBe(false);
});
test("cannot be larger than 1500 bytes", () => {
const id = "a".repeat(1500 + 1);
expect(isValidDocumentId(id)).toBe(false);
});
test("cannot match __.*__", () => {
const id = "__image.jpg__";
expect(isValidDocumentId(id)).toBe(false);
});
});
describe("objectNameToFirestorePaths", () => {
test("file at root", () => {
const name = "a.jpg";
const { prefixPaths, itemPath } = objectNameToFirestorePaths(name);
expect(prefixPaths.length).toBe(0);
expect(itemPath).toBe(`${root}/${items}/a.jpg`);
});
test("file in nested path", () => {
const name = "a/b/c.jpg";
const { prefixPaths, itemPath } = objectNameToFirestorePaths(name);
expect(prefixPaths[0]).toBe(`${root}/${prefixes}/a`);
expect(prefixPaths[1]).toBe(`${root}/${prefixes}/a/${prefixes}/b`);
expect(itemPath).toBe(`${root}/${prefixes}/a/${prefixes}/b/${items}/c.jpg`);
});
});
describe("mirrorDocumentPathToTombstonePath", () => {
test("file in nested path", () => {
const name = "a/b/c.jpg";
const { prefixPaths, itemPath } = objectNameToFirestorePaths(name);
expect(mirrorDocumentPathToTombstonePath(prefixPaths[0])).toBe(
`${root}/${prefixesTombstones}/a`
);
expect(mirrorDocumentPathToTombstonePath(prefixPaths[1])).toBe(
`${root}/${prefixes}/a/${prefixesTombstones}/b`
);
expect(mirrorDocumentPathToTombstonePath(itemPath)).toBe(
`${root}/${prefixes}/a/${prefixes}/b/${itemsTombstones}/c.jpg`
);
});
});<|fim▁end|>
|
const longName = "a".repeat(6144);
expect(isValidDocumentName(deepNestedName)).toBe(true);
expect(isValidDocumentName(longName)).toBe(true);
|
<|file_name|>36.rs<|end_file_name|><|fim▁begin|>/* Problem 36: Double-base palindromes
*
* The decimal number, 585 = 10010010012 (binary), is palindromic in both bases.
*
* Find the sum of all numbers, less than one million, which are palindromic in base 10 and base 2.
*
* (Please note that the palindromic number, in either base, may not include leading zeros.) */
use std::fmt::Write;
trait NumberFormatter {
fn format(number: u32, buffer: &mut String);
}
struct Binary;
struct Decimal;
impl NumberFormatter for Binary {
fn format(number: u32, buffer: &mut String) {
write!(buffer, "{:b}", number).unwrap();
}
}
impl NumberFormatter for Decimal {
fn format(number: u32, buffer: &mut String) {
write!(buffer, "{}", number).unwrap();
}
}
fn main() {
let mut b1 = String::with_capacity(50);
let mut b2 = String::with_capacity(50);
let result: u32 = (1..1_000_000)
.filter(|number| is_palindrome::<Decimal>(*number, &mut b1))
.filter(|number| is_palindrome::<Binary>(*number, &mut b2))
.sum();
println!("{}", result);
}
fn is_palindrome<T: NumberFormatter>(number: u32, buffer: &mut String) -> bool {
buffer.clear();
T::format(number, buffer);<|fim▁hole|> buffer
.chars()
.zip(buffer.chars().rev())
.all(|(from_start, from_end)| from_start == from_end)
}<|fim▁end|>
| |
<|file_name|>multilineArrow.ts<|end_file_name|><|fim▁begin|>// @onlyOwnGrammar - As this has type parameters that would be jsx in tsx files
const method1 = <T>(
param1: T,
param2: T
): Promise<T> => {
return param1;
};
const method2 = async <T>(
param1: T,
param2: T
): Promise<T> => {
<|fim▁hole|><|fim▁end|>
|
return param1;
};
|
<|file_name|>RecordTimer.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from enigma import eEPGCache, getBestPlayableServiceReference, \
eServiceReference, iRecordableService, quitMainloop, eActionMap, setPreferredTuner
from Components.config import config
from Components.UsageConfig import defaultMoviePath
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
import Screens.Standby
import Screens.InfoBar
from Tools import Directories, Notifications, ASCIItranslit, Trashcan
from Tools.XMLTools import stringToXML
import timer
import xml.etree.cElementTree
import NavigationInstance
from ServiceReference import ServiceReference
from time import localtime, strftime, ctime, time
from bisect import insort
from sys import maxint
# ok, for descriptions etc we have:
# service reference (to get the service name)
# name (title)
# description (description)
# event data (ONLY for time adjustments etc.)
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev, description = True):
if description:
name = ev.getEventName()
description = ev.getShortDescription()
if description == "":
description = ev.getExtendedDescription()
else:
name = ""
description = ""
begin = ev.getBeginTime()
end = begin + ev.getDuration()
eit = ev.getEventId()
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
return (begin, end, name, description, eit)
class AFTEREVENT:
NONE = 0
STANDBY = 1
DEEPSTANDBY = 2
AUTO = 3
def findSafeRecordPath(dirname):
if not dirname:
return None
from Components import Harddisk
dirname = os.path.realpath(dirname)
mountpoint = Harddisk.findMountPoint(dirname)
if mountpoint in ('/', '/media'):
print '[RecordTimer] media is not mounted:', dirname
return None
if not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except Exception, ex:
print '[RecordTimer] Failed to create dir "%s":' % dirname, ex
return None
return dirname
def checkForRecordings():
if NavigationInstance.instance.getRecordings():
return True
rec_time = NavigationInstance.instance.RecordTimer.getNextTimerTime(isWakeup=True)
return rec_time > 0 and (rec_time - time()) < 360
# please do not translate log messages
class RecordTimerEntry(timer.TimerEntry, object):
######### the following static methods and members are only in use when the box is in (soft) standby
wasInStandby = False
wasInDeepStandby = False
receiveRecordEvents = False
@staticmethod
def keypress(key=None, flag=1):
if flag and (RecordTimerEntry.wasInStandby or RecordTimerEntry.wasInDeepStandby):
RecordTimerEntry.wasInStandby = False
RecordTimerEntry.wasInDeepStandby = False
eActionMap.getInstance().unbindAction('', RecordTimerEntry.keypress)
@staticmethod
def setWasInDeepStandby():
RecordTimerEntry.wasInDeepStandby = True
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
@staticmethod
def setWasInStandby():
if not RecordTimerEntry.wasInStandby:
if not RecordTimerEntry.wasInDeepStandby:
eActionMap.getInstance().bindAction('', -maxint - 1, RecordTimerEntry.keypress)
RecordTimerEntry.wasInDeepStandby = False
RecordTimerEntry.wasInStandby = True
@staticmethod
def shutdown():
quitMainloop(1)
@staticmethod
def staticGotRecordEvent(recservice, event):
if event == iRecordableService.evEnd:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)"
if not checkForRecordings():
print "No recordings busy of sceduled within 6 minutes so shutdown"
RecordTimerEntry.shutdown() # immediate shutdown
elif event == iRecordableService.evStart:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)"
@staticmethod
def stopTryQuitMainloop():
print "RecordTimer.stopTryQuitMainloop"
NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = False
@staticmethod
def TryQuitMainloop():
if not RecordTimerEntry.receiveRecordEvents and Screens.Standby.inStandby:
print "RecordTimer.TryQuitMainloop"
NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = True
# send fake event.. to check if another recordings are running or
# other timers start in a few seconds
RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd)
#################################################################
def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None, descramble = True, record_ecm = False, always_zap = False, zap_wakeup = "always", rename_repeat = True):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers == True:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
assert isinstance(serviceref, ServiceReference)
if serviceref and serviceref.isRecordable():
self.service_ref = serviceref
else:
self.service_ref = ServiceReference(None)
self.eit = eit
self.dontSave = False
self.name = name
self.description = description
self.disabled = disabled
self.timer = None
self.__record_service = None
self.start_prepare = 0
self.justplay = justplay
self.always_zap = always_zap
self.zap_wakeup = zap_wakeup
self.afterEvent = afterEvent
self.dirname = dirname
self.dirnameHadToFallback = False
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.tags = tags or []
self.descramble = descramble
self.record_ecm = record_ecm
self.rename_repeat = rename_repeat
self.needChangePriorityFrontend = config.usage.recording_frontend_priority.value != "-2" and config.usage.recording_frontend_priority.value != config.usage.frontend_priority.value
self.change_frontend = False
self.InfoBarInstance = Screens.InfoBar.InfoBar.instance
self.ts_dialog = None
self.log_entries = []
self.resetState()
def __repr__(self):
return "RecordTimerEntry(name=%s, begin=%s, serviceref=%s, justplay=%s)" % (self.name, ctime(self.begin), self.service_ref, self.justplay)
def log(self, code, msg):
self.log_entries.append((int(time()), code, msg))
print "[TIMER]", msg
def calculateFilename(self, name=None):
service_name = self.service_ref.getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(self.begin))
name = name or self.name
filename = begin_date + " - " + service_name
if name:
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(self.begin)) + " - " + name
elif config.recording.filename_composition.value == "long":
filename += " - " + name + " - " + self.description
else:
filename += " - " + name # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
if not self.dirname:
dirname = findSafeRecordPath(defaultMoviePath())
else:
dirname = findSafeRecordPath(self.dirname)
if dirname is None:
dirname = findSafeRecordPath(defaultMoviePath())
self.dirnameHadToFallback = True
if not dirname:
return None
self.Filename = Directories.getRecordingFilename(filename, dirname)
self.log(0, "Filename calculated as: '%s'" % self.Filename)
return self.Filename
def tryPrepare(self):
if self.justplay:
return True
else:
if not self.calculateFilename():
self.do_backoff()
self.start_prepare = time() + self.backoff
return False
rec_ref = self.service_ref and self.service_ref.ref
if rec_ref and rec_ref.flags & eServiceReference.isGroup:
rec_ref = getBestPlayableServiceReference(rec_ref, eServiceReference())
if not rec_ref:
self.log(1, "'get best playable service for group... record' failed")
return False
self.setRecordingPreferredTuner()
self.record_service = rec_ref and NavigationInstance.instance.recordService(rec_ref)
if not self.record_service:
self.log(1, "'record service' failed")
self.setRecordingPreferredTuner(setdefault=True)
return False
name = self.name
description = self.description
if self.repeated:
epgcache = eEPGCache.getInstance()
queryTime=self.begin+(self.end-self.begin)/2
evt = epgcache.lookupEventTime(rec_ref, queryTime)
if evt:
if self.rename_repeat:
event_description = evt.getShortDescription()
if not event_description:
event_description = evt.getExtendedDescription()
if event_description and event_description != description:
description = event_description
event_name = evt.getEventName()
if event_name and event_name != name:
name = event_name
if not self.calculateFilename(event_name):
self.do_backoff()
self.start_prepare = time() + self.backoff
return False
event_id = evt.getEventId()
else:
event_id = -1
else:
event_id = self.eit
if event_id is None:
event_id = -1
prep_res=self.record_service.prepare(self.Filename + ".ts", self.begin, self.end, event_id, name.replace("\n", ""), description.replace("\n", ""), ' '.join(self.tags), bool(self.descramble), bool(self.record_ecm))
if prep_res:
if prep_res == -255:
self.log(4, "failed to write meta information")
else:
self.log(2, "'prepare' failed: error %d" % prep_res)
# we must calc nur start time before stopRecordService call because in Screens/Standby.py TryQuitMainloop tries to get
# the next start time in evEnd event handler...
self.do_backoff()
self.start_prepare = time() + self.backoff
NavigationInstance.instance.stopRecordService(self.record_service)
self.record_service = None
self.setRecordingPreferredTuner(setdefault=True)
return False
return True
def do_backoff(self):
if self.backoff == 0:
self.backoff = 5
else:
self.backoff *= 2
if self.backoff > 100:
self.backoff = 100
self.log(10, "backoff: retry in %d seconds" % self.backoff)
def activate(self):
next_state = self.state + 1
self.log(5, "activating state %d" % next_state)
if next_state == 1:
if self.always_zap:
if Screens.Standby.inStandby:
self.log(5, "wakeup and zap to recording service")
RecordTimerEntry.setWasInStandby()
#set service to zap after standby
Screens.Standby.inStandby.prev_running_service = self.service_ref.ref
Screens.Standby.inStandby.paused_service = None
#wakeup standby
Screens.Standby.inStandby.Power()
else:
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.setWasInStandby()
cur_zap_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference()
if cur_zap_ref and not cur_zap_ref.getPath():# we do not zap away if it is no live service
if self.checkingTimeshiftRunning():
if self.ts_dialog is None:
self.openChoiceActionBeforeZap()
else:
Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20)
self.setRecordingPreferredTuner()
self.failureCB(True)
self.log(5, "zap to recording service")
if next_state == self.StatePrepared:
if self.tryPrepare():
self.log(6, "prepare ok, waiting for begin")
# create file to "reserve" the filename
# because another recording at the same time on another service can try to record the same event
# i.e. cable / sat.. then the second recording needs an own extension... when we create the file
# here than calculateFilename is happy
if not self.justplay:
open(self.Filename + ".ts", "w").close()
# Give the Trashcan a chance to clean up
try:
Trashcan.instance.cleanIfIdle(self.Filename)
except Exception, e:
print "[TIMER] Failed to call Trashcan.instance.cleanIfIdle()"
print "[TIMER] Error:", e
# fine. it worked, resources are allocated.
self.next_activation = self.begin
self.backoff = 0
return True
self.log(7, "prepare failed")
if self.first_try_prepare or (self.ts_dialog is not None and not self.checkingTimeshiftRunning()):
self.first_try_prepare = False
cur_ref = NavigationInstance.instance.getCurrentlyPlayingServiceReference()
if cur_ref and not cur_ref.getPath():
if self.always_zap:
return False
if Screens.Standby.inStandby:
self.setRecordingPreferredTuner()
self.failureCB(True)
elif self.checkingTimeshiftRunning():
if self.ts_dialog is None:
self.openChoiceActionBeforeZap()
elif not config.recording.asktozap.value:
self.log(8, "asking user to zap away")
Notifications.AddNotificationWithCallback(self.failureCB, MessageBox, _("A timer failed to record!\nDisable TV and try again?\n"), timeout=20, default=True)
else: # zap without asking
self.log(9, "zap without asking")
Notifications.AddNotification(MessageBox, _("In order to record a timer, the TV was switched to the recording service!\n"), type=MessageBox.TYPE_INFO, timeout=20)
self.setRecordingPreferredTuner()
self.failureCB(True)
elif cur_ref:
self.log(8, "currently running service is not a live service.. so stop it makes no sense")
else:
self.log(8, "currently no service running... so we dont need to stop it")
return False
elif next_state == self.StateRunning:
# if this timer has been cancelled, just go to "end" state.
if self.cancelled:
return True
if self.justplay:
if Screens.Standby.inStandby:
if RecordTimerEntry.wasInDeepStandby and self.zap_wakeup in ("always", "from_deep_standby") or self.zap_wakeup in ("always", "from_standby"):
self.log(11, "wakeup and zap")
RecordTimerEntry.setWasInStandby()
#set service to zap after standby
Screens.Standby.inStandby.prev_running_service = self.service_ref.ref
Screens.Standby.inStandby.paused_service = None
#wakeup standby
Screens.Standby.inStandby.Power()
else:
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.setWasInStandby()
if self.checkingTimeshiftRunning():
if self.ts_dialog is None:
self.openChoiceActionBeforeZap()
else:
self.log(11, "zapping")
NavigationInstance.instance.playService(self.service_ref.ref)
return True
else:
self.log(11, "start recording")
if RecordTimerEntry.wasInDeepStandby:
RecordTimerEntry.keypress()
if Screens.Standby.inStandby: #In case some plugin did put the receiver already in standby
config.misc.standbyCounter.value = 0
else:
Notifications.AddNotification(Screens.Standby.Standby, StandbyCounterIncrease=False)
record_res = self.record_service.start()
self.setRecordingPreferredTuner(setdefault=True)
if record_res:
self.log(13, "start record returned %d" % record_res)
self.do_backoff()
# retry
self.begin = time() + self.backoff
return False
# Tell the trashcan we started recording. The trashcan gets events,
# but cannot tell what the associated path is.
Trashcan.instance.markDirty(self.Filename)
return True
elif next_state == self.StateEnded:
old_end = self.end
self.ts_dialog = None
if self.setAutoincreaseEnd():
self.log(12, "autoincrase recording %d minute(s)" % int((self.end - old_end)/60))
self.state -= 1
return True
self.log(12, "stop recording")
if not self.justplay:
NavigationInstance.instance.stopRecordService(self.record_service)
self.record_service = None
if not checkForRecordings():
if self.afterEvent == AFTEREVENT.DEEPSTANDBY or self.afterEvent == AFTEREVENT.AUTO and (Screens.Standby.inStandby or RecordTimerEntry.wasInStandby) and not config.misc.standbyCounter.value:
if not Screens.Standby.inTryQuitMainloop:
if Screens.Standby.inStandby:
RecordTimerEntry.TryQuitMainloop()
else:
Notifications.AddNotificationWithCallback(self.sendTryQuitMainloopNotification, MessageBox, _("A finished record timer wants to shut down\nyour receiver. Shutdown now?"), timeout=20, default=True)
elif self.afterEvent == AFTEREVENT.STANDBY or self.afterEvent == AFTEREVENT.AUTO and RecordTimerEntry.wasInStandby:
if not Screens.Standby.inStandby:
Notifications.AddNotificationWithCallback(self.sendStandbyNotification, MessageBox, _("A finished record timer wants to set your\nreceiver to standby. Do that now?"), timeout=20, default=True)
else:
RecordTimerEntry.keypress()
return True
def setAutoincreaseEnd(self, entry = None):
if not self.autoincrease:
return False
if entry is None:
new_end = int(time()) + self.autoincreasetime
else:
new_end = entry.begin - 30
dummyentry = RecordTimerEntry(self.service_ref, self.begin, new_end, self.name, self.description, self.eit, disabled=True, justplay = self.justplay, afterEvent = self.afterEvent, dirname = self.dirname, tags = self.tags)
dummyentry.disabled = self.disabled
timersanitycheck = TimerSanityCheck(NavigationInstance.instance.RecordTimer.timer_list, dummyentry)
if not timersanitycheck.check():
simulTimerList = timersanitycheck.getSimulTimerList()
if simulTimerList is not None and len(simulTimerList) > 1:
new_end = simulTimerList[1].begin
new_end -= 30 # 30 Sekunden Prepare-Zeit lassen
if new_end <= time():
return False
self.end = new_end
return True
def setRecordingPreferredTuner(self, setdefault=False):
if self.needChangePriorityFrontend:
elem = None
if not self.change_frontend and not setdefault:
elem = config.usage.recording_frontend_priority.value
self.change_frontend = True
elif self.change_frontend and setdefault:
elem = config.usage.frontend_priority.value
self.change_frontend = False
if elem is not None:
setPreferredTuner(int(elem))
def checkingTimeshiftRunning(self):
return config.usage.check_timeshift.value and self.InfoBarInstance and self.InfoBarInstance.timeshiftEnabled() and self.InfoBarInstance.timeshift_was_activated
def openChoiceActionBeforeZap(self):
if self.ts_dialog is None:
type = _("record")
if self.justplay:
type = _("zap")
elif self.always_zap:
type = _("zap and record")
message = _("You must switch to the service %s (%s - '%s')!\n") % (type, self.service_ref.getServiceName(), self.name)
if self.repeated:
message += _("Attention, this is repeated timer!\n")
message += _("Timeshift is running. Select an action.\n")
choice = [(_("Zap"), "zap"), (_("Don't zap and disable timer"), "disable"), (_("Don't zap and remove timer"), "remove")]
if not self.InfoBarInstance.save_timeshift_file:
choice.insert(1, (_("Save timeshift in movie dir and zap"), "save_movie"))
if self.InfoBarInstance.timeshiftActivated():
choice.insert(0, (_("Save timeshift and zap"), "save"))
else:
choice.insert(1, (_("Save timeshift and zap"), "save"))
else:
message += _("Reminder, you have chosen to save timeshift file.")
#if self.justplay or self.always_zap:
# choice.insert(2, (_("Don't zap"), "continue"))
choice.insert(2, (_("Don't zap"), "continue"))
def zapAction(choice):
start_zap = True
if choice:
if choice in ("zap", "save", "save_movie"):
self.log(8, "zap to recording service")
if choice in ("save", "save_movie"):
ts = self.InfoBarInstance.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if choice =="save_movie":
self.InfoBarInstance.save_timeshift_in_movie_dir = True
self.InfoBarInstance.save_timeshift_file = True
ts.saveTimeshiftFile()
del ts
self.InfoBarInstance.saveTimeshiftFiles()
elif choice == "disable":
self.disable()
NavigationInstance.instance.RecordTimer.timeChanged(self)
start_zap = False
self.log(8, "zap canceled by the user, timer disabled")
elif choice == "remove":
start_zap = False
self.afterEvent = AFTEREVENT.NONE
NavigationInstance.instance.RecordTimer.removeEntry(self)
self.log(8, "zap canceled by the user, timer removed")
elif choice == "continue":
if self.justplay:
self.end = self.begin
start_zap = False
self.log(8, "zap canceled by the user")
if start_zap:
if not self.justplay:
self.setRecordingPreferredTuner()
self.failureCB(True)
else:
self.log(8, "zapping")
NavigationInstance.instance.playService(self.service_ref.ref)
self.ts_dialog = self.InfoBarInstance.session.openWithCallback(zapAction, MessageBox, message, simple=True, list=choice, timeout=20)
def sendStandbyNotification(self, answer):
RecordTimerEntry.keypress()
if answer:
Notifications.AddNotification(Screens.Standby.Standby)
def sendTryQuitMainloopNotification(self, answer):
RecordTimerEntry.keypress()
if answer:
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 1)
def getNextActivation(self):
if self.state == self.StateEnded:
return self.end
next_state = self.state + 1
return {self.StatePrepared: self.start_prepare,
self.StateRunning: self.begin,
self.StateEnded: self.end }[next_state]
def failureCB(self, answer):
self.ts_dialog = None
if answer == True:
self.log(13, "ok, zapped away")
#NavigationInstance.instance.stopUserServices()
NavigationInstance.instance.playService(self.service_ref.ref)
else:
self.log(14, "user didn't want to zap away, record will probably fail")
def timeChanged(self):
old_prepare = self.start_prepare
self.start_prepare = self.begin - self.prepare_time
self.backoff = 0
if int(old_prepare) != int(self.start_prepare):
self.log(15, "record time changed, start prepare is now: %s" % ctime(self.start_prepare))
def gotRecordEvent(self, record, event):
# TODO: this is not working (never true), please fix. (comparing two swig wrapped ePtrs)
if self.__record_service.__deref__() != record.__deref__():
return
self.log(16, "record event %d" % event)
if event == iRecordableService.evRecordWriteError:
print "WRITE ERROR on recording, disk full?"
# show notification. the 'id' will make sure that it will be
# displayed only once, even if more timers are failing at the
# same time. (which is very likely in case of disk fullness)
Notifications.AddPopup(text = _("Write error while recording. Disk full?\n"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "DiskFullMessage")
# ok, the recording has been stopped. we need to properly note
# that in our state, with also keeping the possibility to re-try.
# TODO: this has to be done.
elif event == iRecordableService.evStart:
text = _("A record has been started:\n%s") % self.name
notify = config.usage.show_message_when_recording_starts.value and not Screens.Standby.inStandby and self.InfoBarInstance and self.InfoBarInstance.execing
if self.dirnameHadToFallback:
text = '\n'.join((text, _("Please note that the previously selected media could not be accessed and therefore the default directory is being used instead.")))
notify = True
if notify:
Notifications.AddPopup(text = text, type = MessageBox.TYPE_INFO, timeout = 3)
elif event == iRecordableService.evRecordAborted:
NavigationInstance.instance.RecordTimer.removeEntry(self)
# we have record_service as property to automatically subscribe to record service events
def setRecordService(self, service):
if self.__record_service is not None:
print "[remove callback]"
NavigationInstance.instance.record_event.remove(self.gotRecordEvent)
self.__record_service = service
if self.__record_service is not None:
print "[add callback]"
NavigationInstance.instance.record_event.append(self.gotRecordEvent)
record_service = property(lambda self: self.__record_service, setRecordService)
def createTimer(xml):
begin = int(xml.get("begin"))
end = int(xml.get("end"))
serviceref = ServiceReference(xml.get("serviceref").encode("utf-8"))
description = xml.get("description").encode("utf-8")
repeated = xml.get("repeated").encode("utf-8")
rename_repeat = long(xml.get("rename_repeat") or "1")
disabled = long(xml.get("disabled") or "0")
justplay = long(xml.get("justplay") or "0")
always_zap = long(xml.get("always_zap") or "0")
zap_wakeup = str(xml.get("zap_wakeup") or "always")
afterevent = str(xml.get("afterevent") or "nothing")
afterevent = {
"nothing": AFTEREVENT.NONE,
"standby": AFTEREVENT.STANDBY,
"deepstandby": AFTEREVENT.DEEPSTANDBY,
"auto": AFTEREVENT.AUTO
}[afterevent]
eit = xml.get("eit")
if eit and eit != "None":
eit = long(eit)
else:
eit = None
location = xml.get("location")
if location and location != "None":
location = location.encode("utf-8")
else:
location = None
tags = xml.get("tags")
if tags and tags != "None":
tags = tags.encode("utf-8").split(' ')
else:
tags = None
descramble = int(xml.get("descramble") or "1")
record_ecm = int(xml.get("record_ecm") or "0")
name = xml.get("name").encode("utf-8")
#filename = xml.get("filename").encode("utf-8")
entry = RecordTimerEntry(serviceref, begin, end, name, description, eit, disabled, justplay, afterevent, dirname = location, tags = tags, descramble = descramble, record_ecm = record_ecm, always_zap = always_zap, zap_wakeup = zap_wakeup, rename_repeat = rename_repeat)
entry.repeated = int(repeated)
for l in xml.findall("log"):
time = int(l.get("time"))
code = int(l.get("code"))
msg = l.text.strip().encode("utf-8")
entry.log_entries.append((time, code, msg))
return entry
class RecordTimer(timer.Timer):
def __init__(self):
timer.Timer.__init__(self)
self.Filename = Directories.resolveFilename(Directories.SCOPE_CONFIG, "timers.xml")
try:
self.loadTimer()
except IOError:
print "unable to load timers from file!"
def doActivate(self, w):
# when activating a timer which has already passed,
# simply abort the timer. don't run trough all the stages.
if w.shouldSkip():
w.state = RecordTimerEntry.StateEnded
else:
# when active returns true, this means "accepted".
# otherwise, the current state is kept.
# the timer entry itself will fix up the delay then.
if w.activate():
w.state += 1
self.timer_list.remove(w)
# did this timer reached the last state?
if w.state < RecordTimerEntry.StateEnded:
# no, sort it into active list
insort(self.timer_list, w)
else:
# yes. Process repeated, and re-add.
if w.repeated:
w.processRepeated()
w.state = RecordTimerEntry.StateWaiting
w.first_try_prepare = True
self.addTimerEntry(w)
else:
# Remove old timers as set in config
self.cleanupDaily(config.recording.keep_timers.value)
insort(self.processed_timers, w)
self.stateChanged(w)
def isRecording(self):
for timer in self.timer_list:
if timer.isRunning() and not timer.justplay:
return True
return False
def loadTimer(self):
# TODO: PATH!
if not Directories.fileExists(self.Filename):
return
try:
doc = xml.etree.cElementTree.parse(self.Filename)
except SyntaxError:
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("The timer file (timers.xml) is corrupt and could not be loaded."), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
print "timers.xml failed to load!"
try:
import os
os.rename(self.Filename, self.Filename + "_old")
except (IOError, OSError):
print "renaming broken timer failed"
return
except IOError:
print "timers.xml not found!"
return
root = doc.getroot()
# put out a message when at least one timer overlaps
checkit = True
for timer in root.findall("timer"):
newTimer = createTimer(timer)
if (self.record(newTimer, True, dosave=False) is not None) and (checkit == True):
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
AddPopup(_("Timer overlap in timers.xml detected!\nPlease recheck it!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "TimerLoadFailed")
checkit = False # at moment it is enough when the message is displayed one time
def saveTimer(self):
#root_element = xml.etree.cElementTree.Element('timers')
#root_element.text = "\n"
#for timer in self.timer_list + self.processed_timers:
# some timers (instant records) don't want to be saved.
# skip them
#if timer.dontSave:
#continue
#t = xml.etree.cElementTree.SubElement(root_element, 'timers')
#t.set("begin", str(int(timer.begin)))
#t.set("end", str(int(timer.end)))
#t.set("serviceref", str(timer.service_ref))
#t.set("repeated", str(timer.repeated))
#t.set("name", timer.name)
#t.set("description", timer.description)
#t.set("afterevent", str({
# AFTEREVENT.NONE: "nothing",
# AFTEREVENT.STANDBY: "standby",
# AFTEREVENT.DEEPSTANDBY: "deepstandby",
# AFTEREVENT.AUTO: "auto"}))
#if timer.eit is not None:
# t.set("eit", str(timer.eit))
#if timer.dirname is not None:
# t.set("location", str(timer.dirname))
#t.set("disabled", str(int(timer.disabled)))
#t.set("justplay", str(int(timer.justplay)))
#t.text = "\n"
#t.tail = "\n"
#for time, code, msg in timer.log_entries:
#l = xml.etree.cElementTree.SubElement(t, 'log')
#l.set("time", str(time))
#l.set("code", str(code))
#l.text = str(msg)
#l.tail = "\n"
#doc = xml.etree.cElementTree.ElementTree(root_element)
#doc.write(self.Filename)
list = []
list.append('<?xml version="1.0" ?>\n')
list.append('<timers>\n')
for timer in self.timer_list + self.processed_timers:
if timer.dontSave:
continue
list.append('<timer')
list.append(' begin="' + str(int(timer.begin)) + '"')
list.append(' end="' + str(int(timer.end)) + '"')
list.append(' serviceref="' + stringToXML(str(timer.service_ref)) + '"')
list.append(' repeated="' + str(int(timer.repeated)) + '"')
list.append(' name="' + str(stringToXML(timer.name)) + '"')
list.append(' description="' + str(stringToXML(timer.description)) + '"')
list.append(' afterevent="' + str(stringToXML({
AFTEREVENT.NONE: "nothing",
AFTEREVENT.STANDBY: "standby",
AFTEREVENT.DEEPSTANDBY: "deepstandby",
AFTEREVENT.AUTO: "auto"
}[timer.afterEvent])) + '"')
if timer.eit is not None:
list.append(' eit="' + str(timer.eit) + '"')
if timer.dirname is not None:
list.append(' location="' + str(stringToXML(timer.dirname)) + '"')
if timer.tags is not None:
list.append(' tags="' + str(stringToXML(' '.join(timer.tags))) + '"')
list.append(' disabled="' + str(int(timer.disabled)) + '"')
list.append(' justplay="' + str(int(timer.justplay)) + '"')
list.append(' always_zap="' + str(int(timer.always_zap)) + '"')
list.append(' zap_wakeup="' + str(timer.zap_wakeup) + '"')
list.append(' rename_repeat="' + str(int(timer.rename_repeat)) + '"')
list.append(' descramble="' + str(int(timer.descramble)) + '"')
list.append(' record_ecm="' + str(int(timer.record_ecm)) + '"')
list.append('>\n')
if config.recording.debug.value:
for time, code, msg in timer.log_entries:
list.append('<log')
list.append(' code="' + str(code) + '"')
list.append(' time="' + str(time) + '"')
list.append('>')
list.append(str(stringToXML(msg)))
list.append('</log>\n')
list.append('</timer>\n')
list.append('</timers>\n')
file = open(self.Filename + ".writing", "w")
for x in list:
file.write(x)
file.flush()
import os
os.fsync(file.fileno())
file.close()
os.rename(self.Filename + ".writing", self.Filename)
def getNextZapTime(self, isWakeup=False):
now = time()
for timer in self.timer_list:
if not timer.justplay or timer.begin < now or isWakeup and timer.zap_wakeup in ("from_standby", "never"):
continue
return timer.begin
return -1
def getNextRecordingTime(self):
now = time()
for timer in self.timer_list:
next_act = timer.getNextActivation()
if timer.justplay or next_act < now:
continue
return next_act
return -1
def getNextTimerTime(self, isWakeup=False):
now = time()
for timer in self.timer_list:
next_act = timer.getNextActivation()
if next_act < now or isWakeup and timer.justplay and timer.zap_wakeup in ("from_standby", "never"):
continue
return next_act
return -1
def isNextRecordAfterEventActionAuto(self):
now = time()
t = None
for timer in self.timer_list:
if timer.justplay or timer.begin < now:
continue
if t is None or t.begin == timer.begin:
t = timer
if t.afterEvent == AFTEREVENT.AUTO:
return True
return False
def record(self, entry, ignoreTSC=False, dosave=True): # wird von loadTimer mit dosave=False aufgerufen
timersanitycheck = TimerSanityCheck(self.timer_list,entry)
if not timersanitycheck.check():
if ignoreTSC != True:
print "timer conflict detected!"
print timersanitycheck.getSimulTimerList()
return timersanitycheck.getSimulTimerList()
else:
print "ignore timer conflict"
elif timersanitycheck.doubleCheck():
print "ignore double timer"
return None
entry.timeChanged()
print "[Timer] Record " + str(entry)
entry.Timer = self
self.addTimerEntry(entry)
if dosave:
self.saveTimer()
return None
def isInRepeatTimer(self, timer, event):
time_match = 0
is_editable = False
begin = event.getBeginTime()
duration = event.getDuration()
end = begin + duration
timer_end = timer.end
if timer.disabled and timer.isRunning():
if begin < timer.begin <= end or timer.begin <= begin <= timer_end:
return True
else:
return False
if timer.justplay and (timer_end - timer.begin) <= 1:
timer_end += 60
bt = localtime(begin)
bday = bt.tm_wday
begin2 = 1440 + bt.tm_hour * 60 + bt.tm_min
end2 = begin2 + duration / 60
xbt = localtime(timer.begin)
xet = localtime(timer_end)
offset_day = False
checking_time = timer.begin < begin or begin <= timer.begin <= end
if xbt.tm_yday != xet.tm_yday:
oday = bday - 1
if oday == -1: oday = 6
offset_day = timer.repeated & (1 << oday)
xbegin = 1440 + xbt.tm_hour * 60 + xbt.tm_min
xend = xbegin + ((timer_end - timer.begin) / 60)
if xend < xbegin:
xend += 1440
if timer.repeated & (1 << bday) and checking_time:
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
is_editable = True
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
summary_end = (xend - end2) * 60
is_editable = not summary_end and True or time_match >= summary_end
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
summary_end = (begin2 - xbegin) * 60
is_editable = not summary_end and True or time_match >= summary_end
else:
# recording whole event
time_match = (end2 - begin2) * 60
is_editable = True
elif offset_day:
xbegin -= 1440
xend -= 1440
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
is_editable = True
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
summary_end = (xend - end2) * 60
is_editable = not summary_end and True or time_match >= summary_end
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
summary_end = (begin2 - xbegin) * 60
is_editable = not summary_end and True or time_match >= summary_end
else:
# recording whole event
time_match = (end2 - begin2) * 60
is_editable = True
elif offset_day and checking_time:
xbegin -= 1440
xend -= 1440
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
is_editable = True
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
summary_end = (xend - end2) * 60
is_editable = not summary_end and True or time_match >= summary_end
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
summary_end = (begin2 - xbegin) * 60
is_editable = not summary_end and True or time_match >= summary_end
else:
# recording whole event
time_match = (end2 - begin2) * 60
is_editable = True
return time_match and is_editable
def isInTimer(self, eventid, begin, duration, service):
returnValue = None
type = 0
time_match = 0
bt = None
check_offset_time = not config.recording.margin_before.value and not config.recording.margin_after.value
end = begin + duration
refstr = ':'.join(service.split(':')[:11])
for x in self.timer_list:
check = ':'.join(x.service_ref.ref.toString().split(':')[:11]) == refstr
if not check:
sref = x.service_ref.ref
parent_sid = sref.getUnsignedData(5)
parent_tsid = sref.getUnsignedData(6)
if parent_sid and parent_tsid:
# check for subservice
sid = sref.getUnsignedData(1)
tsid = sref.getUnsignedData(2)
sref.setUnsignedData(1, parent_sid)
sref.setUnsignedData(2, parent_tsid)
sref.setUnsignedData(5, 0)
sref.setUnsignedData(6, 0)
check = sref.toCompareString() == refstr
num = 0
if check:
check = False
event = eEPGCache.getInstance().lookupEventId(sref, eventid)
num = event and event.getNumOfLinkageServices() or 0
sref.setUnsignedData(1, sid)
sref.setUnsignedData(2, tsid)
sref.setUnsignedData(5, parent_sid)
sref.setUnsignedData(6, parent_tsid)
for cnt in range(num):
subservice = event.getLinkageService(sref, cnt)
if sref.toCompareString() == subservice.toCompareString():
check = True
break
if check:
timer_end = x.end
timer_begin = x.begin
type_offset = 0
if not x.repeated and check_offset_time:
if 0 < end - timer_end <= 59:
timer_end = end
elif 0 < timer_begin - begin <= 59:
timer_begin = begin
if x.justplay:
type_offset = 5
if (timer_end - x.begin) <= 1:
timer_end += 60
if x.always_zap:
type_offset = 10
timer_repeat = x.repeated
# if set 'don't stop current event but disable coming events' for repeat timer
running_only_curevent = x.disabled and x.isRunning() and timer_repeat
if running_only_curevent:
timer_repeat = 0
type_offset += 15
if timer_repeat != 0:
type_offset += 15
if bt is None:
bt = localtime(begin)
bday = bt.tm_wday
begin2 = 1440 + bt.tm_hour * 60 + bt.tm_min
end2 = begin2 + duration / 60
xbt = localtime(x.begin)
xet = localtime(timer_end)
offset_day = False
checking_time = x.begin < begin or begin <= x.begin <= end
if xbt.tm_yday != xet.tm_yday:
oday = bday - 1
if oday == -1: oday = 6
offset_day = x.repeated & (1 << oday)
xbegin = 1440 + xbt.tm_hour * 60 + xbt.tm_min
xend = xbegin + ((timer_end - x.begin) / 60)
if xend < xbegin:
xend += 1440
if x.repeated & (1 << bday) and checking_time:
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
type = type_offset + 3
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
type = type_offset + 1
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
type = type_offset + 4
else:
# recording whole event
time_match = (end2 - begin2) * 60
type = type_offset + 2
elif offset_day:
xbegin -= 1440
xend -= 1440
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
type = type_offset + 3
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
type = type_offset + 1
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
type = type_offset + 4
else:
# recording whole event
time_match = (end2 - begin2) * 60
type = type_offset + 2
elif offset_day and checking_time:
xbegin -= 1440
xend -= 1440
if begin2 < xbegin <= end2:
if xend < end2:
# recording within event
time_match = (xend - xbegin) * 60
type = type_offset + 3
else:
# recording last part of event
time_match = (end2 - xbegin) * 60
type = type_offset + 1
elif xbegin <= begin2 <= xend:
if xend < end2:
# recording first part of event
time_match = (xend - begin2) * 60
type = type_offset + 4
else:
# recording whole event
time_match = (end2 - begin2) * 60
type = type_offset + 2
else:
if begin < timer_begin <= end:
if timer_end < end:
# recording within event
time_match = timer_end - timer_begin
type = type_offset + 3
else:
# recording last part of event
time_match = end - timer_begin
type = type_offset + 1
elif timer_begin <= begin <= timer_end:
if timer_end < end:
# recording first part of event
time_match = timer_end - begin
type = type_offset + 4
else:
# recording whole event
time_match = end - begin
type = type_offset + 2
if time_match:
if type in (2,7,12,17,22,27):
# When full recording do not look further
returnValue = (time_match, [type])
break
elif returnValue:
if type not in returnValue[1]:
returnValue[1].append(type)
else:
returnValue = (time_match, [type])
return returnValue
def removeEntry(self, entry):
print "[Timer] Remove " + str(entry)
# avoid re-enqueuing
entry.repeated = False
# abort timer.
# this sets the end time to current time, so timer will be stopped.
entry.autoincrease = False
entry.abort()
if entry.state != entry.StateEnded:
self.timeChanged(entry)
print "state: ", entry.state
print "in processed: ", entry in self.processed_timers
print "in running: ", entry in self.timer_list
# autoincrease instanttimer if possible
if not entry.dontSave:
for x in self.timer_list:
if x.setAutoincreaseEnd():
self.timeChanged(x)
# now the timer should be in the processed_timers list. remove it from there.
self.processed_timers.remove(entry)
self.saveTimer()
def shutdown(self):
self.saveTimer()<|fim▁end|>
|
import os
|
<|file_name|>Code.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import itk
itk.auto_progress(2)
if len(sys.argv) < 3:
print("Usage: " + sys.argv[0] + " <InputFileName> <OutputFileName> [Extension]")
sys.exit(1)
<|fim▁hole|> extension = sys.argv[3]
else:
extension = ".png"
fileNameFormat = outputFileName + "-%d" + extension
Dimension = 3
PixelType = itk.UC
InputImageType = itk.Image[PixelType, Dimension]
ReaderType = itk.ImageFileReader[InputImageType]
reader = ReaderType.New()
reader.SetFileName(inputFileName)
OutputPixelType = itk.UC
RescaleImageType = itk.Image[OutputPixelType, Dimension]
RescaleFilterType = itk.RescaleIntensityImageFilter[InputImageType, RescaleImageType]
rescale = RescaleFilterType.New()
rescale.SetInput(reader.GetOutput())
rescale.SetOutputMinimum(0)
rescale.SetOutputMaximum(255)
rescale.UpdateLargestPossibleRegion()
region = reader.GetOutput().GetLargestPossibleRegion()
size = region.GetSize()
fnames = itk.NumericSeriesFileNames.New()
fnames.SetStartIndex(0)
fnames.SetEndIndex(size[2] - 1)
fnames.SetIncrementIndex(1)
fnames.SetSeriesFormat(fileNameFormat)
OutputImageType = itk.Image[OutputPixelType, 2]
WriterType = itk.ImageSeriesWriter[RescaleImageType, OutputImageType]
writer = WriterType.New()
writer.SetInput(rescale.GetOutput())
writer.SetFileNames(fnames.GetFileNames())
writer.Update()<|fim▁end|>
|
inputFileName = sys.argv[1]
outputFileName = sys.argv[2]
if len(sys.argv) > 3:
|
<|file_name|>ChangeInventoryEvent.java<|end_file_name|><|fim▁begin|>/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org><|fim▁hole|> * Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.event.item.inventory;
import org.spongepowered.api.entity.living.Living;
import org.spongepowered.api.event.Cancellable;
import org.spongepowered.api.item.inventory.ItemStack;
public interface ChangeInventoryEvent extends TargetInventoryEvent, AffectSlotEvent, Cancellable {
/**
* Fired when a {@link Living} changes it's equipment.
*/
interface Equipment extends ChangeInventoryEvent {}
/**
* Fired when a {@link Living} changes it's held {@link ItemStack}.
*/
interface Held extends ChangeInventoryEvent {}
interface Transfer extends ChangeInventoryEvent {}
interface Pickup extends ChangeInventoryEvent {}
}<|fim▁end|>
|
* Copyright (c) contributors
*
|
<|file_name|>android.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
try:
from httplib import HTTPSConnection
from urlparse import urlparse
except ImportError:
from http.client import HTTPSConnection
from urllib.parse import urlparse
from json import dumps, loads
from django.conf import settings
class GCMError(Exception):
pass
def send(user, message, **kwargs):
"""
Site: https://developers.google.com
API: https://developers.google.com/cloud-messaging/
Desc: Android notifications
"""
headers = {
"Content-type": "application/json",
"Authorization": "key=" + kwargs.pop("gcm_key", settings.GCM_KEY)
}
hook_url = 'https://android.googleapis.com/gcm/send'
data = {
"registration_ids": [user],
"data": {<|fim▁hole|> }
}
data['data'].update(kwargs)
up = urlparse(hook_url)
http = HTTPSConnection(up.netloc)
http.request(
"POST", up.path,
headers=headers,
body=dumps(data))
response = http.getresponse()
if response.status != 200:
raise GCMError(response.reason)
body = response.read()
if loads(body).get("failure") > 0:
raise GCMError(repr(body))
return True<|fim▁end|>
|
"title": kwargs.pop("event"),
'message': message,
|
<|file_name|>TabsMenu.java<|end_file_name|><|fim▁begin|>package com.ilad.teamwork;
import org.openqa.selenium.WebElement;
import io.appium.java_client.android.AndroidDriver;
public class TabsMenu extends AbstractTeamWork {
public TabsMenu(AndroidDriver<WebElement> driver) {
super(driver);
}
public AllProjectsPage goToProjects() {
driver.findElementByXPath("//android.widget.TextView[@text='Projects']").click();<|fim▁hole|><|fim▁end|>
|
return new AllProjectsPage(driver);
}
}
|
<|file_name|>YieldAssignThrow.js<|end_file_name|><|fim▁begin|>function* f() {
var x;
try {
x = yield 1;
} catch (ex) {
yield ex;
}
return 2;<|fim▁hole|>expect(g.next()).toEqual({value: 2, done: true});
g = f();
expect(g.next()).toEqual({value: 1, done: false});
expect(g.throw(3)).toEqual({value: 3, done: false});
expect(g.next()).toEqual({value: 2, done: true});<|fim▁end|>
|
}
var g = f();
expect(g.next()).toEqual({value: 1, done: false});
|
<|file_name|>ClassicState2DDrawer.ts<|end_file_name|><|fim▁begin|>module Engine {
export class ClassicState2DDrawer implements State2DDrawer {
draw(gl: WebGLRenderingContext, canvas: HTMLCanvasElement, frameManager: FrameManager, frameTimeInfo: FrameTimeInfo, cam: CameraState, state: State2D): void {
var helper = new States.ClassicGLStateHelper();
helper.startFrame(gl, canvas);
state.drawFrame2D(gl, canvas, frameManager, frameTimeInfo, cam);
}
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>0015_pattern_json.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pattern', '0014_pattern_editnumber'),
]<|fim▁hole|> migrations.AddField(
model_name='pattern',
name='json',
field=models.TextField(null=True),
),
]<|fim▁end|>
|
operations = [
|
<|file_name|>decorators.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------<|fim▁hole|> * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { done } from './util';
export function debounce(delay: number): Function {
return decorate((fn, key) => {
const timerKey = `$debounce$${key}`;
return function (this: any, ...args: any[]) {
clearTimeout(this[timerKey]);
this[timerKey] = setTimeout(() => fn.apply(this, args), delay);
};
});
}
export const throttle = decorate(_throttle);
function _throttle<T>(fn: Function, key: string): Function {
const currentKey = `$throttle$current$${key}`;
const nextKey = `$throttle$next$${key}`;
const trigger = function (this: any, ...args: any[]) {
if (this[nextKey]) {
return this[nextKey];
}
if (this[currentKey]) {
this[nextKey] = done(this[currentKey]).then(() => {
this[nextKey] = undefined;
return trigger.apply(this, args);
});
return this[nextKey];
}
this[currentKey] = fn.apply(this, args) as Promise<T>;
const clear = () => this[currentKey] = undefined;
done(this[currentKey]).then(clear, clear);
return this[currentKey];
};
return trigger;
}
function decorate(decorator: (fn: Function, key: string) => Function): Function {
return (_target: any, key: string, descriptor: any) => {
let fnKey: string | null = null;
let fn: Function | null = null;
if (typeof descriptor.value === 'function') {
fnKey = 'value';
fn = descriptor.value;
} else if (typeof descriptor.get === 'function') {
fnKey = 'get';
fn = descriptor.get;
}
if (!fn || !fnKey) {
throw new Error('not supported');
}
descriptor[fnKey] = decorator(fn, key);
};
}<|fim▁end|>
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
<|file_name|>linux_groups.go<|end_file_name|><|fim▁begin|>/*
* Github Authorized Keys - Use GitHub teams to manage system user accounts and authorized_keys
*
* Copyright 2016 Cloud Posse, LLC <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package api
import (
"errors"
"os/user"
)
const (
// Group file contains one row per group
// Format of the row consists at least of 3 columns
// https://en.wikipedia.org/wiki/Group_identifier
countOfColumnsInGroup = 3
// Group name stored in 0 column
nameColumnNumberInGroup = 0
// Group password flag stored in 1 column
passwordFlagColumnNumberInGroup = 1
// Group GID stored in 2 column
gidColumnNumberInGroup = 2
// Group GID stored in 3 column
usersColumnNumberInGroup = 3
)
func (linux *Linux) groupLookup(groupName string) (*user.Group, error) {
groupInfo, err := linux.getEntity("group", groupName)
if err != nil {
return nil, user.UnknownGroupError(groupName)
}
<|fim▁hole|> if len(groupInfo) < countOfColumnsInGroup {
return nil, errors.New("Wrong format of /etc/group")
}
group := user.Group{
Gid: groupInfo[gidColumnNumberInGroup],
Name: groupInfo[nameColumnNumberInGroup],
}
return &group, err
}
func (linux *Linux) groupLookupByID(groupID string) (*user.Group, error) {
groupInfo, err := linux.getEntity("group", groupID)
if err != nil {
return nil, user.UnknownGroupIdError(groupID)
}
if len(groupInfo) < countOfColumnsInGroup {
return nil, errors.New("Wrong format of /etc/group")
}
group := user.Group{
Gid: groupInfo[gidColumnNumberInGroup],
Name: groupInfo[nameColumnNumberInGroup],
}
return &group, err
}
// GroupExists - check if group {groupName} exists
func (linux *Linux) GroupExists(groupName string) bool {
group, _ := linux.groupLookup(groupName)
return group != nil
}
func (linux *Linux) groupExistsByID(groupID string) bool {
group, _ := linux.groupLookupByID(groupID)
return group != nil
}<|fim▁end|>
| |
<|file_name|>udp_server.go<|end_file_name|><|fim▁begin|>// Copyright 2019, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package transport // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver/transport"
import (
"bytes"
"context"
"io"
"net"
"strings"
"sync"
metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
"go.opentelemetry.io/collector/consumer"
internaldata "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/opencensus"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/carbonreceiver/protocol"
)
type udpServer struct {
wg sync.WaitGroup
packetConn net.PacketConn
reporter Reporter
}
var _ Server = (*udpServer)(nil)
// NewUDPServer creates a transport.Server using UDP as its transport.
func NewUDPServer(addr string) (Server, error) {
packetConn, err := net.ListenPacket("udp", addr)
if err != nil {
return nil, err
}
u := udpServer{
packetConn: packetConn,
}
return &u, nil
}
func (u *udpServer) ListenAndServe(
parser protocol.Parser,
nextConsumer consumer.Metrics,
reporter Reporter,
) error {
if parser == nil || nextConsumer == nil || reporter == nil {
return errNilListenAndServeParameters
}
u.reporter = reporter
buf := make([]byte, 65527) // max size for udp packet body (assuming ipv6)
for {
n, _, err := u.packetConn.ReadFrom(buf)
if n > 0 {
u.wg.Add(1)
bufCopy := make([]byte, n)
copy(bufCopy, buf)
go func() {
u.handlePacket(parser, nextConsumer, bufCopy)
u.wg.Done()
}()
}
if err != nil {
u.reporter.OnDebugf(
"UDP Transport (%s) - ReadFrom error: %v",
u.packetConn.LocalAddr(),
err)
if netErr, ok := err.(net.Error); ok {
if netErr.Temporary() {
continue
}
}
return err
}
}
}
func (u *udpServer) Close() error {
err := u.packetConn.Close()
u.wg.Wait()
return err
}
func (u *udpServer) handlePacket(
p protocol.Parser,
nextConsumer consumer.Metrics,
data []byte,
) {
ctx := u.reporter.OnDataReceived(context.Background())
var numReceivedMetricPoints int
var metrics []*metricspb.Metric
buf := bytes.NewBuffer(data)
for {
bytes, err := buf.ReadBytes((byte)('\n'))
if err == io.EOF {
if len(bytes) == 0 {
// Completed without errors.
break
}
}
line := strings.TrimSpace(string(bytes))
if line != "" {<|fim▁hole|> u.reporter.OnTranslationError(ctx, err)
continue
}
metrics = append(metrics, metric)
}
}
err := nextConsumer.ConsumeMetrics(ctx, internaldata.OCToMetrics(nil, nil, metrics))
u.reporter.OnMetricsProcessed(ctx, numReceivedMetricPoints, err)
}<|fim▁end|>
|
numReceivedMetricPoints++
metric, err := p.Parse(line)
if err != nil {
|
<|file_name|>example.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> // Une calculatrice simple avec des entiers :
// `+` ou `-` signifie ajouter ou soustraire 1
// `*` ou `/` signifie multiplier ou diviser par 2
let programme = "+ + * - /";
let mut accumulateur = 0;
for symbole in programme.chars() {
match symbole {
'+' => accumulateur += 1,
'-' => accumulateur -= 1,
'*' => accumulateur *= 2,
'/' => accumulateur /= 2,
_ => { /* on ne tient pas compte des autres symboles */ }
}
}
println!("Le programme \"{}\" a calculé la valeur {}",
programme, accumulateur);
}<|fim▁end|>
|
// Ce code est modifiable et exécutable !
fn main() {
|
<|file_name|>chunker.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// Split the input into chunks.
exports.default = (function (input, fail) {
var len = input.length;
var level = 0;
var parenLevel = 0;
var lastOpening;
var lastOpeningParen;
var lastMultiComment;
var lastMultiCommentEndBrace;
var chunks = [];
var emitFrom = 0;
var chunkerCurrentIndex;
var currentChunkStartIndex;
var cc;
var cc2;
var matched;
function emitChunk(force) {
var len = chunkerCurrentIndex - emitFrom;
if (((len < 512) && !force) || !len) {
return;
}
chunks.push(input.slice(emitFrom, chunkerCurrentIndex + 1));
emitFrom = chunkerCurrentIndex + 1;
}
for (chunkerCurrentIndex = 0; chunkerCurrentIndex < len; chunkerCurrentIndex++) {
cc = input.charCodeAt(chunkerCurrentIndex);
if (((cc >= 97) && (cc <= 122)) || (cc < 34)) {
// a-z or whitespace
continue;
}
switch (cc) {
case 40: // (
parenLevel++;
lastOpeningParen = chunkerCurrentIndex;
continue;
case 41: // )
if (--parenLevel < 0) {
return fail('missing opening `(`', chunkerCurrentIndex);
}
continue;
case 59: // ;
if (!parenLevel) {
emitChunk();
}
continue;
case 123: // {
level++;
lastOpening = chunkerCurrentIndex;
continue;
case 125: // }
if (--level < 0) {
return fail('missing opening `{`', chunkerCurrentIndex);
}
if (!level && !parenLevel) {
emitChunk();
}
continue;
case 92: // \
if (chunkerCurrentIndex < len - 1) {
chunkerCurrentIndex++;
continue;
}
return fail('unescaped `\\`', chunkerCurrentIndex);
case 34:
case 39:<|fim▁hole|> cc2 = input.charCodeAt(chunkerCurrentIndex);
if (cc2 > 96) {
continue;
}
if (cc2 == cc) {
matched = 1;
break;
}
if (cc2 == 92) { // \
if (chunkerCurrentIndex == len - 1) {
return fail('unescaped `\\`', chunkerCurrentIndex);
}
chunkerCurrentIndex++;
}
}
if (matched) {
continue;
}
return fail("unmatched `" + String.fromCharCode(cc) + "`", currentChunkStartIndex);
case 47: // /, check for comment
if (parenLevel || (chunkerCurrentIndex == len - 1)) {
continue;
}
cc2 = input.charCodeAt(chunkerCurrentIndex + 1);
if (cc2 == 47) {
// //, find lnfeed
for (chunkerCurrentIndex = chunkerCurrentIndex + 2; chunkerCurrentIndex < len; chunkerCurrentIndex++) {
cc2 = input.charCodeAt(chunkerCurrentIndex);
if ((cc2 <= 13) && ((cc2 == 10) || (cc2 == 13))) {
break;
}
}
}
else if (cc2 == 42) {
// /*, find */
lastMultiComment = currentChunkStartIndex = chunkerCurrentIndex;
for (chunkerCurrentIndex = chunkerCurrentIndex + 2; chunkerCurrentIndex < len - 1; chunkerCurrentIndex++) {
cc2 = input.charCodeAt(chunkerCurrentIndex);
if (cc2 == 125) {
lastMultiCommentEndBrace = chunkerCurrentIndex;
}
if (cc2 != 42) {
continue;
}
if (input.charCodeAt(chunkerCurrentIndex + 1) == 47) {
break;
}
}
if (chunkerCurrentIndex == len - 1) {
return fail('missing closing `*/`', currentChunkStartIndex);
}
chunkerCurrentIndex++;
}
continue;
case 42: // *, check for unmatched */
if ((chunkerCurrentIndex < len - 1) && (input.charCodeAt(chunkerCurrentIndex + 1) == 47)) {
return fail('unmatched `/*`', chunkerCurrentIndex);
}
continue;
}
}
if (level !== 0) {
if ((lastMultiComment > lastOpening) && (lastMultiCommentEndBrace > lastMultiComment)) {
return fail('missing closing `}` or `*/`', lastOpening);
}
else {
return fail('missing closing `}`', lastOpening);
}
}
else if (parenLevel !== 0) {
return fail('missing closing `)`', lastOpeningParen);
}
emitChunk(true);
return chunks;
});
//# sourceMappingURL=chunker.js.map<|fim▁end|>
|
case 96: // ", ' and `
matched = 0;
currentChunkStartIndex = chunkerCurrentIndex;
for (chunkerCurrentIndex = chunkerCurrentIndex + 1; chunkerCurrentIndex < len; chunkerCurrentIndex++) {
|
<|file_name|>vpxor.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;<|fim▁hole|> run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 217, 239, 230], OperandSize::Dword)
}
fn vpxor_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM0)), operand3: Some(IndirectScaledIndexedDisplaced(ESI, ESI, Eight, 2013244843, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 239, 132, 246, 171, 173, 255, 119], OperandSize::Dword)
}
fn vpxor_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 239, 231], OperandSize::Qword)
}
fn vpxor_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledDisplaced(RAX, Two, 523638493, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 201, 239, 36, 69, 221, 22, 54, 31], OperandSize::Qword)
}
fn vpxor_5() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM5)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 239, 197], OperandSize::Dword)
}
fn vpxor_6() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledIndexedDisplaced(EDX, ESI, Four, 504873840, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 237, 239, 156, 178, 112, 195, 23, 30], OperandSize::Dword)
}
fn vpxor_7() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM5)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 239, 245], OperandSize::Qword)
}
fn vpxor_8() {
run_test(&Instruction { mnemonic: Mnemonic::VPXOR, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM6)), operand3: Some(IndirectScaledIndexedDisplaced(RBX, RDX, Two, 848752970, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 239, 188, 83, 74, 241, 150, 50], OperandSize::Qword)
}<|fim▁end|>
|
fn vpxor_1() {
|
<|file_name|>pat-lt-bracket-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// compile-flags: -Z parse-only
struct Foo<T>(T, T);
impl<T> Foo<T> {
fn foo(&self) {
match *self {
Foo<T>(x, y) => {
//~^ error: expected one of `=>`, `@`, `if`, or `|`, found `<`
println!("Goodbye, World!")
}
}
}
}<|fim▁end|>
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>Controller.py<|end_file_name|><|fim▁begin|><|fim▁hole|> def __init__(self, model):
self._model = model
self._view = None
def register_view(self, view):
self._view = view
def on_quit(self, *args):
raise NotImplementedError
def on_keybinding_activated(self, core, time):
raise NotImplementedError
def on_show_about(self, sender):
raise NotImplementedError
def on_toggle_history(self, sender):
raise NotImplementedError
def on_show_preferences(self, sender):
raise NotImplementedError
def on_query_entry_changed(self, entry):
raise NotImplementedError
def on_query_entry_key_press_event(self, entry, event):
raise NotImplementedError
def on_query_entry_activate(self, entry):
raise NotImplementedError
def on_treeview_cursor_changed(self, treeview):
raise NotImplementedError
def on_match_selected(self, treeview, text, match_obj, event):
raise NotImplementedError
def on_do_default_action(self, treeview, text, match_obj, event):
raise NotImplementedError
def on_action_selected(self, treeview, text, action, event):
raise NotImplementedError
def on_clear_history(self, sender):
raise NotImplementedError
def on_history_match_selected(self, history, text, match):
raise NotImplementedError<|fim▁end|>
|
class Controller(object):
|
<|file_name|>c.js<|end_file_name|><|fim▁begin|>define(function (require) {
var a = require('a');
return {
name: 'c',<|fim▁hole|> };
});<|fim▁end|>
|
aName: a.name
|
<|file_name|>tabmenu.ts<|end_file_name|><|fim▁begin|>import {NgModule, Component, OnDestroy, Input, EventEmitter} from "@angular/core";
import {CommonModule} from "@angular/common";
import {DomHandler} from "../dom/domhandler";
import {MenuItem} from "../common/api";
import {Router} from "@angular/router";
@Component({
selector: 'p-tabMenu',
template: `
<div [ngClass]="'ui-tabmenu ui-widget ui-widget-content ui-corner-all'" [ngStyle]="style" [class]="styleClass">
<ul class="ui-tabmenu-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all" role="tablist">
<li *ngFor="let item of model"
[ngClass]="{'ui-tabmenuitem ui-state-default ui-corner-top':true,'ui-state-disabled':item.disabled,
'ui-tabmenuitem-hasicon':item.icon,'ui-state-active':activeItem==item}">
<a [href]="item.url||'#'" class="ui-menuitem-link ui-corner-all" (click)="itemClick($event,item)">
<span class="ui-menuitem-icon fa" [ngClass]="item.icon"></span>
<span class="ui-menuitem-text">{{item.label}}</span>
</a>
</li>
</ul>
</div>
`,
providers: [DomHandler]
})
export class TabMenu implements OnDestroy {
@Input() model: MenuItem[];
@Input() activeItem: MenuItem;
@Input() popup: boolean;
@Input() style: any;
@Input() styleClass: string;
constructor(public router: Router) {
}
ngOnInit() {
if (!this.activeItem && this.model && this.model.length) {
this.activeItem = this.model[0];
}
}
itemClick(event: Event, item: MenuItem) {
if (item.disabled) {
event.preventDefault();
return;
}
if (!item.url || item.routerLink) {
event.preventDefault();
}
if (item.command) {
if (!item.eventEmitter) {
item.eventEmitter = new EventEmitter();
item.eventEmitter.subscribe(item.command);
}
item.eventEmitter.emit({
originalEvent: event,<|fim▁hole|> });
}
if (item.routerLink) {
this.router.navigate(item.routerLink);
}
this.activeItem = item;
}
ngOnDestroy() {
if (this.model) {
for (let item of this.model) {
this.unsubscribe(item);
}
}
}
unsubscribe(item: any) {
if (item.eventEmitter) {
item.eventEmitter.unsubscribe();
}
if (item.items) {
for (let childItem of item.items) {
this.unsubscribe(childItem);
}
}
}
}
@NgModule({
imports: [CommonModule],
exports: [TabMenu],
declarations: [TabMenu]
})
export class TabMenuModule {
}<|fim▁end|>
|
item: item
|
<|file_name|>ReferenceEditor.js<|end_file_name|><|fim▁begin|>import core from 'comindware/core';
import CanvasView from 'demoPage/views/CanvasView';
export default function() {
const model = new Backbone.Model({
referenceValue: {
<|fim▁hole|> }
});
return new CanvasView({
view: new core.form.editors.ReferenceBubbleEditor({
model,
key: 'referenceValue',
autocommit: true,
showEditButton: true,
controller: new core.form.editors.reference.controllers.DemoReferenceEditorController()
}),
presentation: "{{#if referenceValue}}{ id: '{{referenceValue.id}}', text: '{{referenceValue.text}}' }{{else}}null{{/if}}"
});
}<|fim▁end|>
|
id: 'test.1',
text: 'Test Reference 1'
|
<|file_name|>kill-setuid.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
fn main() {
unsafe {
let context = seccomp_sys::seccomp_init(seccomp_sys::SCMP_ACT_ALLOW);
let comparator = seccomp_sys::scmp_arg_cmp {
arg: 0,
op: seccomp_sys::scmp_compare::SCMP_CMP_EQ,
datum_a: 1000,
datum_b: 0,
}; /* arg[0] equals 1000 */
let syscall_number = 105; /* setuid on x86_64 */
assert!(
seccomp_sys::seccomp_rule_add(
context,
seccomp_sys::SCMP_ACT_KILL,
syscall_number,
1,
comparator
) == 0
);
assert!(seccomp_sys::seccomp_load(context) == 0);
assert!(libc::setuid(1000) == 0); /* process would be killed here */
}
}<|fim▁end|>
|
extern crate libc;
extern crate seccomp_sys;
|
<|file_name|>SagaFactory.ts<|end_file_name|><|fim▁begin|>import {AssociationValues} from "./AssociationValues";
import {Saga, SagaType} from "./Saga";
export interface SagaFactory<T extends Saga> {
createSaga(sagaType: SagaType<T>, id: string, associationValues: AssociationValues): T;<|fim▁hole|><|fim▁end|>
|
supports(sagaType: SagaType<T>): boolean;
hydrate(saga: T): void;
dehydrate(saga: T): void;
}
|
<|file_name|>S15.7.3.5_A1.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* Number.NEGATIVE_INFINITY is -Infinity<|fim▁hole|> * @path ch15/15.7/15.7.3/15.7.3.5/S15.7.3.5_A1.js
* @description Checking sign and finiteness of Number.NEGATIVE_INFINITY
*/
// CHECK#1
if (isFinite(Number.NEGATIVE_INFINITY) !== false) {
$ERROR('#1: Number.NEGATIVE_INFINITY === Not-a-Finite');
} else {
if ((Number.NEGATIVE_INFINITY < 0) !== true) {
$ERROR('#1: Number.NEGATIVE_INFINITY === -Infinity');
}
}<|fim▁end|>
|
*
|
<|file_name|>DynamicControl.java<|end_file_name|><|fim▁begin|>package net.happybrackets.core.control;
import com.google.gson.Gson;
import de.sciss.net.OSCMessage;
import net.happybrackets.core.Device;
import net.happybrackets.core.OSCVocabulary;
import net.happybrackets.core.scheduling.HBScheduler;
import net.happybrackets.core.scheduling.ScheduledEventListener;
import net.happybrackets.core.scheduling.ScheduledObject;
import net.happybrackets.device.HB;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
/**
* This class facilitates sending message values between sketches,
* devices, and a graphical environment.
* The values can be represented as sliders, text boxes, check boxes, and buttons
*
* A message can either be an integer, a double, a string, a boolean, a trigger or a complete class.
*
* Although similar to the send and receive objects in Max in that the name and type
* parameter of the {@link DynamicControl} determines message interconnection,
* DynamicControls also have an attribute called {@link ControlScope}, which dictates how far (in
* a topological sense) the object can reach in order to communicate with other
* DynamicControls. DynamicControls can be bound to different objects, the default being the class that instantiated it.
*
* <br>The classes are best accessed through {@link DynamicControlParent} abstractions
*
*/
public class DynamicControl implements ScheduledEventListener {
static Gson gson = new Gson();
// flag for testing
static boolean ignoreName = false;
private boolean isPersistentControl = false;
/**
* Set ignore name for testing
* @param ignore true to ignore
*/
static void setIgnoreName(boolean ignore){
ignoreName = true;
}
static int deviceSendId = 0; // we will use this to number all messages we send. They can be filtered at receiver by testing last message mapped
/**
* Define a list of target devices. Can be either device name or IP address
* If it is a device name, there will be a lookup of stored device names
*/
Set<String> targetDevices = new HashSet<>();
// we will map Message ID to device name. If the last ID is in this map, we will ignore message
static Map<String, Integer> messageIdMap = new Hashtable<>();
/**
* See if we will process a control message based on device name and message_id
* If the message_id is mapped against the device_name, ignore message, otherwise store mapping and return true;
* @param device_name the device name
* @param message_id the message_id
* @return true if we are going to process this message
*/
public static boolean enableProcessControlMessage(String device_name, int message_id){
boolean ret = true;
if (messageIdMap.containsKey(device_name)) {
if (messageIdMap.get(device_name) == message_id) {
ret = false;
}
}
if (ret){
messageIdMap.put(device_name, message_id);
}
return ret;
}
// The device name that set last message to this control
// A Null value will indicate that it was this device
String sendingDevice = null;
/**
* Get the name of the device that sent the message. If the message was local, will return this device name
* @return name of device that sent message
*/
public String getSendingDevice(){
String ret = sendingDevice;
if (ret == null) {
ret = deviceName;
}
return ret;
}
/**
* Define how we want the object displayed in the plugin
*/
public enum DISPLAY_TYPE {
DISPLAY_DEFAULT,
DISPLAY_HIDDEN,
DISPLAY_DISABLED,
DISPLAY_ENABLED_BUDDY,
DISPLAY_DISABLED_BUDDY
}
/**
* Return all mapped device addresses for this control
* @return returns the set of mapped targeted devices
*/
public Set<String> getTargetDeviceAddresses(){
return targetDevices;
}
@Override
public void doScheduledEvent(double scheduledTime, Object param) {
FutureControlMessage message = (FutureControlMessage) param;
this.objVal = message.controlValue;
this.executionTime = 0;
this.sendingDevice = message.sourceDevice;
notifyLocalListeners();
if (!message.localOnly) {
notifyValueSetListeners();
}
synchronized (futureMessageListLock) {
futureMessageList.remove(message);
}
}
/**
* Add one or more device names or addresses as strings to use in {@link ControlScope#TARGET} Message
* @param deviceNames device name or IP Address
*/
public synchronized void addTargetDevice(String... deviceNames){
for (String name:
deviceNames) {
targetDevices.add(name);
}
}
/**
* Remove all set target devices and replace with the those provided as arguments
* Adds device address as a string or device name to {@link ControlScope#TARGET} Message
* @param deviceNames device name or IP Address
*/
public synchronized void setTargetDevice(String... deviceNames){
targetDevices.clear();
addTargetDevice(deviceNames);
}
/**
* Remove all set target devices and replace with the those provided as arguments
* Adds device addresses to {@link ControlScope#TARGET} Message
* @param inetAddresses device name or IP Address
*/
public synchronized void setTargetDevice(InetAddress... inetAddresses){
targetDevices.clear();
addTargetDevice(inetAddresses);
}
/**
* Add one or more device {@link InetAddress} for use in {@link ControlScope#TARGET} Message
* @param inetAddresses the target addresses to add
*/
public void addTargetDevice(InetAddress... inetAddresses){
for (InetAddress address:
inetAddresses) {
targetDevices.add(address.getHostAddress());
}
}
/**
* Clear all devices as Targets
*/
public synchronized void clearTargetDevices(){
targetDevices.clear();
}
/**
* Remove one or more device names or addresses as a string.
* For use in {@link ControlScope#TARGET} Messages
* @param deviceNames device names or IP Addresses to remove
*/
public synchronized void removeTargetDevice(String... deviceNames){
for (String name:
deviceNames) {
targetDevices.remove(name);
}
}
/**
* Remove one or more {@link InetAddress} for use in {@link ControlScope#TARGET} Message
* @param inetAddresses the target addresses to remove
*/
public void removeTargetDevice(InetAddress... inetAddresses){
for (InetAddress address:
inetAddresses) {
targetDevices.remove(address.getHostAddress());
}
}
/**
* Create an Interface to listen to
*/
public interface DynamicControlListener {
void update(DynamicControl control);
}
public interface ControlScopeChangedListener {
void controlScopeChanged(ControlScope new_scope);
}
/**
* The way Create Messages are sent
*/
private enum CREATE_MESSAGE_ARGS {
DEVICE_NAME,
MAP_KEY,
CONTROL_NAME,
PARENT_SKETCH_NAME,
PARENT_SKETCH_ID,
CONTROL_TYPE,
OBJ_VAL,
MIN_VAL,
MAX_VAL,
CONTROL_SCOPE,
DISPLAY_TYPE_VAL
}
// Define the Arguments used in an Update message
private enum UPDATE_MESSAGE_ARGS {
DEVICE_NAME,
CONTROL_NAME,
CONTROL_TYPE,
MAP_KEY,
OBJ_VAL,
CONTROL_SCOPE,
DISPLAY_TYPE_VAL,
MIN_VALUE,
MAX_VALUE
}
// Define Global Message arguments
public enum NETWORK_TRANSMIT_MESSAGE_ARGS {
DEVICE_NAME,
CONTROL_NAME,
CONTROL_TYPE,
OBJ_VAL,
EXECUTE_TIME_MLILI_MS, // Most Significant Int of Milliseconds - stored as int
EXECUTE_TIME_MLILI_LS, // Least Significant Bit of Milliseconds - stored as int
EXECUTE_TIME_NANO, // Number on Nano Seconds - stored as int
MESSAGE_ID // we will increment an integer and send the message multiple times. We will ignore message if last message was this one
}
// Define Device Name Message arguments
private enum DEVICE_NAME_ARGS {
DEVICE_NAME
}
// Define where our first Array type global dynamic control message is in OSC
final static int OSC_TRANSMIT_ARRAY_ARG = NETWORK_TRANSMIT_MESSAGE_ARGS.MESSAGE_ID.ordinal() + 1;
// When an event is scheduled in the future, we will create one of these and schedule it
class FutureControlMessage{
/**
* Create a Future Control message
* @param source_device the source device name
* @param value the value to be executed
* @param execution_time the time the value needs to be executed
*/
public FutureControlMessage(String source_device, Object value, double execution_time){
sourceDevice = source_device;
controlValue = value;
executionTime = execution_time;
}
Object controlValue;
double executionTime;
boolean localOnly = false; // if we are local only, we will not sendValue changed listeners
String sourceDevice;
/// have a copy of our pending scheduled object in case we want to cancel it
ScheduledObject pendingSchedule = null;
}
static ControlMap controlMap = ControlMap.getInstance();
private static final Object controlMapLock = new Object();
private static int instanceCounter = 0; // we will use this to order the creation of our objects and give them a unique number on device
private final Object instanceCounterLock = new Object();
private final Object valueChangedLock = new Object();
private final String controlMapKey;
private List<DynamicControlListener> controlListenerList = new ArrayList<>();
private List<DynamicControlListener> globalControlListenerList = new ArrayList<>();
private List<ControlScopeChangedListener> controlScopeChangedList = new ArrayList<>();
private List<FutureControlMessage> futureMessageList = new ArrayList<>();
// This listener is only called when value on control set
private List<DynamicControlListener> valueSetListenerList = new ArrayList<>();
// Create Object to lock shared resources
private final Object controlScopeChangedLock = new Object();
private final Object controlListenerLock = new Object();
private final Object globalListenerLock = new Object();
private final Object valueSetListenerLock = new Object();
private final Object futureMessageListLock = new Object();
static boolean disableScheduler = false; // set flag if we are going to disable scheduler - eg, in GUI
/**
* Create the text we will display at the beginning of tooltip
* @param tooltipPrefix The starting text of the tooltip
* @return this object
*/
public DynamicControl setTooltipPrefix(String tooltipPrefix) {
this.tooltipPrefix = tooltipPrefix;
return this;
}
private String tooltipPrefix = "";
// The Object sketch that this control was created in
private Object parentSketch = null;
final int parentId;
private final String deviceName;
private String parentSketchName;
private ControlType controlType;
final String controlName;
private ControlScope controlScope = ControlScope.SKETCH;
private Object objVal = 0;
private Object maximumDisplayValue = 0;
private Object minimumDisplayValue = 0;
// This is the time we want to execute the control value
private double executionTime = 0;
DISPLAY_TYPE displayType = DISPLAY_TYPE.DISPLAY_DEFAULT; // Whether the control is displayType on control Screen
/**
* Set whether we disable setting all values in context of scheduler
* @param disabled set true to disable
*/
public static void setDisableScheduler(boolean disabled){
disableScheduler = disabled;
}
/**
* Whether we disable the control on the screen
* @return How we will disable control on screen
*/
public DISPLAY_TYPE getDisplayType(){
return displayType;
}
/**
* Set how we will display control object on the screen
* @param display_type how we will display control
* @return this
*/
public DynamicControl setDisplayType(DISPLAY_TYPE display_type){
displayType = display_type;
notifyValueSetListeners();
//notifyLocalListeners();
return this;
}
/**
* Returns the JVM execution time we last used when we set the value
* @return lastExecution time set
*/
public double getExecutionTime(){
return executionTime;
}
/**
* Convert a float or int into required number type based on control. If not a FLOAT or INT, will just return value
* @param control_type the control type
* @param source_value the value we want
* @return the converted value
*/
static private Object convertValue (ControlType control_type, Object source_value) {
Object ret = source_value;
// Convert if we are a float control
if (control_type == ControlType.FLOAT) {
if (source_value == null){
ret = 0.0;
}else if (source_value instanceof Integer) {
Integer i = (Integer) source_value;
double f = i.doubleValue();
ret = f;
}else if (source_value instanceof Double) {
Double d = (Double) source_value;
ret = d;
}else if (source_value instanceof Long) {
Long l = (Long) source_value;
double f = l.doubleValue();
ret = f;
} else if (source_value instanceof Float) {
double f = (Float) source_value;
ret = f;
} else if (source_value instanceof String) {
double f = Double.parseDouble((String)source_value);
ret = f;
}
// Convert if we are an int control
} else if (control_type == ControlType.INT) {
if (source_value == null){
ret = 0;
}else if (source_value instanceof Float) {
Float f = (Float) source_value;
Integer i = f.intValue();
ret = i;
}else if (source_value instanceof Double) {
Double d = (Double) source_value;
Integer i = d.intValue();
ret = i;
}else if (source_value instanceof Long) {
Long l = (Long) source_value;
Integer i = l.intValue();
ret = i;
}
// Convert if we are a BOOLEAN control
} else if (control_type == ControlType.BOOLEAN) {
if (source_value == null){
ret = 0;
}if (source_value instanceof Integer) {
Integer i = (Integer) source_value;
Boolean b = i != 0;
ret = b;
}else if (source_value instanceof Long) {
Long l = (Long) source_value;
Integer i = l.intValue();
Boolean b = i != 0;
ret = b;
}
// Convert if we are a TRIGGER control
}else if (control_type == ControlType.TRIGGER) {
if (source_value == null) {
ret = System.currentTimeMillis();
}
// Convert if we are a TEXT control
}else if (control_type == ControlType.TEXT) {
if (source_value == null) {
ret = "";
}
}
return ret;
}
/**
* Get the Sketch or class object linked to this control
* @return the parentSketch or Object
*/
public Object getParentSketch() {
return parentSketch;
}
/**
* This is a private constructor used to initialise constant attributes of this object
*
* @param parent_sketch the object calling - typically this
* @param control_type The type of control you want to create
* @param name The name we will give to differentiate between different controls in this class
* @param initial_value The initial value of the control
* @param display_type how we want to display the object
*
*/
private DynamicControl(Object parent_sketch, ControlType control_type, String name, Object initial_value, DISPLAY_TYPE display_type) {
if (parent_sketch == null){
parent_sketch = new Object();
}
displayType = display_type;
parentSketch = parent_sketch;
parentSketchName = parent_sketch.getClass().getName();
controlType = control_type;
controlName = name;
objVal = convertValue (control_type, initial_value);
parentId = parent_sketch.hashCode();
deviceName = Device.getDeviceName();
synchronized (instanceCounterLock) {
controlMapKey = Device.getDeviceName() + instanceCounter;
instanceCounter++;
}
}
/**
* Ascertain the Control Type based on the Value
* @param value the value we are obtaing a control value from
* @return a control type
*/
public static ControlType getControlType(Object value){
ControlType ret = ControlType.OBJECT;
if (value == null){
ret = ControlType.TRIGGER;
}
else if (value instanceof Float || value instanceof Double){
ret = ControlType.FLOAT;
}
else if (value instanceof Boolean){
ret = ControlType.BOOLEAN;
}
else if (value instanceof String){
ret = ControlType.TEXT;
}
else if (value instanceof Integer || value instanceof Long){
ret = ControlType.INT;
}
return ret;
}
/**
* A dynamic control that can be accessed from outside this sketch
* it is created with the sketch object that contains it along with the type
*
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
*/
public DynamicControl(String name, Object initial_value) {
this(new Object(), getControlType(initial_value), name, initial_value, DISPLAY_TYPE.DISPLAY_DEFAULT);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside this sketch
* it is created with the sketch object that contains it along with the type
*
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
*/
public DynamicControl(ControlType control_type, String name, Object initial_value) {
this(new Object(), control_type, name, initial_value, DISPLAY_TYPE.DISPLAY_DEFAULT);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside this sketch
* it is created with the sketch object that contains it along with the type
* @param parent_sketch the object calling - typically this, however, you can use any class object
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
*/
public DynamicControl(Object parent_sketch, ControlType control_type, String name) {
this(parent_sketch, control_type, name, null, DISPLAY_TYPE.DISPLAY_DEFAULT);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside this sketch
* it is created with the sketch object that contains it along with the type
*
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
*/
public DynamicControl(ControlType control_type, String name) {
this(new Object(), control_type, name, convertValue(control_type, null), DISPLAY_TYPE.DISPLAY_DEFAULT);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside this sketch
* it is created with the sketch object that contains it along with the type
*
* @param parent_sketch the object calling - typically this, however, you can use any class object
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
*/
public DynamicControl(Object parent_sketch, ControlType control_type, String name, Object initial_value) {
this(parent_sketch, control_type, name, initial_value, DISPLAY_TYPE.DISPLAY_DEFAULT);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* Set this control as a persistentSimulation control so it does not get removed on reset
* @return this
*/
public DynamicControl setPersistentController(){
controlMap.addPersistentControl(this);
isPersistentControl = true;
return this;
}
/**
* See if control is a persistent control
* @return true if a simulator control
*/
public boolean isPersistentControl() {
return isPersistentControl;
}
/**
* A dynamic control that can be accessed from outside
* it is created with the sketch object that contains it along with the type
*
* @param parent_sketch the object calling - typically this, however, you can use any class object
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
* @param min_value The minimum display value of the control. Only used for display purposes
* @param max_value The maximum display value of the control. Only used for display purposes
*/
public DynamicControl(Object parent_sketch, ControlType control_type, String name, Object initial_value, Object min_value, Object max_value) {
this(parent_sketch, control_type, name, initial_value, DISPLAY_TYPE.DISPLAY_DEFAULT);
minimumDisplayValue = convertValue (control_type, min_value);
maximumDisplayValue = convertValue (control_type, max_value);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside
* it is created with the sketch object that contains it along with the type
*
* @param parent_sketch the object calling - typically this, however, you can use any class object
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
* @param min_value The minimum display value of the control. Only used for display purposes
* @param max_value The maximum display value of the control. Only used for display purposes
* @param display_type The way we want the control displayed
*/
public DynamicControl(Object parent_sketch, ControlType control_type, String name, Object initial_value, Object min_value, Object max_value, DISPLAY_TYPE display_type) {
this(parent_sketch, control_type, name, initial_value, display_type);
minimumDisplayValue = convertValue (control_type, min_value);
maximumDisplayValue = convertValue (control_type, max_value);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* A dynamic control that can be accessed from outside
* it is created with the sketch object that contains it along with the type
*
* @param control_type The type of control message you want to send
* @param name The name we will give to associate it with other DynamicControls with identical ControlScope and type.
* @param initial_value The initial value of the control
* @param min_value The minimum display value of the control. Only used for display purposes
* @param max_value The maximum display value of the control. Only used for display purposes
*/
public DynamicControl(ControlType control_type, String name, Object initial_value, Object min_value, Object max_value) {
this(new Object(), control_type, name, initial_value, DISPLAY_TYPE.DISPLAY_DEFAULT);
minimumDisplayValue = convertValue (control_type, min_value);
maximumDisplayValue = convertValue (control_type, max_value);
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* Get the type of control we want
* @return The type of value this control is
*/
public ControlType getControlType(){
return controlType;
}
/**
* Get the scope of this control. Can be Sketch, Class, Device, or global
* @return The Scope
*/
public ControlScope getControlScope(){
return controlScope;
}
/**
* Changed the scope that the control has. It will update control map so the correct events will be generated based on its scope
* @param new_scope The new Control Scope
* @return this object
*/
public synchronized DynamicControl setControlScope(ControlScope new_scope)
{
ControlScope old_scope = controlScope;
if (old_scope != new_scope) {
controlScope = new_scope;
notifyValueSetListeners();
// prevent control scope from changing the value
//notifyLocalListeners();
notifyControlChangeListeners();
}
return this;
}
/**
* Get the Dynamic control based on Map key
*
* @param map_key the string that we are using as the key
* @return the Object associated with this control
*/
public static DynamicControl getControl(String map_key) {
DynamicControl ret = null;
synchronized (controlMapLock) {
ret = controlMap.getControl(map_key);
}
return ret;
}
/**
* Update the parameters of this control with another. This would have been caused by an object having other than SKETCH control scope
* If the parameters are changed, this object will notify it's listeners that a change has occurred
* @param mirror_control The control that we are copying from
* @return this object
*/
public DynamicControl updateControl(DynamicControl mirror_control){
if (mirror_control != null) {
// first check our scope and type are the same
boolean scope_matches = getControlScope() == mirror_control.getControlScope() && getControlType() == mirror_control.getControlType();
if (scope_matches)
{
// Now we need to check whether the scope matches us
if (getControlScope() == ControlScope.SKETCH)
{
scope_matches = this.parentSketch == mirror_control.parentSketch && this.parentSketch != null;
}
// Now we need to check whether the scope matches us
else if (getControlScope() == ControlScope.CLASS)
{
scope_matches = this.parentSketchName.equals(mirror_control.parentSketchName);
}
else if (getControlScope() == ControlScope.DEVICE){
scope_matches = this.deviceName.equals(mirror_control.deviceName);
}
else if (getControlScope() == ControlScope.TARGET){
// check if our mirror has this address
scope_matches = mirror_control.targetsThisDevice();
}
// Otherwise it must be global. We have a match
}
if (scope_matches) {
// do not use setters as we only want to generate one notifyLocalListeners
boolean changed = false;
if (mirror_control.executionTime <= 0.0) { // his needs to be done now
if (!objVal.equals(mirror_control.objVal)) {
//objVal = mirror_control.objVal; // let this get done inside the scheduleValue return
changed = true;
}
if (changed) {
scheduleValue(null, mirror_control.objVal, 0);
}
}
else
{
scheduleValue(null, mirror_control.objVal, mirror_control.executionTime);
}
}
}
return this;
}
/**
* Check whether this device is targeted by checking the loopback, localhost and devicenames
* @return
*/
private boolean targetsThisDevice() {
boolean ret = false;
String device_name = Device.getDeviceName();
String loopback = InetAddress.getLoopbackAddress().getHostAddress();
for (String device:
targetDevices) {
if (device_name.equalsIgnoreCase(device)){
return true;
}
if (device_name.equalsIgnoreCase(loopback)){
return true;
}
try {
if (InetAddress.getLocalHost().getHostAddress().equalsIgnoreCase(device)){
return true;
}
} catch (UnknownHostException e) {
//e.printStackTrace();
}
}
return ret;
}
/**
* Schedule this control to change its value in context of scheduler
* @param source_device the device name that was the source of this message - can be null
* @param value the value to send
* @param execution_time the time it needs to be executed
* @param local_only if true, will not send value changed to notifyValueSetListeners
*/
void scheduleValue(String source_device, Object value, double execution_time, boolean local_only){
// We need to convert the Object value into the exact type. EG, integer must be cast to boolean if that is thr control type
Object converted_value = convertValue(controlType, value);
if (disableScheduler || execution_time == 0){
this.objVal = converted_value;
this.executionTime = 0;
this.sendingDevice = source_device;
notifyLocalListeners();
if (!local_only) {
notifyValueSetListeners();
}
}
else {
FutureControlMessage message = new FutureControlMessage(source_device, converted_value, execution_time);
message.localOnly = local_only;
message.pendingSchedule = HBScheduler.getGlobalScheduler().addScheduledObject(execution_time, message, this);
synchronized (futureMessageListLock) {
futureMessageList.add(message);
}
}
}
/**
* Schedule this control to send a value to it's locallisteners at a scheduled time. Will also notify valueListeners (eg GUI controls)
* @param source_device the device name that was the source of this message - can be null
* @param value the value to send
* @param execution_time the time it needs to be executed
*/
void scheduleValue(String source_device, Object value, double execution_time) {
scheduleValue(source_device, value, execution_time, false);
}
/**
* Process the DynamicControl deviceName message and map device name to IPAddress
* We ignore our own device
* @param src_address The address of the device
* @param msg The OSC Message that has device name
*/
public static void processDeviceNameMessage(InetAddress src_address, OSCMessage msg) {
// do some error checking here
if (src_address != null) {
String device_name = (String) msg.getArg(DEVICE_NAME_ARGS.DEVICE_NAME.ordinal());
try {
if (!Device.getDeviceName().equalsIgnoreCase(device_name)) {
HB.HBInstance.addDeviceAddress(device_name, src_address);
}
}
catch(Exception ex){}
}
}
/**
* Process the DynamicControl deviceRequest message
* Send a deviceName back to src. Test that their name is mapped correctly
* If name is not mapped we will request from all devices globally
* @param src_address The address of the device
* @param msg The OSC Message that has device name
*/
public static void processRequestNameMessage(InetAddress src_address, OSCMessage msg) {
String device_name = (String) msg.getArg(DEVICE_NAME_ARGS.DEVICE_NAME.ordinal());
// ignore ourself
if (!Device.getDeviceName().equalsIgnoreCase(device_name)) {
// send them our message
OSCMessage nameMessage = buildDeviceNameMessage();
ControlMap.getInstance().sendGlobalDynamicControlMessage(nameMessage, null);
// See if we have them mapped the same
boolean address_changed = HB.HBInstance.addDeviceAddress(device_name, src_address);
if (address_changed){
// request all
postRequestNamesMessage();
}
}
}
/**
* Post a request device name message to other devices so we can target them specifically and update our map
*/
public static void postRequestNamesMessage(){
OSCMessage requestMessage = buildDeviceRequestNameMessage();
ControlMap.getInstance().sendGlobalDynamicControlMessage(requestMessage, null);
}
/**
* Build OSC Message that gives our device name
* @return OSC Message that has name
*/
public static OSCMessage buildDeviceNameMessage(){
return new OSCMessage(OSCVocabulary.DynamicControlMessage.DEVICE_NAME,
new Object[]{
Device.getDeviceName(),
});
}
/**
* Build OSC Message that requests devices send us their name
* @return OSC Message to request name
*/
public static OSCMessage buildDeviceRequestNameMessage(){
return new OSCMessage(OSCVocabulary.DynamicControlMessage.REQUEST_NAME,
new Object[]{
Device.getDeviceName(),
});
}
/**
* Convert two halves of a long stored integer values into a long value
* @param msi most significant integer
* @param lsi least significant integer
* @return a long value consisting of the concatenation of both int values
*/
public static long integersToLong(int msi, int lsi){
return (long) msi << 32 | lsi & 0xFFFFFFFFL;
}
/**
* Convert a long into two integers in an array of two integers
* @param l_value the Long values that needs to be encoded
* @return an array of two integers. ret[0] will be most significant integer while int [1] will be lease significant
*/
public static int [] longToIntegers (long l_value){
int msi = (int) (l_value >> 32); // this is most significant integer
int lsi = (int) l_value; // This is LSB that has been trimmed down;
return new int[]{msi, lsi};
}
// We will create a single array that we can cache the size of an array of ints for scheduled time
// This is used in numberIntsForScheduledTime
private static int [] intArrayCache = null;
/**
* Return the array size of Integers that would be required to encode a scheduled time
* @return the Array
*/
public static int numberIntsForScheduledTime(){
if (intArrayCache == null) {
intArrayCache = scheduleTimeToIntegers(0);
}
return intArrayCache.length;
}
/**
* Convert a SchedulerTime into integers in an array of three integers
* @param d_val the double values that needs to be encoded
* @return an array of three integers. ret[0] will be most significant integer while int [1] will be lease significant. int [2] is the number of nano seconds
*/
public static int [] scheduleTimeToIntegers (double d_val){
long lval = (long)d_val;
int msi = (int) (lval >> 32); // this is most significant integer
int lsi = (int) lval; // This is LSB that has been trimmed down;
double nano = d_val - lval;
nano *= 1000000;
int n = (int) nano;
return new int[]{msi, lsi, n};
}
/**
* Convert three integers to a double representing scheduler time
* @param msi the most significant value of millisecond value
* @param lsi the least significant value of millisecond value
* @param nano the number of nanoseconds
* @return a double representing the scheduler time
*/
public static double integersToScheduleTime(int msi, int lsi, int nano){
long milliseconds = integersToLong(msi, lsi);
double ret = milliseconds;
double nanoseconds = nano;
return ret + nanoseconds / 1000000d;
}
/**
* Process the {@link ControlScope#GLOBAL} or {@link ControlScope#TARGET} Message from an OSC Message. Examine buildUpdateMessage for parameters inside Message
* We will not process messages that have come from this device because they will be actioned through local listeners
* @param msg OSC message with new value
* @param controlScope the type of {@link ControlScope};
*/
public static void processOSCControlMessage(OSCMessage msg, ControlScope controlScope) {
String device_name = (String) msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.DEVICE_NAME.ordinal());
int message_id = (int)msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.MESSAGE_ID.ordinal());
// Make sure we ignore messages from this device
if (ignoreName || !device_name.equals(Device.getDeviceName())) {
if (enableProcessControlMessage(device_name, message_id)) {
String control_name = (String) msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.CONTROL_NAME.ordinal());
ControlType control_type = ControlType.values()[(int) msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.CONTROL_TYPE.ordinal())];
Object obj_val = msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.OBJ_VAL.ordinal());
Object ms_max = msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.EXECUTE_TIME_MLILI_MS.ordinal());
Object ms_min = msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.EXECUTE_TIME_MLILI_LS.ordinal());
Object nano = msg.getArg(NETWORK_TRANSMIT_MESSAGE_ARGS.EXECUTE_TIME_NANO.ordinal());
double execution_time = integersToScheduleTime((int) ms_max, (int) ms_min, (int) nano);
boolean data_converted = false; // we only want to do data conversion once
synchronized (controlMapLock) {
List<DynamicControl> named_controls = controlMap.getControlsByName(control_name);
for (DynamicControl named_control : named_controls) {
if (named_control.controlScope == controlScope && control_type.equals(named_control.controlType)) {
// we must NOT call setVal as this will generate a global series again.
// Just notifyListeners specific to this control but not globally
if (!data_converted) {
// we need to see if this is a boolean Object as OSC does not support that
if (control_type == ControlType.BOOLEAN) {
int osc_val = (int) obj_val;
Boolean bool_val = osc_val != 0;
obj_val = bool_val;
data_converted = true;
} else if (control_type == ControlType.OBJECT) {
if (!(obj_val instanceof String)) {
// This is not a Json Message
// We will need to get all the remaining OSC arguments after the schedule time and store that as ObjVal
int num_args = msg.getArgCount() - OSC_TRANSMIT_ARRAY_ARG;
Object[] restore_args = new Object[num_args];
for (int i = 0; i < num_args; i++) {
restore_args[i] = msg.getArg(OSC_TRANSMIT_ARRAY_ARG + i);
}
obj_val = restore_args;
data_converted = true;
}
}
}
// We need to schedule this value
named_control.scheduleValue(device_name, obj_val, execution_time);
}<|fim▁hole|> }
}
/**
* Process the Update Message from an OSC Message. Examine buildUpdateMessage for parameters inside Message
* The message is directed as a specific control defined by the MAP_KEY parameter in the OSC Message
* @param msg OSC message with new value
*/
public static void processUpdateMessage(OSCMessage msg){
String map_key = (String) msg.getArg(UPDATE_MESSAGE_ARGS.MAP_KEY.ordinal());
String control_name = (String) msg.getArg(UPDATE_MESSAGE_ARGS.CONTROL_NAME.ordinal());
Object obj_val = msg.getArg(UPDATE_MESSAGE_ARGS.OBJ_VAL.ordinal());
ControlScope control_scope = ControlScope.values ()[(int) msg.getArg(UPDATE_MESSAGE_ARGS.CONTROL_SCOPE.ordinal())];
DISPLAY_TYPE display_type = DISPLAY_TYPE.DISPLAY_DEFAULT;
DynamicControl control = getControl(map_key);
if (control != null)
{
Object display_min = control.getMinimumDisplayValue();
Object display_max = control.getMaximumDisplayValue();
if (msg.getArgCount() > UPDATE_MESSAGE_ARGS.DISPLAY_TYPE_VAL.ordinal())
{
int osc_val = (int) msg.getArg(UPDATE_MESSAGE_ARGS.DISPLAY_TYPE_VAL.ordinal());
display_type = DISPLAY_TYPE.values ()[osc_val];
}
if (msg.getArgCount() > UPDATE_MESSAGE_ARGS.MAX_VALUE.ordinal()){
display_max = msg.getArg(UPDATE_MESSAGE_ARGS.MAX_VALUE.ordinal());
}
if (msg.getArgCount() > UPDATE_MESSAGE_ARGS.MIN_VALUE.ordinal()){
display_min = msg.getArg(UPDATE_MESSAGE_ARGS.MIN_VALUE.ordinal());
}
// do not use setters as we only want to generate one notifyLocalListeners
boolean changed = false;
boolean control_scope_changed = false;
if (control.displayType != display_type)
{
changed = true;
}
control.displayType = display_type;
obj_val = convertValue(control.controlType, obj_val);
display_max = convertValue(control.controlType, display_max);
display_min = convertValue(control.controlType, display_min);
if (!obj_val.equals(control.objVal) ||
!display_max.equals(control.maximumDisplayValue) ||
!display_min.equals(control.minimumDisplayValue)
) {
changed = true;
}
if (!control_scope.equals(control.controlScope)) {
control.controlScope = control_scope;
//control.executionTime = execution_time;
changed = true;
control_scope_changed = true;
}
if (changed) {
control.maximumDisplayValue = display_max;
control.minimumDisplayValue = display_min;
control.scheduleValue(null, obj_val, 0, true);
if (control.getControlScope() != ControlScope.UNIQUE){
control.objVal = obj_val;
control.notifyGlobalListeners();
}
}
if (control_scope_changed)
{
control.notifyControlChangeListeners();
}
}
}
/**
* Build OSC Message that specifies a removal of a control
* @return OSC Message to notify removal
*/
public OSCMessage buildRemoveMessage(){
return new OSCMessage(OSCVocabulary.DynamicControlMessage.DESTROY,
new Object[]{
deviceName,
controlMapKey
});
}
/**
* Return an object that can be sent by OSC based on control TYpe
* @param obj_val The object value we want to send
* @return the type we will actually send
*/
private Object OSCArgumentObject (Object obj_val){
Object ret = obj_val;
if (obj_val instanceof Boolean)
{
boolean b = (Boolean) obj_val;
return b? 1:0;
}
else if (obj_val instanceof Double){
String s = ((Double)obj_val).toString();
ret = s;
}
return ret;
}
/**
* Build OSC Message that specifies an update
* @return OSC Message To send to specific control
*/
public OSCMessage buildUpdateMessage(){
Object sendObjType = objVal;
if (controlType == ControlType.OBJECT){
sendObjType = objVal.toString();
}
return new OSCMessage(OSCVocabulary.DynamicControlMessage.UPDATE,
new Object[]{
deviceName,
controlName,
controlType.ordinal(),
controlMapKey,
OSCArgumentObject(sendObjType),
controlScope.ordinal(),
displayType.ordinal(),
OSCArgumentObject(minimumDisplayValue),
OSCArgumentObject(maximumDisplayValue),
});
}
/**
* Build OSC Message that specifies a Network update
* @return OSC Message directed to controls with same name, scope, but on different devices
*/
public OSCMessage buildNetworkSendMessage(){
deviceSendId++;
String OSC_MessageName = OSCVocabulary.DynamicControlMessage.GLOBAL;
// define the arguments for send time
int [] execution_args = scheduleTimeToIntegers(executionTime);
if (controlScope == ControlScope.TARGET){
OSC_MessageName = OSCVocabulary.DynamicControlMessage.TARGET;
}
if (controlType == ControlType.OBJECT){
/*
DEVICE_NAME,
CONTROL_NAME,
CONTROL_TYPE,
OBJ_VAL,
EXECUTE_TIME_MLILI_MS, // Most Significant Int of Milliseconds - stored as int
EXECUTE_TIME_MLILI_LS, // Least Significant Bit of Milliseconds - stored as int
EXECUTE_TIME_NANO // Number on Nano Seconds - stored as int
*/
// we need to see if we have a custom encode function
if (objVal instanceof CustomGlobalEncoder){
Object [] encode_data = ((CustomGlobalEncoder)objVal).encodeGlobalMessage();
int num_args = OSC_TRANSMIT_ARRAY_ARG + encode_data.length;
Object [] osc_args = new Object[num_args];
osc_args[0] = deviceName;
osc_args[1] = controlName;
osc_args[2] = controlType.ordinal();
osc_args[3] = 0; // by defining zero we are going to say this is NOT json
osc_args[4] = execution_args [0];
osc_args[5] = execution_args [1];
osc_args[6] = execution_args [2];
osc_args[7] = deviceSendId;
// now encode the object parameters
for (int i = 0; i < encode_data.length; i++){
osc_args[OSC_TRANSMIT_ARRAY_ARG + i] = encode_data[i];
}
return new OSCMessage(OSC_MessageName,
osc_args);
}
else
{
String jsonString = gson.toJson(objVal);
return new OSCMessage(OSC_MessageName,
new Object[]{
deviceName,
controlName,
controlType.ordinal(),
jsonString,
execution_args[0],
execution_args[1],
execution_args[2],
deviceSendId
});
}
}
else {
return new OSCMessage(OSC_MessageName,
new Object[]{
deviceName,
controlName,
controlType.ordinal(),
OSCArgumentObject(objVal),
execution_args[0],
execution_args[1],
execution_args[2],
deviceSendId
});
}
}
/**
* Build the OSC Message for a create message
* @return OSC Message required to create the object
*/
public OSCMessage buildCreateMessage() {
Object sendObjType = objVal;
if (controlType == ControlType.OBJECT){
sendObjType = objVal.toString();
}
return new OSCMessage(OSCVocabulary.DynamicControlMessage.CREATE,
new Object[]{
deviceName,
controlMapKey,
controlName,
parentSketchName,
parentId,
controlType.ordinal(),
OSCArgumentObject(sendObjType),
OSCArgumentObject(minimumDisplayValue),
OSCArgumentObject(maximumDisplayValue),
controlScope.ordinal(),
displayType.ordinal()
});
}
/**
* Create a DynamicControl based on OSC Message. This will keep OSC implementation inside this class
* The buildUpdateMessage shows how messages are constructed
* @param msg the OSC Message with the parameters to make Control
*/
public DynamicControl (OSCMessage msg)
{
deviceName = (String) msg.getArg(CREATE_MESSAGE_ARGS.DEVICE_NAME.ordinal());
controlMapKey = (String) msg.getArg(CREATE_MESSAGE_ARGS.MAP_KEY.ordinal());
controlName = (String) msg.getArg(CREATE_MESSAGE_ARGS.CONTROL_NAME.ordinal());
parentSketchName = (String) msg.getArg(CREATE_MESSAGE_ARGS.PARENT_SKETCH_NAME.ordinal());
parentId = (int) msg.getArg(CREATE_MESSAGE_ARGS.PARENT_SKETCH_ID.ordinal());
controlType = ControlType.values ()[(int) msg.getArg(CREATE_MESSAGE_ARGS.CONTROL_TYPE.ordinal())];
objVal = convertValue (controlType, msg.getArg(CREATE_MESSAGE_ARGS.OBJ_VAL.ordinal()));
minimumDisplayValue = convertValue (controlType, msg.getArg(CREATE_MESSAGE_ARGS.MIN_VAL.ordinal()));
maximumDisplayValue = convertValue (controlType, msg.getArg(CREATE_MESSAGE_ARGS.MAX_VAL.ordinal()));
controlScope = ControlScope.values ()[(int) msg.getArg(CREATE_MESSAGE_ARGS.CONTROL_SCOPE.ordinal())];
if (msg.getArgCount() > CREATE_MESSAGE_ARGS.DISPLAY_TYPE_VAL.ordinal())
{
int osc_val = (int) msg.getArg(CREATE_MESSAGE_ARGS.DISPLAY_TYPE_VAL.ordinal());
displayType = DISPLAY_TYPE.values ()[osc_val];
}
synchronized (controlMapLock) {
controlMap.addControl(this);
}
}
/**
* Get the map key created in the device as a method for mapping back
* @return The unique key to identify this object
*/
public String getControlMapKey(){
return controlMapKey;
}
/**
* Set the value of the object and notify any listeners
* Additionally, the value will propagate to any controls that match the control scope
* If we are using a trigger, send a random number or a unique value
* @param val the value to set
* @return this object
*/
public DynamicControl setValue(Object val)
{
return setValue(val, 0);
}
/**
* Set the value of the object and notify any listeners
* Additionally, the value will propagate to any controls that match the control scope
* If we are using a trigger, send a random number or a unique value
* @param val the value to set
* @param execution_time the Scheduler time we want this to occur
* @return this object
*/
public DynamicControl setValue(Object val, double execution_time)
{
executionTime = execution_time;
val = convertValue (controlType, val);
if (!objVal.equals(val)) {
if (controlType == ControlType.FLOAT)
{
objVal = (Double) val;
}
else {
objVal = val;
}
notifyGlobalListeners();
scheduleValue(null, val, execution_time);
}
return this;
}
/**
* Gets the value of the control. The type needs to be cast to the required type in the listener
* @return Control Value
*/
public Object getValue(){
return objVal;
}
/**
* The maximum value that we want as a display, for example, in a slider control. Does not limit values in the messages
* @return The maximum value we want a graphical display to be set to
*/
public Object getMaximumDisplayValue(){
return maximumDisplayValue;
}
/**
* Set the minimum display range for display
* @param min minimum display value
*
* @return this
*/
public DynamicControl setMinimumValue(Object min) {minimumDisplayValue = min; return this;}
/**
* Set the maximum display range for display
* @param max maximum display value
* @return this
*/
public DynamicControl setMaximumDisplayValue(Object max) {maximumDisplayValue = max; return this;}
/**
* The minimum value that we want as a display, for example, in a slider control. Does not limit values in the messages
* @return The minimum value we want a graphical display to be set to
*/
public Object getMinimumDisplayValue(){
return minimumDisplayValue;
}
/**
* Get the name of the control used for ControlScope matching. Also displayed in GUI
* @return The name of the control for scope matching
*/
public String getControlName(){
return controlName;
}
/**
* Register Listener to receive changed values in the control
* @param listener Listener to register for events
* @return this
*/
public DynamicControl addControlListener(DynamicControlListener listener)
{
if (listener != null) {
synchronized (controlListenerLock) {
controlListenerList.add(listener);
}
}
return this;
}
/**
* Register Listener to receive changed values in the control that need to be global type messages
* @param listener Listener to register for events
* @return this listener that has been created
*/
public DynamicControl addGlobalControlListener(DynamicControlListener listener)
{
if (listener != null) {
synchronized (globalListenerLock) {
globalControlListenerList.add(listener);
}
}
return this;
}
/**
* Register Listener to receive changed values in the control that need to be received when value is specifically set from
* Within sketch
* @param listener Listener to register for events
* @return this
*/
public DynamicControl addValueSetListener(DynamicControlListener listener)
{
if (listener != null) {
synchronized (valueSetListenerLock) {
valueSetListenerList.add(listener);
}
}
return this;
}
/**
* Deregister listener so it no longer receives messages from this control
* @param listener The lsitener we are removing
* @return this object
*/
public DynamicControl removeControlListener(DynamicControlListener listener) {
if (listener != null) {
synchronized (controlListenerLock) {
controlListenerList.remove(listener);
}
}
return this;
}
/**
* Deregister listener so it no longer receives messages from this control
* @param listener the listener we are remmoving
* @return this object
*/
public DynamicControl removeGlobalControlListener(DynamicControlListener listener) {
if (listener != null) {
synchronized (globalListenerLock) {
globalControlListenerList.remove(listener);
}
}
return this;
}
/**
* Register Listener to receive changed values in the control scope
* @param listener Listener to register for events
* @return this object
*/
public DynamicControl addControlScopeListener(ControlScopeChangedListener listener){
if (listener != null) {
synchronized (controlScopeChangedLock) {
controlScopeChangedList.add(listener);
}
}
return this;
}
/**
* Deregister listener so it no longer receives messages from this control
* @param listener the listener
* @return this object
*/
public DynamicControl removeControlScopeChangedListener(ControlScopeChangedListener listener) {
if (listener != null) {
synchronized (controlScopeChangedLock) {
controlScopeChangedList.remove(listener);
}
}
return this;
}
/**
* Erase all listeners from this control
* @return this object
*/
public DynamicControl eraseListeners()
{
// We need to
synchronized (futureMessageListLock){
for (FutureControlMessage message:
futureMessageList) {
message.pendingSchedule.setCancelled(true);
}
futureMessageList.clear();
}
synchronized (controlListenerLock) {controlListenerList.clear();}
synchronized (controlScopeChangedLock) {controlScopeChangedList.clear();}
return this;
}
/**
* Notify all registered listeners of object value on this device
* @return this object
*/
public DynamicControl notifyLocalListeners()
{
synchronized (controlListenerLock)
{
controlListenerList.forEach(listener ->
{
try
{
listener.update(this);
}
catch (Exception ex)
{
ex.printStackTrace();
}
});
}
return this;
}
/**
* Send Update Message when value set
*/
public void notifyValueSetListeners(){
synchronized (valueSetListenerLock)
{
valueSetListenerList.forEach(listener ->
{
try
{
listener.update(this);
}
catch (Exception ex)
{
ex.printStackTrace();
}
});
}
}
/**
* Send Global Update Message
*/
public void notifyGlobalListeners(){
synchronized (globalListenerLock)
{
globalControlListenerList.forEach(listener ->
{
try
{
listener.update(this);
}
catch (Exception ex)
{
ex.printStackTrace();
}
});
}
}
/**
* Notify all registered listeners of object value
* @return this object
*/
public DynamicControl notifyControlChangeListeners()
{
synchronized (controlScopeChangedLock)
{
controlScopeChangedList.forEach(listener ->
{
try
{
listener.controlScopeChanged(this.getControlScope());
}
catch (Exception ex)
{
ex.printStackTrace();
}
});
}
return this;
}
/**
* Get the tooltip to display
* @return the tooltip to display
*/
public String getTooltipText(){
String control_scope_text = "";
if (getControlScope() == ControlScope.UNIQUE)
{
control_scope_text = "UNIQUE scope";
}
else if (getControlScope() == ControlScope.SKETCH)
{
control_scope_text = "SKETCH scope";
}
else if (getControlScope() == ControlScope.CLASS)
{
control_scope_text = "CLASS scope - " + parentSketchName;
}
else if (getControlScope() == ControlScope.DEVICE)
{
control_scope_text = "DEVICE scope - " + deviceName;
}
else if (getControlScope() == ControlScope.GLOBAL)
{
control_scope_text = "GLOBAL scope";
}
return tooltipPrefix + "\n" + control_scope_text;
}
}<|fim▁end|>
|
}
}
}
|
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>module.exports = NotImplementedError => {
const throwNotImplemented = function () {
throw new NotImplementedError()
};
<|fim▁hole|> * @interface
* @memberof module:client.synth
*/
var Controller = throwNotImplemented;
/**
* Asks the controller to start capturing control events
*
* @memberof module:client.synth.Controller
*
* @return {Promise}
*/
Controller.prototype.attach = throwNotImplemented;
/**
* Asks the controller to stop capturing control events
*
* @memberof module:client.synth.Controller
*
* @return {Promise}
*/
Controller.prototype.detach = throwNotImplemented;
return Controller;
};<|fim▁end|>
|
/**
* Controller control interface
*
|
<|file_name|>scripts.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
function test(){return 5}
|
<|file_name|>test_ja.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# <Lettuce - Behaviour Driven Development for python>
# Copyright (C) <2010-2012> Gabriel Falc達o <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os.path import dirname, abspath, join
from nose.tools import with_setup
from tests.asserts import prepare_stdout
from tests.asserts import assert_stdout_lines
from lettuce import Runner
current_dir = abspath(dirname(__file__))
join_path = lambda *x: join(current_dir, *x)
@with_setup(prepare_stdout)
def test_output_with_success_colorless():
"Language: ja -> sucess colorless"
runner = Runner(join_path('ja', 'success', 'dumb.feature'), verbosity=3, no_color=True)
runner.run()
assert_stdout_lines(
u"\n"
u"フィーチャ: ダムフィーチャ # tests/functional/language_specific_features/ja/success/dumb.feature:3\n"
u" テストをグリーンになればテスト成功 # tests/functional/language_specific_features/ja/success/dumb.feature:4\n"
u"\n"
u" シナリオ: 何もしない # tests/functional/language_specific_features/ja/success/dumb.feature:6\n"
u" 前提 何もしない # tests/functional/language_specific_features/ja/success/dumb_steps.py:6\n"
u"\n"
u"1 feature (1 passed)\n"
u"1 scenario (1 passed)\n"
u"1 step (1 passed)\n"
)
@with_setup(prepare_stdout)
def test_output_of_table_with_success_colorless():
"Language: ja -> sucess table colorless"
runner = Runner(join_path('ja', 'success', 'table.feature'), verbosity=3, no_color=True)
runner.run()
assert_stdout_lines(
u"\n"
u"フィーチャ: テーブル記法 # tests/functional/language_specific_features/ja/success/table.feature:3\n"
u" 日本語でのテーブル記法がパスするかのテスト # tests/functional/language_specific_features/ja/success/table.feature:4\n"
u"\n"
u" シナリオ: 何もしないテーブル # tests/functional/language_specific_features/ja/success/table.feature:6\n"
u" 前提 データは以下: # tests/functional/language_specific_features/ja/success/table_steps.py:6\n"
u" | id | 定義 |\n"
u" | 12 | 何かの定義 |\n"
u" | 64 | 別の定義 |\n"
u"\n"
u"1 feature (1 passed)\n"
u"1 scenario (1 passed)\n"
u"1 step (1 passed)\n"
)
@with_setup(prepare_stdout)
def test_output_outlines_success_colorless():
"Language: ja -> sucess outlines colorless"
runner = Runner(join_path('ja', 'success', 'outlines.feature'), verbosity=3, no_color=True)
runner.run()
assert_stdout_lines(
u"\n"
u"フィーチャ: アウトラインを日本語で書く # tests/functional/language_specific_features/ja/success/outlines.feature:3\n"
u" 図表のテストをパスすること # tests/functional/language_specific_features/ja/success/outlines.feature:4\n"
u"\n"
u" シナリオアウトライン: 全てのテストで何もしない # tests/functional/language_specific_features/ja/success/outlines.feature:6\n"
u" 前提 入力値を <データ1> とし # tests/functional/language_specific_features/ja/success/outlines_steps.py:13\n"
u" もし 処理 <方法> を使って # tests/functional/language_specific_features/ja/success/outlines_steps.py:22\n"
u" ならば 表示は <結果> である # tests/functional/language_specific_features/ja/success/outlines_steps.py:31\n"
u"\n"
u" 例:\n"
u" | データ1 | 方法 | 結果 |\n"
u" | 何か | これ | 機能 |\n"
u" | その他 | ここ | 同じ |\n"
u" | データ | 動く | unicodeで! |\n"
u"\n"
u"1 feature (1 passed)\n"
u"3 scenarios (3 passed)\n"
u"9 steps (9 passed)\n"
)
@with_setup(prepare_stdout)
def test_output_outlines_success_colorful():
"Language: ja -> sucess outlines colorful"
runner = Runner(join_path('ja', 'success', 'outlines.feature'), verbosity=3, no_color=False)
runner.run()
assert_stdout_lines(
u'\n'<|fim▁hole|> u'\n'
u"\033[1;37m シナリオアウトライン: 全てのテストで何もしない \033[1;30m# tests/functional/language_specific_features/ja/success/outlines.feature:6\033[0m\n"
u"\033[0;36m 前提 入力値を <データ1> とし \033[1;30m# tests/functional/language_specific_features/ja/success/outlines_steps.py:13\033[0m\n"
u"\033[0;36m もし 処理 <方法> を使って \033[1;30m# tests/functional/language_specific_features/ja/success/outlines_steps.py:22\033[0m\n"
u"\033[0;36m ならば 表示は <結果> である \033[1;30m# tests/functional/language_specific_features/ja/success/outlines_steps.py:31\033[0m\n"
u'\n'
u"\033[1;37m 例:\033[0m\n"
u"\033[0;36m \033[1;37m |\033[0;36m データ1\033[1;37m |\033[0;36m 方法\033[1;37m |\033[0;36m 結果 \033[1;37m |\033[0;36m\033[0m\n"
u"\033[1;32m \033[1;37m |\033[1;32m 何か \033[1;37m |\033[1;32m これ\033[1;37m |\033[1;32m 機能 \033[1;37m |\033[1;32m\033[0m\n"
u"\033[1;32m \033[1;37m |\033[1;32m その他 \033[1;37m |\033[1;32m ここ\033[1;37m |\033[1;32m 同じ \033[1;37m |\033[1;32m\033[0m\n"
u"\033[1;32m \033[1;37m |\033[1;32m データ \033[1;37m |\033[1;32m 動く\033[1;37m |\033[1;32m unicodeで!\033[1;37m |\033[1;32m\033[0m\n"
u'\n'
u"\033[1;37m1 feature (\033[1;32m1 passed\033[1;37m)\033[0m\n"
u"\033[1;37m3 scenarios (\033[1;32m3 passed\033[1;37m)\033[0m\n"
u"\033[1;37m9 steps (\033[1;32m9 passed\033[1;37m)\033[0m\n"
)<|fim▁end|>
|
u"\033[1;37mフィーチャ: アウトラインを日本語で書く \033[1;30m# tests/functional/language_specific_features/ja/success/outlines.feature:3\033[0m\n"
u"\033[1;37m 図表のテストをパスすること \033[1;30m# tests/functional/language_specific_features/ja/success/outlines.feature:4\033[0m\n"
|
<|file_name|>marked.js<|end_file_name|><|fim▁begin|>/**
* marked - a markdown parser
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*/
;(function() {
/**
* Block-Level Grammar
*/
var block = {
newline: /^\n+/,
code: /^( {4}[^\n]+\n*)+/,
fences: noop,
hr: /^( *[-*_]){3,} *(?:\n+|$)/,
heading: /^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,
nptable: noop,
lheading: /^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,
blockquote: /^( *>[^\n]+(\n(?!def)[^\n]+)*\n*)+/,
list: /^( *)(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,
html: /^ *(?:comment *(?:\n|\s*$)|closed *(?:\n{2,}|\s*$)|closing *(?:\n{2,}|\s*$))/,
def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,
table: noop,
paragraph: /^((?:[^\n]+\n?(?!hr|heading|lheading|blockquote|tag|def))+)\n*/,
text: /^[^\n]+/
};
block.bullet = /(?:[*+-]|\d+\.)/;
block.item = /^( *)(bull) [^\n]*(?:\n(?!\1bull )[^\n]*)*/;
block.item = replace(block.item, 'gm')
(/bull/g, block.bullet)
();
block.list = replace(block.list)
(/bull/g, block.bullet)
('hr', '\\n+(?=\\1?(?:[-*_] *){3,}(?:\\n+|$))')
('def', '\\n+(?=' + block.def.source + ')')
();
block.blockquote = replace(block.blockquote)
('def', block.def)
();
block._tag = '(?!(?:'
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code'
+ '|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo'
+ '|span|br|wbr|ins|del|img)\\b)\\w+(?!:/|[^\\w\\s@]*@)\\b';
block.html = replace(block.html)
('comment', /<!--[\s\S]*?-->/)
('closed', /<(tag)[\s\S]+?<\/\1>/)
('closing', /<tag(?:"[^"]*"|'[^']*'|[^'">])*?>/)
(/tag/g, block._tag)
();
block.paragraph = replace(block.paragraph)
('hr', block.hr)
('heading', block.heading)
('lheading', block.lheading)
('blockquote', block.blockquote)
('tag', '<' + block._tag)
('def', block.def)
();
/**
* Normal Block Grammar
*/
block.normal = merge({}, block);
/**
* GFM Block Grammar
*/
block.gfm = merge({}, block.normal, {
fences: /^ *(`{3,}|~{3,})[ \.]*(\S+)? *\n([\s\S]*?)\s*\1 *(?:\n+|$)/,
paragraph: /^/,
heading: /^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/
});
block.gfm.paragraph = replace(block.paragraph)
('(?!', '(?!'
+ block.gfm.fences.source.replace('\\1', '\\2') + '|'
+ block.list.source.replace('\\1', '\\3') + '|')
();
/**
* GFM + Tables Block Grammar
*/
block.tables = merge({}, block.gfm, {
nptable: /^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*/,
table: /^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*/
});
/**
* Block Lexer
*/
function Lexer(options) {
this.tokens = [];
this.tokens.links = {};
this.options = options || marked.defaults;
this.rules = block.normal;
if (this.options.gfm) {
if (this.options.tables) {
this.rules = block.tables;
} else {
this.rules = block.gfm;
}
}
}
/**
* Expose Block Rules
*/
Lexer.rules = block;
/**
* Static Lex Method
*/
Lexer.lex = function(src, options) {
var lexer = new Lexer(options);
return lexer.lex(src);
};
/**
* Preprocessing
*/
Lexer.prototype.lex = function(src) {
src = src
.replace(/\r\n|\r/g, '\n')
.replace(/\t/g, ' ')
.replace(/\u00a0/g, ' ')
.replace(/\u2424/g, '\n');<|fim▁hole|>/**
* Lexing
*/
Lexer.prototype.token = function(src, top, bq) {
var src = src.replace(/^ +$/gm, '')
, next
, loose
, cap
, bull
, b
, item
, space
, i
, l;
while (src) {
// newline
if (cap = this.rules.newline.exec(src)) {
src = src.substring(cap[0].length);
if (cap[0].length > 1) {
this.tokens.push({
type: 'space'
});
}
}
// code
if (cap = this.rules.code.exec(src)) {
src = src.substring(cap[0].length);
cap = cap[0].replace(/^ {4}/gm, '');
this.tokens.push({
type: 'code',
text: !this.options.pedantic
? cap.replace(/\n+$/, '')
: cap
});
continue;
}
// fences (gfm)
if (cap = this.rules.fences.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'code',
lang: cap[2],
text: cap[3] || ''
});
continue;
}
// heading
if (cap = this.rules.heading.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'heading',
depth: cap[1].length,
text: cap[2]
});
continue;
}
// table no leading pipe (gfm)
if (top && (cap = this.rules.nptable.exec(src))) {
src = src.substring(cap[0].length);
item = {
type: 'table',
header: cap[1].replace(/^ *| *\| *$/g, '').split(/ *\| */),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
cells: cap[3].replace(/\n$/, '').split('\n')
};
for (i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
} else {
item.align[i] = null;
}
}
for (i = 0; i < item.cells.length; i++) {
item.cells[i] = item.cells[i].split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// lheading
if (cap = this.rules.lheading.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'heading',
depth: cap[2] === '=' ? 1 : 2,
text: cap[1]
});
continue;
}
// hr
if (cap = this.rules.hr.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'hr'
});
continue;
}
// blockquote
if (cap = this.rules.blockquote.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'blockquote_start'
});
cap = cap[0].replace(/^ *> ?/gm, '');
// Pass `top` to keep the current
// "toplevel" state. This is exactly
// how markdown.pl works.
this.token(cap, top, true);
this.tokens.push({
type: 'blockquote_end'
});
continue;
}
// list
if (cap = this.rules.list.exec(src)) {
src = src.substring(cap[0].length);
bull = cap[2];
this.tokens.push({
type: 'list_start',
ordered: bull.length > 1
});
// Get each top-level item.
cap = cap[0].match(this.rules.item);
next = false;
l = cap.length;
i = 0;
for (; i < l; i++) {
item = cap[i];
// Remove the list item's bullet
// so it is seen as the next token.
space = item.length;
item = item.replace(/^ *([*+-]|\d+\.) +/, '');
// Outdent whatever the
// list item contains. Hacky.
if (~item.indexOf('\n ')) {
space -= item.length;
item = !this.options.pedantic
? item.replace(new RegExp('^ {1,' + space + '}', 'gm'), '')
: item.replace(/^ {1,4}/gm, '');
}
// Determine whether the next list item belongs here.
// Backpedal if it does not belong in this list.
if (this.options.smartLists && i !== l - 1) {
b = block.bullet.exec(cap[i + 1])[0];
if (bull !== b && !(bull.length > 1 && b.length > 1)) {
src = cap.slice(i + 1).join('\n') + src;
i = l - 1;
}
}
// Determine whether item is loose or not.
// Use: /(^|\n)(?! )[^\n]+\n\n(?!\s*$)/
// for discount behavior.
loose = next || /\n\n(?!\s*$)/.test(item);
if (i !== l - 1) {
next = item.charAt(item.length - 1) === '\n';
if (!loose) loose = next;
}
this.tokens.push({
type: loose
? 'loose_item_start'
: 'list_item_start'
});
// Recurse.
this.token(item, false, bq);
this.tokens.push({
type: 'list_item_end'
});
}
this.tokens.push({
type: 'list_end'
});
continue;
}
// html
if (cap = this.rules.html.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: this.options.sanitize
? 'paragraph'
: 'html',
pre: !this.options.sanitizer
&& (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
text: cap[0]
});
continue;
}
// def
if ((!bq && top) && (cap = this.rules.def.exec(src))) {
src = src.substring(cap[0].length);
this.tokens.links[cap[1].toLowerCase()] = {
href: cap[2],
title: cap[3]
};
continue;
}
// table (gfm)
if (top && (cap = this.rules.table.exec(src))) {
src = src.substring(cap[0].length);
item = {
type: 'table',
header: cap[1].replace(/^ *| *\| *$/g, '').split(/ *\| */),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
cells: cap[3].replace(/(?: *\| *)?\n$/, '').split('\n')
};
for (i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
} else {
item.align[i] = null;
}
}
for (i = 0; i < item.cells.length; i++) {
item.cells[i] = item.cells[i]
.replace(/^ *\| *| *\| *$/g, '')
.split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// top-level paragraph
if (top && (cap = this.rules.paragraph.exec(src))) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'paragraph',
text: cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1]
});
continue;
}
// text
if (cap = this.rules.text.exec(src)) {
// Top-level should never reach here.
src = src.substring(cap[0].length);
this.tokens.push({
type: 'text',
text: cap[0]
});
continue;
}
if (src) {
throw new
Error('Infinite loop on byte: ' + src.charCodeAt(0));
}
}
return this.tokens;
};
/**
* Inline-Level Grammar
*/
var inline = {
escape: /^\\([\\`*{}\[\]()#+\-.!_>])/,
autolink: /^<([^ >]+(@|:\/)[^ >]+)>/,
url: noop,
tag: /^<!--[\s\S]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^'">])*?>/,
link: /^!?\[(inside)\]\(href\)/,
reflink: /^!?\[(inside)\]\s*\[([^\]]*)\]/,
nolink: /^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,
strong: /^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)/,
em: /^\b_((?:[^_]|__)+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)/,
code: /^(`+)([\s\S]*?[^`])\1(?!`)/,
br: /^ {2,}\n(?!\s*$)/,
del: noop,
text: /^[\s\S]+?(?=[\\<!\[_*`]| {2,}\n|$)/
};
inline._inside = /(?:\[[^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*/;
inline._href = /\s*<?([\s\S]*?)>?(?:\s+['"]([\s\S]*?)['"])?\s*/;
inline.link = replace(inline.link)
('inside', inline._inside)
('href', inline._href)
();
inline.reflink = replace(inline.reflink)
('inside', inline._inside)
();
/**
* Normal Inline Grammar
*/
inline.normal = merge({}, inline);
/**
* Pedantic Inline Grammar
*/
inline.pedantic = merge({}, inline.normal, {
strong: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
em: /^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/
});
/**
* GFM Inline Grammar
*/
inline.gfm = merge({}, inline.normal, {
escape: replace(inline.escape)('])', '~|])')(),
url: /^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,
del: /^~~(?=\S)([\s\S]*?\S)~~/,
text: replace(inline.text)
(']|', '~]|')
('|', '|https?://|')
()
});
/**
* GFM + Line Breaks Inline Grammar
*/
inline.breaks = merge({}, inline.gfm, {
br: replace(inline.br)('{2,}', '*')(),
text: replace(inline.gfm.text)('{2,}', '*')()
});
/**
* Inline Lexer & Compiler
*/
function InlineLexer(links, options) {
this.options = options || marked.defaults;
this.links = links;
this.rules = inline.normal;
this.renderer = this.options.renderer || new Renderer;
this.renderer.options = this.options;
if (!this.links) {
throw new
Error('Tokens array requires a `links` property.');
}
if (this.options.gfm) {
if (this.options.breaks) {
this.rules = inline.breaks;
} else {
this.rules = inline.gfm;
}
} else if (this.options.pedantic) {
this.rules = inline.pedantic;
}
}
/**
* Expose Inline Rules
*/
InlineLexer.rules = inline;
/**
* Static Lexing/Compiling Method
*/
InlineLexer.output = function(src, links, options) {
var inline = new InlineLexer(links, options);
return inline.output(src);
};
/**
* Lexing/Compiling
*/
InlineLexer.prototype.output = function(src) {
var out = ''
, link
, text
, href
, cap;
while (src) {
// escape
if (cap = this.rules.escape.exec(src)) {
src = src.substring(cap[0].length);
out += cap[1];
continue;
}
// autolink
if (cap = this.rules.autolink.exec(src)) {
src = src.substring(cap[0].length);
if (cap[2] === '@') {
text = cap[1].charAt(6) === ':'
? this.mangle(cap[1].substring(7))
: this.mangle(cap[1]);
href = this.mangle('mailto:') + text;
} else {
text = escape(cap[1]);
href = text;
}
out += this.renderer.link(href, null, text);
continue;
}
// url (gfm)
if (!this.inLink && (cap = this.rules.url.exec(src))) {
src = src.substring(cap[0].length);
text = escape(cap[1]);
href = text;
out += this.renderer.link(href, null, text);
continue;
}
// tag
if (cap = this.rules.tag.exec(src)) {
if (!this.inLink && /^<a /i.test(cap[0])) {
this.inLink = true;
} else if (this.inLink && /^<\/a>/i.test(cap[0])) {
this.inLink = false;
}
src = src.substring(cap[0].length);
out += this.options.sanitize
? this.options.sanitizer
? this.options.sanitizer(cap[0])
: escape(cap[0])
: cap[0]
continue;
}
// link
if (cap = this.rules.link.exec(src)) {
src = src.substring(cap[0].length);
this.inLink = true;
out += this.outputLink(cap, {
href: cap[2],
title: cap[3]
});
this.inLink = false;
continue;
}
// reflink, nolink
if ((cap = this.rules.reflink.exec(src))
|| (cap = this.rules.nolink.exec(src))) {
src = src.substring(cap[0].length);
link = (cap[2] || cap[1]).replace(/\s+/g, ' ');
link = this.links[link.toLowerCase()];
if (!link || !link.href) {
out += cap[0].charAt(0);
src = cap[0].substring(1) + src;
continue;
}
this.inLink = true;
out += this.outputLink(cap, link);
this.inLink = false;
continue;
}
// strong
if (cap = this.rules.strong.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.strong(this.output(cap[2] || cap[1]));
continue;
}
// em
if (cap = this.rules.em.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.em(this.output(cap[2] || cap[1]));
continue;
}
// code
if (cap = this.rules.code.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.codespan(escape(cap[2].trim(), true));
continue;
}
// br
if (cap = this.rules.br.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.br();
continue;
}
// del (gfm)
if (cap = this.rules.del.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.del(this.output(cap[1]));
continue;
}
// text
if (cap = this.rules.text.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.text(escape(this.smartypants(cap[0])));
continue;
}
if (src) {
throw new
Error('Infinite loop on byte: ' + src.charCodeAt(0));
}
}
return out;
};
/**
* Compile Link
*/
InlineLexer.prototype.outputLink = function(cap, link) {
var href = escape(link.href)
, title = link.title ? escape(link.title) : null;
return cap[0].charAt(0) !== '!'
? this.renderer.link(href, title, this.output(cap[1]))
: this.renderer.image(href, title, escape(cap[1]));
};
/**
* Smartypants Transformations
*/
InlineLexer.prototype.smartypants = function(text) {
if (!this.options.smartypants) return text;
return text
// em-dashes
.replace(/---/g, '\u2014')
// en-dashes
.replace(/--/g, '\u2013')
// opening singles
.replace(/(^|[-\u2014/(\[{"\s])'/g, '$1\u2018')
// closing singles & apostrophes
.replace(/'/g, '\u2019')
// opening doubles
.replace(/(^|[-\u2014/(\[{\u2018\s])"/g, '$1\u201c')
// closing doubles
.replace(/"/g, '\u201d')
// ellipses
.replace(/\.{3}/g, '\u2026');
};
/**
* Mangle Links
*/
InlineLexer.prototype.mangle = function(text) {
if (!this.options.mangle) return text;
var out = ''
, l = text.length
, i = 0
, ch;
for (; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
out += '&#' + ch + ';';
}
return out;
};
/**
* Renderer
*/
function Renderer(options) {
this.options = options || {};
}
Renderer.prototype.code = function(code, lang, escaped) {
if (this.options.highlight) {
var out = this.options.highlight(code, lang);
if (out != null && out !== code) {
escaped = true;
code = out;
}
}
if (!lang) {
return '<pre><code>'
+ (escaped ? code : escape(code, true))
+ '\n</code></pre>';
}
return '<pre><code class="'
+ this.options.langPrefix
+ escape(lang, true)
+ '">'
+ (escaped ? code : escape(code, true))
+ '\n</code></pre>\n';
};
Renderer.prototype.blockquote = function(quote) {
return '<blockquote>\n' + quote + '</blockquote>\n';
};
Renderer.prototype.html = function(html) {
return html;
};
Renderer.prototype.heading = function(text, level, raw) {
return '<h'
+ level
+ ' id="'
+ this.options.headerPrefix
+ raw.toLowerCase().replace(/[^\w]+/g, '-')
+ '">'
+ text
+ '</h'
+ level
+ '>\n';
};
Renderer.prototype.hr = function() {
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
};
Renderer.prototype.list = function(body, ordered) {
var type = ordered ? 'ol' : 'ul';
return '<' + type + '>\n' + body + '</' + type + '>\n';
};
Renderer.prototype.listitem = function(text) {
return '<li>' + text + '</li>\n';
};
Renderer.prototype.paragraph = function(text) {
return '<p>' + text + '</p>\n';
};
Renderer.prototype.table = function(header, body) {
return '<table>\n'
+ '<thead>\n'
+ header
+ '</thead>\n'
+ '<tbody>\n'
+ body
+ '</tbody>\n'
+ '</table>\n';
};
Renderer.prototype.tablerow = function(content) {
return '<tr>\n' + content + '</tr>\n';
};
Renderer.prototype.tablecell = function(content, flags) {
var type = flags.header ? 'th' : 'td';
var tag = flags.align
? '<' + type + ' style="text-align:' + flags.align + '">'
: '<' + type + '>';
return tag + content + '</' + type + '>\n';
};
// span level renderer
Renderer.prototype.strong = function(text) {
return '<strong>' + text + '</strong>';
};
Renderer.prototype.em = function(text) {
return '<em>' + text + '</em>';
};
Renderer.prototype.codespan = function(text) {
return '<code>' + text + '</code>';
};
Renderer.prototype.br = function() {
return this.options.xhtml ? '<br/>' : '<br>';
};
Renderer.prototype.del = function(text) {
return '<del>' + text + '</del>';
};
Renderer.prototype.link = function(href, title, text) {
if (this.options.sanitize) {
try {
var prot = decodeURIComponent(unescape(href))
.replace(/[^\w:]/g, '')
.toLowerCase();
} catch (e) {
return '';
}
if (prot.indexOf('javascript:') === 0 || prot.indexOf('vbscript:') === 0 || prot.indexOf('data:') === 0) {
return '';
}
}
if (this.options.baseUrl && !originIndependentUrl.test(href)) {
href = resolveUrl(this.options.baseUrl, href);
}
var out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
}
out += '>' + text + '</a>';
return out;
};
Renderer.prototype.image = function(href, title, text) {
if (this.options.baseUrl && !originIndependentUrl.test(href)) {
href = resolveUrl(this.options.baseUrl, href);
}
var out = '<img src="' + href + '" alt="' + text + '"';
if (title) {
out += ' title="' + title + '"';
}
out += this.options.xhtml ? '/>' : '>';
return out;
};
Renderer.prototype.text = function(text) {
return text;
};
/**
* Parsing & Compiling
*/
function Parser(options) {
this.tokens = [];
this.token = null;
this.options = options || marked.defaults;
this.options.renderer = this.options.renderer || new Renderer;
this.renderer = this.options.renderer;
this.renderer.options = this.options;
}
/**
* Static Parse Method
*/
Parser.parse = function(src, options, renderer) {
var parser = new Parser(options, renderer);
return parser.parse(src);
};
/**
* Parse Loop
*/
Parser.prototype.parse = function(src) {
this.inline = new InlineLexer(src.links, this.options, this.renderer);
this.tokens = src.reverse();
var out = '';
while (this.next()) {
out += this.tok();
}
return out;
};
/**
* Next Token
*/
Parser.prototype.next = function() {
return this.token = this.tokens.pop();
};
/**
* Preview Next Token
*/
Parser.prototype.peek = function() {
return this.tokens[this.tokens.length - 1] || 0;
};
/**
* Parse Text Tokens
*/
Parser.prototype.parseText = function() {
var body = this.token.text;
while (this.peek().type === 'text') {
body += '\n' + this.next().text;
}
return this.inline.output(body);
};
/**
* Parse Current Token
*/
Parser.prototype.tok = function() {
switch (this.token.type) {
case 'space': {
return '';
}
case 'hr': {
return this.renderer.hr();
}
case 'heading': {
return this.renderer.heading(
this.inline.output(this.token.text),
this.token.depth,
this.token.text);
}
case 'code': {
return this.renderer.code(this.token.text,
this.token.lang,
this.token.escaped);
}
case 'table': {
var header = ''
, body = ''
, i
, row
, cell
, flags
, j;
// header
cell = '';
for (i = 0; i < this.token.header.length; i++) {
flags = { header: true, align: this.token.align[i] };
cell += this.renderer.tablecell(
this.inline.output(this.token.header[i]),
{ header: true, align: this.token.align[i] }
);
}
header += this.renderer.tablerow(cell);
for (i = 0; i < this.token.cells.length; i++) {
row = this.token.cells[i];
cell = '';
for (j = 0; j < row.length; j++) {
cell += this.renderer.tablecell(
this.inline.output(row[j]),
{ header: false, align: this.token.align[j] }
);
}
body += this.renderer.tablerow(cell);
}
return this.renderer.table(header, body);
}
case 'blockquote_start': {
var body = '';
while (this.next().type !== 'blockquote_end') {
body += this.tok();
}
return this.renderer.blockquote(body);
}
case 'list_start': {
var body = ''
, ordered = this.token.ordered;
while (this.next().type !== 'list_end') {
body += this.tok();
}
return this.renderer.list(body, ordered);
}
case 'list_item_start': {
var body = '';
while (this.next().type !== 'list_item_end') {
body += this.token.type === 'text'
? this.parseText()
: this.tok();
}
return this.renderer.listitem(body);
}
case 'loose_item_start': {
var body = '';
while (this.next().type !== 'list_item_end') {
body += this.tok();
}
return this.renderer.listitem(body);
}
case 'html': {
var html = !this.token.pre && !this.options.pedantic
? this.inline.output(this.token.text)
: this.token.text;
return this.renderer.html(html);
}
case 'paragraph': {
return this.renderer.paragraph(this.inline.output(this.token.text));
}
case 'text': {
return this.renderer.paragraph(this.parseText());
}
}
};
/**
* Helpers
*/
function escape(html, encode) {
return html
.replace(!encode ? /&(?!#?\w+;)/g : /&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
}
function unescape(html) {
// explicitly match decimal, hex, and named HTML entities
return html.replace(/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/g, function(_, n) {
n = n.toLowerCase();
if (n === 'colon') return ':';
if (n.charAt(0) === '#') {
return n.charAt(1) === 'x'
? String.fromCharCode(parseInt(n.substring(2), 16))
: String.fromCharCode(+n.substring(1));
}
return '';
});
}
function replace(regex, opt) {
regex = regex.source;
opt = opt || '';
return function self(name, val) {
if (!name) return new RegExp(regex, opt);
val = val.source || val;
val = val.replace(/(^|[^\[])\^/g, '$1');
regex = regex.replace(name, val);
return self;
};
}
function resolveUrl(base, href) {
if (!baseUrls[' ' + base]) {
// we can ignore everything in base after the last slash of its path component,
// but we might need to add _that_
// https://tools.ietf.org/html/rfc3986#section-3
if (/^[^:]+:\/*[^/]*$/.test(base)) {
baseUrls[' ' + base] = base + '/';
} else {
baseUrls[' ' + base] = base.replace(/[^/]*$/, '');
}
}
base = baseUrls[' ' + base];
if (href.slice(0, 2) === '//') {
return base.replace(/:[^]*/, ':') + href;
} else if (href.charAt(0) === '/') {
return base.replace(/(:\/*[^/]*)[^]*/, '$1') + href;
} else {
return base + href;
}
}
baseUrls = {};
originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
function noop() {}
noop.exec = noop;
function merge(obj) {
var i = 1
, target
, key;
for (; i < arguments.length; i++) {
target = arguments[i];
for (key in target) {
if (Object.prototype.hasOwnProperty.call(target, key)) {
obj[key] = target[key];
}
}
}
return obj;
}
/**
* Marked
*/
function marked(src, opt, callback) {
if (callback || typeof opt === 'function') {
if (!callback) {
callback = opt;
opt = null;
}
opt = merge({}, marked.defaults, opt || {});
var highlight = opt.highlight
, tokens
, pending
, i = 0;
try {
tokens = Lexer.lex(src, opt)
} catch (e) {
return callback(e);
}
pending = tokens.length;
var done = function(err) {
if (err) {
opt.highlight = highlight;
return callback(err);
}
var out;
try {
out = Parser.parse(tokens, opt);
} catch (e) {
err = e;
}
opt.highlight = highlight;
return err
? callback(err)
: callback(null, out);
};
if (!highlight || highlight.length < 3) {
return done();
}
delete opt.highlight;
if (!pending) return done();
for (; i < tokens.length; i++) {
(function(token) {
if (token.type !== 'code') {
return --pending || done();
}
return highlight(token.text, token.lang, function(err, code) {
if (err) return done(err);
if (code == null || code === token.text) {
return --pending || done();
}
token.text = code;
token.escaped = true;
--pending || done();
});
})(tokens[i]);
}
return;
}
try {
if (opt) opt = merge({}, marked.defaults, opt);
return Parser.parse(Lexer.lex(src, opt), opt);
} catch (e) {
e.message += '\nPlease report this to https://github.com/chjj/marked.';
if ((opt || marked.defaults).silent) {
return '<p>An error occured:</p><pre>'
+ escape(e.message + '', true)
+ '</pre>';
}
throw e;
}
}
/**
* Options
*/
marked.options =
marked.setOptions = function(opt) {
merge(marked.defaults, opt);
return marked;
};
marked.defaults = {
gfm: true,
tables: true,
breaks: false,
pedantic: false,
sanitize: false,
sanitizer: null,
mangle: true,
smartLists: false,
silent: false,
highlight: null,
langPrefix: 'lang-',
smartypants: false,
headerPrefix: '',
renderer: new Renderer,
xhtml: false,
baseUrl: null
};
/**
* Expose
*/
marked.Parser = Parser;
marked.parser = Parser.parse;
marked.Renderer = Renderer;
marked.Lexer = Lexer;
marked.lexer = Lexer.lex;
marked.InlineLexer = InlineLexer;
marked.inlineLexer = InlineLexer.output;
marked.parse = marked;
if (typeof module !== 'undefined' && typeof exports === 'object') {
module.exports = marked;
} else if (typeof define === 'function' && define.amd) {
define(function() { return marked; });
} else {
this.marked = marked;
}
}).call(function() {
return this || (typeof window !== 'undefined' ? window : global);
}());<|fim▁end|>
|
return this.token(src, true);
};
|
<|file_name|>Image.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
#
# Copyright © 2003 - 2018 Michal Čihař <[email protected]>
#
# This file is part of Wammu <https://wammu.eu/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
'''
Wammu - Phone manager
Image displaying classes to be embdeded inside wxHTML
'''
import io<|fim▁hole|>
defaultbmp = [
'20 20 2 1',
'. c Black',
' c None',
' ',
' .. .. ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ...... ',
' .... ',
' .... ',
' ...... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' ... ... ',
' .. .. ',
' ']
class MemoryInputStream(wx.InputStream):
def __init__(self, data):
wx.InputStream.__init__(self, io.StringIO(data))
class EncodedBitmap(wx.StaticBitmap):
def __init__(self, parent, tooltip='Image', image=None, size=None, scale=1):
if image is None:
image = defaultbmp
image = wx.ImageFromStream(MemoryInputStream(base64.b64decode(image)))
if scale > 1:
bitmap = wx.BitmapFromImage(image.Scale(image.GetWidth() * scale, image.GetHeight() * scale))
else:
bitmap = wx.BitmapFromImage(image)
wx.StaticBitmap.__init__(self, parent, -1, bitmap, (0, 0))
self.SetToolTipString(tooltip)
class Bitmap(wx.StaticBitmap):
def __init__(self, parent, tooltip='Image', image=None, size=None, scale=1):
if image is None:
image = defaultbmp
bitmap = wx.BitmapFromXPMData(image)
if scale > 1:
img = wx.ImageFromBitmap(bitmap)
bitmap = wx.BitmapFromImage(img.Scale(bitmap.GetWidth() * scale, bitmap.GetHeight() * scale))
wx.StaticBitmap.__init__(self, parent, -1, bitmap, (0, 0))
self.SetToolTipString(tooltip)
class Throbber(wx.lib.throbber.Throbber):
def __init__(self, parent, tooltip='Animation', images=None, size=None, scale=1, delay=0.1):
if images is None:
images = [defaultbmp]
bitmaps = []
for im in images:
bitmap = wx.BitmapFromXPMData(im)
if scale > 1:
img = wx.ImageFromBitmap(bitmap)
bitmap = wx.BitmapFromImage(img.Scale(bitmap.GetWidth() * scale, bitmap.GetHeight() * scale))
bitmaps.append(bitmap)
wx.lib.throbber.Throbber.__init__(self, parent, -1, bitmaps, frameDelay=delay)
self.SetToolTipString(tooltip)<|fim▁end|>
|
import wx
import wx.lib.throbber
import base64
|
<|file_name|>jquery.countto.js<|end_file_name|><|fim▁begin|>(function ($) {
$.fn.countTo = function (options) {
options = options || {};
return $(this).each(function () {
// set options for current element
var settings = $.extend({}, $.fn.countTo.defaults, {
from: $(this).data('from'),
to: $(this).data('to'),
speed: $(this).data('speed'),
refreshInterval: $(this).data('refresh-interval'),
decimals: $(this).data('decimals')
}, options);
// how many times to update the value, and how much to increment the value on each update
var loops = Math.ceil(settings.speed / settings.refreshInterval),
increment = (settings.to - settings.from) / loops;
<|fim▁hole|> $self = $(this),
loopCount = 0,
value = settings.from,
data = $self.data('countTo') || {};
$self.data('countTo', data);
// if an existing interval can be found, clear it first
if (data.interval) {
clearInterval(data.interval);
}
data.interval = setInterval(updateTimer, settings.refreshInterval);
// initialize the element with the starting value
render(value);
function updateTimer() {
value += increment;
loopCount++;
render(value);
if (typeof(settings.onUpdate) == 'function') {
settings.onUpdate.call(self, value);
}
if (loopCount >= loops) {
// remove the interval
$self.removeData('countTo');
clearInterval(data.interval);
value = settings.to;
if (typeof(settings.onComplete) == 'function') {
settings.onComplete.call(self, value);
}
}
}
function render(value) {
var formattedValue = settings.formatter.call(self, value, settings);
$self.html(formattedValue);
}
});
};
$.fn.countTo.defaults = {
from: 0, // the number the element should start at
to: 0, // the number the element should end at
speed: 1000, // how long it should take to count between the target numbers
refreshInterval: 100, // how often the element should be updated
decimals: 0, // the number of decimal places to show
formatter: formatter, // handler for formatting the value before rendering
onUpdate: null, // callback method for every time the element is updated
onComplete: null // callback method for when the element finishes updating
};
function formatter(value, settings) {
return value.toFixed(settings.decimals);
}
}(jQuery));<|fim▁end|>
|
// references & variables that will change with each update
var self = this,
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// in the event of macro failure, uncomment these lines:
//#![feature(trace_macros)]
//trace_macros!(true);
//use super::*;
pub mod employee_setup;
pub mod generic;<|fim▁hole|>pub mod general;<|fim▁end|>
| |
<|file_name|>claimcode.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*************************************************************
* Current structure of a Player document:<|fim▁hole|> * AvatarUrl - [public] image url
* Allegiance - [public] player's allegiance/team
*
************************************************************/
import * as Defaults from './defaults';
import * as Universal from './universal';
export const COLLECTION_PATH = "claimCodes";
export const FIELD__ID = Universal.FIELD__USER_ID;
export const FIELD__CODE = "code";
export const FIELD__REDEEMER = "redeemer";
export const FIELD__TIMESTAMP = "timestamp";
export function create(code: string): { [key: string]: any; } {
return {
[FIELD__CODE]: code,
[FIELD__REDEEMER]: Defaults.EMPTY_REWARD_REDEEMER
};
}<|fim▁end|>
|
*
* Player
* UserId - [private] user associated with this player
* Name - [public] nickname
|
<|file_name|>test_resample.py<|end_file_name|><|fim▁begin|># pylint: disable=E1101
from datetime import datetime, timedelta
from functools import partial
from pandas.compat import range, lrange, zip, product
import numpy as np
from pandas import (Series, TimeSeries, DataFrame, Panel, Index,
isnull, notnull, Timestamp)
from pandas.core.groupby import DataError
from pandas.tseries.index import date_range
from pandas.tseries.tdi import timedelta_range
from pandas.tseries.offsets import Minute, BDay
from pandas.tseries.period import period_range, PeriodIndex, Period
from pandas.tseries.resample import DatetimeIndex, TimeGrouper
from pandas.tseries.frequencies import MONTHS, DAYS
import pandas.tseries.offsets as offsets
import pandas as pd
import nose
from pandas.util.testing import (assert_series_equal, assert_almost_equal,
assert_frame_equal)
import pandas.util.testing as tm
bday = BDay()
class TestResample(tm.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
dti = DatetimeIndex(start=datetime(2005, 1, 1),
end=datetime(2005, 1, 10), freq='Min')
self.series = Series(np.random.rand(len(dti)), dti)
def test_custom_grouper(self):
dti = DatetimeIndex(freq='Min', start=datetime(2005, 1, 1),
end=datetime(2005, 1, 10))
s = Series(np.array([1] * len(dti)), index=dti, dtype='int64')
b = TimeGrouper(Minute(5))
g = s.groupby(b)
# check all cython functions work
funcs = ['add', 'mean', 'prod', 'ohlc', 'min', 'max', 'var']
for f in funcs:
g._cython_agg_general(f)
b = TimeGrouper(Minute(5), closed='right', label='right')
g = s.groupby(b)
# check all cython functions work
funcs = ['add', 'mean', 'prod', 'ohlc', 'min', 'max', 'var']
for f in funcs:
g._cython_agg_general(f)
self.assertEqual(g.ngroups, 2593)
self.assertTrue(notnull(g.mean()).all())
# construct expected val
arr = [1] + [5] * 2592
idx = dti[0:-1:5]
idx = idx.append(dti[-1:])
expect = Series(arr, index=idx)
# GH2763 - return in put dtype if we can
result = g.agg(np.sum)
assert_series_equal(result, expect)
df = DataFrame(np.random.rand(len(dti), 10), index=dti, dtype='float64')
r = df.groupby(b).agg(np.sum)
self.assertEqual(len(r.columns), 10)
self.assertEqual(len(r.index), 2593)
def test_resample_basic(self):
rng = date_range('1/1/2000 00:00:00', '1/1/2000 00:13:00', freq='min',
name='index')
s = Series(np.random.randn(14), index=rng)
result = s.resample('5min', how='mean', closed='right', label='right')
exp_idx = date_range('1/1/2000', periods=4, freq='5min', name='index')
expected = Series([s[0], s[1:6].mean(), s[6:11].mean(), s[11:].mean()],
index=exp_idx)
assert_series_equal(result, expected)
self.assertEqual(result.index.name, 'index')
result = s.resample('5min', how='mean', closed='left', label='right')
exp_idx = date_range('1/1/2000 00:05', periods=3, freq='5min', name='index')
expected = Series([s[:5].mean(), s[5:10].mean(), s[10:].mean()], index=exp_idx)
assert_series_equal(result, expected)
s = self.series
result = s.resample('5Min', how='last')
grouper = TimeGrouper(Minute(5), closed='left', label='left')
expect = s.groupby(grouper).agg(lambda x: x[-1])
assert_series_equal(result, expect)
def test_resample_how(self):
rng = date_range('1/1/2000 00:00:00', '1/1/2000 00:13:00',
freq='min', name='index')
s = Series(np.random.randn(14), index=rng)
grouplist = np.ones_like(s)
grouplist[0] = 0
grouplist[1:6] = 1
grouplist[6:11] = 2
grouplist[11:] = 3
args = ['sum', 'mean', 'std', 'sem', 'max', 'min',
'median', 'first', 'last', 'ohlc']
def _ohlc(group):
if isnull(group).all():
return np.repeat(np.nan, 4)
return [group[0], group.max(), group.min(), group[-1]]
inds = date_range('1/1/2000', periods=4, freq='5min', name='index')
for arg in args:
if arg == 'ohlc':
func = _ohlc
else:
func = arg
try:
result = s.resample('5min', how=arg,
closed='right', label='right')
expected = s.groupby(grouplist).agg(func)
self.assertEqual(result.index.name, 'index')
if arg == 'ohlc':
expected = DataFrame(expected.values.tolist())
expected.columns = ['open', 'high', 'low', 'close']
expected.index = Index(inds, name='index')
assert_frame_equal(result, expected)
else:
expected.index = inds
assert_series_equal(result, expected)
except BaseException as exc:
exc.args += ('how=%s' % arg,)
raise
def test_resample_how_callables(self):
# GH 7929
data = np.arange(5, dtype=np.int64)
ind = pd.DatetimeIndex(start='2014-01-01', periods=len(data), freq='d')
df = pd.DataFrame({"A": data, "B": data}, index=ind)
def fn(x, a=1):
return str(type(x))
class fn_class:
def __call__(self, x):
return str(type(x))
df_standard = df.resample("M", how=fn)
df_lambda = df.resample("M", how=lambda x: str(type(x)))
df_partial = df.resample("M", how=partial(fn))
df_partial2 = df.resample("M", how=partial(fn, a=2))
df_class = df.resample("M", how=fn_class())
assert_frame_equal(df_standard, df_lambda)
assert_frame_equal(df_standard, df_partial)
assert_frame_equal(df_standard, df_partial2)
assert_frame_equal(df_standard, df_class)
def test_resample_with_timedeltas(self):
expected = DataFrame({'A' : np.arange(1480)})
expected = expected.groupby(expected.index // 30).sum()
expected.index = pd.timedelta_range('0 days',freq='30T',periods=50)
df = DataFrame({'A' : np.arange(1480)},index=pd.to_timedelta(np.arange(1480),unit='T'))
result = df.resample('30T',how='sum')
assert_frame_equal(result, expected)
def test_resample_rounding(self):
# GH 8371
# odd results when rounding is needed
data = """date,time,value
11-08-2014,00:00:01.093,1
11-08-2014,00:00:02.159,1
11-08-2014,00:00:02.667,1
11-08-2014,00:00:03.175,1
11-08-2014,00:00:07.058,1
11-08-2014,00:00:07.362,1
11-08-2014,00:00:08.324,1
11-08-2014,00:00:08.830,1
11-08-2014,00:00:08.982,1
11-08-2014,00:00:09.815,1
11-08-2014,00:00:10.540,1
11-08-2014,00:00:11.061,1
11-08-2014,00:00:11.617,1
11-08-2014,00:00:13.607,1
11-08-2014,00:00:14.535,1
11-08-2014,00:00:15.525,1
11-08-2014,00:00:17.960,1
11-08-2014,00:00:20.674,1
11-08-2014,00:00:21.191,1"""
from pandas.compat import StringIO
df = pd.read_csv(StringIO(data), parse_dates={'timestamp': ['date', 'time']}, index_col='timestamp')
df.index.name = None
result = df.resample('6s', how='sum')
expected = DataFrame({'value' : [4,9,4,2]},index=date_range('2014-11-08',freq='6s',periods=4))
assert_frame_equal(result,expected)
result = df.resample('7s', how='sum')
expected = DataFrame({'value' : [4,10,4,1]},index=date_range('2014-11-08',freq='7s',periods=4))
assert_frame_equal(result,expected)
result = df.resample('11s', how='sum')
expected = DataFrame({'value' : [11,8]},index=date_range('2014-11-08',freq='11s',periods=2))
assert_frame_equal(result,expected)
result = df.resample('13s', how='sum')
expected = DataFrame({'value' : [13,6]},index=date_range('2014-11-08',freq='13s',periods=2))
assert_frame_equal(result,expected)
result = df.resample('17s', how='sum')
expected = DataFrame({'value' : [16,3]},index=date_range('2014-11-08',freq='17s',periods=2))
assert_frame_equal(result,expected)
def test_resample_basic_from_daily(self):
# from daily
dti = DatetimeIndex(
start=datetime(2005, 1, 1), end=datetime(2005, 1, 10),
freq='D', name='index')
s = Series(np.random.rand(len(dti)), dti)
# to weekly
result = s.resample('w-sun', how='last')
self.assertEqual(len(result), 3)
self.assertTrue((result.index.dayofweek == [6, 6, 6]).all())
self.assertEqual(result.iloc[0], s['1/2/2005'])
self.assertEqual(result.iloc[1], s['1/9/2005'])
self.assertEqual(result.iloc[2], s.iloc[-1])
result = s.resample('W-MON', how='last')
self.assertEqual(len(result), 2)
self.assertTrue((result.index.dayofweek == [0, 0]).all())
self.assertEqual(result.iloc[0], s['1/3/2005'])
self.assertEqual(result.iloc[1], s['1/10/2005'])
result = s.resample('W-TUE', how='last')
self.assertEqual(len(result), 2)
self.assertTrue((result.index.dayofweek == [1, 1]).all())
self.assertEqual(result.iloc[0], s['1/4/2005'])
self.assertEqual(result.iloc[1], s['1/10/2005'])
result = s.resample('W-WED', how='last')
self.assertEqual(len(result), 2)
self.assertTrue((result.index.dayofweek == [2, 2]).all())
self.assertEqual(result.iloc[0], s['1/5/2005'])
self.assertEqual(result.iloc[1], s['1/10/2005'])
result = s.resample('W-THU', how='last')
self.assertEqual(len(result), 2)
self.assertTrue((result.index.dayofweek == [3, 3]).all())
self.assertEqual(result.iloc[0], s['1/6/2005'])
self.assertEqual(result.iloc[1], s['1/10/2005'])
result = s.resample('W-FRI', how='last')
self.assertEqual(len(result), 2)
self.assertTrue((result.index.dayofweek == [4, 4]).all())
self.assertEqual(result.iloc[0], s['1/7/2005'])
self.assertEqual(result.iloc[1], s['1/10/2005'])
# to biz day
result = s.resample('B', how='last')
self.assertEqual(len(result), 7)
self.assertTrue((result.index.dayofweek == [4, 0, 1, 2, 3, 4, 0]).all())
self.assertEqual(result.iloc[0], s['1/2/2005'])
self.assertEqual(result.iloc[1], s['1/3/2005'])
self.assertEqual(result.iloc[5], s['1/9/2005'])
self.assertEqual(result.index.name, 'index')
def test_resample_upsampling_picked_but_not_correct(self):
# Test for issue #3020
dates = date_range('01-Jan-2014','05-Jan-2014', freq='D')
series = Series(1, index=dates)
result = series.resample('D')
self.assertEqual(result.index[0], dates[0])
# GH 5955
# incorrect deciding to upsample when the axis frequency matches the resample frequency
import datetime
s = Series(np.arange(1.,6),index=[datetime.datetime(1975, 1, i, 12, 0) for i in range(1, 6)])
expected = Series(np.arange(1.,6),index=date_range('19750101',periods=5,freq='D'))
result = s.resample('D',how='count')
assert_series_equal(result,Series(1,index=expected.index))
result1 = s.resample('D',how='sum')
result2 = s.resample('D',how='mean')
result3 = s.resample('D')
assert_series_equal(result1,expected)
assert_series_equal(result2,expected)
assert_series_equal(result3,expected)
def test_resample_frame_basic(self):
df = tm.makeTimeDataFrame()
b = TimeGrouper('M')
g = df.groupby(b)
# check all cython functions work
funcs = ['add', 'mean', 'prod', 'min', 'max', 'var']
for f in funcs:
g._cython_agg_general(f)
result = df.resample('A')
assert_series_equal(result['A'], df['A'].resample('A'))
result = df.resample('M')
assert_series_equal(result['A'], df['A'].resample('M'))
df.resample('M', kind='period')
df.resample('W-WED', kind='period')
def test_resample_loffset(self):
rng = date_range('1/1/2000 00:00:00', '1/1/2000 00:13:00', freq='min')
s = Series(np.random.randn(14), index=rng)
result = s.resample('5min', how='mean', closed='right', label='right',
loffset=timedelta(minutes=1))
idx = date_range('1/1/2000', periods=4, freq='5min')
expected = Series([s[0], s[1:6].mean(), s[6:11].mean(), s[11:].mean()],
index=idx + timedelta(minutes=1))
assert_series_equal(result, expected)
expected = s.resample(
'5min', how='mean', closed='right', label='right',
loffset='1min')
assert_series_equal(result, expected)
expected = s.resample(
'5min', how='mean', closed='right', label='right',
loffset=Minute(1))
assert_series_equal(result, expected)
self.assertEqual(result.index.freq, Minute(5))
# from daily
dti = DatetimeIndex(
start=datetime(2005, 1, 1), end=datetime(2005, 1, 10),
freq='D')
ser = Series(np.random.rand(len(dti)), dti)
# to weekly
result = ser.resample('w-sun', how='last')
expected = ser.resample('w-sun', how='last', loffset=-bday)
self.assertEqual(result.index[0] - bday, expected.index[0])
def test_resample_upsample(self):
# from daily
dti = DatetimeIndex(
start=datetime(2005, 1, 1), end=datetime(2005, 1, 10),
freq='D', name='index')
s = Series(np.random.rand(len(dti)), dti)
# to minutely, by padding
result = s.resample('Min', fill_method='pad')
self.assertEqual(len(result), 12961)
self.assertEqual(result[0], s[0])
self.assertEqual(result[-1], s[-1])
self.assertEqual(result.index.name, 'index')
def test_resample_extra_index_point(self):
# GH 9756
index = DatetimeIndex(start='20150101', end='20150331', freq='BM')
expected = DataFrame({'A' : Series([21,41,63], index=index)})
index = DatetimeIndex(start='20150101', end='20150331', freq='B')
df = DataFrame({'A' : Series(range(len(index)),index=index)},dtype='int64')
result = df.resample('BM', how='last')
assert_frame_equal(result, expected)
def test_upsample_with_limit(self):
rng = date_range('1/1/2000', periods=3, freq='5t')
ts = Series(np.random.randn(len(rng)), rng)
result = ts.resample('t', fill_method='ffill', limit=2)
expected = ts.reindex(result.index, method='ffill', limit=2)
assert_series_equal(result, expected)
def test_resample_ohlc(self):
s = self.series
grouper = TimeGrouper(Minute(5))
expect = s.groupby(grouper).agg(lambda x: x[-1])
result = s.resample('5Min', how='ohlc')
self.assertEqual(len(result), len(expect))
self.assertEqual(len(result.columns), 4)
xs = result.iloc[-2]
self.assertEqual(xs['open'], s[-6])
self.assertEqual(xs['high'], s[-6:-1].max())
self.assertEqual(xs['low'], s[-6:-1].min())
self.assertEqual(xs['close'], s[-2])
xs = result.iloc[0]
self.assertEqual(xs['open'], s[0])
self.assertEqual(xs['high'], s[:5].max())
self.assertEqual(xs['low'], s[:5].min())
self.assertEqual(xs['close'], s[4])
def test_resample_ohlc_dataframe(self):
df = (pd.DataFrame({'PRICE': {Timestamp('2011-01-06 10:59:05', tz=None): 24990,
Timestamp('2011-01-06 12:43:33', tz=None): 25499,
Timestamp('2011-01-06 12:54:09', tz=None): 25499},
'VOLUME': {Timestamp('2011-01-06 10:59:05', tz=None): 1500000000,
Timestamp('2011-01-06 12:43:33', tz=None): 5000000000,
Timestamp('2011-01-06 12:54:09', tz=None): 100000000}})
).reindex_axis(['VOLUME', 'PRICE'], axis=1)
res = df.resample('H', how='ohlc')
exp = pd.concat([df['VOLUME'].resample('H', how='ohlc'),
df['PRICE'].resample('H', how='ohlc')],
axis=1,
keys=['VOLUME', 'PRICE'])
assert_frame_equal(exp, res)
df.columns = [['a', 'b'], ['c', 'd']]
res = df.resample('H', how='ohlc')
exp.columns = pd.MultiIndex.from_tuples([('a', 'c', 'open'), ('a', 'c', 'high'),
('a', 'c', 'low'), ('a', 'c', 'close'), ('b', 'd', 'open'),
('b', 'd', 'high'), ('b', 'd', 'low'), ('b', 'd', 'close')])
assert_frame_equal(exp, res)
# dupe columns fail atm
# df.columns = ['PRICE', 'PRICE']
def test_resample_dup_index(self):
# GH 4812
# dup columns with resample raising
df = DataFrame(np.random.randn(4,12),index=[2000,2000,2000,2000],columns=[ Period(year=2000,month=i+1,freq='M') for i in range(12) ])
df.iloc[3,:] = np.nan
result = df.resample('Q',axis=1)
expected = df.groupby(lambda x: int((x.month-1)/3),axis=1).mean()
expected.columns = [ Period(year=2000,quarter=i+1,freq='Q') for i in range(4) ]
assert_frame_equal(result, expected)
def test_resample_reresample(self):
dti = DatetimeIndex(
start=datetime(2005, 1, 1), end=datetime(2005, 1, 10),
freq='D')
s = Series(np.random.rand(len(dti)), dti)
bs = s.resample('B', closed='right', label='right')
result = bs.resample('8H')
self.assertEqual(len(result), 22)
tm.assertIsInstance(result.index.freq, offsets.DateOffset)
self.assertEqual(result.index.freq, offsets.Hour(8))
def test_resample_timestamp_to_period(self):
ts = _simple_ts('1/1/1990', '1/1/2000')
result = ts.resample('A-DEC', kind='period')
expected = ts.resample('A-DEC')
expected.index = period_range('1990', '2000', freq='a-dec')
assert_series_equal(result, expected)
result = ts.resample('A-JUN', kind='period')
expected = ts.resample('A-JUN')
expected.index = period_range('1990', '2000', freq='a-jun')
assert_series_equal(result, expected)
result = ts.resample('M', kind='period')
expected = ts.resample('M')
expected.index = period_range('1990-01', '2000-01', freq='M')
assert_series_equal(result, expected)
result = ts.resample('M', kind='period')
expected = ts.resample('M')
expected.index = period_range('1990-01', '2000-01', freq='M')
assert_series_equal(result, expected)
def test_ohlc_5min(self):
def _ohlc(group):
if isnull(group).all():
return np.repeat(np.nan, 4)
return [group[0], group.max(), group.min(), group[-1]]
rng = date_range('1/1/2000 00:00:00', '1/1/2000 5:59:50',
freq='10s')
ts = Series(np.random.randn(len(rng)), index=rng)
resampled = ts.resample('5min', how='ohlc', closed='right',
label='right')
self.assertTrue((resampled.ix['1/1/2000 00:00'] == ts[0]).all())
exp = _ohlc(ts[1:31])
self.assertTrue((resampled.ix['1/1/2000 00:05'] == exp).all())
exp = _ohlc(ts['1/1/2000 5:55:01':])
self.assertTrue((resampled.ix['1/1/2000 6:00:00'] == exp).all())
def test_downsample_non_unique(self):
rng = date_range('1/1/2000', '2/29/2000')
rng2 = rng.repeat(5).values
ts = Series(np.random.randn(len(rng2)), index=rng2)
result = ts.resample('M', how='mean')
expected = ts.groupby(lambda x: x.month).mean()
self.assertEqual(len(result), 2)
assert_almost_equal(result[0], expected[1])
assert_almost_equal(result[1], expected[2])
def test_asfreq_non_unique(self):
# GH #1077
rng = date_range('1/1/2000', '2/29/2000')
rng2 = rng.repeat(2).values
ts = Series(np.random.randn(len(rng2)), index=rng2)
self.assertRaises(Exception, ts.asfreq, 'B')
def test_resample_axis1(self):
rng = date_range('1/1/2000', '2/29/2000')
df = DataFrame(np.random.randn(3, len(rng)), columns=rng,
index=['a', 'b', 'c'])
result = df.resample('M', axis=1)
expected = df.T.resample('M').T
tm.assert_frame_equal(result, expected)
def test_resample_panel(self):
rng = date_range('1/1/2000', '6/30/2000')
n = len(rng)
panel = Panel(np.random.randn(3, n, 5),
items=['one', 'two', 'three'],
major_axis=rng,
minor_axis=['a', 'b', 'c', 'd', 'e'])
result = panel.resample('M', axis=1)
def p_apply(panel, f):
result = {}
for item in panel.items:
result[item] = f(panel[item])
return Panel(result, items=panel.items)
expected = p_apply(panel, lambda x: x.resample('M'))
tm.assert_panel_equal(result, expected)
panel2 = panel.swapaxes(1, 2)
result = panel2.resample('M', axis=2)
expected = p_apply(panel2, lambda x: x.resample('M', axis=1))
tm.assert_panel_equal(result, expected)
def test_resample_panel_numpy(self):
rng = date_range('1/1/2000', '6/30/2000')
n = len(rng)
panel = Panel(np.random.randn(3, n, 5),
items=['one', 'two', 'three'],
major_axis=rng,
minor_axis=['a', 'b', 'c', 'd', 'e'])
result = panel.resample('M', how=lambda x: x.mean(1), axis=1)
expected = panel.resample('M', how='mean', axis=1)
tm.assert_panel_equal(result, expected)
panel = panel.swapaxes(1, 2)
result = panel.resample('M', how=lambda x: x.mean(2), axis=2)
expected = panel.resample('M', how='mean', axis=2)
tm.assert_panel_equal(result, expected)
def test_resample_anchored_ticks(self):
# If a fixed delta (5 minute, 4 hour) evenly divides a day, we should
# "anchor" the origin at midnight so we get regular intervals rather
# than starting from the first timestamp which might start in the middle
# of a desired interval
rng = date_range('1/1/2000 04:00:00', periods=86400, freq='s')
ts = Series(np.random.randn(len(rng)), index=rng)
ts[:2] = np.nan # so results are the same
freqs = ['t', '5t', '15t', '30t', '4h', '12h']
for freq in freqs:
result = ts[2:].resample(freq, closed='left', label='left')
expected = ts.resample(freq, closed='left', label='left')
assert_series_equal(result, expected)
def test_resample_single_group(self):
mysum = lambda x: x.sum()
rng = date_range('2000-1-1', '2000-2-10', freq='D')
ts = Series(np.random.randn(len(rng)), index=rng)
assert_series_equal(ts.resample('M', how='sum'),
ts.resample('M', how=mysum))
rng = date_range('2000-1-1', '2000-1-10', freq='D')
ts = Series(np.random.randn(len(rng)), index=rng)
assert_series_equal(ts.resample('M', how='sum'),
ts.resample('M', how=mysum))
# GH 3849
s = Series([30.1, 31.6], index=[Timestamp('20070915 15:30:00'),
Timestamp('20070915 15:40:00')])
expected = Series([0.75], index=[Timestamp('20070915')])
result = s.resample('D', how=lambda x: np.std(x))
assert_series_equal(result, expected)
def test_resample_base(self):
rng = date_range('1/1/2000 00:00:00', '1/1/2000 02:00', freq='s')
ts = Series(np.random.randn(len(rng)), index=rng)
resampled = ts.resample('5min', base=2)
exp_rng = date_range('12/31/1999 23:57:00', '1/1/2000 01:57',
freq='5min')
self.assertTrue(resampled.index.equals(exp_rng))
def test_resample_base_with_timedeltaindex(self):
# GH 10530
rng = timedelta_range(start = '0s', periods = 25, freq = 's')
ts = Series(np.random.randn(len(rng)), index = rng)
with_base = ts.resample('2s', base = 5)
without_base = ts.resample('2s')
exp_without_base = timedelta_range(start = '0s', end = '25s', freq = '2s')
exp_with_base = timedelta_range(start = '5s', end = '29s', freq = '2s')
self.assertTrue(without_base.index.equals(exp_without_base))
self.assertTrue(with_base.index.equals(exp_with_base))
def test_resample_daily_anchored(self):
rng = date_range('1/1/2000 0:00:00', periods=10000, freq='T')
ts = Series(np.random.randn(len(rng)), index=rng)
ts[:2] = np.nan # so results are the same
result = ts[2:].resample('D', closed='left', label='left')
expected = ts.resample('D', closed='left', label='left')
assert_series_equal(result, expected)
def test_resample_to_period_monthly_buglet(self):
# GH #1259
rng = date_range('1/1/2000', '12/31/2000')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts.resample('M', kind='period')
exp_index = period_range('Jan-2000', 'Dec-2000', freq='M')
self.assertTrue(result.index.equals(exp_index))
def test_resample_empty(self):
ts = _simple_ts('1/1/2000', '2/1/2000')[:0]
result = ts.resample('A')
self.assertEqual(len(result), 0)
self.assertEqual(result.index.freqstr, 'A-DEC')
result = ts.resample('A', kind='period')
self.assertEqual(len(result), 0)
self.assertEqual(result.index.freqstr, 'A-DEC')
xp = DataFrame()
rs = xp.resample('A')
assert_frame_equal(xp, rs)
# Empty series were sometimes causing a segfault (for the functions
# with Cython bounds-checking disabled) or an IndexError. We just run
# them to ensure they no longer do. (GH #10228)
for index in tm.all_timeseries_index_generator(0):
for dtype in (np.float, np.int, np.object, 'datetime64[ns]'):
for how in ('count', 'mean', 'min', 'ohlc', 'last', 'prod'):
empty_series = pd.Series([], index, dtype)
try:
empty_series.resample('d', how)
except DataError:
# Ignore these since some combinations are invalid
# (ex: doing mean with dtype of np.object)
pass
def test_weekly_resample_buglet(self):
# #1327
rng = date_range('1/1/2000', freq='B', periods=20)
ts = Series(np.random.randn(len(rng)), index=rng)
resampled = ts.resample('W')
expected = ts.resample('W-SUN')
assert_series_equal(resampled, expected)
def test_monthly_resample_error(self):
# #1451
dates = date_range('4/16/2012 20:00', periods=5000, freq='h')
ts = Series(np.random.randn(len(dates)), index=dates)
# it works!
result = ts.resample('M')
def test_resample_anchored_intraday(self):
# #1471, #1458
rng = date_range('1/1/2012', '4/1/2012', freq='100min')
df = DataFrame(rng.month, index=rng)
result = df.resample('M')
expected = df.resample('M', kind='period').to_timestamp(how='end')
tm.assert_frame_equal(result, expected)
result = df.resample('M', closed='left')
exp = df.tshift(1, freq='D').resample('M', kind='period')
exp = exp.to_timestamp(how='end')
tm.assert_frame_equal(result, exp)
rng = date_range('1/1/2012', '4/1/2012', freq='100min')
df = DataFrame(rng.month, index=rng)
result = df.resample('Q')
expected = df.resample('Q', kind='period').to_timestamp(how='end')
tm.assert_frame_equal(result, expected)
result = df.resample('Q', closed='left')
expected = df.tshift(1, freq='D').resample('Q', kind='period',
closed='left')
expected = expected.to_timestamp(how='end')
tm.assert_frame_equal(result, expected)
ts = _simple_ts('2012-04-29 23:00', '2012-04-30 5:00', freq='h')
resampled = ts.resample('M')
self.assertEqual(len(resampled), 1)
def test_resample_anchored_monthstart(self):
ts = _simple_ts('1/1/2000', '12/31/2002')
freqs = ['MS', 'BMS', 'QS-MAR', 'AS-DEC', 'AS-JUN']
for freq in freqs:
result = ts.resample(freq, how='mean')
def test_resample_anchored_multiday(self):
# When resampling a range spanning multiple days, ensure that the
# start date gets used to determine the offset. Fixes issue where
# a one day period is not a multiple of the frequency.
#
# See: https://github.com/pydata/pandas/issues/8683
s = pd.Series(np.random.randn(5),
index=pd.date_range('2014-10-14 23:06:23.206',
periods=3, freq='400L')
| pd.date_range('2014-10-15 23:00:00',
periods=2, freq='2200L'))
# Ensure left closing works
result = s.resample('2200L', 'mean')
self.assertEqual(result.index[-1],
pd.Timestamp('2014-10-15 23:00:02.000'))
# Ensure right closing works
result = s.resample('2200L', 'mean', label='right')
self.assertEqual(result.index[-1],
pd.Timestamp('2014-10-15 23:00:04.200'))
def test_corner_cases(self):
# miscellaneous test coverage
rng = date_range('1/1/2000', periods=12, freq='t')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts.resample('5t', closed='right', label='left')
ex_index = date_range('1999-12-31 23:55', periods=4, freq='5t')
self.assertTrue(result.index.equals(ex_index))
len0pts = _simple_pts('2007-01', '2010-05', freq='M')[:0]
# it works
result = len0pts.resample('A-DEC')
self.assertEqual(len(result), 0)
# resample to periods
ts = _simple_ts('2000-04-28', '2000-04-30 11:00', freq='h')
result = ts.resample('M', kind='period')
self.assertEqual(len(result), 1)
self.assertEqual(result.index[0], Period('2000-04', freq='M'))
def test_anchored_lowercase_buglet(self):
dates = date_range('4/16/2012 20:00', periods=50000, freq='s')
ts = Series(np.random.randn(len(dates)), index=dates)
# it works!
ts.resample('d')
def test_upsample_apply_functions(self):
# #1596
rng = pd.date_range('2012-06-12', periods=4, freq='h')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts.resample('20min', how=['mean', 'sum'])
tm.assertIsInstance(result, DataFrame)
def test_resample_not_monotonic(self):
rng = pd.date_range('2012-06-12', periods=200, freq='h')
ts = Series(np.random.randn(len(rng)), index=rng)
ts = ts.take(np.random.permutation(len(ts)))
result = ts.resample('D', how='sum')
exp = ts.sort_index().resample('D', how='sum')
assert_series_equal(result, exp)
def test_resample_median_bug_1688(self):
for dtype in ['int64','int32','float64','float32']:
df = DataFrame([1, 2], index=[datetime(2012, 1, 1, 0, 0, 0),
datetime(2012, 1, 1, 0, 5, 0)],
dtype = dtype)
result = df.resample("T", how=lambda x: x.mean())
exp = df.asfreq('T')
tm.assert_frame_equal(result, exp)
result = df.resample("T", how="median")
exp = df.asfreq('T')
tm.assert_frame_equal(result, exp)
def test_how_lambda_functions(self):
ts = _simple_ts('1/1/2000', '4/1/2000')
result = ts.resample('M', how=lambda x: x.mean())
exp = ts.resample('M', how='mean')
tm.assert_series_equal(result, exp)
self.assertRaises(Exception, ts.resample, 'M',
how=[lambda x: x.mean(), lambda x: x.std(ddof=1)])
result = ts.resample('M', how={'foo': lambda x: x.mean(),
'bar': lambda x: x.std(ddof=1)})
foo_exp = ts.resample('M', how='mean')
foo_exp.name = 'foo'
bar_exp = ts.resample('M', how='std')
bar_exp.name = 'bar'
tm.assert_series_equal(result['foo'], foo_exp)
tm.assert_series_equal(result['bar'], bar_exp)
def test_resample_unequal_times(self):
# #1772
start = datetime(1999, 3, 1, 5)
# end hour is less than start
end = datetime(2012, 7, 31, 4)
bad_ind = date_range(start, end, freq="30min")
df = DataFrame({'close': 1}, index=bad_ind)
# it works!
df.resample('AS', 'sum')
def test_resample_consistency(self):
# GH 6418
# resample with bfill / limit / reindex consistency
i30 = index=pd.date_range('2002-02-02', periods=4, freq='30T')
s=pd.Series(np.arange(4.), index=i30)
s[2] = np.NaN
# Upsample by factor 3 with reindex() and resample() methods:
i10 = pd.date_range(i30[0], i30[-1], freq='10T')
s10 = s.reindex(index=i10, method='bfill')
s10_2 = s.reindex(index=i10, method='bfill', limit=2)
rl = s.reindex_like(s10, method='bfill', limit=2)
r10_2 = s.resample('10Min', fill_method='bfill', limit=2)
r10 = s.resample('10Min', fill_method='bfill')
# s10_2, r10, r10_2, rl should all be equal
assert_series_equal(s10_2, r10)
assert_series_equal(s10_2, r10_2)
assert_series_equal(s10_2, rl)
def test_resample_timegrouper(self):
# GH 7227
dates1 = [datetime(2014, 10, 1), datetime(2014, 9, 3),
datetime(2014, 11, 5), datetime(2014, 9, 5),
datetime(2014, 10, 8), datetime(2014, 7, 15)]
dates2 = dates1[:2] + [pd.NaT] + dates1[2:4] + [pd.NaT] + dates1[4:]
dates3 = [pd.NaT] + dates1 + [pd.NaT]
for dates in [dates1, dates2, dates3]:
df = DataFrame(dict(A=dates, B=np.arange(len(dates))))
result = df.set_index('A').resample('M', how='count')
exp_idx = pd.DatetimeIndex(['2014-07-31', '2014-08-31', '2014-09-30',
'2014-10-31', '2014-11-30'], freq='M', name='A')
expected = DataFrame({'B': [1, 0, 2, 2, 1]}, index=exp_idx)
assert_frame_equal(result, expected)
result = df.groupby(pd.Grouper(freq='M', key='A')).count()
assert_frame_equal(result, expected)
df = DataFrame(dict(A=dates, B=np.arange(len(dates)), C=np.arange(len(dates))))
result = df.set_index('A').resample('M', how='count')
expected = DataFrame({'B': [1, 0, 2, 2, 1], 'C': [1, 0, 2, 2, 1]},
index=exp_idx, columns=['B', 'C'])
assert_frame_equal(result, expected)
result = df.groupby(pd.Grouper(freq='M', key='A')).count()
assert_frame_equal(result, expected)
def test_resample_group_info(self): # GH10914
for n, k in product((10000, 100000), (10, 100, 1000)):
dr = date_range(start='2015-08-27', periods=n // 10, freq='T')
ts = Series(np.random.randint(0, n // k, n).astype('int64'),
index=np.random.choice(dr, n))
left = ts.resample('30T', how='nunique')
ix = date_range(start=ts.index.min(),
end=ts.index.max(),
freq='30T')
vals = ts.values
bins = np.searchsorted(ix.values, ts.index, side='right')
sorter = np.lexsort((vals, bins))
vals, bins = vals[sorter], bins[sorter]
mask = np.r_[True, vals[1:] != vals[:-1]]
mask |= np.r_[True, bins[1:] != bins[:-1]]
arr = np.bincount(bins[mask] - 1, minlength=len(ix)).astype('int64',copy=False)
right = Series(arr, index=ix)
assert_series_equal(left, right)
def test_resample_size(self):
n = 10000
dr = date_range('2015-09-19', periods=n, freq='T')
ts = Series(np.random.randn(n), index=np.random.choice(dr, n))
left = ts.resample('7T', how='size')
ix = date_range(start=left.index.min(), end=ts.index.max(), freq='7T')
bins = np.searchsorted(ix.values, ts.index.values, side='right')
val = np.bincount(bins, minlength=len(ix) + 1)[1:].astype('int64',copy=False)
right = Series(val, index=ix)
assert_series_equal(left, right)
def test_resmaple_dst_anchor(self):
# 5172
dti = DatetimeIndex([datetime(2012, 11, 4, 23)], tz='US/Eastern')
df = DataFrame([5], index=dti)
assert_frame_equal(df.resample(rule='D', how='sum'),
DataFrame([5], index=df.index.normalize()))
df.resample(rule='MS', how='sum')
assert_frame_equal(df.resample(rule='MS', how='sum'),
DataFrame([5], index=DatetimeIndex([datetime(2012, 11, 1)],
tz='US/Eastern')))
dti = date_range('2013-09-30', '2013-11-02', freq='30Min', tz='Europe/Paris')
values = range(dti.size)
df = DataFrame({"a": values, "b": values, "c": values}, index=dti, dtype='int64')
how = {"a": "min", "b": "max", "c": "count"}
assert_frame_equal(df.resample("W-MON", how=how)[["a", "b", "c"]],
DataFrame({"a": [0, 48, 384, 720, 1056, 1394],
"b": [47, 383, 719, 1055, 1393, 1586],
"c": [48, 336, 336, 336, 338, 193]},
index=date_range('9/30/2013', '11/4/2013',
freq='W-MON', tz='Europe/Paris')),
'W-MON Frequency')
assert_frame_equal(df.resample("2W-MON", how=how)[["a", "b", "c"]],
DataFrame({"a": [0, 48, 720, 1394],
"b": [47, 719, 1393, 1586],
"c": [48, 672, 674, 193]},
index=date_range('9/30/2013', '11/11/2013',
freq='2W-MON', tz='Europe/Paris')),
'2W-MON Frequency')
assert_frame_equal(df.resample("MS", how=how)[["a", "b", "c"]],
DataFrame({"a": [0, 48, 1538],
"b": [47, 1537, 1586],
"c": [48, 1490, 49]},
index=date_range('9/1/2013', '11/1/2013',
freq='MS', tz='Europe/Paris')),
'MS Frequency')
assert_frame_equal(df.resample("2MS", how=how)[["a", "b", "c"]],
DataFrame({"a": [0, 1538],
"b": [1537, 1586],
"c": [1538, 49]},
index=date_range('9/1/2013', '11/1/2013',
freq='2MS', tz='Europe/Paris')),
'2MS Frequency')
df_daily = df['10/26/2013':'10/29/2013']
assert_frame_equal(df_daily.resample("D", how={"a": "min", "b": "max", "c": "count"})[["a", "b", "c"]],
DataFrame({"a": [1248, 1296, 1346, 1394],
"b": [1295, 1345, 1393, 1441],
"c": [48, 50, 48, 48]},
index=date_range('10/26/2013', '10/29/2013',
freq='D', tz='Europe/Paris')),
'D Frequency')
def _simple_ts(start, end, freq='D'):
rng = date_range(start, end, freq=freq)
return Series(np.random.randn(len(rng)), index=rng)
def _simple_pts(start, end, freq='D'):
rng = period_range(start, end, freq=freq)
return Series(np.random.randn(len(rng)), index=rng)
class TestResamplePeriodIndex(tm.TestCase):
_multiprocess_can_split_ = True
def test_annual_upsample_D_s_f(self):
self._check_annual_upsample_cases('D', 'start', 'ffill')
def test_annual_upsample_D_e_f(self):
self._check_annual_upsample_cases('D', 'end', 'ffill')
def test_annual_upsample_D_s_b(self):
self._check_annual_upsample_cases('D', 'start', 'bfill')
def test_annual_upsample_D_e_b(self):
self._check_annual_upsample_cases('D', 'end', 'bfill')
def test_annual_upsample_B_s_f(self):
self._check_annual_upsample_cases('B', 'start', 'ffill')
def test_annual_upsample_B_e_f(self):
self._check_annual_upsample_cases('B', 'end', 'ffill')
def test_annual_upsample_B_s_b(self):
self._check_annual_upsample_cases('B', 'start', 'bfill')
def test_annual_upsample_B_e_b(self):
self._check_annual_upsample_cases('B', 'end', 'bfill')
def test_annual_upsample_M_s_f(self):
self._check_annual_upsample_cases('M', 'start', 'ffill')
def test_annual_upsample_M_e_f(self):
self._check_annual_upsample_cases('M', 'end', 'ffill')
def test_annual_upsample_M_s_b(self):
self._check_annual_upsample_cases('M', 'start', 'bfill')
def test_annual_upsample_M_e_b(self):
self._check_annual_upsample_cases('M', 'end', 'bfill')
def _check_annual_upsample_cases(self, targ, conv, meth, end='12/31/1991'):
for month in MONTHS:
ts = _simple_pts('1/1/1990', end, freq='A-%s' % month)
result = ts.resample(targ, fill_method=meth,
convention=conv)
expected = result.to_timestamp(targ, how=conv)
expected = expected.asfreq(targ, meth).to_period()
assert_series_equal(result, expected)
def test_basic_downsample(self):
ts = _simple_pts('1/1/1990', '6/30/1995', freq='M')
result = ts.resample('a-dec')
expected = ts.groupby(ts.index.year).mean()
expected.index = period_range('1/1/1990', '6/30/1995',
freq='a-dec')
assert_series_equal(result, expected)
# this is ok
assert_series_equal(ts.resample('a-dec'), result)
assert_series_equal(ts.resample('a'), result)
def test_not_subperiod(self):
# These are incompatible period rules for resampling
ts = _simple_pts('1/1/1990', '6/30/1995', freq='w-wed')
self.assertRaises(ValueError, ts.resample, 'a-dec')
self.assertRaises(ValueError, ts.resample, 'q-mar')
self.assertRaises(ValueError, ts.resample, 'M')
self.assertRaises(ValueError, ts.resample, 'w-thu')
def test_basic_upsample(self):
ts = _simple_pts('1/1/1990', '6/30/1995', freq='M')
result = ts.resample('a-dec')
resampled = result.resample('D', fill_method='ffill', convention='end')
expected = result.to_timestamp('D', how='end')
expected = expected.asfreq('D', 'ffill').to_period()
assert_series_equal(resampled, expected)
def test_upsample_with_limit(self):
rng = period_range('1/1/2000', periods=5, freq='A')
ts = Series(np.random.randn(len(rng)), rng)
result = ts.resample('M', fill_method='ffill', limit=2,
convention='end')
expected = ts.asfreq('M').reindex(result.index, method='ffill',
limit=2)
assert_series_equal(result, expected)
def test_annual_upsample(self):
ts = _simple_pts('1/1/1990', '12/31/1995', freq='A-DEC')
df = DataFrame({'a': ts})
rdf = df.resample('D', fill_method='ffill')
exp = df['a'].resample('D', fill_method='ffill')
assert_series_equal(rdf['a'], exp)
rng = period_range('2000', '2003', freq='A-DEC')
ts = Series([1, 2, 3, 4], index=rng)
result = ts.resample('M', fill_method='ffill')
ex_index = period_range('2000-01', '2003-12', freq='M')
expected = ts.asfreq('M', how='start').reindex(ex_index,
method='ffill')
assert_series_equal(result, expected)
def test_quarterly_upsample(self):
targets = ['D', 'B', 'M']
for month in MONTHS:
ts = _simple_pts('1/1/1990', '12/31/1995', freq='Q-%s' % month)
for targ, conv in product(targets, ['start', 'end']):
result = ts.resample(targ, fill_method='ffill',
convention=conv)
expected = result.to_timestamp(targ, how=conv)
expected = expected.asfreq(targ, 'ffill').to_period()
assert_series_equal(result, expected)
def test_monthly_upsample(self):
targets = ['D', 'B']
ts = _simple_pts('1/1/1990', '12/31/1995', freq='M')
for targ, conv in product(targets, ['start', 'end']):
result = ts.resample(targ, fill_method='ffill',
convention=conv)
expected = result.to_timestamp(targ, how=conv)
expected = expected.asfreq(targ, 'ffill').to_period()
assert_series_equal(result, expected)
def test_fill_method_and_how_upsample(self):
# GH2073
s = Series(np.arange(9,dtype='int64'),
index=date_range('2010-01-01', periods=9, freq='Q'))
last = s.resample('M', fill_method='ffill')
both = s.resample('M', how='last', fill_method='ffill').astype('int64')
assert_series_equal(last, both)
def test_weekly_upsample(self):
targets = ['D', 'B']
for day in DAYS:
ts = _simple_pts('1/1/1990', '12/31/1995', freq='W-%s' % day)
for targ, conv in product(targets, ['start', 'end']):
result = ts.resample(targ, fill_method='ffill',
convention=conv)
expected = result.to_timestamp(targ, how=conv)
expected = expected.asfreq(targ, 'ffill').to_period()
assert_series_equal(result, expected)
def test_resample_to_timestamps(self):
ts = _simple_pts('1/1/1990', '12/31/1995', freq='M')
result = ts.resample('A-DEC', kind='timestamp')
expected = ts.to_timestamp(how='end').resample('A-DEC')
assert_series_equal(result, expected)
def test_resample_to_quarterly(self):
for month in MONTHS:
ts = _simple_pts('1990', '1992', freq='A-%s' % month)
quar_ts = ts.resample('Q-%s' % month, fill_method='ffill')
stamps = ts.to_timestamp('D', how='start')
qdates = period_range(ts.index[0].asfreq('D', 'start'),
ts.index[-1].asfreq('D', 'end'),
freq='Q-%s' % month)
expected = stamps.reindex(qdates.to_timestamp('D', 's'),
method='ffill')
expected.index = qdates
assert_series_equal(quar_ts, expected)
# conforms, but different month
ts = _simple_pts('1990', '1992', freq='A-JUN')
for how in ['start', 'end']:
result = ts.resample('Q-MAR', convention=how, fill_method='ffill')
expected = ts.asfreq('Q-MAR', how=how)
expected = expected.reindex(result.index, method='ffill')
# .to_timestamp('D')
# expected = expected.resample('Q-MAR', fill_method='ffill')
assert_series_equal(result, expected)
def test_resample_fill_missing(self):
rng = PeriodIndex([2000, 2005, 2007, 2009], freq='A')
s = Series(np.random.randn(4), index=rng)
stamps = s.to_timestamp()
filled = s.resample('A')
expected = stamps.resample('A').to_period('A')
assert_series_equal(filled, expected)
filled = s.resample('A', fill_method='ffill')
expected = stamps.resample('A', fill_method='ffill').to_period('A')
assert_series_equal(filled, expected)
def test_cant_fill_missing_dups(self):
rng = PeriodIndex([2000, 2005, 2005, 2007, 2007], freq='A')
s = Series(np.random.randn(5), index=rng)
self.assertRaises(Exception, s.resample, 'A')
def test_resample_5minute(self):
rng = period_range('1/1/2000', '1/5/2000', freq='T')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts.resample('5min')
expected = ts.to_timestamp().resample('5min')
assert_series_equal(result, expected)
def test_upsample_daily_business_daily(self):
ts = _simple_pts('1/1/2000', '2/1/2000', freq='B')
result = ts.resample('D')
expected = ts.asfreq('D').reindex(period_range('1/3/2000', '2/1/2000'))
assert_series_equal(result, expected)
ts = _simple_pts('1/1/2000', '2/1/2000')
result = ts.resample('H', convention='s')
exp_rng = period_range('1/1/2000', '2/1/2000 23:00', freq='H')
expected = ts.asfreq('H', how='s').reindex(exp_rng)
assert_series_equal(result, expected)
def test_resample_empty(self):
ts = _simple_pts('1/1/2000', '2/1/2000')[:0]
result = ts.resample('A')
self.assertEqual(len(result), 0)
def test_resample_irregular_sparse(self):
dr = date_range(start='1/1/2012', freq='5min', periods=1000)
s = Series(np.array(100), index=dr)
# subset the data.
subset = s[:'2012-01-04 06:55']
result = subset.resample('10min', how=len)
expected = s.resample('10min', how=len).ix[result.index]
assert_series_equal(result, expected)
def test_resample_weekly_all_na(self):
rng = date_range('1/1/2000', periods=10, freq='W-WED')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts.resample('W-THU')
self.assertTrue(result.isnull().all())
result = ts.resample('W-THU', fill_method='ffill')[:-1]
expected = ts.asfreq('W-THU', method='ffill')
assert_series_equal(result, expected)
def test_resample_tz_localized(self):
dr = date_range(start='2012-4-13', end='2012-5-1')
ts = Series(lrange(len(dr)), dr)
ts_utc = ts.tz_localize('UTC')
ts_local = ts_utc.tz_convert('America/Los_Angeles')
result = ts_local.resample('W')
ts_local_naive = ts_local.copy()
ts_local_naive.index = [x.replace(tzinfo=None)
for x in ts_local_naive.index.to_pydatetime()]
exp = ts_local_naive.resample('W').tz_localize('America/Los_Angeles')
assert_series_equal(result, exp)
# it works
result = ts_local.resample('D')
# #2245
idx = date_range('2001-09-20 15:59', '2001-09-20 16:00', freq='T',
tz='Australia/Sydney')
s = Series([1, 2], index=idx)
result = s.resample('D', closed='right', label='right')
ex_index = date_range('2001-09-21', periods=1, freq='D',
tz='Australia/Sydney')
expected = Series([1.5], index=ex_index)
assert_series_equal(result, expected)
# for good measure
result = s.resample('D', kind='period')
ex_index = period_range('2001-09-20', periods=1, freq='D')
expected = Series([1.5], index=ex_index)
assert_series_equal(result, expected)
# GH 6397
# comparing an offset that doesn't propogate tz's
rng = date_range('1/1/2011', periods=20000, freq='H')
rng = rng.tz_localize('EST')
ts = DataFrame(index=rng)
ts['first']=np.random.randn(len(rng))
ts['second']=np.cumsum(np.random.randn(len(rng)))
expected = DataFrame({ 'first' : ts.resample('A',how=np.sum)['first'],
'second' : ts.resample('A',how=np.mean)['second'] },columns=['first','second'])
result = ts.resample('A', how={'first':np.sum, 'second':np.mean}).reindex(columns=['first','second'])
assert_frame_equal(result,expected)
def test_closed_left_corner(self):
# #1465
s = Series(np.random.randn(21),
index=date_range(start='1/1/2012 9:30',
freq='1min', periods=21))
s[0] = np.nan
result = s.resample('10min', how='mean', closed='left', label='right')
exp = s[1:].resample('10min', how='mean', closed='left', label='right')
assert_series_equal(result, exp)
result = s.resample('10min', how='mean', closed='left', label='left')
exp = s[1:].resample('10min', how='mean', closed='left', label='left')
ex_index = date_range(start='1/1/2012 9:30', freq='10min', periods=3)
self.assertTrue(result.index.equals(ex_index))
assert_series_equal(result, exp)
def test_quarterly_resampling(self):<|fim▁hole|> exp = ts.to_timestamp().resample('A').to_period()
assert_series_equal(result, exp)
def test_resample_weekly_bug_1726(self):
# 8/6/12 is a Monday
ind = DatetimeIndex(start="8/6/2012", end="8/26/2012", freq="D")
n = len(ind)
data = [[x] * 5 for x in range(n)]
df = DataFrame(data, columns=['open', 'high', 'low', 'close', 'vol'],
index=ind)
# it works!
df.resample('W-MON', how='first', closed='left', label='left')
def test_resample_bms_2752(self):
# GH2753
foo = pd.Series(index=pd.bdate_range('20000101','20000201'))
res1 = foo.resample("BMS")
res2 = foo.resample("BMS").resample("B")
self.assertEqual(res1.index[0], Timestamp('20000103'))
self.assertEqual(res1.index[0], res2.index[0])
# def test_monthly_convention_span(self):
# rng = period_range('2000-01', periods=3, freq='M')
# ts = Series(np.arange(3), index=rng)
# # hacky way to get same thing
# exp_index = period_range('2000-01-01', '2000-03-31', freq='D')
# expected = ts.asfreq('D', how='end').reindex(exp_index)
# expected = expected.fillna(method='bfill')
# result = ts.resample('D', convention='span')
# assert_series_equal(result, expected)
def test_default_right_closed_label(self):
end_freq = ['D', 'Q', 'M', 'D']
end_types = ['M', 'A', 'Q', 'W']
for from_freq, to_freq in zip(end_freq, end_types):
idx = DatetimeIndex(start='8/15/2012', periods=100,
freq=from_freq)
df = DataFrame(np.random.randn(len(idx), 2), idx)
resampled = df.resample(to_freq)
assert_frame_equal(resampled, df.resample(to_freq, closed='right',
label='right'))
def test_default_left_closed_label(self):
others = ['MS', 'AS', 'QS', 'D', 'H']
others_freq = ['D', 'Q', 'M', 'H', 'T']
for from_freq, to_freq in zip(others_freq, others):
idx = DatetimeIndex(start='8/15/2012', periods=100,
freq=from_freq)
df = DataFrame(np.random.randn(len(idx), 2), idx)
resampled = df.resample(to_freq)
assert_frame_equal(resampled, df.resample(to_freq, closed='left',
label='left'))
def test_all_values_single_bin(self):
# 2070
index = period_range(start="2012-01-01", end="2012-12-31", freq="M")
s = Series(np.random.randn(len(index)), index=index)
result = s.resample("A", how='mean')
tm.assert_almost_equal(result[0], s.mean())
def test_evenly_divisible_with_no_extra_bins(self):
# 4076
# when the frequency is evenly divisible, sometimes extra bins
df = DataFrame(np.random.randn(9, 3), index=date_range('2000-1-1', periods=9))
result = df.resample('5D')
expected = pd.concat([df.iloc[0:5].mean(),df.iloc[5:].mean()],axis=1).T
expected.index = [Timestamp('2000-1-1'),Timestamp('2000-1-6')]
assert_frame_equal(result,expected)
index = date_range(start='2001-5-4', periods=28)
df = DataFrame(
[{'REST_KEY': 1, 'DLY_TRN_QT': 80, 'DLY_SLS_AMT': 90,
'COOP_DLY_TRN_QT': 30, 'COOP_DLY_SLS_AMT': 20}] * 28 +
[{'REST_KEY': 2, 'DLY_TRN_QT': 70, 'DLY_SLS_AMT': 10,
'COOP_DLY_TRN_QT': 50, 'COOP_DLY_SLS_AMT': 20}] * 28,
index=index.append(index)).sort_index()
index = date_range('2001-5-4',periods=4,freq='7D')
expected = DataFrame(
[{'REST_KEY': 14, 'DLY_TRN_QT': 14, 'DLY_SLS_AMT': 14,
'COOP_DLY_TRN_QT': 14, 'COOP_DLY_SLS_AMT': 14}] * 4,
index=index)
result = df.resample('7D', how='count')
assert_frame_equal(result,expected)
expected = DataFrame(
[{'REST_KEY': 21, 'DLY_TRN_QT': 1050, 'DLY_SLS_AMT': 700,
'COOP_DLY_TRN_QT': 560, 'COOP_DLY_SLS_AMT': 280}] * 4,
index=index)
result = df.resample('7D', how='sum')
assert_frame_equal(result,expected)
class TestTimeGrouper(tm.TestCase):
def setUp(self):
self.ts = Series(np.random.randn(1000),
index=date_range('1/1/2000', periods=1000))
def test_apply(self):
grouper = TimeGrouper('A', label='right', closed='right')
grouped = self.ts.groupby(grouper)
f = lambda x: x.sort_values()[-3:]
applied = grouped.apply(f)
expected = self.ts.groupby(lambda x: x.year).apply(f)
applied.index = applied.index.droplevel(0)
expected.index = expected.index.droplevel(0)
assert_series_equal(applied, expected)
def test_count(self):
self.ts[::3] = np.nan
grouper = TimeGrouper('A', label='right', closed='right')
result = self.ts.resample('A', how='count')
expected = self.ts.groupby(lambda x: x.year).count()
expected.index = result.index
assert_series_equal(result, expected)
def test_numpy_reduction(self):
result = self.ts.resample('A', how='prod', closed='right')
expected = self.ts.groupby(lambda x: x.year).agg(np.prod)
expected.index = result.index
assert_series_equal(result, expected)
def test_apply_iteration(self):
# #2300
N = 1000
ind = pd.date_range(start="2000-01-01", freq="D", periods=N)
df = DataFrame({'open': 1, 'close': 2}, index=ind)
tg = TimeGrouper('M')
_, grouper, _ = tg._get_grouper(df)
# Errors
grouped = df.groupby(grouper, group_keys=False)
f = lambda df: df['close'] / df['open']
# it works!
result = grouped.apply(f)
self.assertTrue(result.index.equals(df.index))
def test_panel_aggregation(self):
ind = pd.date_range('1/1/2000', periods=100)
data = np.random.randn(2, len(ind), 4)
wp = pd.Panel(data, items=['Item1', 'Item2'], major_axis=ind,
minor_axis=['A', 'B', 'C', 'D'])
tg = TimeGrouper('M', axis=1)
_, grouper, _ = tg._get_grouper(wp)
bingrouped = wp.groupby(grouper)
binagg = bingrouped.mean()
def f(x):
assert(isinstance(x, Panel))
return x.mean(1)
result = bingrouped.agg(f)
tm.assert_panel_equal(result, binagg)
def test_fails_on_no_datetime_index(self):
index_names = ('Int64Index', 'PeriodIndex', 'Index', 'Float64Index',
'MultiIndex')
index_funcs = (tm.makeIntIndex, tm.makePeriodIndex,
tm.makeUnicodeIndex, tm.makeFloatIndex,
lambda m: tm.makeCustomIndex(m, 2))
n = 2
for name, func in zip(index_names, index_funcs):
index = func(n)
df = DataFrame({'a': np.random.randn(n)}, index=index)
with tm.assertRaisesRegexp(TypeError,
"axis must be a DatetimeIndex, "
"but got an instance of %r" % name):
df.groupby(TimeGrouper('D'))
def test_aggregate_normal(self):
# check TimeGrouper's aggregation is identical as normal groupby
n = 20
data = np.random.randn(n, 4)
normal_df = DataFrame(data, columns=['A', 'B', 'C', 'D'])
normal_df['key'] = [1, 2, 3, 4, 5] * 4
dt_df = DataFrame(data, columns=['A', 'B', 'C', 'D'])
dt_df['key'] = [datetime(2013, 1, 1), datetime(2013, 1, 2), datetime(2013, 1, 3),
datetime(2013, 1, 4), datetime(2013, 1, 5)] * 4
normal_grouped = normal_df.groupby('key')
dt_grouped = dt_df.groupby(TimeGrouper(key='key', freq='D'))
for func in ['min', 'max', 'prod', 'var', 'std', 'mean']:
expected = getattr(normal_grouped, func)()
dt_result = getattr(dt_grouped, func)()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
assert_frame_equal(expected, dt_result)
for func in ['count', 'sum']:
expected = getattr(normal_grouped, func)()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)()
assert_frame_equal(expected, dt_result)
# GH 7453
for func in ['size']:
expected = getattr(normal_grouped, func)()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)()
assert_series_equal(expected, dt_result)
"""
for func in ['first', 'last']:
expected = getattr(normal_grouped, func)()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)()
assert_frame_equal(expected, dt_result)
for func in ['nth']:
expected = getattr(normal_grouped, func)(3)
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)(3)
assert_frame_equal(expected, dt_result)
"""
# if TimeGrouper is used included, 'first','last' and 'nth' doesn't work yet
def test_aggregate_with_nat(self):
# check TimeGrouper's aggregation is identical as normal groupby
n = 20
data = np.random.randn(n, 4).astype('int64')
normal_df = DataFrame(data, columns=['A', 'B', 'C', 'D'])
normal_df['key'] = [1, 2, np.nan, 4, 5] * 4
dt_df = DataFrame(data, columns=['A', 'B', 'C', 'D'])
dt_df['key'] = [datetime(2013, 1, 1), datetime(2013, 1, 2), pd.NaT,
datetime(2013, 1, 4), datetime(2013, 1, 5)] * 4
normal_grouped = normal_df.groupby('key')
dt_grouped = dt_df.groupby(TimeGrouper(key='key', freq='D'))
for func in ['min', 'max', 'sum', 'prod']:
normal_result = getattr(normal_grouped, func)()
dt_result = getattr(dt_grouped, func)()
pad = DataFrame([[np.nan, np.nan, np.nan, np.nan]],
index=[3], columns=['A', 'B', 'C', 'D'])
expected = normal_result.append(pad)
expected = expected.sort_index()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
assert_frame_equal(expected, dt_result)
for func in ['count']:
normal_result = getattr(normal_grouped, func)()
pad = DataFrame([[0, 0, 0, 0]], index=[3], columns=['A', 'B', 'C', 'D'])
expected = normal_result.append(pad)
expected = expected.sort_index()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)()
assert_frame_equal(expected, dt_result)
for func in ['size']:
normal_result = getattr(normal_grouped, func)()
pad = Series([0], index=[3])
expected = normal_result.append(pad)
expected = expected.sort_index()
expected.index = date_range(start='2013-01-01', freq='D', periods=5, name='key')
dt_result = getattr(dt_grouped, func)()
assert_series_equal(expected, dt_result)
# GH 9925
self.assertEqual(dt_result.index.name, 'key')
# if NaT is included, 'var', 'std', 'mean', 'first','last' and 'nth' doesn't work yet
if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)<|fim▁end|>
|
rng = period_range('2000Q1', periods=10, freq='Q-DEC')
ts = Series(np.arange(10), index=rng)
result = ts.resample('A')
|
<|file_name|>UserFormModal.tsx<|end_file_name|><|fim▁begin|>import { Trans } from "@lingui/macro";
import { i18nMark, withI18n } from "@lingui/react";
import mixin from "reactjs-mixin";
import { Hooks } from "PluginSDK";
import * as React from "react";
import StoreMixin from "#SRC/js/mixins/StoreMixin";
import AuthStore from "../../stores/AuthStore";
import FormModal from "../FormModal";
import ModalHeading from "../modals/ModalHeading";
import UserStore from "../../stores/UserStore";
class UserFormModal extends mixin(StoreMixin) {
state = {
disableNewUser: false,
errorMsg: false,
errorCode: null,
};
// prettier-ignore
store_listeners = [
{name: "user", events: ["createSuccess", "createError"], suppressUpdate: true}
];
onUserStoreCreateSuccess = () => {
this.setState({
disableNewUser: false,
errorMsg: false,
errorCode: null,
});
this.props.onClose();
};
onUserStoreCreateError(errorMsg, userID, xhr) {
this.setState({
disableNewUser: false,
errorMsg,
errorCode: xhr.status,
});
}
handleClose = () => {
this.setState({
disableNewUser: false,
errorMsg: false,
errorCode: null,
});
this.props.onClose();
};
handleNewUserSubmit = (model) => {
const { i18n } = this.props;
const passwordsMessage = i18nMark("Passwords do not match.");
if (model.password !== model.confirmPassword) {
// Check if passwords match.
return this.setState({
errorMsg: i18n._(passwordsMessage),
});
}
delete model.confirmPassword; // We don't need to send this to the backend.
this.setState({ disableNewUser: true });
const userModelObject = Hooks.applyFilter("userModelObject", {
...model,
creator_uid: AuthStore.getUser().uid,
cluster_url: `${window.location.protocol}//${window.location.hostname}`,
});
UserStore.addUser(userModelObject);
};
getButtonDefinition() {
const { props, state } = this;
return Hooks.applyFilter(
"userFormModalButtonDefinition",
[
{
text: i18nMark("Cancel"),
className: "button button-primary-link",
isClose: true,
},
{
text: state.disableNewUser
? i18nMark("Adding...")
: i18nMark("Add User"),
className: "button button-primary",
isSubmit: true,
},
],
props,
state
);
}
getNewUserFormDefinition() {
const { props, state } = this;
return Hooks.applyFilter(
"userFormModalDefinition",
[
{
fieldType: "text",
name: "uid",
placeholder: "Email",
required: true,
showError: state.errorMsg,
showLabel: false,
writeType: "input",
validation() {
return true;
},
value: "",
},
],
props,
state
);
}
<|fim▁hole|> return Hooks.applyFilter(
"userFormModalHeader",
<ModalHeading>
<Trans render="span">Add User to Cluster</Trans>
</ModalHeading>
);
}
getFooter() {
return (
<div>
{Hooks.applyFilter("userAddPolicy", null)}
{Hooks.applyFilter(
"userFormModalFooter",
<Trans
render="p"
className="form-group-without-top-label flush-bottom text-align-center"
>
<strong>Important:</strong> Because telemetry is disabled you must
manually notify users of ACL changes.
</Trans>
)}
</div>
);
}
render() {
return (
<FormModal
buttonDefinition={this.getButtonDefinition()}
definition={this.getNewUserFormDefinition()}
disabled={this.state.disableNewUser}
modalProps={{
header: this.getHeader(),
showHeader: true,
}}
onClose={this.handleClose}
onSubmit={this.handleNewUserSubmit}
open={this.props.open}
contentFooter={this.getFooter()}
/>
);
}
}
export default withI18n()(UserFormModal);<|fim▁end|>
|
getHeader() {
|
<|file_name|>Sort.java<|end_file_name|><|fim▁begin|>package models;
import java.util.List;
/**
* The model class to store the sorting information
*
* @author Sandro
*
*/
public class Sort {
private String text;
private String supplier;
private String status;
private String dateCreateStart;
private String dateCreateEnd;
private String dateUpdateStart;
private String dateUpdateEnd;
private String sort;
private List<String> recordsChecked;
public Sort() {
}
/**
* The constructor of the sort class
*
* @param text the search value of the text field
* @param supplier the search value of the supplier field
* @param status the search value of the status field
* @param format the search value of the format field
* @param dateUpdateStart the search value of the date start field
* @param dateUpdateEnd the search value of the date end field
* @param sort the sort type value
* @param recordsChecked the UUID's of the records selected
*/
public Sort(String text, String supplier, String status, String dateCreateStart, String dateCreateEnd,
<|fim▁hole|> List<String> recordsChecked) {
this.text = text;
this.supplier = supplier;
this.status = status;
this.dateCreateStart = dateCreateStart;
this.dateCreateEnd = dateCreateEnd;
this.dateUpdateStart = dateUpdateStart;
this.dateUpdateEnd = dateUpdateEnd;
this.sort = sort;
this.recordsChecked = recordsChecked;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public String getSupplier() {
return supplier;
}
public void setSupplier(String supplier) {
this.supplier = supplier;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getDateCreateStart() {
return dateCreateStart;
}
public void setDateCreateStart(String dateCreateStart) {
this.dateCreateStart = dateCreateStart;
}
public String getDateCreateEnd() {
return dateCreateEnd;
}
public void setDateCreateEnd(String dateCreateEnd) {
this.dateCreateEnd = dateCreateEnd;
}
public String getDateUpdateStart() {
return dateUpdateStart;
}
public void setDateUpdateStart(String dateUpdateStart) {
this.dateUpdateStart = dateUpdateStart;
}
public String getDateUpdateEnd() {
return dateUpdateEnd;
}
public void setDateUpdateEnd(String dateUpdateEnd) {
this.dateUpdateEnd = dateUpdateEnd;
}
public String getSort() {
return sort;
}
public void setSort(String sort) {
this.sort = sort;
}
public List<String> getRecordsChecked() {
return recordsChecked;
}
public void setRecordsChecked(List<String> recordsChecked) {
this.recordsChecked = recordsChecked;
}
}<|fim▁end|>
|
String dateUpdateStart, String dateUpdateEnd, String sort,
|
<|file_name|>down_celebrity_images.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import ast
import os
import requests
import models
from config import config, sqla
from gevent.pool import Pool
from helpers import random_str, down
base_path = config.get('photo', 'path')
base_path = os.path.join(base_path, 'celebrity')
cookies = {
'bid': ''
}
def create_down(str_urls, douban_id, category):
urls = ast.literal_eval(str_urls or "[]")
path = os.path.join(base_path, category)
for url in urls:
filename = str(douban_id) + '_' + url.split('/')[-1].strip('?')
cookies['bid'] = random_str(11)
down(url, cookies, path, filename)
def create_requests_and_save_datas(douban_id):
session = sqla['session']
cookies['bid'] = random_str(11)
celebrity = session.query(models.Celebrity).filter_by(
douban_id=douban_id
).one()
cover_url = celebrity.cover
thumbnail_cover_url = celebrity.thumbnail_cover
photos_url = celebrity.photos
thumbnail_photos_url = celebrity.thumbnail_photos
down(
cover_url,
cookies,
os.path.join(base_path, 'cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
down(
thumbnail_cover_url,
cookies,
os.path.join(base_path, 'thumbnail_cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
create_down(photos_url, douban_id, 'photos')<|fim▁hole|>
def task(douban_ids, pool_number):
pool = Pool(pool_number)
for douban_id in douban_ids:
pool.spawn(
create_requests_and_save_datas,
douban_id=douban_id
)
pool.join()<|fim▁end|>
|
create_down(thumbnail_photos_url, douban_id, 'thumbnail_photos')
|
<|file_name|>level.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
use byteorder::{LittleEndian as E, ReadBytesExt};
use std::{
fs::File,
io::{Read, Seek},
};
const VMC_PATH: &'static str = "/hub/gog/Vangers/game/thechain/fostral/output.vmc";
const SIZE: [usize; 2] = [1 << 11, 1 << 14];
#[bench]
fn load_level(bench: &mut test::Bencher) {
let mut file = File::open(VMC_PATH).unwrap();
let table: Vec<_> = (0..SIZE[1])
.map(|_| {
let offset = file.read_i32::<E>().unwrap();
let size = file.read_i16::<E>().unwrap();
(offset, size)
})
.collect();
let splay = splay::Splay::new(&mut file);
let mut height = vec![0u8; SIZE[0]];
let mut meta = vec![0u8; SIZE[0]];
let data_offset = file.seek(std::io::SeekFrom::Current(0)).unwrap();
let mut buffer = Vec::new();<|fim▁hole|> for &(offset, size) in table[..0x100].iter() {
let off = offset as usize - data_offset as usize;
splay.expand(&buffer[off..off + size as usize], &mut height, &mut meta);
}
});
}<|fim▁end|>
|
file.read_to_end(&mut buffer).unwrap();
bench.iter(|| {
|
<|file_name|>lower.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
import event<|fim▁hole|>import nxt.locator
import nxt.motor as motor
brick = nxt.locator.find_one_brick()
height_motor = motor.Motor(brick, motor.PORT_A)
height_motor.turn(127, 5000, brake=False)<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use n_queens_problem::{n_queens, semi_parallel_n_queens};
fn main() {<|fim▁hole|> for num in 0i32..16 {
println!("Parallel: {}: {}", num, semi_parallel_n_queens(num));
}
}<|fim▁end|>
|
for num in 0i32..16 {
println!("Sequential: {}: {}", num, n_queens(num));
}
|
<|file_name|>source_loc_macros.rs<|end_file_name|><|fim▁begin|>// This test makes sure that different expansions of the file!(), line!(),
// column!() macros get picked up by the incr. comp. hash.
// revisions:rpass1 rpass2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#[rustc_clean(cfg="rpass2")]
fn line_same() {
let _ = line!();
}
#[rustc_clean(cfg="rpass2")]
fn col_same() {
let _ = column!();
}
#[rustc_clean(cfg="rpass2")]
fn file_same() {
let _ = file!();
}
#[rustc_clean(except="hir_owner_nodes,optimized_mir", cfg="rpass2")]<|fim▁hole|> let _ = line!();
}
#[cfg(rpass2)]
{
let _ = line!();
}
}
#[rustc_clean(except="hir_owner_nodes,optimized_mir", cfg="rpass2")]
fn col_different() {
#[cfg(rpass1)]
{
let _ = column!();
}
#[cfg(rpass2)]
{
let _ = column!();
}
}
fn main() {
line_same();
line_different();
col_same();
col_different();
file_same();
}<|fim▁end|>
|
fn line_different() {
#[cfg(rpass1)]
{
|
<|file_name|>async_multicast.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
@file async_multicast.py
@author Woong Gyu La a.k.a Chris. <[email protected]>
<http://github.com/juhgiyo/pyserver>
@date March 10, 2016
@brief AsyncMulticast Interface
@version 0.1
@section LICENSE
The MIT License (MIT)
Copyright (c) 2016 Woong Gyu La <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.<|fim▁hole|>THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@section DESCRIPTION
AsyncMulticast Class.
"""
import asyncio
import socket
import traceback
import threading
from .callback_interface import *
from .async_controller import AsyncController
# noinspection PyDeprecation
import copy
IP_MTU_DISCOVER = 10
IP_PMTUDISC_DONT = 0 # Never send DF frames.
IP_PMTUDISC_WANT = 1 # Use per route hints.
IP_PMTUDISC_DO = 2 # Always DF.
IP_PMTUDISC_PROBE = 3 # Ignore dst pmtu.
'''
Interfaces
variables
- callback_obj
functions
- def send(multicast_addr,port,data)
- def close() # close the socket
- def join(multicast_addr) # start receiving datagram from given multicast group
- def leave(multicast_addr) # stop receiving datagram from given multicast group
- def getgrouplist() # get group list
infos
- multicast address range: 224.0.0.0 - 239.255.255.255
- linux : route add -net 224.0.0.0 netmask 240.0.0.0 dev eth0
to enable multicast
'''
class AsyncMulticast(asyncio.Protocol):
# enable_loopback : 1 enable loopback / 0 disable loopback
# ttl: 0 - restricted to the same host
# 1 - restricted to the same subnet
# 32 - restricted to the same site
# 64 - restricted to the same region
# 128 - restricted to the same continent
# 255 - unrestricted in scope
def __init__(self, port, callback_obj, ttl=1, enable_loopback=False, bind_addr=''):
# self.lock = threading.RLock()
self.MAX_MTU = 1500
self.callback_obj = None
self.port = port
self.multicastSet = set([])
self.lock = threading.RLock()
self.ttl = ttl
self.enable_loopback = enable_loopback
if callback_obj is not None and isinstance(callback_obj, IUdpCallback):
self.callback_obj = callback_obj
else:
raise Exception('callback_obj is None or not an instance of IUdpCallback class')
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except AttributeError:
pass # Some systems don't support SO_REUSEPORT
# for both SENDER and RECEIVER to restrict the region
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self.ttl)
# for SENDER to choose whether to use loop back
if self.enable_loopback:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
else:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 0)
self.bind_addr = bind_addr
if self.bind_addr is None or self.bind_addr == '':
self.bind_addr = socket.gethostbyname(socket.gethostname())
# for both SENDER and RECEIVER to bind to specific network adapter
self.sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.bind_addr))
# for RECEIVE to receive from multiple multicast groups
self.sock.bind(('', port))
except Exception as e:
print(e)
traceback.print_exc()
self.transport = None
AsyncController.instance().add(self)
if self.callback_obj is not None:
self.callback_obj.on_started(self)
self.loop = asyncio.get_event_loop()
coro = self.loop.create_datagram_endpoint(lambda: self, sock=self.sock)
AsyncController.instance().pause()
(self.transport, _) = self.loop.run_until_complete(coro)
AsyncController.instance().resume()
# Even though UDP is connectionless this is called when it binds to a port
def connection_made(self, transport):
self.transport = transport
# This is called everytime there is something to read
def data_received(self, data, addr):
try:
if data and self.callback_obj is not None:
self.callback_obj.on_received(self, addr, data)
except Exception as e:
print(e)
traceback.print_exc()
def connection_lost(self, exc):
self.close()
def close(self):
self.handle_close()
def error_received(self, exc):
self.handle_close()
def handle_close(self):
try:
delete_set = self.getgrouplist()
for multicast_addr in delete_set:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
with self.lock:
self.multicastSet = set([])
except Exception as e:
print(e)
print('asyncUdp close called')
self.transport.close()
AsyncController.instance().discard(self)
try:
if self.callback_obj is not None:
self.callback_obj.on_stopped(self)
except Exception as e:
print(e)
traceback.print_exc()
# noinspection PyMethodOverriding
def send(self, hostname, port, data):
if len(data) <= self.MAX_MTU:
self.transport.sendto(data, (hostname, port))
else:
raise ValueError("The data size is too large")
# for RECEIVER to receive datagram from the multicast group
def join(self, multicast_addr):
with self.lock:
if multicast_addr not in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton(self.bind_addr))
self.multicastSet.add(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_join(self, multicast_addr)
# for RECEIVER to stop receiving datagram from the multicast group
def leave(self, multicast_addr):
with self.lock:
try:
if multicast_addr in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
self.multicastSet.discard(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
except Exception as e:
print(e)
def getgrouplist(self):
with self.lock:
return copy.copy(self.multicastSet)
def gethostbyname(self, arg):
return self.sock.gethostbyname(arg)
def gethostname(self):
return self.sock.gethostname()
# Echo udp server test
# def readHandle(sock,addr, data):
# sock.send(addr[0],addr[1],data)
# server=AsyncUDP(5005,readHandle)<|fim▁end|>
| |
<|file_name|>test_rest_stream_responses_async.py<|end_file_name|><|fim▁begin|># -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE.txt in the project root for
# license information.
# -------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError, ServiceRequestError
import pytest
from azure.core.rest import HttpRequest
from azure.core.exceptions import StreamClosedError, StreamConsumedError, ResponseNotReadError
@pytest.mark.asyncio
async def test_iter_raw(client):
request = HttpRequest("GET", "/streams/basic")
async with client.send_request(request, stream=True) as response:
raw = b""
async for part in response.iter_raw():
raw += part
assert raw == b"Hello, world!"
@pytest.mark.asyncio
async def test_iter_raw_on_iterable(client):
request = HttpRequest("GET", "/streams/iterable")
async with client.send_request(request, stream=True) as response:
raw = b""
async for part in response.iter_raw():
raw += part
assert raw == b"Hello, world!"
@pytest.mark.asyncio
async def test_iter_with_error(client):
request = HttpRequest("GET", "/errors/403")
async with client.send_request(request, stream=True) as response:
try:
response.raise_for_status()
except HttpResponseError as e:
pass
assert response.is_closed
try:<|fim▁hole|>
assert response.is_closed
request = HttpRequest("GET", "http://doesNotExist")
with pytest.raises(ServiceRequestError):
async with (await client.send_request(request, stream=True)):
raise ValueError("Should error before entering")
assert response.is_closed
@pytest.mark.asyncio
async def test_iter_bytes(client):
request = HttpRequest("GET", "/streams/basic")
async with client.send_request(request, stream=True) as response:
raw = b""
async for chunk in response.iter_bytes():
assert response.is_stream_consumed
assert not response.is_closed
raw += chunk
assert response.is_stream_consumed
assert response.is_closed
assert raw == b"Hello, world!"
@pytest.mark.skip(reason="We've gotten rid of iter_text for now")
@pytest.mark.asyncio
async def test_iter_text(client):
request = HttpRequest("GET", "/basic/string")
async with client.send_request(request, stream=True) as response:
content = ""
async for part in response.iter_text():
content += part
assert content == "Hello, world!"
@pytest.mark.skip(reason="We've gotten rid of iter_lines for now")
@pytest.mark.asyncio
async def test_iter_lines(client):
request = HttpRequest("GET", "/basic/lines")
async with client.send_request(request, stream=True) as response:
content = []
async for part in response.iter_lines():
content.append(part)
assert content == ["Hello,\n", "world!"]
@pytest.mark.asyncio
async def test_streaming_response(client):
request = HttpRequest("GET", "/streams/basic")
async with client.send_request(request, stream=True) as response:
assert response.status_code == 200
assert not response.is_closed
content = await response.read()
assert content == b"Hello, world!"
assert response.content == b"Hello, world!"
assert response.is_closed
@pytest.mark.asyncio
async def test_cannot_read_after_stream_consumed(port, client):
request = HttpRequest("GET", "/streams/basic")
async with client.send_request(request, stream=True) as response:
content = b""
async for chunk in response.iter_bytes():
content += chunk
with pytest.raises(StreamConsumedError) as ex:
await response.read()
assert "<HttpRequest [GET], url: 'http://localhost:{}/streams/basic'>".format(port) in str(ex.value)
assert "You have likely already consumed this stream, so it can not be accessed anymore" in str(ex.value)
@pytest.mark.asyncio
async def test_cannot_read_after_response_closed(port, client):
request = HttpRequest("GET", "/streams/basic")
async with client.send_request(request, stream=True) as response:
pass
with pytest.raises(StreamClosedError) as ex:
await response.read()
assert "<HttpRequest [GET], url: 'http://localhost:{}/streams/basic'>".format(port) in str(ex.value)
assert "can no longer be read or streamed, since the response has already been closed" in str(ex.value)
@pytest.mark.asyncio
async def test_decompress_plain_no_header(client):
# thanks to Xiang Yan for this test!
account_name = "coretests"
url = "https://{}.blob.core.windows.net/tests/test.txt".format(account_name)
request = HttpRequest("GET", url)
async with client:
response = await client.send_request(request, stream=True)
with pytest.raises(ResponseNotReadError):
response.content
await response.read()
assert response.content == b"test"
@pytest.mark.asyncio
async def test_compress_plain_no_header(client):
# thanks to Xiang Yan for this test!
account_name = "coretests"
url = "https://{}.blob.core.windows.net/tests/test.txt".format(account_name)
request = HttpRequest("GET", url)
async with client:
response = await client.send_request(request, stream=True)
iter = response.iter_raw()
data = b""
async for d in iter:
data += d
assert data == b"test"
@pytest.mark.asyncio
async def test_iter_read_back_and_forth(client):
# thanks to McCoy Patiño for this test!
# while this test may look like it's exposing buggy behavior, this is httpx's behavior
# the reason why the code flow is like this, is because the 'iter_x' functions don't
# actually read the contents into the response, the output them. Once they're yielded,
# the stream is closed, so you have to catch the output when you iterate through it
request = HttpRequest("GET", "/basic/string")
async with client.send_request(request, stream=True) as response:
async for part in response.iter_bytes():
assert part
with pytest.raises(ResponseNotReadError):
response.text()
with pytest.raises(StreamConsumedError):
await response.read()
with pytest.raises(ResponseNotReadError):
response.text()
@pytest.mark.asyncio
async def test_stream_with_return_pipeline_response(client):
request = HttpRequest("GET", "/basic/string")
pipeline_response = await client.send_request(request, stream=True, _return_pipeline_response=True)
assert hasattr(pipeline_response, "http_request")
assert hasattr(pipeline_response.http_request, "content")
assert hasattr(pipeline_response, "http_response")
assert hasattr(pipeline_response, "context")
parts = []
async for part in pipeline_response.http_response.iter_bytes():
parts.append(part)
assert parts == [b'Hello, world!']
await client.close()
@pytest.mark.asyncio
async def test_error_reading(client):
request = HttpRequest("GET", "/errors/403")
async with client.send_request(request, stream=True) as response:
await response.read()
assert response.content == b""
response.content
response = await client.send_request(request, stream=True)
with pytest.raises(HttpResponseError):
response.raise_for_status()
await response.read()
assert response.content == b""
await client.close()
@pytest.mark.asyncio
async def test_pass_kwarg_to_iter_bytes(client):
request = HttpRequest("GET", "/basic/string")
response = await client.send_request(request, stream=True)
async for part in response.iter_bytes(chunk_size=5):
assert part
@pytest.mark.asyncio
async def test_pass_kwarg_to_iter_raw(client):
request = HttpRequest("GET", "/basic/string")
response = await client.send_request(request, stream=True)
async for part in response.iter_raw(chunk_size=5):
assert part
@pytest.mark.asyncio
async def test_decompress_compressed_header(client):
# expect plain text
request = HttpRequest("GET", "/encoding/gzip")
response = await client.send_request(request)
content = await response.read()
assert content == b"hello world"
assert response.content == content
assert response.text() == "hello world"
@pytest.mark.asyncio
async def test_decompress_compressed_header_stream(client):
# expect plain text
request = HttpRequest("GET", "/encoding/gzip")
response = await client.send_request(request, stream=True)
content = await response.read()
assert content == b"hello world"
assert response.content == content
assert response.text() == "hello world"
@pytest.mark.asyncio
async def test_decompress_compressed_header_stream_body_content(client):
# expect plain text
request = HttpRequest("GET", "/encoding/gzip")
response = await client.send_request(request, stream=True)
await response.read()
content = response.content
assert content == response.body()<|fim▁end|>
|
async with client.send_request(request, stream=True) as response:
response.raise_for_status()
except HttpResponseError as e:
pass
|
<|file_name|>PredictorRegressionMetrics.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import { ValueLine } from '@framework/Lines'
import { TypeContext } from '@framework/TypeContext'
import { PredictorRegressionMetricsEmbedded, PredictorEntity } from '../Signum.Entities.MachineLearning'
export default class PredictorRegressionMetrics extends React.Component<{ ctx: TypeContext<PredictorEntity> }> {
render() {
const ctx = this.props.ctx.subCtx({ formGroupStyle: "SrOnly" });
return (
<fieldset>
<legend>Regression</legend>
<table className="table table-sm" style={{ width: "initial" }}>
<thead>
<tr>
<th></th>
<th>Training</th>
<th>Validation</th>
</tr>
</thead>
<tbody>
{this.renderRow(ctx, a => a.meanError)}
{this.renderRow(ctx, a => a.meanAbsoluteError)}
{this.renderRow(ctx, a => a.meanSquaredError)}
{this.renderRow(ctx, a => a.rootMeanSquareError)}
{this.renderRow(ctx, a => a.meanPercentageError)}
{this.renderRow(ctx, a => a.meanAbsolutePercentageError)}
</tbody>
</table>
</fieldset>
);
}
<|fim▁hole|> var unit = ctxT.subCtx(property).propertyRoute.member!.unit;
return (
<tr>
<th>{ctxT.niceName(property)}{unit && " (" + unit + ")"}</th>
<td><ValueLine ctx={ctxT.subCtx(property)} unitText="" /></td>
<td><ValueLine ctx={ctxV.subCtx(property)} unitText="" /></td>
</tr>
);
}
}<|fim▁end|>
|
renderRow(ctx: TypeContext<PredictorEntity>, property: (val: PredictorRegressionMetricsEmbedded) => number | null | undefined) {
const ctxT = ctx.subCtx(a => a.regressionTraining!);
const ctxV = ctx.subCtx(a => a.regressionValidation!);
|
<|file_name|>sm.py<|end_file_name|><|fim▁begin|>import os
from pyjs import linker
from pyjs import translator
from pyjs import util
from optparse import OptionParser
import pyjs
PLATFORM='spidermonkey'
APP_TEMPLATE = """
var $wnd = new Object();
$wnd.document = new Object();
var $doc = $wnd.document;
var $moduleName = "%(app_name)s";
var $pyjs = new Object();
$pyjs.__modules__ = {};
$pyjs.modules = {};
$pyjs.modules_hash = {};
$pyjs.available_modules = %(available_modules)s;
$pyjs.loaded_modules = {};
$pyjs.options = new Object();
$pyjs.options.set_all = function (v) {
$pyjs.options.arg_ignore = v;
$pyjs.options.arg_count = v;
$pyjs.options.arg_is_instance = v;
$pyjs.options.arg_instance_type = v;
$pyjs.options.arg_kwarg_dup = v;
$pyjs.options.arg_kwarg_unexpected_keyword = v;
$pyjs.options.arg_kwarg_multiple_values = v;
}
$pyjs.options.set_all(true);
$pyjs.trackstack = [];
$pyjs.track = {module:'__main__', lineno: 1};
$pyjs.trackstack.push($pyjs.track);
$pyjs.__last_exception_stack__ = null;
$pyjs.__last_exception__ = null;
/*<|fim▁hole|>$pyjs.loadpath = './';
load(%(module_files)s);
load(%(js_lib_files)s);
/* late static js libs */
%(late_static_js_libs)s
try {
$pyjs.loaded_modules['pyjslib']('pyjslib');
$pyjs.loaded_modules['pyjslib'].___import___('%(app_name)s', '%(app_name)s', '__main__');
} catch(exception)
{
var fullMessage = exception.name + ': ' + exception.message;
var uri = exception.fileName;
//var stack = exception.stack;
var line = exception.lineNumber;
fullMessage += "\\n at " + uri + ": " + line;
print (fullMessage );
//print (stack.toString() );
}
"""
class SpidermonkeyLinker(linker.BaseLinker):
"""Spidermonkey linker, which links together files by using the
load function of the spidermonkey shell."""
# we derive from mozilla
platform_parents = {
PLATFORM:['mozilla', 'array_extras']
}
def __init__(self, *args, **kwargs):
kwargs['platforms'] = [PLATFORM]
super(SpidermonkeyLinker, self).__init__(*args, **kwargs)
def visit_start(self):
super(SpidermonkeyLinker, self).visit_start()
self.js_libs.append('_pyjs.js')
self.merged_public = set()
def merge_resources(self, dir_name):
"""find the absolute paths of js includes"""
if not self.js_libs or dir_name in self.merged_public:
return
public_folder = os.path.join(dir_name, 'public')
if not os.path.isdir(public_folder):
return
for i, js_lib in enumerate(self.js_libs):
p = os.path.join(public_folder, js_lib)
if os.path.isfile(p):
self.js_libs[i] = p
def visit_end(self):
def static_code(libs, msg = None):
code = []
for lib in libs:
fname = lib
if not os.path.isfile(fname):
fname = os.path.join(self.output, lib)
if not os.path.isfile(fname):
raise RuntimeError('File not found %r' % lib)
if fname[len_ouput_dir:] == self.output:
name = fname[len_ouput_dir:]
else:
name = os.path.basename(lib)
if not msg is None:
code.append("/* start %s: %s */" % (msg, name))
f = file(fname)
code.append(f.read())
if not msg is None:
code.append("/* end %s */" % (name,))
self.remove_files[fname] = True
fname = fname.split('.')
if fname[-2] == '__%s__' % platform_name:
del fname[-2]
fname = '.'.join(fname)
if os.path.isfile(fname):
self.remove_files[fname] = True
return "\n".join(code)
done = self.done[PLATFORM]
# locals - go into template via locals()
module_files=str(done)[1:-1]
js_lib_files=str(self.js_libs)[1:-1]
early_static_js_libs=str(self.js_libs)[1:-1]
late_static_js_libs = [] + self.late_static_js_libs
late_static_js_libs = static_code(late_static_js_libs, "javascript lib")
app_name = self.top_module
available_modules = self.visited_modules[PLATFORM]
out_file = open(
os.path.join(self.output, self.top_module + '.js'), 'w')
out_file.write(APP_TEMPLATE % locals())
out_file.close()
def build_script():
usage = """
usage: %prog [options] module_name
"""
parser = OptionParser(usage = usage)
translator.add_compile_options(parser)
# override the default because we want print
parser.set_defaults(print_statements=True)
linker.add_linker_options(parser)
options, args = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
top_module = args[0]
for d in options.library_dirs:
pyjs.path.append(os.path.abspath(d))
translator_arguments=dict(
debug=options.debug,
print_statements = options.print_statements,
function_argument_checking=options.function_argument_checking,
attribute_checking=options.attribute_checking,
source_tracking=options.source_tracking,
line_tracking=options.line_tracking,
store_source=options.store_source
)
l = SpidermonkeyLinker(top_module,
output=options.output,
platforms=[PLATFORM],
path=pyjs.path,
translator_arguments=translator_arguments)
l()<|fim▁end|>
|
* prepare app system vars
*/
$pyjs.platform = 'spidermonkey';
$pyjs.appname = '%(app_name)s';
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate docopt;
extern crate futures;
extern crate http;
extern crate hyper;
#[macro_use]
extern crate serde_derive;
use docopt::Docopt;
use std::env;
mod server;
const USAGE: &'static str = "
HTTP server for theremins.club
Usage:
theremins-http-server [--port <port>] [--ws-url <url>]
theremins-http-server --help
Options:
-h --help Show this screen
--port <port> HTTP address to listen to [env: THEREMINS_HTTP_PORT]
--ws-url <url> Web Socket URL to point to [env: THEREMINS_WS_URL].
";
#[derive(Debug, Deserialize)]
struct Args {
flag_port: Option<u16>,
flag_ws_url: Option<String>,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
let port = args.flag_port
.or_else(|| {
env::var("THEREMINS_HTTP_PORT")
.ok()
.and_then(|foo| foo.parse().ok())
})
.unwrap_or(8000);
let ws_url = args.flag_ws_url
.or_else(|| env::var("THEREMINS_WS_URL").ok())
.unwrap_or("ws://localhost:8001".into());<|fim▁hole|><|fim▁end|>
|
server::serve(port, ws_url);
}
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Fast R-CNN config system.
This file specifies default config options for Fast R-CNN. You should not
change values in this file. Instead, you should write a config file (in yaml)
and use cfg_from_file(yaml_file) to load it and override the default options.
Most tools in $ROOT/tools take a --cfg option to specify an override file.
- See tools/{train,test}_net.py for example code that uses cfg_from_file()
- See experiments/cfgs/*.yml for example YAML config override files
"""
import os
import os.path as osp
import numpy as np
import math
# `pip install easydict` if you don't have it
from easydict import EasyDict as edict
__C = edict()
# Consumers can get config by:
# from fast_rcnn_config import cfg
cfg = __C
# region proposal network (RPN) or not
__C.IS_RPN = False
__C.FLIP_X = False
__C.INPUT = 'COLOR'
# multiscale training and testing
__C.IS_MULTISCALE = True
__C.IS_EXTRAPOLATING = True
#
__C.REGION_PROPOSAL = 'RPN'
__C.NET_NAME = 'CaffeNet'
__C.SUBCLS_NAME = 'voxel_exemplars'
#
# Training options
#
__C.TRAIN = edict()
__C.TRAIN.VISUALIZE = False
__C.TRAIN.VERTEX_REG = False
__C.TRAIN.GRID_SIZE = 256
__C.TRAIN.CHROMATIC = False
# Scales to compute real features
__C.TRAIN.SCALES_BASE = (0.25, 0.5, 1.0, 2.0, 3.0)
# The number of scales per octave in the image pyramid
# An octave is the set of scales up to half of the initial scale
__C.TRAIN.NUM_PER_OCTAVE = 4
# parameters for ROI generating
__C.TRAIN.SPATIAL_SCALE = 0.0625
__C.TRAIN.KERNEL_SIZE = 5
# Aspect ratio to use during training
__C.TRAIN.ASPECTS = (1, 0.75, 0.5, 0.25)
# Images to use per minibatch
__C.TRAIN.IMS_PER_BATCH = 2
# Minibatch size (number of regions of interest [ROIs])
__C.TRAIN.BATCH_SIZE = 128
# Fraction of minibatch that is labeled foreground (i.e. class > 0)
__C.TRAIN.FG_FRACTION = 0.25
# Overlap threshold for a ROI to be considered foreground (if >= FG_THRESH)
__C.TRAIN.FG_THRESH = (0.5,)
# Overlap threshold for a ROI to be considered background (class = 0 if
# overlap in [LO, HI))
__C.TRAIN.BG_THRESH_HI = (0.5,)
__C.TRAIN.BG_THRESH_LO = (0.1,)
# Use horizontally-flipped images during training?
__C.TRAIN.USE_FLIPPED = True
# Train bounding-box regressors
__C.TRAIN.BBOX_REG = True
# Overlap required between a ROI and ground-truth box in order for that ROI to
# be used as a bounding-box regression training example
__C.TRAIN.BBOX_THRESH = (0.5,)
# Iterations between snapshots
__C.TRAIN.SNAPSHOT_ITERS = 10000
# solver.prototxt specifies the snapshot path prefix, this adds an optional
# infix to yield the path: <prefix>[_<infix>]_iters_XYZ.caffemodel
__C.TRAIN.SNAPSHOT_INFIX = ''
# Use a prefetch thread in roi_data_layer.layer
# So far I haven't found this useful; likely more engineering work is required
__C.TRAIN.USE_PREFETCH = False
# Train using subclasses
__C.TRAIN.SUBCLS = True
# Train using viewpoint
__C.TRAIN.VIEWPOINT = False
# Threshold of ROIs in training RCNN
__C.TRAIN.ROI_THRESHOLD = 0.1
# IOU >= thresh: positive example
__C.TRAIN.RPN_POSITIVE_OVERLAP = 0.7
# IOU < thresh: negative example
__C.TRAIN.RPN_NEGATIVE_OVERLAP = 0.3
# If an anchor statisfied by positive and negative conditions set to negative
__C.TRAIN.RPN_CLOBBER_POSITIVES = False
# Max number of foreground examples
__C.TRAIN.RPN_FG_FRACTION = 0.5
# Total number of examples
__C.TRAIN.RPN_BATCHSIZE = 256
# NMS threshold used on RPN proposals
__C.TRAIN.RPN_NMS_THRESH = 0.7
# Number of top scoring boxes to keep before apply NMS to RPN proposals
__C.TRAIN.RPN_PRE_NMS_TOP_N = 12000
# Number of top scoring boxes to keep after applying NMS to RPN proposals
__C.TRAIN.RPN_POST_NMS_TOP_N = 2000
# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale)
__C.TRAIN.RPN_MIN_SIZE = 16
# Deprecated (outside weights)
__C.TRAIN.RPN_BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0)
# Give the positive RPN examples weight of p * 1 / {num positives}
# and give negatives a weight of (1 - p)
# Set to -1.0 to use uniform example weighting
__C.TRAIN.RPN_POSITIVE_WEIGHT = -1.0
__C.TRAIN.RPN_BASE_SIZE = 16
__C.TRAIN.RPN_ASPECTS = [0.25, 0.5, 0.75, 1, 1.5, 2, 3] # 7 aspects
__C.TRAIN.RPN_SCALES = [2, 2.82842712, 4, 5.65685425, 8, 11.3137085, 16, 22.627417, 32, 45.254834] # 2**np.arange(1, 6, 0.5), 10 scales
#
# Testing options
#
__C.TEST = edict()
__C.TEST.IS_PATCH = False;
__C.TEST.VERTEX_REG = False
__C.TEST.VISUALIZE = False
# Scales to compute real features
__C.TEST.SCALES_BASE = (0.25, 0.5, 1.0, 2.0, 3.0)
# The number of scales per octave in the image pyramid
# An octave is the set of scales up to half of the initial scale
__C.TEST.NUM_PER_OCTAVE = 4
# Aspect ratio to use during testing
__C.TEST.ASPECTS = (1, 0.75, 0.5, 0.25)
# parameters for ROI generating
__C.TEST.SPATIAL_SCALE = 0.0625
__C.TEST.KERNEL_SIZE = 5
# Overlap threshold used for non-maximum suppression (suppress boxes with
# IoU >= this threshold)
__C.TEST.NMS = 0.5
# Experimental: treat the (K+1) units in the cls_score layer as linear
# predictors (trained, eg, with one-vs-rest SVMs).
__C.TEST.SVM = False
# Test using bounding-box regressors
__C.TEST.BBOX_REG = True
# Test using subclass
__C.TEST.SUBCLS = True
# Train using viewpoint
__C.TEST.VIEWPOINT = False
# Threshold of ROIs in testing
__C.TEST.ROI_THRESHOLD = 0.1
__C.TEST.ROI_THRESHOLD_NUM = 80000
__C.TEST.ROI_NUM = 2000
__C.TEST.DET_THRESHOLD = 0.0001
## NMS threshold used on RPN proposals
__C.TEST.RPN_NMS_THRESH = 0.7
## Number of top scoring boxes to keep before apply NMS to RPN proposals
__C.TEST.RPN_PRE_NMS_TOP_N = 6000
## Number of top scoring boxes to keep after applying NMS to RPN proposals
__C.TEST.RPN_POST_NMS_TOP_N = 300
# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale)
__C.TEST.RPN_MIN_SIZE = 16
#
# MISC
#
# The mapping from image coordinates to feature map coordinates might cause
# some boxes that are distinct in image space to become identical in feature
# coordinates. If DEDUP_BOXES > 0, then DEDUP_BOXES is used as the scale factor
# for identifying duplicate boxes.
# 1/16 is correct for {Alex,Caffe}Net, VGG_CNN_M_1024, and VGG16
__C.DEDUP_BOXES = 1./16.
# Pixel mean values (BGR order) as a (1, 1, 3) array
# These are the values originally used for training VGG16
__C.PIXEL_MEANS = np.array([[[102.9801, 115.9465, 122.7717]]])
# For reproducibility
__C.RNG_SEED = 3
# A small number that's used many times
__C.EPS = 1e-14
# Root directory of project
__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Place outputs under an experiments directory
__C.EXP_DIR = 'default'
# Use GPU implementation of non-maximum suppression
__C.USE_GPU_NMS = True
# Default GPU device id
__C.GPU_ID = 0
<|fim▁hole|> A canonical path is built using the name from an imdb and a network
(if not None).
"""
path = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name))
if net is None:
return path
else:
return osp.join(path, net.name)
def _add_more_info(is_train):
# compute all the scales
if is_train:
scales_base = __C.TRAIN.SCALES_BASE
num_per_octave = __C.TRAIN.NUM_PER_OCTAVE
else:
scales_base = __C.TEST.SCALES_BASE
num_per_octave = __C.TEST.NUM_PER_OCTAVE
num_scale_base = len(scales_base)
num = (num_scale_base - 1) * num_per_octave + 1
scales = []
for i in xrange(num):
index_scale_base = i / num_per_octave
sbase = scales_base[index_scale_base]
j = i % num_per_octave
if j == 0:
scales.append(sbase)
else:
sbase_next = scales_base[index_scale_base+1]
step = (sbase_next - sbase) / num_per_octave
scales.append(sbase + j * step)
if is_train:
__C.TRAIN.SCALES = scales
else:
__C.TEST.SCALES = scales
print scales
# map the scales to scales for RoI pooling of classification
if is_train:
kernel_size = __C.TRAIN.KERNEL_SIZE / __C.TRAIN.SPATIAL_SCALE
else:
kernel_size = __C.TEST.KERNEL_SIZE / __C.TEST.SPATIAL_SCALE
area = kernel_size * kernel_size
scales = np.array(scales)
areas = np.repeat(area, num) / (scales ** 2)
scaled_areas = areas[:, np.newaxis] * (scales[np.newaxis, :] ** 2)
diff_areas = np.abs(scaled_areas - 224 * 224)
levels = diff_areas.argmin(axis=1)
if is_train:
__C.TRAIN.SCALE_MAPPING = levels
else:
__C.TEST.SCALE_MAPPING = levels
# compute width and height of grid box
if is_train:
area = __C.TRAIN.KERNEL_SIZE * __C.TRAIN.KERNEL_SIZE
aspect = __C.TRAIN.ASPECTS # height / width
else:
area = __C.TEST.KERNEL_SIZE * __C.TEST.KERNEL_SIZE
aspect = __C.TEST.ASPECTS # height / width
num_aspect = len(aspect)
widths = np.zeros((num_aspect), dtype=np.float32)
heights = np.zeros((num_aspect), dtype=np.float32)
for i in xrange(num_aspect):
widths[i] = math.sqrt(area / aspect[i])
heights[i] = widths[i] * aspect[i]
if is_train:
__C.TRAIN.ASPECT_WIDTHS = widths
__C.TRAIN.ASPECT_HEIGHTS = heights
__C.TRAIN.RPN_SCALES = np.array(__C.TRAIN.RPN_SCALES)
else:
__C.TEST.ASPECT_WIDTHS = widths
__C.TEST.ASPECT_HEIGHTS = heights
def _merge_a_into_b(a, b):
"""Merge config dictionary a into config dictionary b, clobbering the
options in b whenever they are also specified in a.
"""
if type(a) is not edict:
return
for k, v in a.iteritems():
# a must specify keys that are in b
if not b.has_key(k):
raise KeyError('{} is not a valid config key'.format(k))
# the types must match, too
if type(b[k]) is not type(v):
raise ValueError(('Type mismatch ({} vs. {}) '
'for config key: {}').format(type(b[k]),
type(v), k))
# recursively merge dicts
if type(v) is edict:
try:
_merge_a_into_b(a[k], b[k])
except:
print('Error under config key: {}'.format(k))
raise
else:
b[k] = v
def cfg_from_file(filename):
"""Load a config file and merge it into the default options."""
import yaml
with open(filename, 'r') as f:
yaml_cfg = edict(yaml.load(f))
_merge_a_into_b(yaml_cfg, __C)
_add_more_info(1)
_add_more_info(0)<|fim▁end|>
|
def get_output_dir(imdb, net):
"""Return the directory where experimental artifacts are placed.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.