prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>UserProfile.test.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { shallow } from 'enzyme';
import UserProfile from '../UserProfile';<|fim▁hole|> it('should render <Wrapper />', () => {
const wrapper = shallow(<UserProfile />);
expect(wrapper.find(Wrapper).length).toEqual(1);
});
});<|fim▁end|> | import Wrapper from '../Wrapper';
describe('<UserProfile />', () => { |
<|file_name|>UserClient.java<|end_file_name|><|fim▁begin|>// ***************************************************************************
// * Copyright 2014 Joseph Molnar
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// ***************************************************************************
package com.talvish.tales.samples.userclient;
import java.time.LocalDate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.talvish.tales.businessobjects.ObjectId;
import com.talvish.tales.client.http.ResourceClient;
import com.talvish.tales.client.http.ResourceConfiguration;
import com.talvish.tales.client.http.ResourceMethod;
import com.talvish.tales.client.http.ResourceResult;
import com.talvish.tales.communication.HttpVerb;
import com.talvish.tales.parts.ArgumentParser;
import com.talvish.tales.system.configuration.ConfigurationManager;
import com.talvish.tales.system.configuration.MapSource;
import com.talvish.tales.system.configuration.PropertyFileSource;
/**
* The client for talking to the UserService.
* @author jmolnar
*
*/
public class UserClient extends ResourceClient {
private static final Logger logger = LoggerFactory.getLogger( UserClient.class );
/**
* This main is really just to demonstrate calling and would not exist in an actual client.
*/
public static void main( String[ ] theArgs ) throws Exception {
// get the configuration system up and running
ConfigurationManager configurationManager = new ConfigurationManager( );
// we prepare two sources for configurations
// first the command line source
configurationManager.addSource( new MapSource( "command-line", ArgumentParser.parse( theArgs ) ) );
// second the file source, if the command-line indicates a file is to be used
String filename = configurationManager.getStringValue( "settings.file", null ); // we will store config in a file ideally
if( !Strings.isNullOrEmpty( filename ) ) {
configurationManager.addSource( new PropertyFileSource( filename ) );
}
UserClient client = new UserClient( configurationManager.getValues( "user_service", ResourceConfiguration.class ), "sample_user_client/1.0" );
// client.setHeaderOverride( "Authorization", "random" ); //<= for testing, perhaps want to override this value, assuming server allows overrides
// next we see what mode we are in, setup or not setup
String operation = configurationManager.getStringValue( "operation", "update_user" );
ResourceResult<User> result;
switch( operation ) {
case "update_user":
result = client.getUser( new ObjectId( 1, 1, 100 ) );
if( result.getResult() != null ) {
logger.debug( "Found user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) );
result.getResult().setFirstName( "Bilbo" );
result.getResult().getAliases( ).add( "billy" );
result.getResult().getSettings().put( "favourite_category", "games" );
result = client.updateUser( result.getResult() );
logger.debug( "Updated user: '{}'", result.getResult().getFirstName( ) );
} else {
logger.debug( "Did not find user." );
}
break;
case "create_user":
//for( int i = 0; i < 1; i += 1 ) {
User user = new User( );
user.setFirstName( "Jimmy" );
user.setMiddleName( "Scott" );
user.setLastName( "McWhalter" );
user.setBirthdate( LocalDate.of( 1992, 1, 31 ) );
user.getAliases().add( "alias1" );
result = client.createUser( user );
if( result.getResult() != null ) {
logger.debug( "Created user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) );
} else {
logger.debug( "Did not create user." );
}
//}
break;
default:
break;
}
// TODO: this doesn't exit at the end of the main here, need to understand why
// (which is why I added the System.exit(0)
// TODO: one time when this ran it throw some form of SSL EOF related error that
// I need to track down (this happened on the server too)
System.console().writer().print( "Please <Enter> to quit ..." );
System.console().writer().flush();
System.console().readLine();
System.exit( 0 );
}
private String authToken = "Sample key=\"42349840984\"";
/**
* The constructor used to create the client.
* @param theConfiguration the configuration needed to talk to the service
* @param theUserAgent the user agent to use while talking to the service
*/
public UserClient( ResourceConfiguration theConfiguration, String theUserAgent ) {
super( theConfiguration, "/user", "20140124", theUserAgent );
// we now define the methods that we are going to expose for calling
this.methods = new ResourceMethod[ 3 ];<|fim▁hole|> .defineHeaderParameter( "Authorization", String.class );
this.methods[ 1 ] = this.defineMethod( "update_user", User.class, HttpVerb.POST, "users/{id}/update" )
.definePathParameter( "id", ObjectId.class )
.defineBodyParameter( "user", User.class )
.defineHeaderParameter( "Authorization", String.class );
this.methods[ 2 ] = this.defineMethod( "create_user", User.class, HttpVerb.POST, "users/create" )
.defineBodyParameter( "user", User.class )
.defineHeaderParameter( "Authorization", String.class );
}
/**
* Requests a particular user.
* @param theUserId the id of the user being requested
* @return the requested user, if found, null otherwise
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> getUser( ObjectId theUserId ) throws InterruptedException {
Preconditions.checkNotNull( theUserId, "need a user id to retrieve a user" );
return this.createRequest( this.methods[ 0 ], theUserId )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
/**
* A call to save the values of a user on the server.
* @param theUser the user to save
* @return the server returned version of the saved user
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> updateUser( User theUser ) throws InterruptedException {
Preconditions.checkNotNull( theUser, "need a user to be able to update" );
return this.createRequest( this.methods[ 1 ], theUser.getId() )
.setBodyParameter( "user", theUser )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
/**
* A call to create a new user
* @param theFirstName the first name of the user
* @param theLastName the last name of the user
* @return the freshly created user
* @throws InterruptedException thrown if the calling thread is interrupted
*/
public ResourceResult<User> createUser( User theUser) throws InterruptedException {
Preconditions.checkNotNull( theUser, "need a user" );
Preconditions.checkArgument( theUser.getId( ) == null, "user's id must be null" );
Preconditions.checkArgument( !Strings.isNullOrEmpty( theUser.getFirstName() ), "to create a user you need a first name" );
return this.createRequest( this.methods[ 2 ] )
.setBodyParameter( "user", theUser )
.setHeaderParameter( "Authorization", this.authToken )
.call();
}
}<|fim▁end|> |
this.methods[ 0 ] = this.defineMethod( "get_user", User.class, HttpVerb.GET, "users/{id}" )
.definePathParameter("id", ObjectId.class ) |
<|file_name|>GuiSchematicMaterialsSlot.java<|end_file_name|><|fim▁begin|>package com.github.lunatrius.schematica.client.gui;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiSlot;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.resources.I18n;
import net.minecraft.item.ItemStack;
class GuiSchematicMaterialsSlot extends GuiSlot {
private final Minecraft minecraft = Minecraft.getMinecraft();
private final GuiSchematicMaterials guiSchematicMaterials;
protected int selectedIndex = -1;
private final String strUnknownBlock = I18n.format("schematica.gui.unknownblock");
public GuiSchematicMaterialsSlot(GuiSchematicMaterials par1) {
super(Minecraft.getMinecraft(), par1.width, par1.height, 16, par1.height - 34, 24);
this.guiSchematicMaterials = par1;
this.selectedIndex = -1;
}
@Override
protected int getSize() {
return this.guiSchematicMaterials.blockList.size();
}
@Override
protected void elementClicked(int index, boolean par2, int par3, int par4) {
this.selectedIndex = index;
}
@Override
protected boolean isSelected(int index) {
return index == this.selectedIndex;
}
@Override
protected void drawBackground() {
}
@Override
protected void drawContainerBackground(Tessellator tessellator) {
}
@Override
protected void drawSlot(int index, int x, int y, int par4, Tessellator tessellator, int par6, int par7) {
ItemStack itemStack = this.guiSchematicMaterials.blockList.get(index);
String itemName;
String amount = Integer.toString(itemStack.stackSize);
if (itemStack.getItem() != null) {
itemName = itemStack.getItem().getItemStackDisplayName(itemStack);
} else {
itemName = this.strUnknownBlock;
}
GuiHelper.drawItemStack(this.minecraft.renderEngine, this.minecraft.fontRenderer, x, y, itemStack);
this.guiSchematicMaterials.drawString(this.minecraft.fontRenderer, itemName, x + 24, y + 6, 0xFFFFFF);<|fim▁hole|><|fim▁end|> | this.guiSchematicMaterials.drawString(this.minecraft.fontRenderer, amount, x + 215 - this.minecraft.fontRenderer.getStringWidth(amount), y + 6, 0xFFFFFF);
}
} |
<|file_name|>ascribe_user_type.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::Fallible;
use hir::def_id::DefId;
use mir::ProjectionKind;
use ty::{self, ParamEnvAnd, Ty, TyCtxt};
use ty::subst::UserSubsts;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct AscribeUserType<'tcx> {
pub mir_ty: Ty<'tcx>,
pub variance: ty::Variance,
pub def_id: DefId,
pub user_substs: UserSubsts<'tcx>,
pub projs: &'tcx ty::List<ProjectionKind<'tcx>>,
}
impl<'tcx> AscribeUserType<'tcx> {
pub fn new(
mir_ty: Ty<'tcx>,
variance: ty::Variance,
def_id: DefId,
user_substs: UserSubsts<'tcx>,
projs: &'tcx ty::List<ProjectionKind<'tcx>>,
) -> Self {
AscribeUserType { mir_ty, variance, def_id, user_substs, projs }
}
}
impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for AscribeUserType<'tcx> {
type QueryResponse = ();
fn try_fast_path(
_tcx: TyCtxt<'_, 'gcx, 'tcx>,
_key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse> {
None
}
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
tcx.type_op_ascribe_user_type(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'gcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for AscribeUserType<'tcx> {
mir_ty, variance, def_id, user_substs, projs
}
}
BraceStructLiftImpl! {
impl<'a, 'tcx> Lift<'tcx> for AscribeUserType<'a> {
type Lifted = AscribeUserType<'tcx>;
mir_ty, variance, def_id, user_substs, projs
}
}
impl_stable_hash_for! {
struct AscribeUserType<'tcx> {
mir_ty, variance, def_id, user_substs, projs
}
}<|fim▁end|> | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at |
<|file_name|>mock-heroes.ts<|end_file_name|><|fim▁begin|>import {Hero} from './hero';
export const HEROES: Hero[] = [
{id: 11, name: 'Mr. Nice'},
{id: 12, name: 'Narco'},
{id: 13, name: 'Bombasto'},
{id: 14, name: 'Celeritas'},
{id: 15, name: 'Magneta'},
{id: 16, name: 'RubberMan'},
{id: 17, name: 'Dynama'},<|fim▁hole|> {id: 20, name: 'Tornado'}
];<|fim▁end|> | {id: 18, name: 'Dr IQ'},
{id: 19, name: 'Magma'}, |
<|file_name|>index1.js<|end_file_name|><|fim▁begin|>$(document).ready(function() {
var text = $("#hSleep").text();
var results = [];
var data = [];
results = text.split(",");
// alert(results);
console.log("results: " + text);
for(var i = results.length-1; i >= 0; i--) {
data.push([new Date(results[i-1]).getTime(), results[i]]);
i--;
}
var barOptions = {
series: {
bars: {
show: true,
barWidth: 43200000
}
},
xaxis: {
mode: "time",
timeformat: "%m/%d",
minTickSize: [1, "day"]
},
grid: {
hoverable: true
},
legend: {
show: false
},
tooltip: true,
tooltipOpts: {
content: "Date: %x, Minutes: %y"
}
};
var barData = {
label: "bar",
data: data
};
$.plot($("#flot-line-chart"), [barData], barOptions);
text = $("#hSteps").text();
results = text.split(",");
data = [];
for(var i = results.length-1; i >= 0; i--) {
data.push([new Date(results[i-1]).getTime(), results[i]]);
i--;
}
var options = {
series: {
lines: {
show: true
},
points: {
show: true
}
},
grid: {
hoverable: true //IMPORTANT! this is needed for tooltip to work
},
xaxis: {
mode: "time",
timeformat: "%m/%d",
minTickSize: [1, "day"]
},
tooltip: true,
tooltipOpts: {
content: "'Date: %x.1, Steps: %y",
shifts: {
x: -60,
y: 25
}
}
};
var plotObj = $.plot($("#flot-bar-chart"), [{
data: data,
label: "Steps"
}],
options);
text = $("#hDistance").text();
results = text.split(",");
data = [];
//alert(text);
for(var i = results.length-1; i >= 0; i--) {
data.push([new Date(results[i-1]).getTime(), results[i]]);
i--;
}
var options = {
series: {
lines: {
show: true
},
points: {
show: true
}
},
grid: {<|fim▁hole|> timeformat: "%m/%d",
minTickSize: [1, "day"]
},
tooltip: true,
tooltipOpts: {
content: "'Date: %x.1, Heart Rates: %y",
shifts: {
x: -60,
y: 25
}
}
};
var plotObj = $.plot($("#flot-moving-line-chart"), [{
data: data,
label: "Heart Rates"
}],
options);
text = $("#hCalories").text();
results = text.split(",");
data = [];
for(var i = results.length-1; i >= 0; i--) {
data.push([new Date(results[i-1]).getTime(), results[i]]);
i--;
}
var options = {
series: {
lines: {
show: true
},
points: {
show: true
}
},
grid: {
hoverable: true //IMPORTANT! this is needed for tooltip to work
},
xaxis: {
mode: "time",
timeformat: "%m/%d",
minTickSize: [1, "day"]
},
tooltip: true,
tooltipOpts: {
content: "'Date: %x.1, Steps: %y",
shifts: {
x: -60,
y: 25
}
}
};
var plotObj = $.plot($("#flot-multiple-axes-chart"), [{
data: data,
label: "Calories"
}],
options);
});<|fim▁end|> | hoverable: true //IMPORTANT! this is needed for tooltip to work
},
xaxis: {
mode: "time", |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Provides dedicated `system` pipelines inside OrbTk.
//!
//! System pipelines are modules, that handle specific tasks when
//! iteratively walking the widget tree. Because each widget
//! implements the `state` trait, all system modules are accessible.
//! Pipelines are connected in a logical order. E.g. the `InitSystem`
//! is triggered **before** the `LayoutSystem`. The `LayoutSystem` is
//! triggerd **before** the `RenderSystem`. Handling of widget objects
//! inside the pipelines rely on the Entity Component System
//! ([`DCES`]).
//!
//! [`DCES`]: https://gitlab.redox-os.org/redox-os/dces-rust
pub use self::cleanup_system::*;
pub use self::event_state_system::*;
pub use self::init_system::*;
pub use self::layout_system::*;
pub use self::post_layout_state_system::*;
pub use self::render_system::*;
mod cleanup_system;
mod event_state_system;
mod init_system;
mod layout_system;
mod post_layout_state_system;<|fim▁hole|>mod render_system;<|fim▁end|> | |
<|file_name|>irrigation.ts<|end_file_name|><|fim▁begin|>/// <reference path="../defs/metahub.d.ts" />
/// <reference path="bloom.d.ts" />
import MetaHub = require('metahub')
export interface IRequest {
id?
action?
parameters?
path?
path_string?
}
export class Irrigation extends MetaHub.Meta_Object {
app_path:string = ''
page_path:string = ''
trellis_plots = {}
channels:any [] = []
parameters
constructor() {
super()
this.parameters = {
trellis: 'trellis',
id: 'int',
action: 'string'
}
}
// vineyard:Vineyard null
// initialize_vineyard (vineyard) {
// this.vineyard = vineyard;
// this.add_channel(['%trellis', '%id', '%action?']);
// this.add_channel(['%trellis', '%action?']);
// }
add_channel(pattern, action) {
pattern = Irrigation.convert_path_to_array(pattern);
var priority = this.calculate_priority(pattern);
var i, channel, result = {
pattern: pattern,
action: action,
priority: priority
};
// Find the right slot based on priority.
// This is faster than resorting the array
// every time a channel is added.
for (i = 0; i < this.channels.length; ++i) {
channel = this.channels[i];
if (channel.priority < priority)
break;
}
this.channels.splice(i, 0, result);
return result;
}
apply_pattern(pattern, path) {
if (typeof path !== 'object')
throw new Error('path must be an array');
if (typeof pattern !== 'object')
throw new Error('channel must be an array');
// if (path.length !== channel.length)
// throw new Error('Irrigation.apply_pattern() requires a path and channel with the same length. (path.length = ' +
// path.length + '. channel.length = ' + channel.length + '.)');
var processed_pattern = this.compare(pattern, path);
if (!processed_pattern)
throw new Error('Pattern/path mismatch: ' + pattern.join('/') + ' != ' + path.join('/'));
// console.log('pattern', processed_pattern)
var result = {};
for (var i = 0; i < path.length; ++i) {
var part = processed_pattern[i];
if (part[0] == '%') {
var type = part.substring(1);
result[type] = this.convert_value(path[i], type);
}
}
return result;
}
calculate_priority(path) {
var bindings = 0;
for (var i = 0; i < path.length; ++i) {
if (path[i][0] == '%') {
bindings += 2;
var type = this.parameters[path[i].substring(1)];
if (type && type != 'string')
++bindings;
}
}
return bindings + (path.length * 2);
}
compare(primary, secondary) {
var a = Irrigation.convert_path_to_array(primary);
var b = Irrigation.convert_path_to_array(secondary);
var result = [];
// Optional parameters can only be in the primary path,
// so the secondary path can be possibly shorter but
// never longer than the primary path.
if (a.length < b.length)
return false;
var length = Math.max(a.length, b.length);
var x = -1, y = -1, ax, by, ax_pure;
for (var i = 0; i < length; i++) {
if (++x >= a.length)
return false;
ax = a[x];
if (++y >= b.length) {
if (ax[ax.length - 1] == '?') {
--y;
continue;
}
else
return false;
}
by = b[y];
ax_pure = ax.replace(/\?$/, '');
if (ax_pure == by
|| ax == '*'
|| (ax[0] == '%' && this.compare_parts(ax_pure, by))) {
result.push(ax_pure);
continue;
}
// Handle optional parameters
if (ax[ax.length - 1] == '?') {
--y;
continue;
}
return false;
}
return result;
}
compare_parts(name, value) {
var type = this.parameters[name.substring(1)];
if (this.convert_value(value, type) === null)
return false;
else
return true;
}
convert_value(value, type) {
switch (type) {
case 'trellis':
return this.get_trellis(value);
case 'int':
if (!value.toString().match(/\d+/))
return null;
return parseInt(value);
case 'string':
return value.toString();
}
return value;
}
determine_action(request) {
// if (request.trellis && this.vineyard.trellises[request.trellis]) {
//// if (request.action == 'create') {
//// return 'create';
//// }
// if (request.action) {
// return request.action;
// }
// else if (request.id) {
// return 'view';
// }
// else {
// return 'index';
// }
// }
if (request.action)
return request.action;
return 'other';
}
find_channel(path) {
for (var i = 0; i < this.channels.length; i++) {
var channel = this.channels[i];
if (this.compare(channel.pattern, path)) {
return channel;
}
}
return null;
}
// Eventually parameters will be passed to this, but right now it's very simple.
get_channel(type) {
if (type == 'seed')
return 'vineyard';
if (type == 'page')
return this.page_path;
throw new Error(type + ' is not a valid channel type.');
}
get_destination(request) {
var id = request.parameters.id || request.id;
if (request.trellis) {
if (request.action == 'create') {
return 'create';
}
else if (id) {
return 'view';
}
else {
return 'index';
}
}
else {
'other';
}
return null
}
get_plant_url() {
var channel = this.get_channel('seed');
return Bloom.join(this.app_path, channel, 'update');
}
get_plot(trellis) {
if (this.trellis_plots[trellis])
return this.trellis_plots[trellis];
return null;
}
url(trellis_or_seed, id, action, args) {
var trellis;
if (!trellis_or_seed)
throw new Error('Invalid first argument');
if (typeof trellis_or_seed == 'string') {
trellis = trellis_or_seed;
}
else {
var seed = trellis_or_seed;
var trellis = (seed.trellis && seed.trellis.name) || seed.type;
if (!trellis)
throw new Error('Invalid seed.');
args = action;
action = id;
if (seed.trellis && seed.trellis.primary_key)
id = seed[seed.trellis.primary_key];
<|fim▁hole|> }
// Allow hooks to override the arguments.
var data = {
trellis: trellis,
id: id,
pre: null,
post: null,
action: action,
args: args
};
if (trellis) {
this.invoke('url.' + trellis, data, seed);
}
return Bloom.join(this.app_path, data.pre, data.trellis, data.id, data.action, data.post) + Bloom.render_query(data.args);
}
get_request() {
return this.get_request_from_string(window.location.pathname);
}
get_request_from_string(path_string, ignore_browser_args = false) {
var args, path;
var query_index = path_string.indexOf('?');
if (ignore_browser_args)
args = '';
if (query_index > -1) {
args = path_string.substring(query_index);
path_string = path_string.substring(0, query_index);
}
path = Irrigation.get_path_array(path_string, Bloom.join(this.app_path, this.page_path));
var request:IRequest = {
parameters: Bloom.get_url_properties(args),
path: path
// trellis: path[0]
};
var channel = this.find_channel(path);
if (channel) {
MetaHub.extend(request, this.apply_pattern(channel.pattern, path));
if (typeof channel.action === 'function')
MetaHub.extend(request, channel.action(path));
}
else {
if (path.length > 1) {
if (path.length > 2) {
request.id = path[1];
request.action = path[2];
}
else {
if (path[1].match(/\d+/))
request.id = path[1];
else
request.action = path[1];
}
}
if (request.id === undefined && request.parameters.id !== undefined)
request.id = request.parameters.id;
}
request.path_string = request.path.join('/');
return request;
}
get_trellis(name) {
throw new Error('not implemented.')
// if (this.vineyard.trellises[name])
// return name;
//// return this.vineyard.trellises[name];
// return null;
}
static convert_path_to_array(path) {
if (typeof path == 'object')
return path;
if (!path || path.length == 0)
return [];
if (path[0] == '/')
path = path.substring(1);
if (path[path.length - 1] == '/')
path = path.substring(0, path.length - 1);
return path.split('/');
}
static get_path_array(path, base) {
path = Irrigation.convert_path_to_array(path);
base = Irrigation.convert_path_to_array(base);
for (var i = 0; i < base.length; i++) {
if (i >= path.length)
break;
if (path[i] == base[i]) {
path.splice(i, 1);
base.splice(i, 1);
--i;
}
}
return path;
}
}<|fim▁end|> | |
<|file_name|>3f289637f530_remove_unused_models.py<|end_file_name|><|fim▁begin|>"""Remove unused models
Revision ID: 3f289637f530
Revises: 4ba1dd8c3080
Create Date: 2014-04-17 11:08:50.963964
"""
# revision identifiers, used by Alembic.
revision = '3f289637f530'<|fim▁hole|>
def upgrade():
op.drop_table('aggtestgroup')
op.drop_table('testgroup_test')
op.drop_table('testgroup')
op.drop_table('aggtestsuite')
def downgrade():
raise NotImplementedError<|fim▁end|> | down_revision = '4ba1dd8c3080'
from alembic import op
|
<|file_name|>jsdocBindingInUnreachableCode.ts<|end_file_name|><|fim▁begin|>// @allowJs: true
// @noEmit: true
// @checkJs: true<|fim▁hole|> /**
* @param {string} s
*/
const x = function (s) {
};
}<|fim▁end|> | // @Filename: bug27341.js
if (false) { |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2012-2016 Therp BV <http://therp.nl>
# © 2013 Agile Business Group sagl <http://www.agilebg.com>
# <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{<|fim▁hole|> "name": "Accounting Financial Report Horizontal",
"version": "10.0.1.0.0",
"author": "Therp BV,Agile Business Group,Odoo Community Association (OCA)",
"category": 'Accounting & Finance',
'website': 'https://github.com/OCA/account-financial-reporting',
'license': 'AGPL-3',
"depends": ["account"],
'data': [
"data/report_paperformat.xml",
"data/ir_actions_report_xml.xml",
"report/report_financial.xml",
],
}<|fim▁end|> | |
<|file_name|>OgreGLDepthBuffer.cpp<|end_file_name|><|fim▁begin|>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2011 Torus Knot Software Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
<|fim▁hole|>all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreGLDepthBuffer.h"
#include "OgreGLHardwarePixelBuffer.h"
#include "OgreGLRenderSystem.h"
#include "OgreGLFrameBufferObject.h"
namespace Ogre
{
GLDepthBuffer::GLDepthBuffer( uint16 poolId, GLRenderSystem *renderSystem, GLContext *creatorContext,
GLRenderBuffer *depth, GLRenderBuffer *stencil,
uint32 width, uint32 height, uint32 fsaa, uint32 multiSampleQuality,
bool manual ) :
DepthBuffer( poolId, 0, width, height, fsaa, "", manual ),
mMultiSampleQuality( multiSampleQuality ),
mCreatorContext( creatorContext ),
mDepthBuffer( depth ),
mStencilBuffer( stencil ),
mRenderSystem( renderSystem )
{
if( mDepthBuffer )
{
switch( mDepthBuffer->getGLFormat() )
{
case GL_DEPTH_COMPONENT16:
mBitDepth = 16;
break;
case GL_DEPTH_COMPONENT24:
case GL_DEPTH_COMPONENT32:
case GL_DEPTH24_STENCIL8_EXT:
mBitDepth = 32;
break;
}
}
}
GLDepthBuffer::~GLDepthBuffer()
{
if( mStencilBuffer && mStencilBuffer != mDepthBuffer )
{
delete mStencilBuffer;
mStencilBuffer = 0;
}
if( mDepthBuffer )
{
delete mDepthBuffer;
mDepthBuffer = 0;
}
}
//---------------------------------------------------------------------
bool GLDepthBuffer::isCompatible( RenderTarget *renderTarget ) const
{
bool retVal = false;
//Check standard stuff first.
if( mRenderSystem->getCapabilities()->hasCapability( RSC_RTT_DEPTHBUFFER_RESOLUTION_LESSEQUAL ) )
{
if( !DepthBuffer::isCompatible( renderTarget ) )
return false;
}
else
{
if( this->getWidth() != renderTarget->getWidth() ||
this->getHeight() != renderTarget->getHeight() ||
this->getFsaa() != renderTarget->getFSAA() )
return false;
}
//Now check this is the appropriate format
GLFrameBufferObject *fbo = 0;
renderTarget->getCustomAttribute(GLRenderTexture::CustomAttributeString_FBO, &fbo);
if( !fbo )
{
GLContext *windowContext;
renderTarget->getCustomAttribute( GLRenderTexture::CustomAttributeString_GLCONTEXT, &windowContext );
//Non-FBO targets and FBO depth surfaces don't play along, only dummies which match the same
//context
if( !mDepthBuffer && !mStencilBuffer && mCreatorContext == windowContext )
retVal = true;
}
else
{
//Check this isn't a dummy non-FBO depth buffer with an FBO target, don't mix them.
//If you don't want depth buffer, use a Null Depth Buffer, not a dummy one.
if( mDepthBuffer || mStencilBuffer )
{
GLenum internalFormat = fbo->getFormat();
GLenum depthFormat, stencilFormat;
mRenderSystem->_getDepthStencilFormatFor( internalFormat, &depthFormat, &stencilFormat );
bool bSameDepth = false;
if( mDepthBuffer )
bSameDepth |= mDepthBuffer->getGLFormat() == depthFormat;
bool bSameStencil = false;
if( !mStencilBuffer || mStencilBuffer == mDepthBuffer )
bSameStencil = stencilFormat == GL_NONE;
else
{
if( mStencilBuffer )
bSameStencil = stencilFormat == mStencilBuffer->getGLFormat();
}
retVal = bSameDepth && bSameStencil;
}
}
return retVal;
}
}<|fim▁end|> | The above copyright notice and this permission notice shall be included in |
<|file_name|>UpdateUserController.java<|end_file_name|><|fim▁begin|>package ru.job4j.servlet.services;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.job4j.servlet.model.User;
import ru.job4j.servlet.repository.RepositoryException;
import ru.job4j.servlet.repository.UserStore;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* UpdateUserController.
*
* @author Stanislav ([email protected])
* @since 11.01.2018
*/
public class UpdateUserController extends HttpServlet {
private static final Logger LOG = LoggerFactory.getLogger(UpdateUserController.class);
private static final long serialVersionUID = 6328444530140780881L;
private UserStore userStore = UserStore.getInstance();
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
User user = userStore.findByID(Integer.valueOf(req.getParameter("id")));
if (user != null) {
String name = req.getParameter("name");
String login = req.getParameter("login");
String email = req.getParameter("email");<|fim▁hole|>
if (name != null && !name.trim().isEmpty()) {
user.setName(name);
}
if (login != null && !login.trim().isEmpty()) {
user.setLogin(login);
}
if (email != null && !email.trim().isEmpty()) {
user.setEmail(email);
}
userStore.update(user);
}
} catch (NumberFormatException e) {
LOG.error("Not the correct format id. ", e);
} catch (RepositoryException e) {
LOG.error("Error adding user. ", e);
}
resp.sendRedirect(req.getContextPath().length() == 0 ? "/" : req.getContextPath());
}
}<|fim▁end|> | |
<|file_name|>log.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package connmgr
import "github.com/abcsuite/abclog"
// log is a logger that is initialized with no output filters. This
// means the package will not perform any logging by default until the caller
// requests it.
var log abclog.Logger
// The default amount of logging is none.
func init() {
DisableLog()
}
// DisableLog disables all library log output. Logging output is disabled
// by default until either UseLogger or SetLogWriter are called.
func DisableLog() {
log = abclog.Disabled
}
<|fim▁hole|>// UseLogger uses a specified Logger to output package logging info.
// This should be used in preference to SetLogWriter if the caller is also
// using abclog.
func UseLogger(logger abclog.Logger) {
log = logger
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// =================================================================<|fim▁hole|>// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(
html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png"
)]
//! <p>AWS Transfer Family is a fully managed service that enables the transfer of files over the File Transfer Protocol (FTP), File Transfer Protocol over SSL (FTPS), or Secure Shell (SSH) File Transfer Protocol (SFTP) directly into and out of Amazon Simple Storage Service (Amazon S3). AWS helps you seamlessly migrate your file transfer workflows to AWS Transfer Family by integrating with existing authentication systems, and providing DNS routing with Amazon Route 53 so nothing changes for your customers and partners, or their applications. With your data in Amazon S3, you can use it with AWS services for processing, analytics, machine learning, and archiving. Getting started with AWS Transfer Family is easy since there is no infrastructure to buy and set up.</p>
//!
//! If you're using the service, you're probably looking for [TransferClient](struct.TransferClient.html) and [Transfer](trait.Transfer.html).
mod custom;
mod generated;
pub use custom::*;
pub use generated::*;<|fim▁end|> | // |
<|file_name|>Bus.d.ts<|end_file_name|><|fim▁begin|>import * as Promise from 'bluebird';
export declare class RabbitHutch {
static CreateBus(config: IBusConfig): IBus;
static CreateExtendedBus(config: IBusConfig): IExtendedBus;
}
export declare class Bus implements IBus {
config: IBusConfig;
private static rpcExchange;
private static rpcQueueBase;
private static defaultErrorQueue;
private static defaultDeferredAckTimeout;
private Connection;
private rpcQueue;
private rpcConsumerTag;
private rpcResponseHandlers;
protected Channels: {
publishChannel: any;
rpcChannel: any;
};
private pubChanUp;
private rpcConsumerUp;
private static remove$type;
SendToErrorQueue(msg: any, err?: string, stack?: string): any;
constructor(config: IBusConfig);
Publish(msg: {
TypeID: string;
}, withTopic?: string): Promise<boolean>;
Subscribe(type: {
TypeID: string;
}, subscriberName: string, handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
defer: () => void;
}) => void, withTopic?: string): Promise<IConsumerDispose>;
Send(queue: string, msg: {
TypeID: string;
}): Promise<boolean>;
Receive(rxType: {
TypeID: string;
}, queue: string, handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
defer: () => void;
}) => void): Promise<IConsumerDispose>;
ReceiveTypes(queue: string, handlers: {
rxType: {
TypeID: string;
};
handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
defer: () => void;
}) => void;
}[]): Promise<IConsumerDispose>;
Request(request: {
TypeID: string;
}): Promise<any>;
Respond(rqType: {
TypeID: string;
}, rsType: {
TypeID: string;
}, responder: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => {
TypeID: string;
}): Promise<IConsumerDispose>;
RespondAsync(rqType: {
TypeID: string;
}, rsType: {
TypeID: string;
}, responder: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => Promise<{
TypeID: string;
}>): Promise<IConsumerDispose>;
private static ToBuffer;
private static FromSubscription;
}
export declare class ExtendedBus extends Bus implements IExtendedBus {
constructor(config: IBusConfig);
CancelConsumer(consumerTag: string): Promise<IQueueConsumeReply>;
DeleteExchange(exchange: string, ifUnused?: boolean): void;
DeleteQueue(queue: string, ifUnused?: boolean, ifEmpty?: boolean): Promise<{
messageCount: number;
}>;
DeleteQueueUnconditional(queue: string): Promise<{
messageCount: number;
}>;
QueueStatus(queue: string): Promise<{
queue: string;
messageCount: number;
consumerCount: number;
}>;
PurgeQueue(queue: string): Promise<IPurgeQueueResponse>;
}
export interface IBus {
Publish(msg: {
TypeID: string;
}, withTopic?: string): Promise<boolean>;
Subscribe(type: {
TypeID: string;
}, subscriberName: string, handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => void, withTopic?: string): Promise<IConsumerDispose>;
Send(queue: string, msg: {
TypeID: string;
}): Promise<boolean>;
Receive(rxType: {
TypeID: string;
}, queue: string, handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => void): Promise<IConsumerDispose>;
ReceiveTypes(queue: string, handlers: {
rxType: {
TypeID: string;
};
handler: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => void;
}[]): Promise<IConsumerDispose>;
Request(request: {
TypeID: string;
}): Promise<{
TypeID: string;
}>;
Respond(rqType: {
TypeID: string;
}, rsType: {
TypeID: string;
}, responder: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => {
TypeID: string;
}): Promise<IConsumerDispose>;
RespondAsync(rqType: {
TypeID: string;
}, rsType: {
TypeID: string;
}, responder: (msg: {
TypeID: string;
}, ackFns?: {
ack: () => void;
nack: () => void;
}) => Promise<{
TypeID: string;
}>): Promise<IConsumerDispose>;
SendToErrorQueue(msg: any, err?: string, stack?: string): void;
}
export interface IBusConfig {
heartbeat: number;
prefetch: number;
rpcTimeout: number;
url: string;
vhost: string;
}
export interface IExtendedBus extends IBus {<|fim▁hole|> DeleteQueue(queue: string, ifUnused: boolean, ifEmpty: boolean): Promise<{
messageCount: number;
}>;
DeleteQueueUnconditional(queue: string): Promise<{
messageCount: number;
}>;
QueueStatus(queue: string): Promise<{
queue: string;
messageCount: number;
consumerCount: number;
}>;
PurgeQueue(queue: string): Promise<IPurgeQueueResponse>;
}
export interface IQueueConsumeReply {
consumerTag: string;
}
export interface IConsumerDispose {
cancelConsumer: () => Promise<boolean>;
deleteQueue: () => Promise<boolean>;
purgeQueue: () => Promise<boolean>;
}
export interface IPurgeQueueResponse {
messageCount: number;
}<|fim▁end|> | CancelConsumer(consumerTag: string): Promise<IQueueConsumeReply>;
DeleteExchange(exchange: string, ifUnused: boolean): void; |
<|file_name|>route.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="angular" />
/// <reference types="angular-route" /><|fim▁hole|><|fim▁end|> | export default function ($locationProvider: angular.ILocationProvider, $routeProvider: angular.route.IRouteProvider): void; |
<|file_name|>reportcommon.py<|end_file_name|><|fim▁begin|>#!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)<|fim▁hole|> finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
"""Call display detail msg handler."""
display.display_detail("%s" % msg, *args)
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def runExternalScriptWithTimeout(
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk)
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate wapc_guest as guest;
use guest::prelude::*;
#[no_mangle]<|fim▁hole|>pub extern "C" fn wapc_init() {
register_function("SayHello", do_hello);
}
fn do_hello(msg: &[u8]) -> CallResult {
let name = std::str::from_utf8(msg)?;
let res =
host_call("default",
"sample",
"GetGreeting",
&vec![])?;
let greeting = std::str::from_utf8(&res)?;
let output = format!("{}, {}!", greeting, name);
Ok(output.as_bytes().to_vec())
}<|fim▁end|> | |
<|file_name|>A.cpp<|end_file_name|><|fim▁begin|>/* ========================================
ID: mathema6
TASK:
LANG: C++14
* File Name : A.cpp
* Creation Date : 10-04-2021
* Last Modified : Tue 13 Apr 2021 11:12:59 PM CEST
* Created By : Karel Ha <[email protected]>
* URL : https://codingcompetitions.withgoogle.com/codejam/round/000000000043585d/00000000007549e5
* Points/Time :
* ~2h20m
* 2h30m -> practice mode already :-( :-( :-(
*
* upsolve:
* + 7m20s = 7m20s
* + 1m20s = 8m40s
* + 2m30s = 11m10s
* +~80m10s = 1h31m20s
*
* Total/ETA :<|fim▁hole|> * S WA - :-(
* S AC WA :-/
* S AC TLE :-O
* S AC TLE :-/
* S AC RE (probed while-loop with exit code -> RE)
* S AC AC YESSSSSSSSSSSSSSSSS!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
*
==========================================*/
#include <string>
#define PROBLEMNAME "TASK_PLACEHOLDER_FOR_VIM"
#include <bits/stdc++.h>
using namespace std;
#define endl "\n"
#define REP(i,n) for(int i=0;i<(n);i++)
#define FOR(i,a,b) for(int i=(a);i<=(b);i++)
#define FORD(i,a,b) for(int i=(a);i>=(b);i--)
#define ALL(A) (A).begin(), (A).end()
#define REVALL(A) (A).rbegin(), (A).rend()
#define F first
#define S second
#define PB push_back
#define MP make_pair
#define MTP make_tuple
#define MINUPDATE(A,B) A = min((A), (B));
#define MAXUPDATE(A,B) A = max((A), (B));
#define SGN(X) ((X) ? ( (X)>0?1:-1 ) : 0)
#define CONTAINS(S,E) ((S).find(E) != (S).end())
#define SZ(x) ((int) (x).size())
using ll = long long;
using ul = unsigned long long;
using llll = pair<ll, ll>;
using ulul = pair<ul, ul>;
#ifdef ONLINE_JUDGE
#undef MATHEMAGE_DEBUG
#endif
#ifdef MATHEMAGE_DEBUG
#define MSG(a) cerr << "> " << (#a) << ": " << (a) << endl;
#define MSG_VEC_VEC(v) cerr << "> " << (#v) << ":\n" << (v) << endl;
#define MSG_VEC_PAIRS(v) print_vector_pairs((v), (#v));
#define LINESEP1 cerr << "----------------------------------------------- " << endl;
#define LINESEP2 cerr << "_________________________________________________________________" << endl;
#else
#define MSG(a)
#define MSG_VEC_VEC(v)
#define MSG_VEC_PAIRS(v)
#define LINESEP1
#define LINESEP2
#endif
ostream& operator<<(ostream& os, const vector<string> & vec) {
os << endl;
for (const auto & s: vec) os << s << endl;
return os;
}
template<typename T>
ostream& operator<<(ostream& os, const vector<T> & vec) {
for (const auto & x: vec) os << x << " ";
return os;
}
template<typename T>
ostream& operator<<(ostream& os, const vector<vector<T>> & vec) {
for (const auto & v: vec) os << v << endl;
return os;
}
template<typename T>
inline ostream& operator<<(ostream& os, const vector<vector<vector<T>>> & vec) {
for (const auto & row: vec) {
for (const auto & col: row) {
os << "[ " << col << "] ";
}
os << endl;
}
return os;
}
template<typename T>
ostream& operator<<(ostream& os, const set<T>& vec) {
os << "{ | ";
for (const auto & x: vec) os << x << "| ";
os << "}";
return os;
}
template<typename T1, typename T2>
void print_vector_pairs(const vector<pair<T1, T2>> & vec, const string & name) {
cerr << "> " << name << ": ";
for (const auto & x: vec) cerr << "(" << x.F << ", " << x.S << ")\t";
cerr << endl;
}
template<typename T>
inline bool bounded(const T & x, const T & u, const T & l=0) {
return min(l,u)<=x && x<max(l,u);
}
const int CLEAN = -1;
const int UNDEF = -42;
const long long MOD = 1000000007;
const double EPS = 1e-8;
const int INF = INT_MAX;
const long long INF_LL = LLONG_MAX;
const long long INF_ULL = ULLONG_MAX;
const vector<int> DX4 = {-1, 0, 1, 0};
const vector<int> DY4 = { 0, 1, 0, -1};
const vector<pair<int,int>> DXY4 = { {-1,0}, {0,1}, {1,0}, {0,-1} };
const vector<int> DX8 = {-1, -1, -1, 0, 0, 1, 1, 1};
const vector<int> DY8 = {-1, 0, 1, -1, 1, -1, 0, 1};
const vector<pair<int,int>> DXY8 = {
{-1,-1}, {-1,0}, {-1,1},
{ 0,-1}, { 0,1},
{ 1,-1}, { 1,0}, { 1,1}
};
string atLeast, newNum;
void solve() {
int N;
cin >> N;
MSG(N);
vector<string> X(N);
REP(i,N) {
MSG(i);
cin >> X[i];
}
MSG(X); cerr.flush();
int result = 0;
FOR(i,1,N-1) {
MSG(i); cerr.flush();
if (SZ(X[i-1])<SZ(X[i])) { continue; }
if (SZ(X[i-1])==SZ(X[i]) && X[i-1]<X[i]) { continue; }
atLeast=X[i-1];
bool only9s=true;
FORD(pos,SZ(atLeast)-1,0) {
if (atLeast[pos]=='9') {
atLeast[pos]='0';
} else {
atLeast[pos]++;
only9s=false;
break;
}
}
// if (count(ALL(atLeast), '0')==SZ(atLeast)) {
if (only9s) {
atLeast='1'+atLeast;
}
MSG(X[i-1]); MSG(atLeast); cerr.flush();
newNum=X[i];
if (atLeast.substr(0,SZ(X[i])) == X[i]) { // it's a prefix
newNum = atLeast;
} else {
while (SZ(newNum)<SZ(atLeast)) {
newNum+='0';
}
if (newNum<atLeast) { newNum+='0'; }
}
result+=SZ(newNum) - SZ(X[i]);
MSG(X[i]); MSG(newNum); MSG(result); cerr.flush();
X[i]=newNum;
newNum.clear();
atLeast.clear();
LINESEP1;
}
MSG(X); cerr.flush();
cout << result << endl;
X.clear();
MSG(X); cerr.flush();
}
int main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
int cases = 1;
cin >> cases;
FOR(tt,1,cases) {
cout << "Case #" << tt << ": ";
cout.flush(); cerr.flush();
solve();
LINESEP2; cout.flush(); cerr.flush();
}
return 0;
}<|fim▁end|> | * 15m :-/ :-(
* 15m (upsolve)
*
* Status : |
<|file_name|>feature_flag_tests.py<|end_file_name|><|fim▁begin|>import unittest
import helper.config
import mock
from vetoes import config
<|fim▁hole|>class FeatureFlagMixinTests(unittest.TestCase):
def test_that_flags_are_processed_during_initialize(self):
settings = helper.config.Data({
'features': {'on': 'on', 'off': 'false'}
})
consumer = config.FeatureFlagMixin(settings, mock.Mock())
self.assertTrue(consumer.feature_flags['on'])
self.assertFalse(consumer.feature_flags['off'])
def test_that_invalid_flags_arg_ignored(self):
settings = helper.config.Data({
'features': {'one': 'not valid', 'two': None}
})
consumer = config.FeatureFlagMixin(settings, mock.Mock())
self.assertEqual(consumer.feature_flags, {})<|fim▁end|> | |
<|file_name|>bookmarkfiltermodel.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the Qt Assistant of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "bookmarkfiltermodel.h"
#include "bookmarkitem.h"
#include "bookmarkmodel.h"
BookmarkFilterModel::BookmarkFilterModel(QObject *parent)
: QAbstractProxyModel(parent)
, hideBookmarks(true)
, sourceModel(0)
{
}
void BookmarkFilterModel::setSourceModel(QAbstractItemModel *_sourceModel)
{
beginResetModel();
if (sourceModel) {
disconnect(sourceModel, SIGNAL(dataChanged(QModelIndex, QModelIndex)),
this, SLOT(changed(QModelIndex, QModelIndex)));
disconnect(sourceModel, SIGNAL(rowsInserted(QModelIndex, int, int)),
this, SLOT(rowsInserted(QModelIndex, int, int)));
disconnect(sourceModel,
SIGNAL(rowsAboutToBeRemoved(QModelIndex, int, int)), this,
SLOT(rowsAboutToBeRemoved(QModelIndex, int, int)));
disconnect(sourceModel, SIGNAL(rowsRemoved(QModelIndex, int, int)),
this, SLOT(rowsRemoved(QModelIndex, int, int)));
disconnect(sourceModel, SIGNAL(layoutAboutToBeChanged()), this,
SLOT(layoutAboutToBeChanged()));
disconnect(sourceModel, SIGNAL(layoutChanged()), this,
SLOT(layoutChanged()));
disconnect(sourceModel, SIGNAL(modelAboutToBeReset()), this,
SLOT(modelAboutToBeReset()));
disconnect(sourceModel, SIGNAL(modelReset()), this, SLOT(modelReset()));
}
QAbstractProxyModel::setSourceModel(sourceModel);
sourceModel = qobject_cast<BookmarkModel*> (_sourceModel);
connect(sourceModel, SIGNAL(dataChanged(QModelIndex, QModelIndex)), this,
SLOT(changed(QModelIndex, QModelIndex)));
connect(sourceModel, SIGNAL(rowsInserted(QModelIndex, int, int)),
this, SLOT(rowsInserted(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(rowsAboutToBeRemoved(QModelIndex, int, int)),
this, SLOT(rowsAboutToBeRemoved(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(rowsRemoved(QModelIndex, int, int)), this,
SLOT(rowsRemoved(QModelIndex, int, int)));
connect(sourceModel, SIGNAL(layoutAboutToBeChanged()), this,
SLOT(layoutAboutToBeChanged()));
connect(sourceModel, SIGNAL(layoutChanged()), this,
SLOT(layoutChanged()));
connect(sourceModel, SIGNAL(modelAboutToBeReset()), this,
SLOT(modelAboutToBeReset()));
connect(sourceModel, SIGNAL(modelReset()), this, SLOT(modelReset()));
if (sourceModel)
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
int BookmarkFilterModel::rowCount(const QModelIndex &index) const<|fim▁hole|> Q_UNUSED(index)
return cache.count();
}
int BookmarkFilterModel::columnCount(const QModelIndex &index) const
{
Q_UNUSED(index)
if (sourceModel)
return sourceModel->columnCount();
return 0;
}
QModelIndex BookmarkFilterModel::mapToSource(const QModelIndex &proxyIndex) const
{
const int row = proxyIndex.row();
if (proxyIndex.isValid() && row >= 0 && row < cache.count())
return cache[row];
return QModelIndex();
}
QModelIndex BookmarkFilterModel::mapFromSource(const QModelIndex &sourceIndex) const
{
return index(cache.indexOf(sourceIndex), 0, QModelIndex());
}
QModelIndex BookmarkFilterModel::parent(const QModelIndex &child) const
{
Q_UNUSED(child)
return QModelIndex();
}
QModelIndex BookmarkFilterModel::index(int row, int column,
const QModelIndex &index) const
{
Q_UNUSED(index)
if (row < 0 || column < 0 || cache.count() <= row
|| !sourceModel || sourceModel->columnCount() <= column) {
return QModelIndex();
}
return createIndex(row, 0);
}
Qt::DropActions BookmarkFilterModel::supportedDropActions () const
{
if (sourceModel)
return sourceModel->supportedDropActions();
return Qt::IgnoreAction;
}
Qt::ItemFlags BookmarkFilterModel::flags(const QModelIndex &index) const
{
if (sourceModel)
return sourceModel->flags(index);
return Qt::NoItemFlags;
}
QVariant BookmarkFilterModel::data(const QModelIndex &index, int role) const
{
if (sourceModel)
return sourceModel->data(mapToSource(index), role);
return QVariant();
}
bool BookmarkFilterModel::setData(const QModelIndex &index, const QVariant &value,
int role)
{
if (sourceModel)
return sourceModel->setData(mapToSource(index), value, role);
return false;
}
void BookmarkFilterModel::filterBookmarks()
{
if (sourceModel) {
beginResetModel();
hideBookmarks = true;
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
}
void BookmarkFilterModel::filterBookmarkFolders()
{
if (sourceModel) {
beginResetModel();
hideBookmarks = false;
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
}
void BookmarkFilterModel::changed(const QModelIndex &topLeft,
const QModelIndex &bottomRight)
{
emit dataChanged(mapFromSource(topLeft), mapFromSource(bottomRight));
}
void BookmarkFilterModel::rowsInserted(const QModelIndex &parent, int start,
int end)
{
if (!sourceModel)
return;
QModelIndex cachePrevious = parent;
if (BookmarkItem *parentItem = sourceModel->itemFromIndex(parent)) {
BookmarkItem *newItem = parentItem->child(start);
// iterate over tree hirarchie to find the previous folder
for (int i = 0; i < parentItem->childCount(); ++i) {
if (BookmarkItem *child = parentItem->child(i)) {
const QModelIndex &tmp = sourceModel->indexFromItem(child);
if (tmp.data(UserRoleFolder).toBool() && child != newItem)
cachePrevious = tmp;
}
}
const QModelIndex &newIndex = sourceModel->indexFromItem(newItem);
const bool isFolder = newIndex.data(UserRoleFolder).toBool();
if ((isFolder && hideBookmarks) || (!isFolder && !hideBookmarks)) {
beginInsertRows(mapFromSource(parent), start, end);
const int index = cache.indexOf(cachePrevious) + 1;
if (cache.value(index, QPersistentModelIndex()) != newIndex)
cache.insert(index, newIndex);
endInsertRows();
}
}
}
void BookmarkFilterModel::rowsAboutToBeRemoved(const QModelIndex &parent,
int start, int end)
{
if (!sourceModel)
return;
if (BookmarkItem *parentItem = sourceModel->itemFromIndex(parent)) {
if (BookmarkItem *child = parentItem->child(start)) {
indexToRemove = sourceModel->indexFromItem(child);
if (cache.contains(indexToRemove))
beginRemoveRows(mapFromSource(parent), start, end);
}
}
}
void BookmarkFilterModel::rowsRemoved(const QModelIndex &/*parent*/, int, int)
{
if (cache.contains(indexToRemove)) {
cache.removeAll(indexToRemove);
endRemoveRows();
}
}
void BookmarkFilterModel::layoutAboutToBeChanged()
{
// TODO: ???
}
void BookmarkFilterModel::layoutChanged()
{
// TODO: ???
}
void BookmarkFilterModel::modelAboutToBeReset()
{
beginResetModel();
}
void BookmarkFilterModel::modelReset()
{
if (sourceModel)
setupCache(sourceModel->index(0, 0, QModelIndex()).parent());
endResetModel();
}
void BookmarkFilterModel::setupCache(const QModelIndex &parent)
{
cache.clear();
for (int i = 0; i < sourceModel->rowCount(parent); ++i)
collectItems(sourceModel->index(i, 0, parent));
}
void BookmarkFilterModel::collectItems(const QModelIndex &parent)
{
if (parent.isValid()) {
bool isFolder = sourceModel->data(parent, UserRoleFolder).toBool();
if ((isFolder && hideBookmarks) || (!isFolder && !hideBookmarks))
cache.append(parent);
if (sourceModel->hasChildren(parent)) {
for (int i = 0; i < sourceModel->rowCount(parent); ++i)
collectItems(sourceModel->index(i, 0, parent));
}
}
}
// -- BookmarkTreeModel
BookmarkTreeModel::BookmarkTreeModel(QObject *parent)
: QSortFilterProxyModel(parent)
{
}
int BookmarkTreeModel::columnCount(const QModelIndex &parent) const
{
return qMin(1, QSortFilterProxyModel::columnCount(parent));
}
bool BookmarkTreeModel::filterAcceptsRow(int row, const QModelIndex &parent) const
{
Q_UNUSED(row)
BookmarkModel *model = qobject_cast<BookmarkModel*> (sourceModel());
if (model->rowCount(parent) > 0
&& model->data(model->index(row, 0, parent), UserRoleFolder).toBool())
return true;
return false;
}<|fim▁end|> | { |
<|file_name|>pyText2Pdf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*- coding: UTF-8 -*-
# File: pyText2Pdf.py
#
# Derived from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/189858
__docformat__ = 'plaintext'
__date__ = '04/10/2013'
'''
pyText2Pdf - Python script to convert plain text files into Adobe
Acrobat PDF files.
Version 1.2
Author: Anand B Pillai <abpillai at lycos dot com>
Keywords: python, tools, converter, pdf, text2pdf, adobe, acrobat,
processing.
Copyright (C) 2003-2004 Free Software Foundation, Inc.
This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
This file is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Emacs; see the file COPYING. If not, write to
the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.'''
import sys, os
import string
import time
import optparse
import re
LF_EXTRA=0
LINE_END='\015'
# form feed character (^L)
FF=chr(12)
ENCODING_STR = """\
/Encoding <<
/Differences [ 0 /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /space /exclam
/quotedbl /numbersign /dollar /percent /ampersand
/quoteright /parenleft /parenright /asterisk /plus /comma
/hyphen /period /slash /zero /one /two /three /four /five
/six /seven /eight /nine /colon /semicolon /less /equal
/greater /question /at /A /B /C /D /E /F /G /H /I /J /K /L
/M /N /O /P /Q /R /S /T /U /V /W /X /Y /Z /bracketleft
/backslash /bracketright /asciicircum /underscore
/quoteleft /a /b /c /d /e /f /g /h /i /j /k /l /m /n /o /p
/q /r /s /t /u /v /w /x /y /z /braceleft /bar /braceright
/asciitilde /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/dotlessi /grave /acute /circumflex /tilde /macron /breve
/dotaccent /dieresis /.notdef /ring /cedilla /.notdef
/hungarumlaut /ogonek /caron /space /exclamdown /cent
/sterling /currency /yen /brokenbar /section /dieresis
/copyright /ordfeminine /guillemotleft /logicalnot /hyphen
/registered /macron /degree /plusminus /twosuperior
/threesuperior /acute /mu /paragraph /periodcentered
/cedilla /onesuperior /ordmasculine /guillemotright
/onequarter /onehalf /threequarters /questiondown /Agrave
/Aacute /Acircumflex /Atilde /Adieresis /Aring /AE
/Ccedilla /Egrave /Eacute /Ecircumflex /Edieresis /Igrave
/Iacute /Icircumflex /Idieresis /Eth /Ntilde /Ograve
/Oacute /Ocircumflex /Otilde /Odieresis /multiply /Oslash
/Ugrave /Uacute /Ucircumflex /Udieresis /Yacute /Thorn
/germandbls /agrave /aacute /acircumflex /atilde /adieresis
/aring /ae /ccedilla /egrave /eacute /ecircumflex
/edieresis /igrave /iacute /icircumflex /idieresis /eth
/ntilde /ograve /oacute /ocircumflex /otilde /odieresis
/divide /oslash /ugrave /uacute /ucircumflex /udieresis
/yacute /thorn /ydieresis ]
>>
"""
INTRO="""\
%prog [options] filename
PyText2Pdf makes a 7-bit clean PDF file from any input file.
It reads from a named file, and writes the PDF file to a file specified by
the user, otherwise to a file with '.pdf' appended to the input file.
Author: Anand B Pillai."""
class PyText2Pdf(object):
""" Text2pdf converter in pure Python """
def __init__(self, **kwargs):
# version number
self._version="1.3"
# iso encoding flag
self._IsoEnc=False
# formfeeds flag
self._doFFs=False
self._progname="PyText2Pdf"
self._appname = " ".join((self._progname,str(self._version)))
# default font
self._font="/Courier"
# default font size
self._ptSize=10
# default vert space
self._vertSpace=12
self._lines=0
# number of characters in a row
self._cols=80
self._columns=1
# page ht
self._pageHt=792
# page wd
self._pageWd=612
# input file
self._ifile=""
if 'ifilename' in kwargs:
self._ifile = kwargs['ifilename']
# output file
self._ofile=""
if 'ofile' in kwargs:
self._ofile = kwargs['ofile']
# default tab width
self._tab=4
# input file descriptor
self._ifs=None
# output file descriptor
self._ofs=None
self.buffers = False
if 'buffers' in kwargs and kwargs['buffers']:
self._ifs=kwargs['ifile']
self._ofs=kwargs['ofile']
self.buffers = True
# landscape flag
self._landscape=False
# Subject
self._subject = ''
# Author
self._author = ''
# Keywords
self._keywords = []
# Custom regexp for page breaks
self._pagebreakre = None
# marker objects
self._curobj = 5
self._pageObs = [0]
self._locations = [0,0,0,0,0,0]
self._pageNo=0
# file position marker
self._fpos=0
def parse_args(self):
""" Callback function called by argument parser.
Helps to remove duplicate code """
if len(sys.argv)<2:
sys.argv.append('-h')
parser = optparse.OptionParser(usage=INTRO)
parser.add_option('-o','--output',dest='outfile',help='Direct output to file OUTFILE',metavar='OUTFILE')
parser.add_option('-f','--font',dest='font',help='Use Postscript font FONT (must be in standard 14, default: Courier)',
default='Courier')
parser.add_option('-I','--isolatin',dest='isolatin',help='Use ISO latin-1 encoding',default=False,action='store_true')
parser.add_option('-s','--size',dest='fontsize',help='Use font at PTSIZE points (default=>10)',metavar='PTSIZE',default=10)
parser.add_option('-v','--linespace',dest='linespace',help='Use line spacing LINESPACE (deault 12)',metavar='LINESPACE',default=12)
parser.add_option('-l','--lines',dest='lines',help='Lines per page (default 60, determined automatically if unspecified)',default=60, metavar=None)
parser.add_option('-c','--chars',dest='chars',help='Maximum characters per line (default 80)',default=80,metavar=None)
parser.add_option('-t','--tab',dest='tabspace',help='Spaces per tab character (default 4)',default=4,metavar=None)
parser.add_option('-F','--ignoreff',dest='formfeed',help='Ignore formfeed character ^L (i.e, accept formfeed characters as pagebreaks)',default=False,action='store_true')
parser.add_option('-P','--papersize',dest='papersize',help='Set paper size (default is letter, accepted values are "A4" or "A3")')
parser.add_option('-W','--width',dest='width',help='Independent paper width in points',metavar=None,default=612)
parser.add_option('-H','--height',dest='height',help='Independent paper height in points',metavar=None,default=792)
parser.add_option('-2','--twocolumns',dest='twocolumns',help='Format as two columns',metavar=None,default=False,action='store_true')
parser.add_option('-L','--landscape',dest='landscape',help='Format in landscape mode',metavar=None,default=False,action='store_true')
parser.add_option('-R','--regexp',dest='pageregexp',help='Regular expression string to determine page breaks (if supplied, this will be used to split text into pages, instead of using line count)',metavar=None)
parser.add_option('-S','--subject',dest='subject',help='Optional subject for the document',metavar=None)
parser.add_option('-A','--author',dest='author',help='Optional author for the document',metavar=None)
parser.add_option('-K','--keywords',dest='keywords',help='Optional list of keywords for the document (separated by commas)',metavar=None)
optlist, args = parser.parse_args()
# print optlist.__dict__, args
if len(args)==0:
sys.exit('Error: input file argument missing')
elif len(args)>1:
sys.exit('Error: Too many arguments')
self._ifile = args[0]<|fim▁hole|> if d.get('twocolumns'): self._columns = 2
if d.get('landscape'): self._landscape = True
self._font = '/' + d.get('font')
psize = d.get('papersize')
if psize=='A4':
self._pageWd=595
self._pageHt=842
elif psize=='A3':
self._pageWd=842
self._pageHt=1190
fsize = int(d.get('fontsize'))
if fsize < 1: fsize = 1
self._ptSize = fsize
lspace = int(d.get('linespace'))
if lspace<1: lspace = 1
self._vertSpace = lspace
lines = int(d.get('lines'))
if lines<1: lines = 1
self._lines = int(lines)
chars = int(d.get('chars'))
if chars<4: chars = 4
self._cols = chars
tab = int(d.get('tabspace'))
if tab<1: tab = 1
self._tab = tab
w = int(d.get('width'))
if w<72: w=72
self._pageWd = w
h = int(d.get('height'))
if h<72: h=72
self._pageHt = h
# Very optional args
author = d.get('author')
if author: self._author = author
subject = d.get('subject')
if subject: self._subject = subject
keywords = d.get('keywords')
if keywords:
self._keywords = keywords.split(',')
pagebreak = d.get('pageregexp')
if pagebreak:
self._pagebreakre = re.compile(pagebreak, re.UNICODE|re.IGNORECASE)
outfile = d.get('outfile')
if outfile: self._ofile = outfile
if self._landscape:
print 'Landscape option on...'
if self._columns==2:
print 'Printing in two columns...'
if self._doFFs:
print 'Ignoring form feed character...'
if self._IsoEnc:
print 'Using ISO Latin Encoding...'
print 'Using font',self._font[1:],'size =', self._ptSize
def writestr(self, str):
""" Write string to output file descriptor.
All output operations go through this function.
We keep the current file position also here"""
# update current file position
self._fpos += len(str)
for x in range(0, len(str)):
if str[x] == '\n':
self._fpos += LF_EXTRA
try:
self._ofs.write(str)
except IOError, e:
print e
return -1
return 0
def convert(self, buff=False):
""" Perform the actual conversion """
if self._landscape:
# swap page width & height
tmp = self._pageHt
self._pageHt = self._pageWd
self._pageWd = tmp
if self._lines==0:
self._lines = (self._pageHt - 72)/self._vertSpace
if self._lines < 1:
self._lines=1
if not self.buffers:
try:
self._ifs=open(self._ifile)
except IOError, (strerror, errno):
print 'Error: Could not open file to read --->', self._ifile
sys.exit(3)
if self._ofile=="":
self._ofile = os.path.splitext(self._ifile)[0] + '.pdf'
try:
self._ofs = open(self._ofile, 'wb')
except IOError, (strerror, errno):
print 'Error: Could not open file to write --->', self._ofile
sys.exit(3)
#print 'Input file=>',self._ifile
#print 'Writing pdf file',self._ofile, '...'
self.writeheader()
self.writepages()
self.writerest()
if not self.buffers:
#print 'Wrote file', self._ofile
self._ifs.close()
self._ofs.close()
return 0
def writeheader(self):
"""Write the PDF header"""
ws = self.writestr
title = self._ifile
t=time.localtime()
timestr=str(time.strftime("D:%Y%m%d%H%M%S", t))
ws("%PDF-1.4\n")
self._locations[1] = self._fpos
ws("1 0 obj\n")
ws("<<\n")
buf = "".join(("/Creator (", self._appname, " By Anand B Pillai )\n"))
ws(buf)
buf = "".join(("/CreationDate (", timestr, ")\n"))
ws(buf)
buf = "".join(("/Producer (", self._appname, "(\\251 Anand B Pillai))\n"))
ws(buf)
if self._subject:
title = self._subject
buf = "".join(("/Subject (",self._subject,")\n"))
ws(buf)
if self._author:
buf = "".join(("/Author (",self._author,")\n"))
ws(buf)
if self._keywords:
buf = "".join(("/Keywords (",' '.join(self._keywords),")\n"))
ws(buf)
if title:
buf = "".join(("/Title (", title, ")\n"))
ws(buf)
ws(">>\n")
ws("endobj\n")
self._locations[2] = self._fpos
ws("2 0 obj\n")
ws("<<\n")
ws("/Type /Catalog\n")
ws("/Pages 3 0 R\n")
ws(">>\n")
ws("endobj\n")
self._locations[4] = self._fpos
ws("4 0 obj\n")
ws("<<\n")
buf = "".join(("/BaseFont ", str(self._font), " /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font >>\n"))
ws(buf)
if self._IsoEnc:
ws(ENCODING_STR)
ws(">>\n")
ws("endobj\n")
self._locations[5] = self._fpos
ws("5 0 obj\n")
ws("<<\n")
ws(" /Font << /F1 4 0 R >>\n")
ws(" /ProcSet [ /PDF /Text ]\n")
ws(">>\n")
ws("endobj\n")
def startpage(self):
""" Start a page of data """
ws = self.writestr
self._pageNo += 1
self._curobj += 1
self._locations.append(self._fpos)
self._locations[self._curobj]=self._fpos
self._pageObs.append(self._curobj)
self._pageObs[self._pageNo] = self._curobj
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
ws("<<\n")
ws("/Type /Page\n")
ws("/Parent 3 0 R\n")
ws("/Resources 5 0 R\n")
self._curobj += 1
buf = "".join(("/Contents ", str(self._curobj), " 0 R\n"))
ws(buf)
ws(">>\n")
ws("endobj\n")
self._locations.append(self._fpos)
self._locations[self._curobj] = self._fpos
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
ws("<<\n")
buf = "".join(("/Length ", str(self._curobj + 1), " 0 R\n"))
ws(buf)
ws(">>\n")
ws("stream\n")
strmPos = self._fpos
ws("BT\n");
buf = "".join(("/F1 ", str(self._ptSize), " Tf\n"))
ws(buf)
buf = "".join(("1 0 0 1 50 ", str(self._pageHt - 40), " Tm\n"))
ws(buf)
buf = "".join((str(self._vertSpace), " TL\n"))
ws(buf)
return strmPos
def endpage(self, streamStart):
"""End a page of data """
ws = self.writestr
ws("ET\n")
streamEnd = self._fpos
ws("endstream\n")
ws("endobj\n")
self._curobj += 1
self._locations.append(self._fpos)
self._locations[self._curobj] = self._fpos
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
buf = "".join((str(streamEnd - streamStart), '\n'))
ws(buf)
ws('endobj\n')
def writepages(self):
"""Write pages as PDF"""
ws = self.writestr
beginstream=0
lineNo, charNo=0,0
ch, column=0,0
padding,i=0,0
atEOF=0
linebuf = ''
while not atEOF:
beginstream = self.startpage()
column=1
while column <= self._columns:
column += 1
atFF=0
atBOP=0
lineNo=0
# Special flag for regexp page break
pagebreak = False
while lineNo < self._lines and not atFF and not atEOF and not pagebreak:
linebuf = ''
lineNo += 1
ws("(")
charNo=0
while charNo < self._cols:
charNo += 1
ch = self._ifs.read(1)
cond = ((ch != '\n') and not(ch==FF and self._doFFs) and (ch != ''))
if not cond:
# See if this dude matches the pagebreak regexp
if self._pagebreakre and self._pagebreakre.search(linebuf.strip()):
pagebreak = True
linebuf = ''
break
else:
linebuf = linebuf + ch
if ord(ch) >= 32 and ord(ch) <= 127:
if ch == '(' or ch == ')' or ch == '\\':
ws("\\")
ws(ch)
else:
if ord(ch) == 9:
padding =self._tab - ((charNo - 1) % self._tab)
for i in range(padding):
ws(" ")
charNo += (padding -1)
else:
if ch != FF:
# write \xxx form for dodgy character
buf = "".join(('\\', ch))
ws(buf)
else:
# dont print anything for a FF
charNo -= 1
ws(")'\n")
if ch == FF:
atFF=1
if lineNo == self._lines:
atBOP=1
if atBOP:
pos=0
ch = self._ifs.read(1)
pos= self._ifs.tell()
if ch == FF:
ch = self._ifs.read(1)
pos=self._ifs.tell()
# python's EOF signature
if ch == '':
atEOF=1
else:
# push position back by one char
self._ifs.seek(pos-1)
elif atFF:
ch = self._ifs.read(1)
pos=self._ifs.tell()
if ch == '':
atEOF=1
else:
self._ifs.seek(pos-1)
if column < self._columns:
buf = "".join(("1 0 0 1 ",
str((self._pageWd/2 + 25)),
" ",
str(self._pageHt - 40),
" Tm\n"))
ws(buf)
self.endpage(beginstream)
def writerest(self):
"""Finish the file"""
ws = self.writestr
self._locations[3] = self._fpos
ws("3 0 obj\n")
ws("<<\n")
ws("/Type /Pages\n")
buf = "".join(("/Count ", str(self._pageNo), "\n"))
ws(buf)
buf = "".join(("/MediaBox [ 0 0 ", str(self._pageWd), " ", str(self._pageHt), " ]\n"))
ws(buf)
ws("/Kids [ ")
for i in range(1, self._pageNo+1):
buf = "".join((str(self._pageObs[i]), " 0 R "))
ws(buf)
ws("]\n")
ws(">>\n")
ws("endobj\n")
xref = self._fpos
ws("xref\n")
buf = "".join(("0 ", str((self._curobj) + 1), "\n"))
ws(buf)
buf = "".join(("0000000000 65535 f ", str(LINE_END)))
ws(buf)
for i in range(1, self._curobj + 1):
val = self._locations[i]
buf = "".join((string.zfill(str(val), 10), " 00000 n ", str(LINE_END)))
ws(buf)
ws("trailer\n")
ws("<<\n")
buf = "".join(("/Size ", str(self._curobj + 1), "\n"))
ws(buf)
ws("/Root 2 0 R\n")
ws("/Info 1 0 R\n")
ws(">>\n")
ws("startxref\n")
buf = "".join((str(xref), "\n"))
ws(buf)
ws("%%EOF\n")
'''def main():
# reads from argv the input and output files
# usual for command line
pdfclass=PyText2Pdf()
pdfclass.parse_args()
pdfclass.convert()
# uses input and output file descriptors
# usual for importing from other Python modules
from pyText2Pdf import PyText2Pdf
from StringIO import StringIO
#input_fp = StringIO() # or open('<file>', 'rb')
input_fp = open('test.txt', 'rb')
output_fp = StringIO() # or open('<file>', 'wb')
t2p = PyText2Pdf(ifile=input_fp, ofile=output_fp, ifilename='test.txt', buffers=False)
t2p.convert()
f = open('somefile.pdf', 'wb')
f.write(output_fp)
f.close()
t= PyText2Pdf( ifilename='test.txt', buffers=False)
t.convert()
#if __name__ == "__main__":
# main()'''<|fim▁end|> |
d = optlist.__dict__
if d.get('isolatin'): self._IsoEnc=True
if d.get('formfeed'): self._doFFs = True |
<|file_name|>FormEndpoint.java<|end_file_name|><|fim▁begin|>package com.hcentive.webservice.soap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.ws.server.endpoint.annotation.Endpoint;
import org.springframework.ws.server.endpoint.annotation.PayloadRoot;
import org.springframework.ws.server.endpoint.annotation.RequestPayload;
import org.springframework.ws.server.endpoint.annotation.ResponsePayload;
import com.hcentive.service.FormResponse;
import com.hcentive.webservice.exception.HcentiveSOAPException;
import org.apache.log4j.Logger;
/**
* @author Mebin.Jacob
*Endpoint class.
*/
@Endpoint
public final class FormEndpoint {
private static final String NAMESPACE_URI = "http://hcentive.com/service";
Logger logger = Logger.getLogger(FormEndpoint.class);
@Autowired
FormRepository formRepo;
@PayloadRoot(namespace = NAMESPACE_URI, localPart = "FormResponse")
@ResponsePayload
public FormResponse submitForm(@RequestPayload FormResponse request) throws HcentiveSOAPException {<|fim▁hole|> FormResponse response = null;
logger.debug("AAGAYA");
try{
response = new FormResponse();
response.setForm1(formRepo.findForm("1"));
//make API call
}catch(Exception exception){
throw new HcentiveSOAPException("Something went wrong!!! The exception is --- " + exception);
}
return response;
// return null;
}
}<|fim▁end|> | // GetCountryResponse response = new GetCountryResponse();
// response.setCountry(countryRepository.findCountry(request.getName())); |
<|file_name|>chart.js<|end_file_name|><|fim▁begin|>(function(window, document){
var chart;
var pressure_chart;
var lasttime;
function drawCurrent(data)
{
var tempDHT, tempBMP;
if (data.count_dht022 > 0)
{
lasttime = data.humidity[data.count_dht022-1][0];
tempDHT = data.temperature_dht022[data.count_dht022-1][1];
document.querySelector("span#humidityDHT022").innerHTML = data.humidity[data.count_dht022-1][1];
var date = new Date(lasttime);
document.querySelector("span#time").innerHTML = date.toTimeString();
}
if (data.count_bmp180 > 0)
{
lasttime = data.temperature_bmp180[data.count_bmp180-1][0];
tempBMP = data.temperature_bmp180[data.count_bmp180-1][1];
document.querySelector("span#pressureBMP180").innerHTML = data.pressure[data.count_bmp180-1][1] + 'mm hg (' + (data.pressure[data.count_bmp180-1][1] / 7.50061561303).toFixed(2) + ' kPa)' ;
var date = new Date(lasttime);
document.querySelector("span#time").innerHTML = date.toTimeString();
}
document.querySelector("span#temperature").innerHTML = '<abbr title="BMP180 ' + tempBMP + ', DHT022 ' + tempDHT + '">' + ((tempDHT + tempBMP)/2).toFixed(1) + '</abbr>';
document.querySelector("span#lastupdate").innerHTML = new Date().toTimeString();
}
function requestDelta()
{
$.ajax({
url: 'weather_new.php?mode=delta&delta='+lasttime,<|fim▁hole|> success: function(data)
{
var i;
if (data.count > 0) {
for(i=0; i<data.count_dht022;i++)
chart.series[0].addPoint(data.temperature_dht022[i], false, true);
for(i=0; i<data.count_dht022;i++)
chart.series[1].addPoint(data.humidity[i], false, true);
for(i=0; i<data.count_bmp180;i++)
chart.series[0].addPoint(data.temperature_bmp180[i], false, true);
for(i=0; i<data.count_bmp180;i++)
pressure_chart.series[0].addPoint(data.pressure[i], false, true);
chart.redraw();
pressure_chart.redraw();
}
drawCurrent(data);
}
});
}
function requestData()
{
var daterange = document.querySelector("select#daterange").value;
if (!daterange)
daterange = "today";
$.ajax({
url: 'weather_new.php?mode='+daterange,
datatype: "json",
success: function(data)
{
chart.series[0].setData(data.temperature_dht022);
chart.series[1].setData(data.humidity);
chart.series[2].setData(data.temperature_bmp180);
pressure_chart.series[0].setData(data.pressure);
drawCurrent(data);
setInterval(requestDelta, 5 * 60 * 1000);
}
});
}
$(document).ready(function() {
Highcharts.setOptions({
global: {
useUTC: false
}
});
chart = new Highcharts.Chart({
chart: {
renderTo: 'graph',
type: 'spline',
events: {
load: requestData
}
},
title: {
text: 'Monitoring'
},
tooltip: {
shared: true
},
xAxis: {
type: 'datetime',
maxZoom: 20 * 1000
},
yAxis: {
min: 10,
minPadding: 0.2,
maxPadding: 0.2,
title: {
text: 'Temperature/Humidity',
margin: 80
}
},
series: [{
name: 'Temperature DHT022',
data: []
},
{
name: 'Humidity',
data: []
},
{
name: 'Temperature BMP180',
data: []
}]
});
pressure_chart = new Highcharts.Chart({
chart: {
renderTo: 'pressure_graph',
type: 'spline',
events: {
load: requestData
}
},
title: {
text: 'Pressure monitoring'
},
tooltip: {
shared: true
},
xAxis: {
type: 'datetime',
maxZoom: 20 * 1000
},
yAxis: {
min: 700,
minPadding: 0.2,
maxPadding: 0.2,
title: {
text: 'Pressure',
margin: 80
}
},
series: [{
name: 'Pressure',
data: []
}]
});
$('select#daterange').change(function() {requestData();});
});
})(window, document)<|fim▁end|> | datatype: "json", |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// this example showcases speeds metric inference from strings property
package main
import (
"flag"
"time"
"github.com/performancecopilot/speed"
)
var timelimit = flag.Int("time", 60, "number of seconds to run for")
func main() {
flag.Parse()
c, err := speed.NewPCPClient("strings")
if err != nil {
panic(err)
}
m, err := c.RegisterString(
"this.is.a.simple.counter.metric.to.demonstrate.the.RegisterString.function",
10, speed.Int32Type, speed.CounterSemantics, speed.OneUnit)
if err != nil {
panic(err)
}<|fim▁hole|>
metric := m.(speed.SingletonMetric)
for i := 0; i < *timelimit; i++ {
val := metric.Val().(int32)
val++
metric.MustSet(val)
time.Sleep(time.Second)
}
}<|fim▁end|> |
c.MustStart()
defer c.MustStop() |
<|file_name|>topology_layer2_testcase.py<|end_file_name|><|fim▁begin|>from mock import patch
import networkx as nx
from nav.models.manage import SwPortVlan, Vlan
from nav.netmap import topology<|fim▁hole|>class TopologyLayer2TestCase(TopologyTestCase):
def setUp(self):
super(TopologyLayer2TestCase, self).setUp()
self.model_id = 1
self.nav_graph = nx.MultiDiGraph()
self.a = a = self._netbox_factory('a')
self.b = b = self._netbox_factory('b')
self.c = c = self._netbox_factory('c')
self.d = d = self._netbox_factory('d')
self.a1 = a1 = self._interface_factory('a1', a)
self.a2 = a2 = self._interface_factory('a2', a)
self.a3 = a3 = self._interface_factory('a3', a)
self.b1 = b1 = self._interface_factory('b1', b)
self.b2 = b2 = self._interface_factory('b2', b)
self.c3 = c3 = self._interface_factory('c3', c)
self.c4 = c4 = self._interface_factory('c4', c)
self.d4 = d4 = self._interface_factory('d4', d)
self._add_edge(self.nav_graph, a1.netbox, a1, b1.netbox, b1)
self._add_edge(self.nav_graph, b1.netbox, b1, a1.netbox, a1)
self._add_edge(self.nav_graph, a2.netbox, a2, b2.netbox, b2)
self._add_edge(self.nav_graph, b2.netbox, b2, a2.netbox, a2)
self._add_edge(self.nav_graph, a3.netbox, a3, c3.netbox, c3)
self._add_edge(self.nav_graph, d4.netbox, d4, c4.netbox, c4)
self.vlan__a1_b1 = a_vlan_between_a1_and_b1 = SwPortVlan(
id=self._next_id(), interface=self.a1, vlan=Vlan(id=201, vlan=2))
self.vlans = patch.object(topology, '_get_vlans_map_layer2',
return_value=(
{
self.a1: [a_vlan_between_a1_and_b1],
self.b1: [a_vlan_between_a1_and_b1],
self.a2: [],
self.b2: [],
self.a3: [],
self.c3: []
},
{
self.a: {201: a_vlan_between_a1_and_b1},
self.b: {201: a_vlan_between_a1_and_b1},
self.c: {}
}))
self.vlans.start()
self.build_l2 = patch.object(vlan, 'build_layer2_graph', return_value=self.nav_graph)
self.build_l2.start()
bar = vlan.build_layer2_graph()
#foo = topology._get_vlans_map_layer2(bar)
vlan_by_interfaces, vlan_by_netbox = topology._get_vlans_map_layer2(self.nav_graph)
self.netmap_graph = topology.build_netmap_layer2_graph(
vlan.build_layer2_graph(),
vlan_by_interfaces,
vlan_by_netbox,
None)
def tearDown(self):
self.vlans.stop()
self.build_l2.stop()
def test_noop_layer2_testcase_setup(self):
self.assertTrue(True)
def _add_edge(self, g, node_a, interface_a, node_b, interface_b):
interface_a.to_interface = interface_b
g.add_edge(node_a, node_b, key=interface_a)<|fim▁end|> | from nav.topology import vlan
from .topology_testcase import TopologyTestCase
|
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | import { TextFill32 } from "../../";
export = TextFill32; |
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { Router, Route, IndexRoute } from 'react-router';
import createBrowserHistory from 'history/lib/createBrowserHistory';
import * as views from 'views';
import * as admin from 'views/admin';
import { RouteConstants } from 'constants';
const {
HomeView,
LayoutView
} = views;
const {
AdminHomeView,
AdminLayoutView
} = admin;<|fim▁hole|>
export default class App extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<Router history={createBrowserHistory()}>
<Route path='/' component={LayoutView}>
<IndexRoute component={HomeView} />
<Route path={RouteConstants.ADMIN} component={AdminLayoutView}>
<IndexRoute component={AdminHomeView} />
</Route>
</Route>
</Router>
);
}
}<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import shutil
from io import BytesIO, TextIOWrapper
import logging
import os
from os.path import expanduser, exists
import struct
from configobj import ConfigObj
from Crypto.Cipher import AES
logger = logging.getLogger(__name__)
def load_config(usr_cfg, def_cfg=None):
cfg = ConfigObj()
cfg.merge(ConfigObj(def_cfg, interpolation=False))
cfg.merge(ConfigObj(expanduser(usr_cfg), interpolation=False))
cfg.filename = expanduser(usr_cfg)
return cfg
def write_default_config(source, destination, overwrite=False):
destination = expanduser(destination)
if not overwrite and exists(destination):
return
shutil.copyfile(source, destination)
def get_mylogin_cnf_path():
"""Return the path to the .mylogin.cnf file or None if doesn't exist."""
app_data = os.getenv('APPDATA')
if app_data is None:
mylogin_cnf_dir = os.path.expanduser('~')
else:
mylogin_cnf_dir = os.path.join(app_data, 'MySQL')
mylogin_cnf_dir = os.path.abspath(mylogin_cnf_dir)
mylogin_cnf_path = os.path.join(mylogin_cnf_dir, '.mylogin.cnf')
if exists(mylogin_cnf_path):
logger.debug("Found login path file at '{0}'".format(mylogin_cnf_path))
return mylogin_cnf_path
return None
def open_mylogin_cnf(name):
"""Open a readable version of .mylogin.cnf.
Returns the file contents as a TextIOWrapper object.
:param str name: The pathname of the file to be opened.
:return: the login path file or None
"""
try:
with open(name, 'rb') as f:
plaintext = read_and_decrypt_mylogin_cnf(f)
except (OSError, IOError):
logger.error('Unable to open login path file.')
return None
if not isinstance(plaintext, BytesIO):
logger.error('Unable to read login path file.')
return None
return TextIOWrapper(plaintext)
def read_and_decrypt_mylogin_cnf(f):
"""Read and decrypt the contents of .mylogin.cnf.
This decryption algorithm mimics the code in MySQL's
mysql_config_editor.cc.
The login key is 20-bytes of random non-printable ASCII.
It is written to the actual login path file. It is used
to generate the real key used in the AES cipher.
:param f: an I/O object opened in binary mode
:return: the decrypted login path file
:rtype: io.BytesIO or None
"""
# Number of bytes used to store the length of ciphertext.
MAX_CIPHER_STORE_LEN = 4
LOGIN_KEY_LEN = 20
# Move past the unused buffer.
buf = f.read(4)
if not buf or len(buf) != 4:
logger.error('Login path file is blank or incomplete.')
return None
<|fim▁hole|> key = f.read(LOGIN_KEY_LEN)
# Generate the real key.
rkey = [0] * 16
for i in range(LOGIN_KEY_LEN):
try:
rkey[i % 16] ^= ord(key[i:i+1])
except TypeError:
# ord() was unable to get the value of the byte.
logger.error('Unable to generate login path AES key.')
return None
rkey = struct.pack('16B', *rkey)
# Create a cipher object using the key.
aes_cipher = AES.new(rkey, AES.MODE_ECB)
# Create a bytes buffer to hold the plaintext.
plaintext = BytesIO()
while True:
# Read the length of the ciphertext.
len_buf = f.read(MAX_CIPHER_STORE_LEN)
if len(len_buf) < MAX_CIPHER_STORE_LEN:
break
cipher_len, = struct.unpack("<i", len_buf)
# Read cipher_len bytes from the file and decrypt.
cipher = f.read(cipher_len)
pplain = aes_cipher.decrypt(cipher)
try:
# Determine pad length.
pad_len = ord(pplain[-1:])
except TypeError:
# ord() was unable to get the value of the byte.
logger.warning('Unable to remove pad.')
continue
if pad_len > len(pplain) or len(set(pplain[-pad_len:])) != 1:
# Pad length should be less than or equal to the length of the
# plaintext. The pad should have a single unqiue byte.
logger.warning('Invalid pad found in login path file.')
continue
# Get rid of pad.
plain = pplain[:-pad_len]
plaintext.write(plain)
if plaintext.tell() == 0:
logger.error('No data successfully decrypted from login path file.')
return None
plaintext.seek(0)
return plaintext<|fim▁end|> | # Read the login key. |
<|file_name|>calc.py<|end_file_name|><|fim▁begin|>"""
To start UNO for both Calc and Writer:
(Note that if you use the current_document command, it will open the Calc's current document since it's the first switch passed)
libreoffice "--accept=socket,host=localhost,port=18100;urp;StarOffice.ServiceManager" --norestore --nofirststartwizard --nologo --calc --writer
To start UNO without opening a libreoffice instance, use the --headless switch:
(Note that this doesn't allow to use the current_document command)
libreoffice --headless "--accept=socket,host=localhost,port=18100;urp;StarOffice.ServiceManager" --norestore --nofirststartwizard --nologo --calc --writer
"""
from uno import getComponentContext<|fim▁hole|># For saving the file
from com.sun.star.beans import PropertyValue
from uno import systemPathToFileUrl
class Message(object):
connection_setup_exception = "Error: Please start the uno bridge first."
# Connect to libreoffice using UNO
UNO_PORT = 18100
try:
localContext = getComponentContext()
resolver = localContext.ServiceManager.createInstanceWithContext(
"com.sun.star.bridge.UnoUrlResolver", localContext)
context = resolver.resolve(
"uno:socket,host=localhost,port=%d;urp;StarOffice.ComponentContext" % UNO_PORT)
except ConnectionSetupException:
print("%s\n" % Message.connection_setup_exception)
sys.exit(1)
# Get the desktop service
desktop = context.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", context)
class Interface(object):
variables = {}
@staticmethod
def current_document():
"""current_document()"""
return desktop.getCurrentComponent()
@staticmethod
def load_document(path):
"""load_document(['path'])"""
url = systemPathToFileUrl(path)
return desktop.loadComponentFromURL(url ,"_blank", 0, ())
@staticmethod
def new_document():
"""new_document()"""
return desktop.loadComponentFromURL("private:factory/scalc","_blank", 0, ())
@staticmethod
def current_sheet(document):
"""[document].current_sheet()"""
return document.getCurrentController().getActiveSheet()
@staticmethod
def save_as(document, path):
"""[document].save_as(['path'])"""
url = systemPathToFileUrl(path)
# Set file to overwrite
property_value = PropertyValue()
property_value.Name = 'Overwrite'
property_value.Value = 'overwrite'
properties = (property_value,)
# Save to file
document.storeAsURL(url, properties)
return True
@staticmethod
def fetch_cell(sheet, cell_range):
"""[sheet].fetch_cell(['A1'])"""
return sheet.getCellRangeByName(cell_range)
@staticmethod
def set_text(cell, string):
"""[cell].set_text(['string'])"""
if (string.startswith('"') and string.endswith('"')) or \
(string.startswith("'") and string.endswith("'")):
string = string[1:-1]
cell.setString(string)
return True
@staticmethod
def get_text(cell):
"""[cell].get_text()"""
return cell.getString()
@staticmethod
def weight(cell, bold):
"""[cell].weight(['bold'])"""
if bold.strip("'").strip('"') == "bold":
cell.CharWeight = BOLD
return True
else:
return False<|fim▁end|> | from com.sun.star.connection import ConnectionSetupException
from com.sun.star.awt.FontWeight import BOLD
import sys
|
<|file_name|>ArithmeticModuleNode.java<|end_file_name|><|fim▁begin|>package com.mauriciotogneri.apply.compiler.syntactic.nodes.arithmetic;
import com.mauriciotogneri.apply.compiler.lexical.Token;
import com.mauriciotogneri.apply.compiler.syntactic.TreeNode;
import com.mauriciotogneri.apply.compiler.syntactic.nodes.ExpressionBinaryNode;
public class ArithmeticModuleNode extends ExpressionBinaryNode
{
public ArithmeticModuleNode(Token token, TreeNode left, TreeNode right)
{<|fim▁hole|> public String sourceCode()
{
return String.format("mod(%s, %s)", left.sourceCode(), right.sourceCode());
}
}<|fim▁end|> | super(token, left, right);
}
@Override |
<|file_name|>nvd3-test-discreteBarChart.ts<|end_file_name|><|fim▁begin|>
namespace nvd3_test_discreteBarChart {
var historicalBarChart = [
{
key: "Cumulative Return",
values: [
{
"label": "A",
"value": 29.765957771107
},
{
"label": "B",
"value": 0
},
{
"label": "C",
"value": 32.807804682612
},
{
"label": "D",
"value": 196.45946739256
},
{
"label": "E",
"value": 0.19434030906893
},
{
"label": "F",
"value": 98.079782601442
},
{
"label": "G",<|fim▁hole|> },
{
"label": "H",
"value": 5.1387322875705
}
]
}
];
nv.addGraph(function () {
var chart = nv.models.discreteBarChart()
.x(function (d) { return d.label })
.y(function (d) { return d.value })
.staggerLabels(true)
//.staggerLabels(historicalBarChart[0].values.length > 8)
.showValues(true)
.duration(250)
;
d3.select('#chart1 svg')
.datum(historicalBarChart)
.call(chart);
nv.utils.windowResize(chart.update);
return chart;
});
}<|fim▁end|> | "value": 13.925743130903 |
<|file_name|>C.rs<|end_file_name|><|fim▁begin|>fn main(){
let mut s = String::new();
std::io::stdin().read_line(&mut s).unwrap();
let n: u32 = s.trim().parse().unwrap();<|fim▁hole|> for i in 0..n {
let mut s = String::new();
std::io::stdin().read_line(&mut s).unwrap();
let vec: Vec<&str> = s.trim().split(' ').collect();
t.push(vec[0].parse::<u32>().unwrap());
x.push(vec[1].parse::<u32>().unwrap());
y.push(vec[2].parse::<u32>().unwrap());
}
}<|fim▁end|> | let mut t: Vec<u32> = Vec::new();
let mut x: Vec<u32> = Vec::new();
let mut y: Vec<u32> = Vec::new(); |
<|file_name|>queues.py<|end_file_name|><|fim▁begin|>#
# Module implementing queues
#
# multiprocessing/queues.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import sys
import os
import threading
import collections
import weakref
import errno
from . import connection
from . import context
from ._ext import _billiard
from .compat import get_errno
from .five import monotonic, Empty, Full
from .util import (
debug, error, info, Finalize, register_after_fork, is_exiting,
)
from .reduction import ForkingPickler
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
class Queue(object):
'''
Queue type using a pipe, buffer and thread
'''
def __init__(self, maxsize=0, *args, **kwargs):
try:
ctx = kwargs['ctx']
except KeyError:
raise TypeError('missing 1 required keyword-only argument: ctx')
if maxsize <= 0:
maxsize = _billiard.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = connection.Pipe(duplex=False)
self._rlock = ctx.Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = ctx.Lock()
self._sem = ctx.BoundedSemaphore(maxsize)
# For use by concurrent.futures
self._ignore_epipe = False
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
context.assert_spawning(self)
return (self._ignore_epipe, self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._ignore_epipe, self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._send_bytes = self._writer.send_bytes
self._recv_bytes = self._reader.recv_bytes
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
with self._notempty:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
def get(self, block=True, timeout=None):
if block and timeout is None:
with self._rlock:
res = self._recv_bytes()
self._sem.release()
else:
if block:
deadline = monotonic() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if block:
timeout = deadline - monotonic()
if timeout < 0 or not self._poll(timeout):
raise Empty
elif not self._poll():
raise Empty
res = self._recv_bytes()
self._sem.release()
finally:
self._rlock.release()
# unserialize the data after having released the lock
return ForkingPickler.loads(res)
def qsize(self):
# Raises NotImplementedError on Mac OSX because
# of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
# Start thread which transfers data from buffer to pipe
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send_bytes,
self._wlock, self._writer.close, self._ignore_epipe),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
# On process exit we will wait for data to be flushed to pipe.
#
# However, if this process created the queue then all
# processes which use the queue will be descendants of this
# process. Therefore waiting for the queue to be flushed
# is pointless once all the child processes have been joined.
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
# Send sentinel to the thread queue object when garbage collected<|fim▁hole|> self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
with notempty:
buffer.append(_sentinel)
notempty.notify()
@staticmethod
def _feed(buffer, notempty, send_bytes, writelock, close, ignore_epipe):
debug('starting thread to feed data to pipe')
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
# serialize the data before acquiring the lock
obj = ForkingPickler.dumps(obj)
if wacquire is None:
send_bytes(obj)
else:
wacquire()
try:
send_bytes(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as exc:
if ignore_epipe and get_errno(exc) == errno.EPIPE:
return
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
# started to cleanup.
try:
if is_exiting():
info('error in queue thread: %r', exc, exc_info=True)
else:
if not error('error in queue thread: %r', exc,
exc_info=True):
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
class JoinableQueue(Queue):
'''
A queue type which also supports join() and task_done() methods
Note that if you do not call task_done() for each finished task then
eventually the counter's semaphore may overflow causing Bad Things
to happen.
'''
def __init__(self, maxsize=0, *args, **kwargs):
try:
ctx = kwargs['ctx']
except KeyError:
raise TypeError('missing 1 required keyword argument: ctx')
Queue.__init__(self, maxsize, ctx=ctx)
self._unfinished_tasks = ctx.Semaphore(0)
self._cond = ctx.Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
with self._notempty:
with self._cond:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
def task_done(self):
with self._cond:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
def join(self):
with self._cond:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
class _SimpleQueue(object):
'''
Simplified Queue type -- really just a locked pipe
'''
def __init__(self, rnonblock=False, wnonblock=False, ctx=None):
self._reader, self._writer = connection.Pipe(
duplex=False, rnonblock=rnonblock, wnonblock=wnonblock,
)
self._poll = self._reader.poll
self._rlock = self._wlock = None
def empty(self):
return not self._poll()
def __getstate__(self):
context.assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
def get_payload(self):
return self._reader.recv_bytes()
def send_payload(self, value):
self._writer.send_bytes(value)
def get(self):
# unserialize the data after having released the lock
return ForkingPickler.loads(self.get_payload())
def put(self, obj):
# serialize the data before acquiring the lock
self.send_payload(ForkingPickler.dumps(obj))
class SimpleQueue(_SimpleQueue):
def __init__(self, *args, **kwargs):
try:
ctx = kwargs['ctx']
except KeyError:
raise TypeError('missing required keyword argument: ctx')
self._reader, self._writer = connection.Pipe(duplex=False)
self._rlock = ctx.Lock()
self._wlock = ctx.Lock() if sys.platform != 'win32' else None
def get_payload(self):
with self._rlock:
return self._reader.recv_bytes()
def send_payload(self, value):
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self._writer.send_bytes(value)
else:
with self._wlock:
self._writer.send_bytes(value)<|fim▁end|> | |
<|file_name|>node.py<|end_file_name|><|fim▁begin|>""" EPYNET Classes """
from . import epanet2
from .objectcollection import ObjectCollection
from .baseobject import BaseObject, lazy_property
from .pattern import Pattern
class Node(BaseObject):
""" Base EPANET Node class """
static_properties = {'elevation': epanet2.EN_ELEVATION}
properties = {'head': epanet2.EN_HEAD, 'pressure': epanet2.EN_PRESSURE}
def __init__(self, uid, network):
super(Node, self).__init__(uid, network)
self.links = ObjectCollection()
def get_index(self, uid):
if not self._index:
self._index = self.network().ep.ENgetnodeindex(uid)
return self._index
def set_object_value(self, code, value):
return self.network().ep.ENsetnodevalue(self.index, code, value)
def get_object_value(self, code):
return self.network().ep.ENgetnodevalue(self.index, code)
@property
def index(self):
return self.get_index(self.uid)
@lazy_property
def coordinates(self):
return self.network().ep.ENgetcoord(self.index)
# extra functionality
@lazy_property
def upstream_links(self):
""" return a list of upstream links """
if self.results != {}:
raise ValueError("This method is only supported for steady state simulations")
links = ObjectCollection()
for link in self.links:
if (link.to_node == self and link.flow >= 1e-3) or (link.from_node == self and link.flow < -1e-3):
links[link.uid] = link
return links
@lazy_property
def downstream_links(self):
""" return a list of downstream nodes """
if self.results != {}:
raise ValueError("This method is only supported for steady state simulations")
links = ObjectCollection()
for link in self.links:
if (link.from_node == self and link.flow >= 1e-3) or (link.to_node == self and link.flow < 1e-3):
links[link.uid] = link
return links
@lazy_property
def inflow(self):
outflow = 0
for link in self.upstream_links:
outflow += abs(link.flow)
return outflow
@lazy_property
def outflow(self):
outflow = 0
for link in self.downstream_links:
outflow += abs(link.flow)
return outflow
""" calculates all the water flowing out of the node """
class Reservoir(Node):
""" EPANET Reservoir Class """
node_type = "Reservoir"
class Junction(Node):
""" EPANET Junction Class """
static_properties = {'elevation': epanet2.EN_ELEVATION, 'basedemand': epanet2.EN_BASEDEMAND, 'emitter': epanet2.EN_EMITTER}
properties = {'head': epanet2.EN_HEAD, 'pressure': epanet2.EN_PRESSURE, 'demand': epanet2.EN_DEMAND}
node_type = "Junction"
@property
def pattern(self):
pattern_index = int(self.get_property(epanet2.EN_PATTERN))
uid = self.network().ep.ENgetpatternid(pattern_index)
return Pattern(uid, self.network())
@pattern.setter
def pattern(self, value):
if isinstance(value, int):<|fim▁hole|> pattern_index = value
elif isinstance(value, str):
pattern_index = self.network().ep.ENgetpatternindex(value)
else:
pattern_index = value.index
self.network().solved = False
self.set_object_value(epanet2.EN_PATTERN, pattern_index)
class Tank(Node):
""" EPANET Tank Class """
node_type = "Tank"
static_properties = {'elevation': epanet2.EN_ELEVATION, 'basedemand': epanet2.EN_BASEDEMAND,
'initvolume': epanet2.EN_INITVOLUME, 'diameter': epanet2.EN_TANKDIAM,
'minvolume': epanet2.EN_MINVOLUME, 'minlevel': epanet2.EN_MINLEVEL,
'maxlevel': epanet2.EN_MAXLEVEL, 'maxvolume': 25, 'tanklevel': epanet2.EN_TANKLEVEL}
properties = {'head': epanet2.EN_HEAD, 'pressure': epanet2.EN_PRESSURE,
'demand': epanet2.EN_DEMAND, 'volume': 24, 'level': epanet2.EN_TANKLEVEL}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// This file is part of zinc64.
// Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved.
// Licensed under the GPLv3. See LICENSE file in the project root for full license text.
mod help;
mod load;
mod ls;
mod parser;
mod reset;
use std::io::Write;
use std::result::Result;
use zinc64_emu::system::C64;
<|fim▁hole|>use self::reset::ResetCommand;
pub enum Cmd {
Help(Option<String>),
Load(String),
Ls(Option<String>),
Reset(bool),
}
trait Handler {
fn run(&mut self, out: &mut dyn Write) -> Result<(), String>;
}
pub struct Executor {
parser: Parser,
}
impl Executor {
pub fn new() -> Self {
Executor {
parser: Parser::new(),
}
}
pub fn execute(&self, input: &str, c64: &mut C64, out: &mut dyn Write) -> Result<(), String> {
let command = self.parser.parse(input)?;
let mut handler: Box<dyn Handler> = match command {
Cmd::Load(path) => Box::new(LoadCommand::new(c64, path)),
Cmd::Ls(path) => Box::new(LsCommand::new(path)),
Cmd::Reset(hard) => Box::new(ResetCommand::new(c64, hard)),
Cmd::Help(command) => Box::new(HelpCommand::new(command)),
};
handler.run(out)
}
}<|fim▁end|> | use self::help::HelpCommand;
use self::load::LoadCommand;
use self::ls::LsCommand;
use self::parser::Parser; |
<|file_name|>ObjJasmineSpec.js<|end_file_name|><|fim▁begin|>'use strict';
describe("Divhide.Obj", function () {
beforeEach(function () {
jasmine.addMatchers(window.JasmineCustomMatchers);
});
it("Divhide.Specs.ObjExample", function () {
Divhide.Specs.ObjExample();
});
/**
*
* Tests for Obj.stringify()
*
*/
describe("stringify", function () {
/**
*
* Test toString() with a literal as argument
*
*/
describe("stringify(literal)", function(){
it("String should return a valid string", function () {
var val = Divhide.Obj.stringify("Oscar");
expect("\"Oscar\"").toBe(val);
});
it("Number should return a valid string", function () {
var val = Divhide.Obj.stringify(1);
expect(val).toBe("1");
});
it("null should return a valid string", function () {
var val = Divhide.Obj.stringify(null);
expect(val).toBe("null");
});
it("undefined should return a valid string", function () {
var val = Divhide.Obj.stringify(undefined);
expect(val).toBe("undefined");
});
});
/**
*
* Test toString() with an object as the argument
*
*/
describe("stringify(obj)", function(){
it("empty object should return a valid string", function () {
var val = Divhide.Obj.stringify({}, { space: 0 });
expect(val).toBe("{}");
});
it("one level object should return a valid string", function () {
var val = Divhide.Obj.stringify({
"firstName": "Oscar",
"lastName": "Brito"
}, { space: 0 });
expect(val)
.toBe("{\"firstName\":\"Oscar\",\"lastName\":\"Brito\"}");
});
it("two level object should return a valid string", function () {
var val = Divhide.Obj.stringify({
"firstName": {
"value": "Oscar"
},
"lastName": {
"value": "Brito"
}
}, { space: 0 });
expect(val)
.toBe("{\"firstName\":{\"value\":\"Oscar\"},\"lastName\":{\"value\":\"Brito\"}}");
});
it("with identation should return a valid string", function () {
var val = Divhide.Obj.stringify({
"other": {},
"firstName": {
"value": "Oscar"
}
}, { space: 2 });
expect(val).toBe("{\n" +
" \"other\": {},\n" +
" \"firstName\": {\n" +
" \"value\": \"Oscar\"\n" +
" }\n" +
"}");
});
});
/**
*
* Test toString() with an array as argument
*
*/
describe("stringify(array)", function(){
it("empty array should return a valid string", function () {
var val = Divhide.Obj.stringify([], { space: 0 });
expect(val).toBe("[]");
});
it("one level array should return a valid string", function () {
var val = Divhide.Obj.stringify([
"one",
"two",
3
], { space: 0 });
expect(val).toBe("[\"one\",\"two\",3]");
});
it("complex array should return a valid string", function () {
var val = Divhide.Obj.stringify(
["one", ["two"]],
{ space: 0 });
expect(val).toBe("[\"one\",[\"two\"]]");
});
it("with identation should return a valid string", function () {
var val = Divhide.Obj.stringify([
1,
[],
[ 2, 3 ],
], { space: 2 });
expect(val).toBe(
"[\n" +
" 1,\n" +
" [],\n" +
" [\n" +
" 2,\n" +
" 3\n" +
" ]\n" +
"]");
});
});
/**
*
* Test toString() with different combination of
* arguments.
*
*/
describe("stringify(*)", function(){
it("array with complex combination should return a valid string", function () {
var val = Divhide.Obj.stringify([
"one",
{
"2": "two",
"3": "three",
4: [4]
},
[ 5 ]
], { space: 0 });
expect(val)
.toBe("[\"one\",{\"2\":\"two\",\"3\":\"three\",\"4\":[4]},[5]]");
});
it("object with complex combination should return a valid string", function () {
var val = Divhide.Obj.stringify({
1: 1,
2: [ 2 ],
3: {
"value": "3"
}
}, { space: 0 });
expect(val)
.toBe("{\"1\":1,\"2\":[2],\"3\":{\"value\":\"3\"}}");
});<|fim▁hole|> var val = Divhide.Obj.stringify([
{
name: "Oscar",
age: 30,
tags: [ "tag1", "tag2" ]
},
{
name: "Filipe",
age: 31
},
], { space: 2 });
expect(val).toBe(
"[\n" +
" {\n" +
" \"name\": \"Oscar\",\n" +
" \"age\": 30,\n" +
" \"tags\": [\n" +
" \"tag1\",\n" +
" \"tag2\"\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"name\": \"Filipe\",\n" +
" \"age\": 31\n" +
" }\n" +
"]");
});
});
/**
* Test stringify() annotations
*/
describe("annotate", function(){
it("literal should return a valid string", function(){
var val = Divhide.Obj.stringify(1, {
space: 2,
annotate: function(value, info){
return {
before: "-> ",
after: " /* A value */"
};
}
});
expect(val).toBe("-> 1 /* A value */");
});
it("object should return a valid string", function(){
var val = Divhide.Obj.stringify({
name: "Oscar",
age: 30
}, {
space: 2,
annotate: function(value, info){
return {
after: (value instanceof Object) ? " /* The one! */" : null
};
}
});
expect(val).toBe(
"{\n" +
" \"name\": \"Oscar\",\n" +
" \"age\": 30\n" +
"} /* The one! */");
});
it("object keys should return a valid string", function(){
var val = Divhide.Obj.stringify({
name: "Oscar",
age: 30
}, {
space: 2,
annotate: function(value, info){
return {
before: (value == "Oscar") ? "/* The name */ " : null,
after: (value == "Oscar") ? " /* is so cool */" : null
};
}
});
expect(val).toBe(
"{\n" +
" \"name\": /* The name */ \"Oscar\", /* is so cool */\n" +
" \"age\": 30\n" +
"}");
});
it("array should return a valid string", function(){
var val = Divhide.Obj.stringify([{
name: "Oscar",
age: 30
}], {
space: 2,
annotate: function(value, info){
return {
after: (value instanceof Array) ? " /* The one! */" : null
};
}
});
expect(val).toBe(
"[\n" +
" {\n" +
" \"name\": \"Oscar\",\n" +
" \"age\": 30\n" +
" }\n" +
"] /* The one! */");
});
it("array item should return a valid string", function(){
var val = Divhide.Obj.stringify([{
name: "Oscar",
age: 30
}], {
space: 2,
annotate: function(value, info){
return {
after: (Divhide.Type.isObject(value)) ? " /* The one! */" : null
};
}
});
expect(val).toBe(
"[\n" +
" {\n" +
" \"name\": \"Oscar\",\n" +
" \"age\": 30\n" +
" } /* The one! */\n" +
"]");
});
});
});
});<|fim▁end|> |
it("array with identation should return a valid string", function () {
|
<|file_name|>signal_handlers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import connection
from django.conf import settings
from django.utils import timezone
from taiga.projects.history import services as history_service
from taiga.projects.history.choices import HistoryType
from . import tasks
def _get_project_webhooks(project):
webhooks = []
for webhook in project.webhooks.all():
webhooks.append({
"id": webhook.pk,
"url": webhook.url,
"key": webhook.key,
})
return webhooks
def on_new_history_entry(sender, instance, created, **kwargs):
if not settings.WEBHOOKS_ENABLED:
return None
if instance.is_hidden:
return None
model = history_service.get_model_from_key(instance.key)
pk = history_service.get_pk_from_key(instance.key)<|fim▁hole|> # Catch simultaneous DELETE request
return None
webhooks = _get_project_webhooks(obj.project)
if instance.type == HistoryType.create:
task = tasks.create_webhook
extra_args = []
elif instance.type == HistoryType.change:
task = tasks.change_webhook
extra_args = [instance]
elif instance.type == HistoryType.delete:
task = tasks.delete_webhook
extra_args = []
by = instance.owner
date = timezone.now()
webhooks_args = []
for webhook in webhooks:
args = [webhook["id"], webhook["url"], webhook["key"], by, date, obj] + extra_args
webhooks_args.append(args)
connection.on_commit(lambda: _execute_task(task, webhooks_args))
def _execute_task(task, webhooks_args):
for webhook_args in webhooks_args:
if settings.CELERY_ENABLED:
task.delay(*webhook_args)
else:
task(*webhook_args)<|fim▁end|> | try:
obj = model.objects.get(pk=pk)
except model.DoesNotExist: |
<|file_name|>viewsets.py<|end_file_name|><|fim▁begin|>from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet<|fim▁hole|>from rdmo.core.permissions import HasModelPermission
from rdmo.core.views import ChoicesViewSet
from rdmo.core.viewsets import CopyModelMixin
from .models import Condition
from .renderers import ConditionRenderer
from .serializers.export import ConditionExportSerializer
from .serializers.v1 import ConditionIndexSerializer, ConditionSerializer
class ConditionViewSet(CopyModelMixin, ModelViewSet):
permission_classes = (HasModelPermission, )
queryset = Condition.objects.select_related('source', 'target_option') \
.prefetch_related('optionsets', 'questionsets', 'questions', 'tasks')
serializer_class = ConditionSerializer
filter_backends = (DjangoFilterBackend,)
filterset_fields = (
'uri',
'key',
'source',
'relation',
'target_text',
'target_option'
)
@action(detail=False)
def index(self, request):
queryset = Condition.objects.select_related('source', 'target_option')
serializer = ConditionIndexSerializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, permission_classes=[HasModelPermission])
def export(self, request):
serializer = ConditionExportSerializer(self.get_queryset(), many=True)
xml = ConditionRenderer().render(serializer.data)
return XMLResponse(xml, name='conditions')
@action(detail=True, url_path='export', permission_classes=[HasModelPermission])
def detail_export(self, request, pk=None):
serializer = ConditionExportSerializer(self.get_object())
xml = ConditionRenderer().render([serializer.data])
return XMLResponse(xml, name=self.get_object().key)
class RelationViewSet(ChoicesViewSet):
permission_classes = (IsAuthenticated, )
queryset = Condition.RELATION_CHOICES<|fim▁end|> |
from rdmo.core.exports import XMLResponse |
<|file_name|>presets.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for parser and parser plugin presets."""
from __future__ import unicode_literals
import unittest
from plaso.containers import artifacts
from plaso.parsers import presets
from tests import test_lib as shared_test_lib
<|fim▁hole|> def testInitialize(self):
"""Tests the __init__ function."""
test_definition = presets.ParserPreset('test', ['parser1', 'parser2'])
self.assertIsNotNone(test_definition)
class ParserPresetsManagerTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin presets manager."""
_LINUX_PARSERS = [
'bash_history',
'bencode',
'czip/oxml',
'dockerjson',
'dpkg',
'filestat',
'gdrive_synclog',
'olecf',
'pls_recall',
'popularity_contest',
'selinux',
'sqlite/google_drive',
'sqlite/skype',
'sqlite/zeitgeist',
'syslog',
'systemd_journal',
'utmp',
'vsftpd',
'webhist',
'xchatlog',
'xchatscrollback',
'zsh_extended_history']
_MACOS_PARSERS = [
'asl_log',
'bash_history',
'bencode',
'bsm_log',
'cups_ipp',
'czip/oxml',
'filestat',
'fseventsd',
'gdrive_synclog',
'mac_appfirewall_log',
'mac_keychain',
'mac_securityd',
'macwifi',
'olecf',
'plist',
'sqlite/appusage',
'sqlite/google_drive',
'sqlite/imessage',
'sqlite/ls_quarantine',
'sqlite/mac_document_versions',
'sqlite/mackeeper_cache',
'sqlite/skype',
'syslog',
'utmpx',
'webhist',
'zsh_extended_history']
# TODO add tests for _ReadPresetDefinitionValues
# TODO add tests for _ReadPresetsFromFileObject
def testGetNames(self):
"""Tests the GetNames function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_names = list(test_manager.GetNames())
self.assertEqual(len(test_names), 7)
expected_names = sorted([
'android', 'linux', 'macos', 'webhist', 'win7', 'win_gen', 'winxp'])
self.assertEqual(test_names, expected_names)
def testGetParsersByPreset(self):
"""Tests the GetParsersByPreset function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_names = test_manager.GetParsersByPreset('linux')
self.assertEqual(parser_names, self._LINUX_PARSERS)
with self.assertRaises(KeyError):
test_manager.GetParsersByPreset('bogus')
def testGetPresetByName(self):
"""Tests the GetPresetByName function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_preset = test_manager.GetPresetByName('linux')
self.assertIsNotNone(test_preset)
self.assertEqual(test_preset.name, 'linux')
self.assertEqual(test_preset.parsers, self._LINUX_PARSERS)
test_preset = test_manager.GetPresetByName('bogus')
self.assertIsNone(test_preset)
def testGetPresetsByOperatingSystem(self):
"""Tests the GetPresetsByOperatingSystem function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
operating_system = artifacts.OperatingSystemArtifact(family='MacOS')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 1)
self.assertEqual(test_presets[0].name, 'macos')
self.assertEqual(test_presets[0].parsers, self._MACOS_PARSERS)
operating_system = artifacts.OperatingSystemArtifact(family='bogus')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 0)
def testGetPresetsInformation(self):
"""Tests the GetPresetsInformation function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_presets_information = test_manager.GetPresetsInformation()
self.assertGreaterEqual(len(parser_presets_information), 1)
available_parser_names = [name for name, _ in parser_presets_information]
self.assertIn('linux', available_parser_names)
# TODO add tests for ReadFromFile
if __name__ == '__main__':
unittest.main()<|fim▁end|> | class ParserPresetTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin preset."""
|
<|file_name|>test_task_manager.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from unittest import mock
from configman.dotdict import DotDict
from socorro.lib.task_manager import TaskManager, default_task_func
class TestTaskManager:
def test_constuctor1(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config)
assert tm.config == config
assert tm.task_func == default_task_func
assert tm.quit is False
def test_get_iterator(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config, job_source_iterator=range(1))
assert list(tm._get_iterator()) == [0]
def an_iter(self):
yield from range(5)
tm = TaskManager(config, job_source_iterator=an_iter)
assert list(tm._get_iterator()) == [0, 1, 2, 3, 4]
class X:
def __init__(self, config):
self.config = config
def __iter__(self):
yield from self.config
tm = TaskManager(config, job_source_iterator=X(config))
assert list(tm._get_iterator()) == list(config.keys())
def test_blocking_start(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = False
class MyTaskManager(TaskManager):
def _responsive_sleep(self, seconds, wait_log_interval=0, wait_reason=""):
try:
if self.count >= 2:
raise KeyboardInterrupt
self.count += 1
except AttributeError:
self.count = 0
<|fim▁hole|>
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
def test_blocking_start_with_quit_on_empty(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = True
tm = TaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0<|fim▁end|> | tm = MyTaskManager(config, task_func=mock.Mock()) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# pynag - Python Nagios plug-in and configuration environment
# Copyright (C) 2010 Drew Stinnet
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""This module contains low-level Parsers for nagios configuration and status objects.
Hint: If you are looking to parse some nagios configuration data, you probably
want pynag.Model module instead.
The highlights of this module are:
class Config: For Parsing nagios local nagios configuration files
class Livestatus: To connect to MK-Livestatus
class StatusDat: To read info from status.dat (not used a lot, migrate to mk-livestatus)
class LogFiles: To read nagios log-files
class MultiSite: To talk with multiple Livestatus instances
"""
import os
import re
import time
import sys
import socket # for mk_livestatus
import stat
import pynag.Plugins
import pynag.Utils
import StringIO
import tarfile
_sentinel = object()
class Config(object):
""" Parse and write nagios config files """
# Regex for beginning of object definition
# We want everything that matches:
# define <object_type> {
__beginning_of_object = re.compile("^\s*define\s+(\w+)\s*\{?(.*)$")
def __init__(self, cfg_file=None, strict=False):
""" Constructor for :py:class:`pynag.Parsers.config` class
Args:
cfg_file (str): Full path to nagios.cfg. If None, try to
auto-discover location
strict (bool): if True, use stricter parsing which is more prone to
raising exceptions
"""
self.cfg_file = cfg_file # Main configuration file
self.strict = strict # Use strict parsing or not
# If nagios.cfg is not set, lets do some minor autodiscover.
if self.cfg_file is None:
self.cfg_file = self.guess_cfg_file()
self.data = {}
self.maincfg_values = []
self._is_dirty = False
self.reset() # Initilize misc member variables
def guess_nagios_directory(self):
""" Returns a path to the nagios configuration directory on your system
Use this function for determining the nagios config directory in your
code
Returns:
str. directory containing the nagios.cfg file
Raises:
:py:class:`pynag.Parsers.ConfigFileNotFound` if cannot guess config
file location.
"""
cfg_file = self.guess_cfg_file()
if not cfg_file:
raise ConfigFileNotFound("Could not find nagios.cfg")
return os.path.dirname(cfg_file)
def guess_nagios_binary(self):
""" Returns a path to any nagios binary found on your system
Use this function if you don't want specify path to the nagios binary
in your code and you are confident that it is located in a common
location
Checked locations are as follows:
* /usr/bin/nagios
* /usr/sbin/nagios
* /usr/local/nagios/bin/nagios
* /nagios/bin/nagios
* /usr/bin/icinga
* /usr/sbin/icinga
* /usr/bin/naemon
* /usr/sbin/naemon
* /usr/local/naemon/bin/naemon.cfg
* /usr/bin/shinken
* /usr/sbin/shinken
Returns:
str. Path to the nagios binary
None if could not find a binary in any of those locations
"""
possible_files = ('/usr/bin/nagios',
'/usr/sbin/nagios',
'/usr/local/nagios/bin/nagios',
'/nagios/bin/nagios',
'/usr/bin/icinga',
'/usr/sbin/icinga',
'/usr/bin/naemon',
'/usr/sbin/naemon',
'/usr/local/naemon/bin/naemon.cfg',
'/usr/bin/shinken',
'/usr/sbin/shinken')
possible_binaries = ('nagios', 'nagios3', 'naemon', 'icinga', 'shinken')
for i in possible_binaries:
command = ['which', i]
code, stdout, stderr = pynag.Utils.runCommand(command=command, shell=False)
if code == 0:
return stdout.splitlines()[0].strip()
return None
def guess_cfg_file(self):
""" Returns a path to any nagios.cfg found on your system
Use this function if you don't want specify path to nagios.cfg in your
code and you are confident that it is located in a common location
Checked locations are as follows:
* /etc/nagios/nagios.cfg
* /etc/nagios3/nagios.cfg
* /usr/local/nagios/etc/nagios.cfg
* /nagios/etc/nagios/nagios.cfg
* ./nagios.cfg
* ./nagios/nagios.cfg
* /etc/icinga/icinga.cfg
* /usr/local/icinga/etc/icinga.cfg
* ./icinga.cfg
* ./icinga/icinga.cfg
* /etc/naemon/naemon.cfg
* /usr/local/naemon/etc/naemon.cfg
* ./naemon.cfg
* ./naemon/naemon.cfg
* /etc/shinken/shinken.cfg
Returns:
str. Path to the nagios.cfg or equivalent file
None if couldn't find a file in any of these locations.
"""
possible_files = ('/etc/nagios/nagios.cfg',
'/etc/nagios3/nagios.cfg',
'/usr/local/nagios/etc/nagios.cfg',
'/nagios/etc/nagios/nagios.cfg',
'./nagios.cfg',
'./nagios/nagios.cfg',
'/etc/icinga/icinga.cfg',
'/usr/local/icinga/etc/icinga.cfg',
'./icinga.cfg',
'./icinga/icinga.cfg',
'/etc/naemon/naemon.cfg',
'/usr/local/naemon/etc/naemon.cfg',
'./naemon.cfg',
'./naemon/naemon.cfg',
'/etc/shinken/shinken.cfg',
)
for file_path in possible_files:
if self.isfile(file_path):
return file_path
return None
def reset(self):
""" Reinitializes the data of a parser instance to its default values.
"""
self.cfg_files = [] # List of other configuration files
self.data = {} # dict of every known object definition
self.errors = [] # List of ParserErrors
self.item_list = None
self.item_cache = None
self.maincfg_values = [] # The contents of main nagios.cfg
self._resource_values = [] # The contents of any resource_files
self.item_apply_cache = {} # This is performance tweak used by _apply_template
# This is a pure listof all the key/values in the config files. It
# shouldn't be useful until the items in it are parsed through with the proper
# 'use' relationships
self.pre_object_list = []
self.post_object_list = []
self.object_type_keys = {
'hostgroup': 'hostgroup_name',
'hostextinfo': 'host_name',
'host': 'host_name',
'service': 'name',
'servicegroup': 'servicegroup_name',
'contact': 'contact_name',
'contactgroup': 'contactgroup_name',
'timeperiod': 'timeperiod_name',
'command': 'command_name',
#'service':['host_name','description'],
}
def _has_template(self, target):
""" Determine if an item has a template associated with it
Args:
target (dict): Parsed item as parsed by :py:class:`pynag.Parsers.config`
"""
return 'use' in target
def _get_pid(self):
""" Checks the lock_file var in nagios.cfg and returns the pid from the file
If the pid file does not exist, returns None.
"""
try:
return self.open(self.get_cfg_value('lock_file'), "r").readline().strip()
except Exception:
return None
def _get_hostgroup(self, hostgroup_name):
""" Returns the hostgroup that matches the queried name.
Args:
hostgroup_name: Name of the hostgroup to be returned (string)
Returns:
Hostgroup item with hostgroup_name that matches the queried name.
"""
return self.data['all_hostgroup'].get(hostgroup_name, None)
def _get_key(self, object_type, user_key=None):
""" Return the correct 'key' for an item.
This is mainly a helper method for other methods in this class. It is
used to shorten code repetition.
Args:
object_type: Object type from which to obtain the 'key' (string)
user_key: User defined key. Default None. (string)
Returns:
Correct 'key' for the object type. (string)
"""
if not user_key and not object_type in self.object_type_keys:
raise ParserError("Unknown key for object type: %s\n" % object_type)
# Use a default key
if not user_key:
user_key = self.object_type_keys[object_type]
return user_key
def _get_item(self, item_name, item_type):
""" Return an item from a list
Creates a cache of items in self.pre_object_list and returns an element
from this cache. Looks for an item with corresponding name and type.
Args:
item_name: Name of the item to be returned (string)
item_type: Type of the item to be returned (string)
Returns:
Item with matching name and type from
:py:attr:`pynag.Parsers.config.item_cache`
"""
# create local cache for performance optimizations. TODO: Rewrite functions that call this function
if not self.item_list:
self.item_list = self.pre_object_list
self.item_cache = {}
for item in self.item_list:
if not "name" in item:
continue
name = item['name']
tmp_item_type = (item['meta']['object_type'])
if not tmp_item_type in self.item_cache:
self.item_cache[tmp_item_type] = {}
self.item_cache[tmp_item_type][name] = item
my_cache = self.item_cache.get(item_type, None)
if not my_cache:
return None
return my_cache.get(item_name, None)
def _apply_template(self, original_item):
""" Apply all attributes of item named parent_name to "original_item".
Applies all of the attributes of parents (from the 'use' field) to item.
Args:
original_item: Item 'use'-ing a parent item. The parent's attributes
will be concretely added to this item.
Returns:
original_item to which have been added all the attributes defined
in parent items.
"""
# TODO: There is space for more performance tweaks here
# If item does not inherit from anyone else, lets just return item as is.
if 'use' not in original_item:
return original_item
object_type = original_item['meta']['object_type']
raw_definition = original_item['meta']['raw_definition']
my_cache = self.item_apply_cache.get(object_type, {})
# Performance tweak, if item has been parsed. Lets not do it again
if raw_definition in my_cache:
return my_cache[raw_definition]
parent_names = original_item['use'].split(',')
parent_items = []
for parent_name in parent_names:
parent_item = self._get_item(parent_name, object_type)
if parent_item is None:
error_string = "Can not find any %s named %s\n" % (object_type, parent_name)
self.errors.append(ParserError(error_string, item=original_item))
continue
try:
# Parent item probably has use flags on its own. So lets apply to parent first
parent_item = self._apply_template(parent_item)
except RuntimeError:
t, e = sys.exc_info()[:2]
self.errors.append(ParserError("Error while parsing item: %s (it might have circular use=)" % str(e),
item=original_item))
parent_items.append(parent_item)
inherited_attributes = original_item['meta']['inherited_attributes']
template_fields = original_item['meta']['template_fields']
for parent_item in parent_items:
for k, v in parent_item.iteritems():
if k in ('use', 'register', 'meta', 'name'):
continue
if k not in inherited_attributes:
inherited_attributes[k] = v
if k not in original_item:
original_item[k] = v
template_fields.append(k)
if 'name' in original_item:
my_cache[raw_definition] = original_item
return original_item
def _get_items_in_file(self, filename):
""" Return all items in the given file
Iterates through all elements in self.data and gatehrs all the items
defined in the queried filename.
Args:
filename: file from which are defined the items that will be
returned.
Returns:
A list containing all the items in self.data that were defined in
filename
"""
return_list = []
for k in self.data.keys():
for item in self[k]:
if item['meta']['filename'] == filename:
return_list.append(item)
return return_list
def get_new_item(self, object_type, filename):
""" Returns an empty item with all necessary metadata
Creates a new item dict and fills it with usual metadata:
* object_type : object_type (arg)
* filename : filename (arg)
* template_fields = []
* needs_commit = None
* delete_me = None
* defined_attributes = {}
* inherited_attributes = {}
* raw_definition = "define %s {\\n\\n} % object_type"
Args:
object_type: type of the object to be created (string)
filename: Path to which the item will be saved (string)
Returns:
A new item with default metadata
"""
meta = {
'object_type': object_type,
'filename': filename,
'template_fields': [],
'needs_commit': None,
'delete_me': None,
'defined_attributes': {},
'inherited_attributes': {},
'raw_definition': "define %s {\n\n}" % object_type,
}
return {'meta': meta}
def _load_file(self, filename):
""" Parses filename with self.parse_filename and append results in self._pre_object_list
This function is mostly here for backwards compatibility
Args:
filename: the file to be parsed. This is supposed to a nagios object definition file
"""
for i in self.parse_file(filename):
self.pre_object_list.append(i)
def parse_file(self, filename):
""" Parses a nagios object configuration file and returns lists of dictionaries.
This is more or less a wrapper around :py:meth:`config.parse_string`,
so reading documentation there is useful.
Args:
filename: Path to the file to parse (string)
Returns:
A list containing elements parsed by :py:meth:`parse_string`
"""
try:
raw_string = self.open(filename, 'rb').read()
return self.parse_string(raw_string, filename=filename)
except IOError:
t, e = sys.exc_info()[:2]
parser_error = ParserError(e.strerror)
parser_error.filename = e.filename
self.errors.append(parser_error)
return []
def parse_string(self, string, filename='None'):
""" Parses a string, and returns all object definitions in that string
Args:
string: A string containing one or more object definitions
filename (optional): If filename is provided, it will be referenced
when raising exceptions
Examples:
>>> test_string = "define host {\\nhost_name examplehost\\n}\\n"
>>> test_string += "define service {\\nhost_name examplehost\\nservice_description example service\\n}\\n"
>>> c = config()
>>> result = c.parse_string(test_string)
>>> for i in result: print i.get('host_name'), i.get('service_description', None)
examplehost None
examplehost example service
Returns:
A list of dictionaries, that look like self.data
Raises:
:py:class:`ParserError`
"""
append = ""
current = None
in_definition = {}
tmp_buffer = []
result = []
for sequence_no, line in enumerate(string.splitlines(False)):
line_num = sequence_no + 1
# If previous line ended with backslash, treat this line as a
# continuation of previous line
if append:
line = append + line
append = None
# Cleanup and line skips
line = line.strip()
if line == "":
continue
if line[0] == "#" or line[0] == ';':
continue
# If this line ends with a backslash, continue directly to next line
if line.endswith('\\'):
append = line.strip('\\')
continue
if line.startswith('}'): # end of object definition
if not in_definition:
p = ParserError("Unexpected '}' found outside object definition in line %s" % line_num)
p.filename = filename
p.line_start = line_num
raise p
in_definition = None
current['meta']['line_end'] = line_num
# Looks to me like nagios ignores everything after the } so why shouldn't we ?
rest = line.split("}", 1)[1]
tmp_buffer.append(line)
try:
current['meta']['raw_definition'] = '\n'.join(tmp_buffer)
except Exception:
raise ParserError("Encountered Unexpected end of object definition in file '%s'." % filename)
result.append(current)
# Destroy the Nagios Object
current = None
continue
elif line.startswith('define'): # beginning of object definition
if in_definition:
msg = "Unexpected 'define' in {filename} on line {line_num}. was expecting '}}'."
msg = msg.format(**locals())
self.errors.append(ParserError(msg, item=current))
m = self.__beginning_of_object.search(line)
tmp_buffer = [line]
object_type = m.groups()[0]
if self.strict and object_type not in self.object_type_keys.keys():
raise ParserError(
"Don't know any object definition of type '%s'. it is not in a list of known object definitions." % object_type)
current = self.get_new_item(object_type, filename)
current['meta']['line_start'] = line_num
# Start off an object
in_definition = True
# Looks to me like nagios ignores everything after the {, so why shouldn't we ?
rest = m.groups()[1]
continue
else: # In the middle of an object definition
tmp_buffer.append(' ' + line)
# save whatever's left in the buffer for the next iteration
if not in_definition:
append = line
continue
# this is an attribute inside an object definition
if in_definition:
#(key, value) = line.split(None, 1)
tmp = line.split(None, 1)
if len(tmp) > 1:
(key, value) = tmp
else:
key = tmp[0]
value = ""
# Strip out in-line comments
if value.find(";") != -1:
value = value.split(";", 1)[0]
# Clean info
key = key.strip()
value = value.strip()
# Rename some old values that may be in the configuration
# This can probably be removed in the future to increase performance
if (current['meta']['object_type'] == 'service') and key == 'description':
key = 'service_description'
# Special hack for timeperiods as they are not consistent with other objects
# We will treat whole line as a key with an empty value
if (current['meta']['object_type'] == 'timeperiod') and key not in ('timeperiod_name', 'alias'):
key = line
value = ''
current[key] = value
current['meta']['defined_attributes'][key] = value
# Something is wrong in the config
else:
raise ParserError("Error: Unexpected token in file '%s'" % filename)
# Something is wrong in the config
if in_definition:
raise ParserError("Error: Unexpected EOF in file '%s'" % filename)
return result
def _locate_item(self, item):
""" This is a helper function for anyone who wishes to modify objects.
It takes "item", locates the file which is configured in, and locates
exactly the lines which contain that definition.
Returns: (tuple)
(everything_before, object_definition, everything_after, filename):
* everything_before (list of lines): Every line in filename before object was defined
* everything_after (list of lines): Every line in "filename" after object was defined
* object_definition (list of lines): Every line used to define our item in "filename"
* filename (string): file in which the object was written to
Raises:
:py:class:`ValueError` if object was not found in "filename"
"""
if "filename" in item['meta']:
filename = item['meta']['filename']
else:
raise ValueError("item does not have a filename")
# Look for our item, store it as my_item
for i in self.parse_file(filename):
if self.compareObjects(item, i):
my_item = i
break
else:
raise ValueError("We could not find object in %s\n%s" % (filename, item))
# Caller of this method expects to be returned
# several lists that describe the lines in our file.
# The splitting logic starts here.
my_file = self.open(filename)
all_lines = my_file.readlines()
my_file.close()
start = my_item['meta']['line_start'] - 1
end = my_item['meta']['line_end']
everything_before = all_lines[:start]
object_definition = all_lines[start:end]
everything_after = all_lines[end:]
# If there happen to be line continuations in the object we will edit
# We will remove them from object_definition
object_definition = self._clean_backslashes(object_definition)
return everything_before, object_definition, everything_after, filename
def _clean_backslashes(self, list_of_strings):
""" Returns list_of_strings with all all strings joined that ended with backslashes
Args:
list_of_strings: List of strings to join
Returns:
Another list of strings, which lines ending with \ joined together.
"""
tmp_buffer = ''
result = []
for i in list_of_strings:
if i.endswith('\\\n'):
tmp_buffer += i.strip('\\\n')
else:
result.append(tmp_buffer + i)
tmp_buffer = ''
return result
def _modify_object(self, item, field_name=None, new_value=None, new_field_name=None, new_item=None,
make_comments=False):
""" Locates "item" and changes the line which contains field_name.
Helper function for object_* functions. Locates "item" and changes the
line which contains field_name. If new_value and new_field_name are both
None, the attribute is removed.
Args:
item(dict): The item to be modified
field_name(str): The field_name to modify (if any)
new_field_name(str): If set, field_name will be renamed
new_value(str): If set the value of field_name will be changed
new_item(str): If set, whole object will be replaced with this
string
make_comments: If set, put pynag-branded comments where changes
have been made
Returns:
True on success
Raises:
:py:class:`ValueError` if object or field_name is not found
:py:class:`IOError` is save is unsuccessful.
"""
if item is None:
return
if field_name is None and new_item is None:
raise ValueError("either field_name or new_item must be set")
if '\n' in str(new_value):
raise ValueError("Invalid character \\n used as an attribute value.")
everything_before, object_definition, everything_after, filename = self._locate_item(item)
if new_item is not None:
# We have instruction on how to write new object, so we dont need to parse it
object_definition = [new_item]
else:
change = None
value = None
i = 0
for i in range(len(object_definition)):
tmp = object_definition[i].split(None, 1)
if len(tmp) == 0:
continue
# Hack for timeperiods, they dont work like other objects
elif item['meta']['object_type'] == 'timeperiod' and field_name not in ('alias', 'timeperiod_name'):
tmp = [object_definition[i]]
# we can't change timeperiod, so we fake a field rename
if new_value is not None:
new_field_name = new_value
new_value = None
value = ''
elif len(tmp) == 1:
value = ''
else:
value = tmp[1]
k = tmp[0].strip()
if k == field_name:
# Attribute was found, lets change this line
if new_field_name is None and new_value is None:
# We take it that we are supposed to remove this attribute
change = object_definition.pop(i)
break
elif new_field_name:
# Field name has changed
k = new_field_name
if new_value is not None:
# value has changed
value = new_value
# Here we do the actual change
change = "\t%-30s%s\n" % (k, value)
if item['meta']['object_type'] == 'timeperiod' and field_name not in ('alias', 'timeperiod_name'):
change = "\t%s\n" % new_field_name
object_definition[i] = change
break
if not change and new_value is not None:
# Attribute was not found. Lets add it
change = "\t%-30s%s\n" % (field_name, new_value)
object_definition.insert(i, change)
# Lets put a banner in front of our item
if make_comments:
comment = '# Edited by PyNag on %s\n' % time.ctime()
if len(everything_before) > 0:
last_line_before = everything_before[-1]
if last_line_before.startswith('# Edited by PyNag on'):
everything_before.pop() # remove this line
object_definition.insert(0, comment)
# Here we overwrite the config-file, hoping not to ruin anything
str_buffer = "%s%s%s" % (''.join(everything_before), ''.join(object_definition), ''.join(everything_after))
self.write(filename, str_buffer)
return True
def open(self, filename, *args, **kwargs):
""" Wrapper around global open()
Simply calls global open(filename, *args, **kwargs) and passes all arguments
as they are received. See global open() function for more details.
"""
return open(filename, *args, **kwargs)
@pynag.Utils.synchronized(pynag.Utils.rlock)
def write(self, filename, string):
""" Wrapper around open(filename).write()
Writes string to filename and closes the file handler. File handler is
openned in `'w'` mode.
Args:
filename: File where *string* will be written. This is the path to
the file. (string)
string: String to be written to file. (string)
Returns:
Return code as returned by :py:meth:`os.write`
"""
fh = self.open(filename, 'w')
return_code = fh.write(string)
fh.flush()
# os.fsync(fh)
fh.close()
self._is_dirty = True
return return_code
def item_rewrite(self, item, str_new_item):
""" Completely rewrites item with string provided.
Args:
item: Item that is to be rewritten
str_new_item: str representation of the new item
..
In the following line, every "\\n" is actually a simple line break
This is only a little patch for the generated documentation.
Examples::
item_rewrite( item, "define service {\\n name example-service \\n register 0 \\n }\\n" )
Returns:
True on success
Raises:
:py:class:`ValueError` if object is not found
:py:class:`IOError` if save fails
"""
return self._modify_object(item=item, new_item=str_new_item)
def item_remove(self, item):
""" Delete one specific item from its configuration files
Args:
item: Item that is to be rewritten
str_new_item: string representation of the new item
..
In the following line, every "\\n" is actually a simple line break
This is only a little patch for the generated documentation.
Examples::
item_remove( item, "define service {\\n name example-service \\n register 0 \\n }\\n" )
Returns:
True on success
Raises:
:py:class:`ValueError` if object is not found
:py:class:`IOError` if save fails
"""
return self._modify_object(item=item, new_item="")
def item_edit_field(self, item, field_name, new_value):
""" Modifies one field of a (currently existing) object.
Changes are immediate (i.e. there is no commit)
Args:
item: Item to be modified. Its field `field_name` will be set to
`new_value`.
field_name: Name of the field that will be modified. (str)
new_value: Value to which will be set the field `field_name`. (str)
Example usage::
edit_object( item, field_name="host_name", new_value="examplehost.example.com") # doctest: +SKIP
Returns:
True on success
Raises:
:py:class:`ValueError` if object is not found
:py:class:`IOError` if save fails
"""
return self._modify_object(item, field_name=field_name, new_value=new_value)
def item_remove_field(self, item, field_name):
""" Removes one field of a (currently existing) object.
Changes are immediate (i.e. there is no commit)
Args:
item: Item to remove field from.
field_name: Field to remove. (string)
Example usage::
item_remove_field( item, field_name="contactgroups" )
Returns:
True on success
Raises:
:py:class:`ValueError` if object is not found
:py:class:`IOError` if save fails
"""
return self._modify_object(item=item, field_name=field_name, new_value=None, new_field_name=None)
def item_rename_field(self, item, old_field_name, new_field_name):
""" Renames a field of a (currently existing) item.
Changes are immediate (i.e. there is no commit).
Args:
item: Item to modify.
old_field_name: Name of the field that will have its name changed. (string)
new_field_name: New name given to `old_field_name` (string)
Example usage::
item_rename_field(item, old_field_name="normal_check_interval", new_field_name="check_interval")
Returns:
True on success
Raises:
:py:class:`ValueError` if object is not found
:py:class:`IOError` if save fails
"""
return self._modify_object(item=item, field_name=old_field_name, new_field_name=new_field_name)
def item_add(self, item, filename):
""" Adds a new object to a specified config file.
Args:
item: Item to be created
filename: Filename that we are supposed to write the new item to.
This is the path to the file. (string)
Returns:
True on success
Raises:
:py:class:`IOError` on failed save
"""
if not 'meta' in item:
item['meta'] = {}
item['meta']['filename'] = filename
# Create directory if it does not already exist
dirname = os.path.dirname(filename)
if not self.isdir(dirname):
os.makedirs(dirname)
str_buffer = self.print_conf(item)
fh = self.open(filename, 'a')
fh.write(str_buffer)
fh.close()
return True
def edit_object(self, item, field_name, new_value):
""" Modifies a (currently existing) item.
Changes are immediate (i.e. there is no commit)
Args:
item: Item to modify.
field_name: Field that will be updated.
new_value: Updated value of field `field_name`
Example Usage:
edit_object( item, field_name="host_name", new_value="examplehost.example.com")
Returns:
True on success
.. WARNING::
THIS FUNCTION IS DEPRECATED. USE item_edit_field() instead
"""
return self.item_edit_field(item=item, field_name=field_name, new_value=new_value)
def compareObjects(self, item1, item2):
""" Compares two items. Returns true if they are equal
Compares every key: value pair for both items. If anything is different,
the items will not be considered equal.
Args:
item1, item2: Items to be compared.
Returns:
True -- Items are equal
False -- Items are not equal
"""
keys1 = item1['meta']['defined_attributes'].keys()
keys2 = item2['meta']['defined_attributes'].keys()
keys1.sort()
keys2.sort()
result = True
if keys1 != keys2:
return False
for key in keys1:
if key == 'meta':
continue
key1 = item1[key]
key2 = item2[key]
# For our purpose, 30 is equal to 30.000
if key == 'check_interval':
key1 = int(float(key1))
key2 = int(float(key2))
if str(key1) != str(key2):
result = False
if result is False:
return False
return True
def edit_service(self, target_host, service_description, field_name, new_value):
""" Edit a service's attributes
Takes a host, service_description pair to identify the service to modify
and sets its field `field_name` to `new_value`.
Args:
target_host: name of the host to which the service is attached to. (string)
service_description: Service description of the service to modify. (string)
field_name: Field to modify. (string)
new_value: Value to which the `field_name` field will be updated (string)
Returns:
True on success
Raises:
:py:class:`ParserError` if the service is not found
"""
original_object = self.get_service(target_host, service_description)
if original_object is None:
raise ParserError("Service not found")
return self.edit_object(original_object, field_name, new_value)
def _get_list(self, item, key):
""" Return a comma list from an item
Args:
item: Item from which to select value. (string)
key: Field name of the value to select and return as a list. (string)
Example::
_get_list(Foo_object, host_name)
define service {
service_description Foo
host_name larry,curly,moe
}
returns
['larry','curly','moe']
Returns:
A list of the item's values of `key`
Raises:
:py:class:`ParserError` if item is not a dict
"""
if not isinstance(item, dict):
raise ParserError("%s is not a dictionary\n" % item)
# return []
if not key in item:
return []
return_list = []
if item[key].find(",") != -1:
for name in item[key].split(","):
return_list.append(name)
else:
return_list.append(item[key])
# Alphabetize
return_list.sort()
return return_list
def delete_object(self, object_type, object_name, user_key=None):
""" Delete object from configuration files
Args:
object_type: Type of the object to delete from configuration files.
object_name: Name of the object to delete from configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
True on success.
"""
item = self.get_object(object_type=object_type, object_name=object_name, user_key=user_key)
return self.item_remove(item)
def delete_service(self, service_description, host_name):
""" Delete service from configuration files
Args:
service_description: service_description field value of the object
to delete from configuration files.
host_name: host_name field value of the object to delete from
configuration files.
Returns:
True on success.
"""
item = self.get_service(host_name, service_description)
return self.item_remove(item)
def delete_host(self, object_name, user_key=None):
""" Delete a host from its configuration files
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
True on success.
"""
return self.delete_object('host', object_name, user_key=user_key)
def delete_hostgroup(self, object_name, user_key=None):
""" Delete a hostgroup from its configuration files
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
True on success.
"""
return self.delete_object('hostgroup', object_name, user_key=user_key)
def get_object(self, object_type, object_name, user_key=None):
""" Return a complete object dictionary
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: User defined key. Default None. (string)
Returns:
The item found to match all the criterias.
None if object is not found
"""
object_key = self._get_key(object_type, user_key)
for item in self.data['all_%s' % object_type]:
if item.get(object_key, None) == object_name:
return item
return None
def get_host(self, object_name, user_key=None):
""" Return a host object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('host', object_name, user_key=user_key)
def get_servicegroup(self, object_name, user_key=None):
""" Return a Servicegroup object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('servicegroup', object_name, user_key=user_key)
def get_contact(self, object_name, user_key=None):
""" Return a Contact object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('contact', object_name, user_key=user_key)
def get_contactgroup(self, object_name, user_key=None):
""" Return a Contactgroup object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('contactgroup', object_name, user_key=user_key)
def get_timeperiod(self, object_name, user_key=None):
""" Return a Timeperiod object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('timeperiod', object_name, user_key=user_key)
def get_command(self, object_name, user_key=None):
""" Return a Command object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('command', object_name, user_key=user_key)
def get_hostgroup(self, object_name, user_key=None):
""" Return a hostgroup object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('hostgroup', object_name, user_key=user_key)
def get_servicedependency(self, object_name, user_key=None):
""" Return a servicedependency object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('servicedependency', object_name, user_key=user_key)
def get_hostdependency(self, object_name, user_key=None):
""" Return a hostdependency object
Args:
object_name: object_name field value of the object to delete from
configuration files.
user_key: user_key to pass to :py:meth:`get_object`
Returns:
The item found to match all the criterias.
"""
return self.get_object('hostdependency', object_name, user_key=user_key)
def get_service(self, target_host, service_description):
""" Return a service object
Args:
target_host: host_name field of the service to be returned. This is
the host to which is attached the service.
service_description: service_description field of the service to be
returned.
Returns:
The item found to match all the criterias.
"""
for item in self.data['all_service']:
if item.get('service_description') == service_description and item.get('host_name') == target_host:
return item
return None
def _append_use(self, source_item, name):
""" Append attributes to source_item that are inherited via 'use' attribute'
Args:
source_item: item (dict) to apply the inheritance upon
name: obsolete (discovered automatically via source_item['use'].
Here for compatibility.
Returns:
Source Item with appended attributes.
Raises:
:py:class:`ParserError` on recursion errors
"""
# Remove the 'use' key
if "use" in source_item:
del source_item['use']
for possible_item in self.pre_object_list:
if "name" in possible_item:
# Start appending to the item
for k, v in possible_item.iteritems():
try:
if k == 'use':
source_item = self._append_use(source_item, v)
except Exception:
raise ParserError("Recursion error on %s %s" % (source_item, v))
# Only add the item if it doesn't already exist
if not k in source_item:
source_item[k] = v
return source_item
def _post_parse(self):
""" Creates a few optimization tweaks and easy access lists in self.data
Creates :py:attr:`config.item_apply_cache` and fills the all_object
item lists in self.data.
"""
self.item_list = None
self.item_apply_cache = {} # This is performance tweak used by _apply_template
for raw_item in self.pre_object_list:
# Performance tweak, make sure hashmap exists for this object_type
object_type = raw_item['meta']['object_type']
if not object_type in self.item_apply_cache:
self.item_apply_cache[object_type] = {}
# Tweak ends
if "use" in raw_item:
raw_item = self._apply_template(raw_item)
self.post_object_list.append(raw_item)
# Add the items to the class lists.
for list_item in self.post_object_list:
type_list_name = "all_%s" % list_item['meta']['object_type']
if not type_list_name in self.data:
self.data[type_list_name] = []
self.data[type_list_name].append(list_item)
def commit(self):
""" Write any changes that have been made to it's appropriate file """
# Loops through ALL items
for k in self.data.keys():
for item in self[k]:
# If the object needs committing, commit it!
if item['meta']['needs_commit']:
# Create file contents as an empty string
file_contents = ""
# find any other items that may share this config file
extra_items = self._get_items_in_file(item['meta']['filename'])
if len(extra_items) > 0:
for commit_item in extra_items:
# Ignore files that are already set to be deleted:w
if commit_item['meta']['delete_me']:
continue
# Make sure we aren't adding this thing twice
if item != commit_item:
file_contents += self.print_conf(commit_item)
# This is the actual item that needs commiting
if not item['meta']['delete_me']:
file_contents += self.print_conf(item)
# Write the file
filename = item['meta']['filename']
self.write(filename, file_contents)
# Recreate the item entry without the commit flag
self.data[k].remove(item)
item['meta']['needs_commit'] = None
self.data[k].append(item)
def flag_all_commit(self):
""" Flag every item in the configuration to be committed
This should probably only be used for debugging purposes
"""
for object_type in self.data.keys():
for item in self.data[object_type]:
item['meta']['needs_commit'] = True
def print_conf(self, item):
""" Return a string that can be used in a configuration file
Args:
item: Item to be dumped as a string.
Returns:
String representation of item.
"""
output = ""
# Header, to go on all files
output += "# Configuration file %s\n" % item['meta']['filename']
output += "# Edited by PyNag on %s\n" % time.ctime()
# Some hostgroup information
if "hostgroup_list" in item['meta']:
output += "# Hostgroups: %s\n" % ",".join(item['meta']['hostgroup_list'])
# Some hostgroup information
if "service_list" in item['meta']:
output += "# Services: %s\n" % ",".join(item['meta']['service_list'])
# Some hostgroup information
if "service_members" in item['meta']:
output += "# Service Members: %s\n" % ",".join(item['meta']['service_members'])
if len(item['meta']['template_fields']) != 0:
output += "# Values from templates:\n"
for k in item['meta']['template_fields']:
output += "#\t %-30s %-30s\n" % (k, item[k])
output += "\n"
output += "define %s {\n" % item['meta']['object_type']
for k, v in item.iteritems():
if v is None:
# Skip entries with No value
continue
if k != 'meta':
if k not in item['meta']['template_fields']:
output += "\t %-30s %-30s\n" % (k, v)
output += "}\n\n"
return output
def _load_static_file(self, filename=None):
""" Load a general config file (like nagios.cfg) that has key=value config file format. Ignore comments
Arguments:
filename: name of file to parse, if none nagios.cfg will be used
Returns:
a [ (key,value), (key,value) ] list
"""
result = []
if not filename:
filename = self.cfg_file
for line in self.open(filename).readlines():
# Strip out new line characters
line = line.strip()
# Skip blank lines
if line == "":
continue
# Skip comments
if line[0] == "#" or line[0] == ';':
continue
tmp = line.split("=", 1)
if len(tmp) < 2:
continue
key, value = tmp
key = key.strip()
value = value.strip()
result.append((key, value))
return result
def _edit_static_file(self, attribute, new_value, filename=None, old_value=None, append=False):
""" Modify a general config file (like nagios.cfg) that has a key=value config file format.
Arguments:
filename: Name of config file that will be edited (i.e. nagios.cfg)
attribute: name of attribute to edit (i.e. check_external_commands)
new_value: new value for the said attribute (i.e. "1"). None deletes
the line.
old_value: Useful if multiple attributes exist (i.e. cfg_dir) and
you want to replace a specific one.
append: If true, do not overwrite current setting. Instead append
this at the end. Use this with settings that are repeated like
cfg_file.
Examples::
_edit_static_file(filename='/etc/nagios/nagios.cfg', attribute='check_external_commands', new_value='1')
_edit_static_file(filename='/etc/nagios/nagios.cfg', attribute='cfg_dir', new_value='/etc/nagios/okconfig', append=True)
"""
if filename is None:
filename = self.cfg_file
# For some specific attributes, append should be implied
if attribute in ('cfg_file', 'cfg_dir', 'broker_module'):
append = True
# If/when we make a change, new_line is what will be written
new_line = '%s=%s\n' % (attribute, new_value)
# new_value=None means line should be removed
if new_value is None:
new_line = ''
write_buffer = self.open(filename).readlines()
is_dirty = False # dirty if we make any changes
for i, line in enumerate(write_buffer):
# Strip out new line characters
line = line.strip()
# Skip blank lines
if line == "":
continue
# Skip comments
if line[0] == "#" or line[0] == ';':
continue
key, value = line.split("=", 1)
key = key.strip()
value = value.strip()
# If key does not match, we are not interested in this line
if key != attribute:
continue
# If old_value was specified, and it matches, dont have to look any further
elif value == old_value:
write_buffer[i] = new_line
is_dirty = True
break
# if current value is the same as new_value, no need to make changes
elif value == new_value:
return False
# Special so cfg_dir matches despite double-slashes, etc
elif attribute == 'cfg_dir' and new_value and os.path.normpath(value) == os.path.normpath(new_value):
return False
# We are not appending, and no old value was specified:
elif append is False and not old_value:
write_buffer[i] = new_line
is_dirty = True
break
if is_dirty is False and new_value is not None:
# If we get here, it means we read the whole file,
# and we have not yet made any changes, So we assume
# We should append to the file
write_buffer.append(new_line)
is_dirty = True
# When we get down here, it is time to write changes to file
if is_dirty is True:
str_buffer = ''.join(write_buffer)
self.write(filename, str_buffer)
return True
else:
return False
def needs_reload(self):
""" Checks if the Nagios service needs a reload.
Returns:
True if Nagios service needs reload of cfg files
False if reload not needed or Nagios is not running
"""
if not self.maincfg_values:
self.reset()
self.parse_maincfg()
new_timestamps = self.get_timestamps()
object_cache_file = self.get_cfg_value('object_cache_file')
if self._get_pid() is None:
return False
if not object_cache_file:
return True
if not self.isfile(object_cache_file):
return True
object_cache_timestamp = new_timestamps.get(object_cache_file, 0)
# Reload not needed if no object_cache file
if object_cache_file is None:
return False
for k, v in new_timestamps.items():
if not v or int(v) > object_cache_timestamp:
return True
return False
def needs_reparse(self):
""" Checks if the Nagios configuration needs to be reparsed.
Returns:
True if any Nagios configuration file has changed since last parse()
"""
# If Parse has never been run:
if self.data == {}:
return True
# If previous save operation has forced a reparse
if self._is_dirty is True:
return True
# If we get here, we check the timestamps of the configs
new_timestamps = self.get_timestamps()
if len(new_timestamps) != len(self.timestamps):
return True
for k, v in new_timestamps.items():
if self.timestamps.get(k, None) != v:
return True
return False
@pynag.Utils.synchronized(pynag.Utils.rlock)
def parse_maincfg(self):
""" Parses your main configuration (nagios.cfg) and stores it as key/value pairs in self.maincfg_values
This function is mainly used by config.parse() which also parses your
whole configuration set.
Raises:
py:class:`ConfigFileNotFound`
"""
# If nagios.cfg is not set, lets do some minor autodiscover.
if self.cfg_file is None:
raise ConfigFileNotFound('Could not find nagios.cfg')
self.maincfg_values = self._load_static_file(self.cfg_file)
@pynag.Utils.synchronized(pynag.Utils.rlock)
def parse(self):
""" Parse all objects in your nagios configuration
This functions starts by loading up your nagios.cfg ( parse_maincfg() )
then moving on to your object configuration files (as defined via
cfg_file and cfg_dir) and and your resource_file as well.
Returns:
None
Raises:
:py:class:`IOError` if unable to read any file due to permission
problems
"""
# reset
self.reset()
self.parse_maincfg()
self.cfg_files = self.get_cfg_files()
# When parsing config, we will softly fail if permission denied
# comes on resource files. If later someone tries to get them via
# get_resource, we will fail hard
try:
self._resource_values = self.get_resources()
except IOError:
t, e = sys.exc_info()[:2]
self.errors.append(str(e))
self.timestamps = self.get_timestamps()
# This loads everything into
for cfg_file in self.cfg_files:
self._load_file(cfg_file)
self._post_parse()
self._is_dirty = False
def get_resource(self, resource_name):
""" Get a single resource value which can be located in any resource.cfg file
Arguments:
resource_name: Name as it appears in resource file (i.e. $USER1$)
Returns:
String value of the resource value.
Raises:
:py:class:`KeyError` if resource is not found
:py:class:`ParserError` if resource is not found and you do not have
permissions
"""
resources = self.get_resources()
for k, v in resources:
if k == resource_name:
return v
def get_timestamps(self):
""" Returns hash map of all nagios related files and their timestamps"""
files = {}
files[self.cfg_file] = None
for k, v in self.maincfg_values:
if k in ('resource_file', 'lock_file', 'object_cache_file'):
files[v] = None
for i in self.get_cfg_files():
files[i] = None
# Now lets lets get timestamp of every file
for k, v in files.items():
if not self.isfile(k):
continue
files[k] = self.stat(k).st_mtime
return files
def isfile(self, *args, **kwargs):
""" Wrapper around os.path.isfile """
return os.path.isfile(*args, **kwargs)
def isdir(self, *args, **kwargs):
""" Wrapper around os.path.isdir """
return os.path.isdir(*args, **kwargs)
def islink(self, *args, **kwargs):
""" Wrapper around os.path.islink """
return os.path.islink(*args, **kwargs)
def readlink(selfself, *args, **kwargs):
""" Wrapper around os.readlink """
return os.readlink(*args, **kwargs)
<|fim▁hole|> """ Wrapper around os.stat """
return os.stat(*args, **kwargs)
def remove(self, *args, **kwargs):
""" Wrapper around os.remove """
return os.remove(*args, **kwargs)
def access(self, *args, **kwargs):
""" Wrapper around os.access """
return os.access(*args, **kwargs)
def listdir(self, *args, **kwargs):
""" Wrapper around os.listdir """
return os.listdir(*args, **kwargs)
def exists(self, *args, **kwargs):
""" Wrapper around os.path.exists """
return os.path.exists(*args, **kwargs)
def get_resources(self):
"""Returns a list of every private resources from nagios.cfg"""
resources = []
for config_object, config_value in self.maincfg_values:
if config_object == 'resource_file' and self.isfile(config_value):
resources += self._load_static_file(config_value)
return resources
def extended_parse(self):
""" This parse is used after the initial parse() command is run.
It is only needed if you want extended meta information about hosts or other objects
"""
# Do the initial parsing
self.parse()
# First, cycle through the hosts, and append hostgroup information
index = 0
for host in self.data['all_host']:
if host.get("register", None) == "0":
continue
if not "host_name" in host:
continue
if not "hostgroup_list" in self.data['all_host'][index]['meta']:
self.data['all_host'][index]['meta']['hostgroup_list'] = []
# Append any hostgroups that are directly listed in the host definition
if "hostgroups" in host:
for hostgroup_name in self._get_list(host, 'hostgroups'):
if not "hostgroup_list" in self.data['all_host'][index]['meta']:
self.data['all_host'][index]['meta']['hostgroup_list'] = []
if hostgroup_name not in self.data['all_host'][index]['meta']['hostgroup_list']:
self.data['all_host'][index]['meta']['hostgroup_list'].append(hostgroup_name)
# Append any services which reference this host
service_list = []
for service in self.data['all_service']:
if service.get("register", None) == "0":
continue
if not "service_description" in service:
continue
if host['host_name'] in self._get_active_hosts(service):
service_list.append(service['service_description'])
self.data['all_host'][index]['meta']['service_list'] = service_list
# Increment count
index += 1
# Loop through all hostgroups, appending them to their respective hosts
for hostgroup in self.data['all_hostgroup']:
for member in self._get_list(hostgroup, 'members'):
index = 0
for host in self.data['all_host']:
if not "host_name" in host:
continue
# Skip members that do not match
if host['host_name'] == member:
# Create the meta var if it doesn' exist
if not "hostgroup_list" in self.data['all_host'][index]['meta']:
self.data['all_host'][index]['meta']['hostgroup_list'] = []
if hostgroup['hostgroup_name'] not in self.data['all_host'][index]['meta']['hostgroup_list']:
self.data['all_host'][index]['meta']['hostgroup_list'].append(hostgroup['hostgroup_name'])
# Increment count
index += 1
# Expand service membership
index = 0
for service in self.data['all_service']:
# Find a list of hosts to negate from the final list
self.data['all_service'][index]['meta']['service_members'] = self._get_active_hosts(service)
# Increment count
index += 1
def _get_active_hosts(self, item):
""" Given an object, return a list of active hosts.
This will exclude hosts that are negated with a "!"
Args:
item: Item to obtain active hosts from.
Returns:
List of all the active hosts for `item`
"""
# First, generate the negation list
negate_hosts = []
# Hostgroups
if "hostgroup_name" in item:
for hostgroup_name in self._get_list(item, 'hostgroup_name'):
if hostgroup_name[0] == "!":
hostgroup_obj = self.get_hostgroup(hostgroup_name[1:])
negate_hosts.extend(self._get_list(hostgroup_obj, 'members'))
# Host Names
if "host_name" in item:
for host_name in self._get_list(item, 'host_name'):
if host_name[0] == "!":
negate_hosts.append(host_name[1:])
# Now get hosts that are actually listed
active_hosts = []
# Hostgroups
if "hostgroup_name" in item:
for hostgroup_name in self._get_list(item, 'hostgroup_name'):
if hostgroup_name[0] != "!":
active_hosts.extend(self._get_list(self.get_hostgroup(hostgroup_name), 'members'))
# Host Names
if "host_name" in item:
for host_name in self._get_list(item, 'host_name'):
if host_name[0] != "!":
active_hosts.append(host_name)
# Combine the lists
return_hosts = []
for active_host in active_hosts:
if active_host not in negate_hosts:
return_hosts.append(active_host)
return return_hosts
def get_cfg_dirs(self):
""" Parses the main config file for configuration directories
Returns:
List of all cfg directories used in this configuration
Example::
print(get_cfg_dirs())
['/etc/nagios/hosts','/etc/nagios/objects',...]
"""
cfg_dirs = []
for config_object, config_value in self.maincfg_values:
if config_object == "cfg_dir":
cfg_dirs.append(config_value)
return cfg_dirs
def get_cfg_files(self):
""" Return a list of all cfg files used in this configuration
Filenames are normalised so that if nagios.cfg specifies relative
filenames we will convert it to fully qualified filename before returning.
Returns:
List of all configurations files used in the configuration.
Example:
print(get_cfg_files())
['/etc/nagios/hosts/host1.cfg','/etc/nagios/hosts/host2.cfg',...]
"""
cfg_files = []
for config_object, config_value in self.maincfg_values:
# Add cfg_file objects to cfg file list
if config_object == "cfg_file":
config_value = self.abspath(config_value)
if self.isfile(config_value):
cfg_files.append(config_value)
# Parse all files in a cfg directory
if config_object == "cfg_dir":
config_value = self.abspath(config_value)
directories = []
raw_file_list = []
directories.append(config_value)
# Walk through every subdirectory and add to our list
while directories:
current_directory = directories.pop(0)
# Nagios doesnt care if cfg_dir exists or not, so why should we ?
if not self.isdir(current_directory):
continue
for item in self.listdir(current_directory):
# Append full path to file
item = "%s" % (os.path.join(current_directory, item.strip()))
if self.islink(item):
item = os.readlink(item)
if self.isdir(item):
directories.append(item)
if raw_file_list.count(item) < 1:
raw_file_list.append(item)
for raw_file in raw_file_list:
if raw_file.endswith('.cfg'):
if self.exists(raw_file) and not self.isdir(raw_file):
# Nagios doesnt care if cfg_file exists or not, so we will not throws errors
cfg_files.append(raw_file)
return cfg_files
def abspath(self, path):
""" Return the absolute path of a given relative path.
The current working directory is assumed to be the dirname of nagios.cfg
Args:
path: relative path to be transformed into absolute path. (string)
Returns:
Absolute path of given relative path.
Example:
>>> c = config(cfg_file="/etc/nagios/nagios.cfg")
>>> c.abspath('nagios.cfg')
'/etc/nagios/nagios.cfg'
>>> c.abspath('/etc/nagios/nagios.cfg')
'/etc/nagios/nagios.cfg'
"""
if not isinstance(path, str):
return ValueError("Path must be a string got %s instead" % type(path))
if path.startswith('/'):
return path
nagiosdir = os.path.dirname(self.cfg_file)
normpath = os.path.abspath(os.path.join(nagiosdir, path))
return normpath
def get_cfg_value(self, key):
""" Returns one specific value from your nagios.cfg file,
None if value is not found.
Arguments:
key: what attribute to fetch from nagios.cfg (example: "command_file" )
Returns:
String of the first value found for
Example:
>>> c = Config() # doctest: +SKIP
>>> log_file = c.get_cfg_value('log_file') # doctest: +SKIP
# Should return something like "/var/log/nagios/nagios.log"
"""
if not self.maincfg_values:
self.parse_maincfg()
for k, v in self.maincfg_values:
if k == key:
return v
return None
def get_object_types(self):
""" Returns a list of all discovered object types """
return map(lambda x: re.sub("all_", "", x), self.data.keys())
def cleanup(self):
""" Remove configuration files that have no configuration items """
for filename in self.cfg_files:
if not self.parse_file(filename): # parse_file returns empty list on empty files
self.remove(filename)
# If nagios.cfg specifies this file directly via cfg_file directive then...
for k, v in self.maincfg_values:
if k == 'cfg_file' and v == filename:
self._edit_static_file(k, old_value=v, new_value=None)
def __setitem__(self, key, item):
self.data[key] = item
def __getitem__(self, key):
return self.data[key]
class Livestatus(object):
""" Wrapper around MK-Livestatus
Example usage::
s = Livestatus()
for hostgroup s.get_hostgroups():
print(hostgroup['name'], hostgroup['num_hosts'])
"""
def __init__(self, livestatus_socket_path=None, nagios_cfg_file=None, authuser=None):
""" Initilize a new instance of Livestatus
Args:
livestatus_socket_path: Path to livestatus socket (if none specified,
use one specified in nagios.cfg)
nagios_cfg_file: Path to your nagios.cfg. If None then try to
auto-detect
authuser: If specified. Every data pulled is with the access rights
of that contact.
"""
self.nagios_cfg_file = nagios_cfg_file
self.error = None
if not livestatus_socket_path:
c = config(cfg_file=nagios_cfg_file)
c.parse_maincfg()
self.nagios_cfg_file = c.cfg_file
# Look for a broker_module line in the main config and parse its arguments
# One of the arguments is path to the file socket created
for k, v in c.maincfg_values:
if k == 'broker_module' and "livestatus.o" in v:
for arg in v.split()[1:]:
if arg.startswith('/') or '=' not in arg:
livestatus_socket_path = arg
break
else:
# If we get here, then we could not locate a broker_module argument
# that looked like a filename
msg = "No Livestatus socket defined. Make sure livestatus broker module is loaded."
raise ParserError(msg)
self.livestatus_socket_path = livestatus_socket_path
self.authuser = authuser
def test(self, raise_error=True):
""" Test if connection to livestatus socket is working
Args:
raise_error: If set to True, raise exception if test fails,otherwise return False
Raises:
ParserError if raise_error == True and connection fails
Returns:
True -- Connection is OK
False -- there are problems and raise_error==False
"""
try:
self.query("GET hosts")
except Exception:
t, e = sys.exc_info()[:2]
self.error = e
if raise_error:
raise ParserError("got '%s' when testing livestatus socket. error was: '%s'" % (type(e), e))
else:
return False
return True
def _get_socket(self):
""" Returns a socket.socket() instance to communicate with livestatus
Socket might be either unix filesocket or a tcp socket depenging in
the content of :py:attr:`livestatus_socket_path`
Returns:
Socket to livestatus instance (socket.socket)
Raises:
:py:class:`LivestatusNotConfiguredException` on failed connection.
:py:class:`ParserError` If could not parse configured TCP address
correctly.
"""
if not self.livestatus_socket_path:
msg = "We could not find path to MK livestatus socket file. Make sure MK livestatus is installed and configured"
raise LivestatusNotConfiguredException(msg)
try:
# If livestatus_socket_path contains a colon, then we assume that it is tcp socket instead of a local filesocket
if self.livestatus_socket_path.find(':') > 0:
address, tcp_port = self.livestatus_socket_path.split(':', 1)
if not tcp_port.isdigit():
msg = 'Could not parse host:port "%s". %s does not look like a valid port is not a valid tcp port.'
raise ParserError(msg % (self.livestatus_socket_path, tcp_port))
tcp_port = int(tcp_port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((address, tcp_port))
else:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.livestatus_socket_path)
return s
except IOError:
t, e = sys.exc_info()[:2]
msg = "%s while connecting to '%s'. Make sure nagios is running and mk_livestatus loaded."
raise ParserError(msg % (e, self.livestatus_socket_path))
def query(self, query, *args, **kwargs):
""" Performs LQL queries the livestatus socket
Queries are corrected and convienient default data are added to the
query before sending it to the socket.
Args:
query: Query to be passed to the livestatus socket (string)
args, kwargs: Additionnal parameters that will be sent to
:py:meth:`pynag.Utils.grep_to_livestatus`. The result will be
appended to the query.
Returns:
Answer from livestatus. It will be in python format unless specified
otherwise.
Raises:
:py:class:`ParserError` if problems connecting to livestatus.
"""
# columns parameter is here for backwards compatibility only
kwargs.pop('columns', None)
# We break query up into a list, of commands, then before sending command to the socket
# We will write it one line per item in the array
query = query.split('\n')
query += pynag.Utils.grep_to_livestatus(*args, **kwargs)
# If no response header was specified, we add fixed16
response_header = None
if not filter(lambda x: x.startswith('ResponseHeader:'), query):
query.append("ResponseHeader: fixed16")
response_header = "fixed16"
# If no specific outputformat is requested, we will return in python format
python_format = False
if not filter(lambda x: x.startswith('OutputFormat:'), query):
query.append("OutputFormat: python")
python_format = True
# There is a bug in livestatus where if requesting Stats, then no column headers are sent from livestatus
# In later version, the headers are sent, but the output is corrupted.
#
# We maintain consistency by clinging on to the old bug, and if there are Stats in the output
# we will not ask for column headers
doing_stats = len(filter(lambda x: x.startswith('Stats:'), query)) > 0
if not filter(lambda x: x.startswith('Stats:'), query) and not filter(
lambda x: x.startswith('ColumnHeaders: on'), query):
query.append("ColumnHeaders: on")
# Check if we need to add authuser to the query
if not filter(lambda x: x.startswith('AuthUser:'), query) and self.authuser not in (None, ''):
query.append("AuthUser: %s" % self.authuser)
# When we reach here, we are done adding options to the query, so we convert to the string that will
# be sent to the livestatus socket
query = '\n'.join(query) + '\n'
self.last_query = query
#
# Lets create a socket and see if we can write to it
#
s = self._get_socket()
try:
s.send(query)
except IOError:
msg = "Could not write to socket '%s'. Make sure you have the right permissions"
raise ParserError(msg % self.livestatus_socket_path)
s.shutdown(socket.SHUT_WR)
tmp = s.makefile()
# Read the response header from livestatus
if response_header == "fixed16":
response_data = tmp.readline()
if len(response_data) == 0:
return []
return_code = response_data.split()[0]
if not return_code.startswith('2'):
error_message = tmp.readline().strip()
raise ParserError("Error '%s' from livestatus: %s" % (return_code, error_message))
answer = tmp.read()
# We are done with the livestatus socket. lets close it
s.close()
if answer == '':
return []
# If something other than python format was requested, we return the answer as is
if python_format is False:
return answer
# If we reach down here, it means we are supposed to parse the output before returning it
try:
answer = eval(answer)
except Exception:
raise ParserError("Error, could not parse response from livestatus.\n%s" % answer)
# Workaround for livestatus bug, where column headers are not provided even if we asked for them
if doing_stats is True and len(answer) == 1:
return answer[0]
columns = answer.pop(0)
# Lets throw everything into a hashmap before we return
result = []
for line in answer:
tmp = {}
for i, column in enumerate(line):
column_name = columns[i]
tmp[column_name] = column
result.append(tmp)
return result
def get(self, table, *args, **kwargs):
""" Same as self.query('GET %s' % (table,))
Extra arguments will be appended to the query.
Args:
table: Table from which the data will be retrieved
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Example::
get('contacts', 'Columns: name alias')
Returns:
Answer from livestatus in python format.
"""
return self.query('GET %s' % (table,), *args, **kwargs)
def get_host(self, host_name):
""" Performs a GET query for a particular host
This performs::
'''GET hosts
Filter: host_name = %s''' % host_name
Args:
host_name: name of the host to obtain livestatus data from
Returns:
Answer from livestatus in python format.
"""
return self.query('GET hosts', 'Filter: host_name = %s' % host_name)[0]
def get_service(self, host_name, service_description):
""" Performs a GET query for a particular service
This performs::
'''GET services
Filter: host_name = %s
Filter: service_description = %s''' % (host_name, service_description)
Args:
host_name: name of the host the target service is attached to.
service_description: Description of the service to obtain livestatus
data from.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET services', 'Filter: host_name = %s' % host_name,
'Filter: description = %s' % service_description)[0]
def get_hosts(self, *args, **kwargs):
""" Performs a GET query for all hosts
This performs::
'''GET hosts %s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET hosts', *args, **kwargs)
def get_services(self, *args, **kwargs):
""" Performs a GET query for all services
This performs::
'''GET services
%s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET services', *args, **kwargs)
def get_hostgroups(self, *args, **kwargs):
""" Performs a GET query for all hostgroups
This performs::
'''GET hostgroups
%s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET hostgroups', *args, **kwargs)
def get_servicegroups(self, *args, **kwargs):
""" Performs a GET query for all servicegroups
This performs::
'''GET servicegroups
%s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET servicegroups', *args, **kwargs)
def get_contactgroups(self, *args, **kwargs):
""" Performs a GET query for all contactgroups
This performs::
'''GET contactgroups
%s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET contactgroups', *args, **kwargs)
def get_contacts(self, *args, **kwargs):
""" Performs a GET query for all contacts
This performs::
'''GET contacts
%s %s''' % (*args, **kwargs)
Args:
args, kwargs: These will be appendend to the end of the query to
perform additionnal instructions.
Returns:
Answer from livestatus in python format.
"""
return self.query('GET contacts', *args, **kwargs)
def get_contact(self, contact_name):
""" Performs a GET query for a particular contact
This performs::
'''GET contacts
Filter: contact_name = %s''' % contact_name
Args:
contact_name: name of the contact to obtain livestatus data from
Returns:
Answer from livestatus in python format.
"""
return self.query('GET contacts', 'Filter: contact_name = %s' % contact_name)[0]
def get_servicegroup(self, name):
""" Performs a GET query for a particular servicegroup
This performs::
'''GET servicegroups
Filter: servicegroup_name = %s''' % servicegroup_name
Args:
servicegroup_name: name of the servicegroup to obtain livestatus data from
Returns:
Answer from livestatus in python format.
"""
return self.query('GET servicegroups', 'Filter: name = %s' % name)[0]
def get_hostgroup(self, name):
""" Performs a GET query for a particular hostgroup
This performs::
'''GET hostgroups
Filter: hostgroup_name = %s''' % hostgroup_name
Args:
hostgroup_name: name of the hostgroup to obtain livestatus data from
Returns:
Answer from livestatus in python format.
"""
return self.query('GET hostgroups', 'Filter: name = %s' % name)[0]
def get_contactgroup(self, name):
""" Performs a GET query for a particular contactgroup
This performs::
'''GET contactgroups
Filter: contactgroup_name = %s''' % contactgroup_name
Args:
contactgroup_name: name of the contactgroup to obtain livestatus data from
Returns:
Answer from livestatus in python format.
"""
return self.query('GET contactgroups', 'Filter: name = %s' % name)[0]
class RetentionDat(object):
""" Easy way to parse the content of retention.dat
After calling parse() contents of retention.dat are kept in self.data
Example Usage::
r = retention()
r.parse()
print r
print r.data['info']
"""
def __init__(self, filename=None, cfg_file=None):
""" Initilize a new instance of retention.dat
Args (you only need to provide one of these):
filename: path to your retention.dat file
cfg_file: path to your nagios.cfg file, path to retention.dat will
be looked up in this file
"""
# If filename is not provided, lets try to discover it from
# nagios.cfg
if filename is None:
c = config(cfg_file=cfg_file)
for key, value in c._load_static_file():
if key == "state_retention_file":
filename = value
self.filename = filename
self.data = None
def parse(self):
""" Parses your status.dat file and stores in a dictionary under self.data
Returns:
None
Raises:
:py:class:`ParserError`: if problem arises while reading status.dat
:py:class:`ParserError`: if status.dat is not found
:py:class:`IOError`: if status.dat cannot be read
"""
self.data = {}
status = {} # Holds all attributes of a single item
key = None # if within definition, store everything before =
value = None # if within definition, store everything after =
if not self.filename:
raise ParserError("status.dat file not found")
lines = open(self.filename, 'rb').readlines()
for sequence_no, line in enumerate(lines):
line_num = sequence_no + 1
# Cleanup and line skips
line = line.strip()
if line == "":
pass
elif line[0] == "#" or line[0] == ';':
pass
elif line.find("{") != -1:
status = {}
status['meta'] = {}
status['meta']['type'] = line.split("{")[0].strip()
elif line.find("}") != -1:
# Status definition has finished, lets add it to
# self.data
if status['meta']['type'] not in self.data:
self.data[status['meta']['type']] = []
self.data[status['meta']['type']].append(status)
else:
tmp = line.split("=", 1)
if len(tmp) == 2:
(key, value) = line.split("=", 1)
status[key] = value
elif key == "long_plugin_output":
# special hack for long_output support. We get here if:
# * line does not contain {
# * line does not contain }
# * line does not contain =
# * last line parsed started with long_plugin_output=
status[key] += "\n" + line
else:
raise ParserError("Error on %s:%s: Could not parse line: %s" % (self.filename, line_num, line))
def __setitem__(self, key, item):
self.data[key] = item
def __getitem__(self, key):
return self.data[key]
def __str__(self):
if not self.data:
self.parse()
str_buffer = "# Generated by pynag"
for datatype, datalist in self.data.items():
for item in datalist:
str_buffer += "%s {\n" % datatype
for attr, value in item.items():
str_buffer += "%s=%s\n" % (attr, value)
str_buffer += "}\n"
return str_buffer
class StatusDat(RetentionDat):
""" Easy way to parse status.dat file from nagios
After calling parse() contents of status.dat are kept in status.data
Example usage::
>>> s = status()
>>> s.parse()
>>> keys = s.data.keys()
>>> 'info' in keys
True
>>> 'programstatus' in keys
True
>>> for service in s.data.get('servicestatus',[]):
... host_name=service.get('host_name', None)
... description=service.get('service_description',None)
"""
def __init__(self, filename=None, cfg_file=None):
""" Initilize a new instance of status
Args (you only need to provide one of these):
filename: path to your status.dat file
cfg_file: path to your nagios.cfg file, path to status.dat will be
looked up in this file
"""
# If filename is not provided, lets try to discover it from
# nagios.cfg
if filename is None:
c = config(cfg_file=cfg_file)
for key, value in c._load_static_file():
if key == "status_file":
filename = value
self.filename = filename
self.data = None
def get_contactstatus(self, contact_name):
""" Returns a dictionary derived from status.dat for one particular contact
Args:
contact_name: `contact_name` field of the contact's status.dat data
to parse and return as a dict.
Returns:
dict derived from status.dat for the contact.
Raises:
ValueError if object is not found
Example:
>>> s = status()
>>> s.get_contactstatus(contact_name='invalid_contact')
ValueError('invalid_contact',)
>>> first_contact = s.data['contactstatus'][0]['contact_name']
>>> s.get_contactstatus(first_contact)['contact_name'] == first_contact
True
"""
if self.data is None:
self.parse()
for i in self.data['contactstatus']:
if i.get('contact_name') == contact_name:
return i
return ValueError(contact_name)
def get_hoststatus(self, host_name):
""" Returns a dictionary derived from status.dat for one particular contact
Args:
host_name: `host_name` field of the host's status.dat data
to parse and return as a dict.
Returns:
dict derived from status.dat for the host.
Raises:
ValueError if object is not found
"""
if self.data is None:
self.parse()
for i in self.data['hoststatus']:
if i.get('host_name') == host_name:
return i
raise ValueError(host_name)
def get_servicestatus(self, host_name, service_description):
""" Returns a dictionary derived from status.dat for one particular service
Args:
service_name: `service_name` field of the host's status.dat data
to parse and return as a dict.
Returns:
dict derived from status.dat for the service.
Raises:
ValueError if object is not found
"""
if self.data is None:
self.parse()
for i in self.data['servicestatus']:
if i.get('host_name') == host_name:
if i.get('service_description') == service_description:
return i
raise ValueError(host_name, service_description)
class ObjectCache(Config):
""" Loads the configuration as it appears in objects.cache file """
def get_cfg_files(self):
for k, v in self.maincfg_values:
if k == 'object_cache_file':
return [v]
class ParserError(Exception):
""" ParserError is used for errors that the Parser has when parsing config.
Typical usecase when there is a critical error while trying to read configuration.
"""
filename = None
line_start = None
message = None
def __init__(self, message, item=None):
""" Creates an instance of ParserError
Args:
message: Message to be printed by the error
item: Pynag item who caused the error
"""
self.message = message
if item is None:
return
self.item = item
self.filename = item['meta']['filename']
self.line_start = item['meta'].get('line_start')
def __str__(self):
message = self.message
if self.filename and self.line_start:
message = '%s in %s, line %s' % (message, self.filename, self.line_start)
return repr(message)
class ConfigFileNotFound(ParserError):
""" This exception is thrown if we cannot locate any nagios.cfg-style config file. """
pass
class LivestatusNotConfiguredException(ParserError):
""" This exception is raised if we tried to autodiscover path to livestatus and failed """
class LogFiles(object):
""" Parses Logfiles defined in nagios.cfg and allows easy access to its content
Content is stored in python-friendly arrays of dicts. Output should be more
or less compatible with mk_livestatus log output
"""
def __init__(self, maincfg=None):
self.config = config(maincfg)
self.log_file = self.config.get_cfg_value('log_file')
self.log_archive_path = self.config.get_cfg_value('log_archive_path')
def get_log_entries(self, start_time=None, end_time=None, strict=True, search=None, **kwargs):
""" Get Parsed log entries for given timeperiod.
Args:
start_time: unix timestamp. if None, return all entries from today
end_time: If specified, only fetch log entries older than this (unix
timestamp)
strict: If True, only return entries between start_time and
end_time, if False, then return entries that belong to same log
files as given timeset
search: If provided, only return log entries that contain this
string (case insensitive)
kwargs: All extra arguments are provided as filter on the log
entries. f.e. host_name="localhost"
Returns:
List of dicts
"""
now = time.time()
if end_time is None:
end_time = now
if start_time is None:
if 'filename' in kwargs:
start_time = 1
else:
seconds_in_a_day = 60 * 60 * 24
seconds_today = end_time % seconds_in_a_day # midnight of today
start_time = end_time - seconds_today
start_time = int(start_time)
end_time = int(end_time)
logfiles = self.get_logfiles()
if 'filename' in kwargs:
logfiles = filter(lambda x: x == kwargs.get('filename'), logfiles)
# If start time was provided, skip all files that we last modified
# before start_time
if start_time:
logfiles = filter(lambda x: start_time <= os.stat(x).st_mtime, logfiles)
# Log entries are returned in ascending order, which is the opposite of
# what get_logfiles returns.
logfiles.reverse()
result = []
for log_file in logfiles:
entries = self._parse_log_file(filename=log_file)
if len(entries) == 0:
continue
first_entry = entries[0]
last_entry = entries[-1]
if first_entry['time'] > end_time:
continue
# If strict, filter entries to only include the ones in the timespan
if strict is True:
entries = [x for x in entries if x['time'] >= start_time and x['time'] <= end_time]
# If search string provided, filter the string
if search is not None:
entries = [x for x in entries if x['message'].lower().find(search.lower()) > -1]
for k, v in kwargs.items():
entries = [x for x in entries if x.get(k) == v]
result += entries
if start_time is None or int(start_time) >= int(first_entry.get('time')):
continue
# Now, logfiles should in MOST cases come sorted for us.
# However we rely on modification time of files and if it is off,
# We want to make sure log entries are coming in the correct order.
# The following sort should not impact performance in the typical use case.
result.sort(key=lambda x: x.get('time'))
return result
def get_logfiles(self):
""" Returns a list with the fullpath to every log file used by nagios.
Lists are sorted by modification times. Newest logfile is at the front
of the list so usually nagios.log comes first, followed by archivelogs
Returns:
List of strings
"""
logfiles = []
for filename in os.listdir(self.log_archive_path):
full_path = "%s/%s" % (self.log_archive_path, filename)
logfiles.append(full_path)
logfiles.append(self.log_file)
# Sort the logfiles by modification time, newest file at the front
compare_mtime = lambda a, b: os.stat(a).st_mtime < os.stat(b).st_mtime
logfiles.sort(key=lambda x: int(os.stat(x).st_mtime))
# Newest logfiles go to the front of the list
logfiles.reverse()
return logfiles
def get_flap_alerts(self, **kwargs):
""" Same as :py:meth:`get_log_entries`, except return timeperiod transitions.
Takes same parameters.
"""
return self.get_log_entries(class_name="timeperiod transition", **kwargs)
def get_notifications(self, **kwargs):
""" Same as :py:meth:`get_log_entries`, except return only notifications.
Takes same parameters.
"""
return self.get_log_entries(class_name="notification", **kwargs)
def get_state_history(self, start_time=None, end_time=None, host_name=None, strict=True, service_description=None):
""" Returns a list of dicts, with the state history of hosts and services.
Args:
start_time: unix timestamp. if None, return all entries from today
end_time: If specified, only fetch log entries older than this (unix
timestamp)
host_name: If provided, only return log entries that contain this
string (case insensitive)
service_description: If provided, only return log entries that contain this
string (case insensitive)
Returns:
List of dicts with state history of hosts and services
"""
log_entries = self.get_log_entries(start_time=start_time, end_time=end_time, strict=strict, class_name='alerts')
result = []
last_state = {}
now = time.time()
for line in log_entries:
if 'state' not in line:
continue
line['duration'] = now - int(line.get('time'))
if host_name is not None and host_name != line.get('host_name'):
continue
if service_description is not None and service_description != line.get('service_description'):
continue
if start_time is None:
start_time = int(line.get('time'))
short_name = "%s/%s" % (line['host_name'], line['service_description'])
if short_name in last_state:
last = last_state[short_name]
last['end_time'] = line['time']
last['duration'] = last['end_time'] - last['time']
line['previous_state'] = last['state']
last_state[short_name] = line
if strict is True:
if start_time is not None and int(start_time) > int(line.get('time')):
continue
if end_time is not None and int(end_time) < int(line.get('time')):
continue
result.append(line)
return result
def _parse_log_file(self, filename=None):
""" Parses one particular nagios logfile into arrays of dicts.
Args:
filename: Log file to be parsed. If is None, then log_file from
nagios.cfg is used.
Returns:
A list of dicts containing all data from the log file
"""
if filename is None:
filename = self.log_file
result = []
for line in open(filename).readlines():
parsed_entry = self._parse_log_line(line)
if parsed_entry != {}:
parsed_entry['filename'] = filename
result.append(parsed_entry)
return result
def _parse_log_line(self, line):
""" Parse one particular line in nagios logfile and return a dict.
Args:
line: Line of the log file to be parsed.
Returns:
dict containing the information from the log file line.
"""
host = None
service_description = None
state = None
check_attempt = None
plugin_output = None
contact = None
m = re.search('^\[(.*?)\] (.*?): (.*)', line)
if m is None:
return {}
line = line.strip()
timestamp, logtype, options = m.groups()
result = {}
try:
timestamp = int(timestamp)
except ValueError:
timestamp = 0
result['time'] = int(timestamp)
result['type'] = logtype
result['options'] = options
result['message'] = line
result['class'] = 0 # unknown
result['class_name'] = 'unclassified'
if logtype in ('CURRENT HOST STATE', 'CURRENT SERVICE STATE', 'SERVICE ALERT', 'HOST ALERT'):
result['class'] = 1
result['class_name'] = 'alerts'
if logtype.find('HOST') > -1:
# This matches host current state:
m = re.search('(.*?);(.*?);(.*);(.*?);(.*)', options)
if m is None:
return result
host, state, hard, check_attempt, plugin_output = m.groups()
service_description = None
if logtype.find('SERVICE') > -1:
m = re.search('(.*?);(.*?);(.*?);(.*?);(.*?);(.*)', options)
if m is None:
return result
host, service_description, state, hard, check_attempt, plugin_output = m.groups()
result['host_name'] = host
result['service_description'] = service_description
result['state'] = int(pynag.Plugins.state[state])
result['check_attempt'] = check_attempt
result['plugin_output'] = plugin_output
result['text'] = plugin_output
elif "NOTIFICATION" in logtype:
result['class'] = 3
result['class_name'] = 'notification'
if logtype == 'SERVICE NOTIFICATION':
m = re.search('(.*?);(.*?);(.*?);(.*?);(.*?);(.*)', options)
if m is None:
return result
contact, host, service_description, state, command, plugin_output = m.groups()
elif logtype == 'HOST NOTIFICATION':
m = re.search('(.*?);(.*?);(.*?);(.*?);(.*)', options)
if m is None:
return result
contact, host, state, command, plugin_output = m.groups()
service_description = None
result['contact_name'] = contact
result['host_name'] = host
result['service_description'] = service_description
try:
result['state'] = int(pynag.Plugins.state[state])
except Exception:
result['state'] = -1
result['plugin_output'] = plugin_output
result['text'] = plugin_output
elif logtype == "EXTERNAL COMMAND":
result['class'] = 5
result['class_name'] = 'command'
m = re.search('(.*?);(.*)', options)
if m is None:
return result
command_name, text = m.groups()
result['command_name'] = command_name
result['text'] = text
elif logtype in ('PASSIVE SERVICE CHECK', 'PASSIVE HOST CHECK'):
result['class'] = 4
result['class_name'] = 'passive'
if logtype.find('HOST') > -1:
# This matches host current state:
m = re.search('(.*?);(.*?);(.*)', options)
if m is None:
return result
host, state, plugin_output = m.groups()
service_description = None
if logtype.find('SERVICE') > -1:
m = re.search('(.*?);(.*?);(.*?);(.*)', options)
if m is None:
return result
host, service_description, state, plugin_output = m.groups()
result['host_name'] = host
result['service_description'] = service_description
result['state'] = state
result['plugin_output'] = plugin_output
result['text'] = plugin_output
elif logtype in ('SERVICE FLAPPING ALERT', 'HOST FLAPPING ALERT'):
result['class_name'] = 'flapping'
elif logtype == 'TIMEPERIOD TRANSITION':
result['class_name'] = 'timeperiod_transition'
elif logtype == 'Warning':
result['class_name'] = 'warning'
result['state'] = "1"
result['text'] = options
if 'text' not in result:
result['text'] = result['options']
result['log_class'] = result['class'] # since class is a python keyword
return result
class ExtraOptsParser(object):
""" Get Nagios Extra-Opts from a config file as specified by http://nagiosplugins.org/extra-opts
We could ALMOST use pythons ConfParser but nagios plugin team thought it would be a
good idea to support multiple values per key, so a dict datatype no longer works.
Its a shame because we have to make our own "ini" parser as a result
Usage::
# cat /etc/nagios/plugins.ini
[main]
host_name = localhost
[other section]
host_name = example.com
# EOF
e = ExtraOptsParser(section_name='main', config_file='/etc/nagios/plugins.ini')
e.get('host_name') # returns "localhost"
e.get_values() # Returns a dict of all the extra opts
e.getlist('host_name') # returns all values of host_name (if more than one were specified) in a list
"""
standard_locations = [
"/etc/nagios/plugins.ini",
"/usr/local/nagios/etc/plugins.ini",
"/usr/local/etc/nagios/plugins.ini",
"/etc/opt/nagios/plugins.ini",
"/etc/nagios-plugins.ini",
"/usr/local/etc/nagios-plugins.ini",
"/etc/opt/nagios-plugins.ini",
]
def __init__(self, section_name=None, config_file=None):
if not section_name:
section_name = self.get_default_section_name()
if not config_file:
config_file = self.get_default_config_file()
self.section_name = section_name
self.config_file = config_file
self._all_options = self.parse_file(filename=config_file) or {}
def get_values(self):
""" Returns a dict with all extra-options with the granted section_name and config_file
Results are in the form of::
{
'key': ["possible","values"]
}
"""
return self._all_options.get(self.section_name, {})
def get_default_section_name(self):
""" According to extra-opts standard, the default should be filename of check script being run """
return os.path.basename(sys.argv[0])
def get_default_config_file(self):
""" Return path to first readable extra-opt config-file found
According to the nagiosplugins extra-opts spec the search method is as follows:
1. Search for nagios.ini or nagios-plugins.ini in : splitted variable NAGIOS_CONFIG_PATH
2. Search in a predefined list of files
3. Return None if no config file is found
The method works as follows:
To quote the spec on NAGIOS_CONFIG_PATH:
*"To use a custom location, set a NAGIOS_CONFIG_PATH environment
variable to the set of directories that should be checked (this is a
colon-separated list just like PATH). The first plugins.ini or
nagios-plugins.ini file found in these directories will be used."*
"""
search_path = []
nagios_config_path = os.environ.get('NAGIOS_CONFIG_PATH', '')
for path in nagios_config_path.split(':'):
search_path.append(os.path.join(path, 'plugins.ini'))
search_path.append(os.path.join(path, 'nagios-plugins.ini'))
search_path += self.standard_locations
self.search_path = search_path
for path in search_path:
if os.path.isfile(path):
return path
return None
def get(self, option_name, default=_sentinel):
""" Return the value of one specific option
Args:
option_name: The value set to this option will be returned
Returns:
The value of `option_name`
Raises:
:py:class:`ValueError` when `option_name` cannot be found in options
"""
result = self.getlist(option_name, default)
# If option was not found, raise error
if result == _sentinel:
raise ValueError("Option named %s was not found" % (option_name))
elif result == default:
return result
elif not result:
# empty list
return result
else:
return result[0]
def getlist(self, option_name, default=_sentinel):
""" Return a list of all values for option_name
Args:
option_name: All the values set to this option will be returned
Returns:
List containing all the options set to `option_name`
Raises:
:py:class:`ValueError` when `option_name` cannot be found in options
"""
result = self.get_values().get(option_name, default)
if result == _sentinel:
raise ValueError("Option named %s was not found" % (option_name))
return result
def parse_file(self, filename):
""" Parses an ini-file and returns a dict of the ini values.
The datatype returned is a list of sections where each section is a
dict of values.
Args:
filename: Full path to the ini-file to be parsed.
Example the following the file::
[main]
name = this is a name
key = value
key = value2
Would return::
[
{'main':
{
'name': ['this is a name'],
'key': [value, value2]
}
},
]
"""
if filename is None:
return {}
f = open(filename)
try:
data = f.read()
return self.parse_string(data)
finally:
f.close()
def parse_string(self, string):
""" Parses a string that is supposed to be ini-style format.
See :py:meth:`parse_file` for more info
Args:
string: String to be parsed. Should be in ini-file format.
Returns:
Dictionnary containing all the sections of the ini-file and their
respective data.
Raises:
:py:class:`ParserError` when line does not follow the ini format.
"""
sections = {}
# When parsing inside a section, the name of it stored here.
section_name = None
current_section = pynag.Utils.defaultdict(dict)
for line_no, line, in enumerate(string.splitlines()):
line = line.strip()
# skip empty lines
if not line or line[0] in ('#', ';'):
continue
# Check if this is a new section
if line.startswith('[') and line.endswith(']'):
section_name = line.strip('[').strip(']').strip()
current_section = pynag.Utils.defaultdict(list)
sections[section_name] = current_section
continue
# All entries should have key=value format
if not '=' in line:
error = "Line %s should be in the form of key=value format (got '%s' instead)" % (line_no, line)
raise ParserError(error)
# If we reach here, we parse current line into key and a value section
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
sections[section_name][key].append(value)
return sections
class SshConfig(Config):
""" Parse object configuration files from remote host via ssh
Uses python-paramiko for ssh connections.
"""
def __init__(self, host, username, password=None, cfg_file=None):
""" Creates a SshConfig instance
Args:
host: Host to connect to
username: User to connect with
password: Password for `username`
cfg_file: Nagios main cfg file
"""
import paramiko
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(host, username=username, password=password)
self.ftp = self.ssh.open_sftp()
import cStringIO
c = cStringIO.StringIO()
self.tar = tarfile.open(mode='w', fileobj=c)
self.cached_stats = {}
super(SshConfig, self).__init__(cfg_file=cfg_file)
def open(self, filename, *args, **kwargs):
""" Behaves like file.open only, via ssh connection """
return self.tar.extractfile(filename)
tarinfo = self._get_file(filename)
string = tarinfo.tobuf()
print string
return StringIO.StringIO(string)
return self.tar.extractfile(tarinfo)
def add_to_tar(self, path):
"""
"""
print "Taring ", path
command = "find '{path}' -type f | tar -c -T - --to-stdout --absolute-names"
command = command.format(path=path)
print command
stdin, stdout, stderr = self.ssh.exec_command(command, bufsize=50000)
tar = tarfile.open(fileobj=stdout, mode='r|')
if not self.tar:
self.tar = tar
# return
else:
for i in tar:
self.tar.addfile(i)
def is_cached(self, filename):
if not self.tar:
return False
return filename in self.tar.getnames()
def _get_file(self, filename):
""" Download filename and return the TarInfo object """
if filename not in self.tar.getnames():
self.add_to_tar(filename)
return self.tar.getmember(filename)
def get_cfg_files(self):
cfg_files = []
for config_object, config_value in self.maincfg_values:
# Add cfg_file objects to cfg file list
if config_object == "cfg_file":
config_value = self.abspath(config_value)
if self.isfile(config_value):
cfg_files.append(config_value)
elif config_object == "cfg_dir":
absolut_path = self.abspath(config_value)
command = "find '%s' -type f -iname \*cfg" % (absolut_path)
stdin, stdout, stderr = self.ssh.exec_command(command)
raw_filelist = stdout.read().splitlines()
cfg_files += raw_filelist
else:
continue
if not self.is_cached(config_value):
self.add_to_tar(config_value)
return cfg_files
def isfile(self, path):
""" Behaves like os.path.isfile only, via ssh connection """
try:
copy = self._get_file(path)
return copy.isfile()
except IOError:
return False
def isdir(self, path):
""" Behaves like os.path.isdir only, via ssh connection """
try:
file_stat = self.stat(path)
return stat.S_ISDIR(file_stat.st_mode)
except IOError:
return False
def islink(self, path):
""" Behaves like os.path.islink only, via ssh connection """
try:
file_stat = self.stat(path)
return stat.S_ISLNK(file_stat.st_mode)
except IOError:
return False
def readlink(self, path):
""" Behaves like os.readlink only, via ssh connection """
return self.ftp.readlink(path)
def stat(self, *args, **kwargs):
""" Wrapper around os.stat only, via ssh connection """
path = args[0]
if not self.is_cached(path):
self.add_to_tar(path)
if path not in self.tar.getnames():
raise IOError("No such file or directory %s" % path)
member = self.tar.getmember(path)
member.st_mode = member.mode
member.st_mtime = member.mtime
return member
def access(self, *args, **kwargs):
""" Wrapper around os.access only, via ssh connection """
return os.access(*args, **kwargs)
def exists(self, path):
""" Wrapper around os.path.exists only, via ssh connection """
try:
self.ftp.stat(path)
return True
except IOError:
return False
def listdir(self, *args, **kwargs):
""" Wrapper around os.listdir but via ssh connection """
stats = self.ftp.listdir_attr(*args, **kwargs)
for i in stats:
self.cached_stats[args[0] + "/" + i.filename] = i
files = map(lambda x: x.filename, stats)
return files
class MultiSite(Livestatus):
""" Wrapps around multiple Livesatus instances and aggregates the results
of queries.
Example:
>>> m = MultiSite()
>>> m.add_backend(path='/var/spool/nagios/livestatus.socket', name='local')
>>> m.add_backend(path='127.0.0.1:5992', name='remote')
"""
def __init__(self, *args, **kwargs):
super(MultiSite, self).__init__(*args, **kwargs)
self.backends = {}
def add_backend(self, path, name):
""" Add a new livestatus backend to this instance.
Arguments:
path (str): Path to file socket or remote address
name (str): Friendly shortname for this backend
"""
backend = Livestatus(
livestatus_socket_path=path,
nagios_cfg_file=self.nagios_cfg_file,
authuser=self.authuser
)
self.backends[name] = backend
def get_backends(self):
""" Returns a list of mk_livestatus instances
Returns:
list. List of mk_livestatus instances
"""
return self.backends
def get_backend(self, backend_name):
""" Return one specific backend that has previously been added
"""
if not backend_name:
return self.backends.values()[0]
try:
return self.backends[backend_name]
except KeyError:
raise ParserError("No backend found with name='%s'" % backend_name)
def query(self, query, *args, **kwargs):
""" Behaves like mk_livestatus.query() except results are aggregated from multiple backends
Arguments:
backend (str): If specified, fetch only data from this backend (see add_backend())
*args: Passed directly to mk_livestatus.query()
**kwargs: Passed directly to mk_livestatus.query()
"""
result = []
backend = kwargs.pop('backend', None)
# Special hack, if 'Stats' argument was provided to livestatus
# We have to maintain compatibility with old versions of livestatus
# and return single list with all results instead of a list of dicts
doing_stats = any(map(lambda x: x.startswith('Stats:'), args + (query,)))
# Iterate though all backends and run the query
# TODO: Make this multithreaded
for name, backend_instance in self.backends.items():
# Skip if a specific backend was requested and this is not it
if backend and backend != name:
continue
query_result = backend_instance.query(query, *args, **kwargs)
if doing_stats:
result = self._merge_statistics(result, query_result)
else:
for row in query_result:
row['backend'] = name
result.append(row)
return result
def _merge_statistics(self, list1, list2):
""" Merges multiple livestatus results into one result
Arguments:
list1 (list): List of integers
list2 (list): List of integers
Returns:
list. Aggregated results of list1 + list2
Example:
>>> result1 = [1,1,1,1]
>>> result2 = [2,2,2,2]
>>> MultiSite()._merge_statistics(result1, result2)
[3, 3, 3, 3]
"""
if not list1:
return list2
if not list2:
return list1
number_of_columns = len(list1)
result = [0] * number_of_columns
for row in (list1, list2):
for i, column in enumerate(row):
result[i] += column
return result
def get_host(self, host_name, backend=None):
""" Same as Livestatus.get_host() """
backend = self.get_backend(backend)
return backend.get_host(host_name)
def get_service(self, host_name, service_description, backend=None):
""" Same as Livestatus.get_service() """
backend = self.get_backend(backend)
return backend.get_service(host_name, service_description)
def get_contact(self, contact_name, backend=None):
""" Same as Livestatus.get_contact() """
backend = self.get_backend(backend)
return backend.get_contact(contact_name)
def get_contactgroup(self, contactgroup_name, backend=None):
""" Same as Livestatus.get_contact() """
backend = self.get_backend(backend)
return backend.get_contactgroup(contactgroup_name)
def get_servicegroup(self, servicegroup_name, backend=None):
""" Same as Livestatus.get_servicegroup() """
backend = self.get_backend(backend)
return backend.get_servicegroup(servicegroup_name)
def get_hostgroup(self, hostgroup_name, backend=None):
""" Same as Livestatus.get_hostgroup() """
backend = self.get_backend(backend)
return backend.get_hostgroup(hostgroup_name)
class config(Config):
""" This class is here only for backwards compatibility. Use Config instead. """
class mk_livestatus(Livestatus):
""" This class is here only for backwards compatibility. Use Livestatus instead. """
class object_cache(ObjectCache):
""" This class is here only for backwards compatibility. Use ObjectCache instead. """
class status(StatusDat):
""" This class is here only for backwards compatibility. Use StatusDat instead. """
class retention(RetentionDat):
""" This class is here only for backwards compatibility. Use RetentionDat instead. """
if __name__ == '__main__':
import time
start = time.time()
ssh = SshConfig(host='status.adagios.org', username='palli')
ssh.ssh.get_transport().window_size = 3 * 1024 * 1024
ssh.ssh.get_transport().use_compression()
# ssh.add_to_tar('/etc/nagios')
# sys.exit()
# ssh.ssh.exec_command("/bin/ls")
print "before reset"
ssh.parse()
end = time.time()
print "duration=", end - start
bland = ssh.tar.getmember('/etc/nagios/okconfig/hosts/web-servers/bland.is-http.cfg')
print bland.tobuf()
sys.exit(0)
print "ssh up"
ssh_conn = FastTransport(('status.adagios.org', 22))
ssh_conn.connect(username='palli')
ftp = paramiko.SFTPClient.from_transport(ssh_conn)
print "connected" \
""
ssh.ssh = ssh_conn
ssh.ftp = ftp
print "starting parse"
print "done parsing"<|fim▁end|> | def stat(self, *args, **kwargs): |
<|file_name|>transfer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
""" Product transfer management """
# pylint: enable=E1101
from decimal import Decimal
from kiwi.currency import currency
from storm.expr import Join, LeftJoin, Sum, Cast, Coalesce, And
from storm.info import ClassAlias
from storm.references import Reference
from zope.interface import implementer
from stoqlib.database.expr import NullIf
from stoqlib.database.properties import (DateTimeCol, IdCol, IdentifierCol,
IntCol, PriceCol, QuantityCol,
UnicodeCol, EnumCol)
from stoqlib.database.viewable import Viewable
from stoqlib.domain.base import Domain
from stoqlib.domain.fiscal import Invoice
from stoqlib.domain.product import ProductHistory, StockTransactionHistory
from stoqlib.domain.person import Person, Branch, Company
from stoqlib.domain.interfaces import IContainer, IInvoice, IInvoiceItem
from stoqlib.domain.sellable import Sellable
from stoqlib.domain.taxes import InvoiceItemIcms, InvoiceItemIpi
from stoqlib.lib.dateutils import localnow
from stoqlib.lib.translation import stoqlib_gettext
_ = stoqlib_gettext
@implementer(IInvoiceItem)
class TransferOrderItem(Domain):
"""Transfer order item
"""
<|fim▁hole|>
sellable_id = IdCol()
# FIXME: This should be a product, since it does not make sense to transfer
# serviçes
#: The |sellable| to transfer
sellable = Reference(sellable_id, 'Sellable.id')
batch_id = IdCol()
#: If the sellable is a storable, the |batch| that was transfered
batch = Reference(batch_id, 'StorableBatch.id')
transfer_order_id = IdCol()
#: The |transfer| this item belongs to
transfer_order = Reference(transfer_order_id, 'TransferOrder.id')
#: The quantity to transfer
quantity = QuantityCol()
#: Average cost of the item in the source branch at the time of transfer.
stock_cost = PriceCol(default=0)
icms_info_id = IdCol()
#: the :class:`stoqlib.domain.taxes.InvoiceItemIcms` tax for *self*
icms_info = Reference(icms_info_id, 'InvoiceItemIcms.id')
ipi_info_id = IdCol()
#: the :class:`stoqlib.domain.taxes.InvoiceItemIpi` tax for *self*
ipi_info = Reference(ipi_info_id, 'InvoiceItemIpi.id')
item_discount = Decimal('0')
def __init__(self, store=None, **kwargs):
if not 'sellable' in kwargs:
raise TypeError('You must provide a sellable argument')
kwargs['ipi_info'] = InvoiceItemIpi(store=store)
kwargs['icms_info'] = InvoiceItemIcms(store=store)
super(TransferOrderItem, self).__init__(store=store, **kwargs)
product = self.sellable.product
if product:
self.ipi_info.set_item_tax(self)
self.icms_info.set_item_tax(self)
#
# IInvoiceItem implementation
#
@property
def parent(self):
return self.transfer_order
@property
def base_price(self):
return self.stock_cost
@property
def price(self):
return self.stock_cost
@property
def nfe_cfop_code(self):
source_branch = self.transfer_order.source_branch
source_address = source_branch.person.get_main_address()
destination_branch = self.transfer_order.destination_branch
destination_address = destination_branch.person.get_main_address()
same_state = True
if (source_address.city_location.state != destination_address.city_location.state):
same_state = False
if same_state:
return u'5152'
else:
return u'6152'
#
# Public API
#
def get_total(self):
"""Returns the total cost of a transfer item eg quantity * cost"""
return self.quantity * self.sellable.cost
def send(self):
"""Sends this item to it's destination |branch|.
This method should never be used directly, and to send a transfer you
should use TransferOrder.send().
"""
product = self.sellable.product
if product.manage_stock:
storable = product.storable
storable.decrease_stock(self.quantity,
self.transfer_order.source_branch,
StockTransactionHistory.TYPE_TRANSFER_TO,
self.id, batch=self.batch)
ProductHistory.add_transfered_item(self.store,
self.transfer_order.source_branch,
self)
def receive(self):
"""Receives this item, increasing the quantity in the stock.
This method should never be used directly, and to receive a transfer
you should use TransferOrder.receive().
"""
product = self.sellable.product
if product.manage_stock:
storable = product.storable
storable.increase_stock(self.quantity,
self.transfer_order.destination_branch,
StockTransactionHistory.TYPE_TRANSFER_FROM,
self.id, unit_cost=self.stock_cost,
batch=self.batch)
@implementer(IContainer)
@implementer(IInvoice)
class TransferOrder(Domain):
""" Transfer Order class
"""
__storm_table__ = 'transfer_order'
STATUS_PENDING = u'pending'
STATUS_SENT = u'sent'
STATUS_RECEIVED = u'received'
statuses = {STATUS_PENDING: _(u'Pending'),
STATUS_SENT: _(u'Sent'),
STATUS_RECEIVED: _(u'Received')}
status = EnumCol(default=STATUS_PENDING)
#: A numeric identifier for this object. This value should be used instead
#: of :obj:`Domain.id` when displaying a numerical representation of this
#: object to the user, in dialogs, lists, reports and such.
identifier = IdentifierCol()
#: The date the order was created
open_date = DateTimeCol(default_factory=localnow)
#: The date the order was received
receival_date = DateTimeCol()
#: The invoice number of the transfer
invoice_number = IntCol()
#: Comments of a transfer
comments = UnicodeCol()
source_branch_id = IdCol()
#: The |branch| sending the stock
source_branch = Reference(source_branch_id, 'Branch.id')
destination_branch_id = IdCol()
#: The |branch| receiving the stock
destination_branch = Reference(destination_branch_id, 'Branch.id')
source_responsible_id = IdCol()
#: The |employee| responsible for the |transfer| at source |branch|
source_responsible = Reference(source_responsible_id, 'Employee.id')
destination_responsible_id = IdCol()
#: The |employee| responsible for the |transfer| at destination |branch|
destination_responsible = Reference(destination_responsible_id,
'Employee.id')
#: |payments| generated by this transfer
payments = None
#: |transporter| used in transfer
transporter = None
invoice_id = IdCol()
#: The |invoice| generated by the transfer
invoice = Reference(invoice_id, 'Invoice.id')
def __init__(self, store=None, **kwargs):
kwargs['invoice'] = Invoice(store=store, invoice_type=Invoice.TYPE_OUT)
super(TransferOrder, self).__init__(store=store, **kwargs)
#
# IContainer implementation
#
def get_items(self):
return self.store.find(TransferOrderItem, transfer_order=self)
def add_item(self, item):
assert self.status == self.STATUS_PENDING
item.transfer_order = self
def remove_item(self, item):
if item.transfer_order is not self:
raise ValueError(_('The item does not belong to this '
'transfer order'))
item.transfer_order = None
self.store.maybe_remove(item)
#
# IInvoice implementation
#
@property
def discount_value(self):
return currency(0)
@property
def invoice_subtotal(self):
subtotal = self.get_items().sum(TransferOrderItem.quantity *
TransferOrderItem.stock_cost)
return currency(subtotal)
@property
def invoice_total(self):
return self.invoice_subtotal
@property
def recipient(self):
return self.destination_branch.person
@property
def operation_nature(self):
# TODO: Save the operation nature in new transfer_order table field
return _(u"Transfer")
#
# Public API
#
@property
def branch(self):
return self.source_branch
@property
def status_str(self):
return(self.statuses[self.status])
def add_sellable(self, sellable, batch, quantity=1, cost=None):
"""Add the given |sellable| to this |transfer|.
:param sellable: The |sellable| we are transfering
:param batch: What |batch| of the storable (represented by sellable) we
are transfering.
:param quantity: The quantity of this product that is being transfered.
"""
assert self.status == self.STATUS_PENDING
self.validate_batch(batch, sellable=sellable)
product = sellable.product
if product.manage_stock:
stock_item = product.storable.get_stock_item(
self.source_branch, batch)
stock_cost = stock_item.stock_cost
else:
stock_cost = sellable.cost
return TransferOrderItem(store=self.store,
transfer_order=self,
sellable=sellable,
batch=batch,
quantity=quantity,
stock_cost=cost or stock_cost)
def can_send(self):
return (self.status == self.STATUS_PENDING and
self.get_items().count() > 0)
def can_receive(self):
return self.status == self.STATUS_SENT
def send(self):
"""Sends a transfer order to the destination branch.
"""
assert self.can_send()
for item in self.get_items():
item.send()
# Save invoice number, operation_nature and branch in Invoice table.
self.invoice.invoice_number = self.invoice_number
self.invoice.operation_nature = self.operation_nature
self.invoice.branch = self.branch
self.status = self.STATUS_SENT
def receive(self, responsible, receival_date=None):
"""Confirms the receiving of the transfer order.
"""
assert self.can_receive()
for item in self.get_items():
item.receive()
self.receival_date = receival_date or localnow()
self.destination_responsible = responsible
self.status = self.STATUS_RECEIVED
@classmethod
def get_pending_transfers(cls, store, branch):
"""Get all the transfers that need to be recieved
Get all transfers that have STATUS_SENT and the current branch as the destination
This is useful if you want to list all the items that need to be
recieved in a certain branch
"""
return store.find(cls, And(cls.status == cls.STATUS_SENT,
cls.destination_branch == branch))
def get_source_branch_name(self):
"""Returns the source |branch| name"""
return self.source_branch.get_description()
def get_destination_branch_name(self):
"""Returns the destination |branch| name"""
return self.destination_branch.get_description()
def get_source_responsible_name(self):
"""Returns the name of the |employee| responsible for the transfer
at source |branch|
"""
return self.source_responsible.person.name
def get_destination_responsible_name(self):
"""Returns the name of the |employee| responsible for the transfer
at destination |branch|
"""
if not self.destination_responsible:
return u''
return self.destination_responsible.person.name
def get_total_items_transfer(self):
"""Retuns the |transferitems| quantity
"""
return sum([item.quantity for item in self.get_items()], 0)
class BaseTransferView(Viewable):
BranchDest = ClassAlias(Branch, 'branch_dest')
PersonDest = ClassAlias(Person, 'person_dest')
CompanyDest = ClassAlias(Company, 'company_dest')
transfer_order = TransferOrder
identifier = TransferOrder.identifier
identifier_str = Cast(TransferOrder.identifier, 'text')
status = TransferOrder.status
open_date = TransferOrder.open_date
receival_date = TransferOrder.receival_date
source_branch_id = TransferOrder.source_branch_id
destination_branch_id = TransferOrder.destination_branch_id
source_branch_name = Coalesce(NullIf(Company.fancy_name, u''), Person.name)
destination_branch_name = Coalesce(NullIf(CompanyDest.fancy_name, u''),
PersonDest.name)
group_by = [TransferOrder, source_branch_name, destination_branch_name]
tables = [
TransferOrder,
Join(TransferOrderItem,
TransferOrder.id == TransferOrderItem.transfer_order_id),
# Source
LeftJoin(Branch, TransferOrder.source_branch_id == Branch.id),
LeftJoin(Person, Branch.person_id == Person.id),
LeftJoin(Company, Company.person_id == Person.id),
# Destination
LeftJoin(BranchDest, TransferOrder.destination_branch_id == BranchDest.id),
LeftJoin(PersonDest, BranchDest.person_id == PersonDest.id),
LeftJoin(CompanyDest, CompanyDest.person_id == PersonDest.id),
]
@property
def branch(self):
# We need this property for the acronym to appear in the identifier
return self.store.get(Branch, self.source_branch_id)
class TransferOrderView(BaseTransferView):
id = TransferOrder.id
# Aggregates
total_items = Sum(TransferOrderItem.quantity)
class TransferItemView(BaseTransferView):
id = TransferOrderItem.id
item_quantity = TransferOrderItem.quantity
item_description = Sellable.description
group_by = BaseTransferView.group_by[:]
group_by.extend([TransferOrderItem, Sellable])
tables = BaseTransferView.tables[:]
tables.append(Join(Sellable, Sellable.id == TransferOrderItem.sellable_id))<|fim▁end|> | __storm_table__ = 'transfer_order_item' |
<|file_name|>addons.py<|end_file_name|><|fim▁begin|>from base import IfbyphoneApiBase
class Addons(IfbyphoneApiBase):
def list(self):
"""List all purchased Addons for an account
"""
self.options['action'] = 'addons.list'
return self.call(self.options)
def purchase(self, **kwargs):
"""Purchase an addon for an account
keyword arguments:<|fim▁hole|> """
self.options.update(kwargs)
self.options['action'] = 'addons.purchase'
return self.call(self.options)<|fim▁end|> | item_id -- ID number of desired addon
qty -- the quantity of the addon
send_receipt -- set to 1 to send a receipt to account email
|
<|file_name|>clear_cache.py<|end_file_name|><|fim▁begin|>"""Clears the Cache"""
from django.core.cache import cache
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Clears the Cache
"""
help = "Clears the Cache"
def handle(self, **options):
"""Clears the Cache"""
cache.clear()<|fim▁hole|><|fim▁end|> | self.stdout.write('Cleared cache\n') |
<|file_name|>Solution.java<|end_file_name|><|fim▁begin|>import java.util.Scanner;
/**
* @author Oleg Cherednik
* @since 13.07.2018
*/
public class Solution {
static int palindromeIndex(String s) {
for (int i = 0, j = s.length() - 1; i < j; i++, j--) {
if (s.charAt(i) == s.charAt(j))
continue;
for (int k = i, m = j - 1; k < m; k++, m--)
if (s.charAt(k) != s.charAt(m))
return i;
return j;
}
return -1;
}
private static final Scanner scanner = new Scanner(System.in);
<|fim▁hole|> scanner.skip("(\r\n|[\n\r\u2028\u2029\u0085])?");
for (int qItr = 0; qItr < q; qItr++) {
String s = scanner.nextLine();
int result = palindromeIndex(s);
System.out.println(String.valueOf(result));
}
scanner.close();
}
}<|fim▁end|> | public static void main(String[] args) {
int q = scanner.nextInt(); |
<|file_name|>shapepack.py<|end_file_name|><|fim▁begin|>import woo.core, woo.dem
from woo.dem import *
import woo.utils
from minieigen import *
from math import *<|fim▁hole|>for p in [woo.utils.sphere((0,0,0),1,mat=m),woo.utils.ellipsoid((0,0,0),semiAxes=(.8,1,1.2),mat=m),woo.utils.ellipsoid((0,0,0),semiAxes=(1.,1.,1.),mat=m),woo.utils.capsule((0,0,0),radius=.8,shaft=.6,mat=m)]:
print 100*'#'
print p.shape
#S=woo.core.Scene(fields=[DemField()])
#S.dem.par.add(p)
sp=woo.dem.ShapePack()
sp.add([p.shape,zeroSphere.shape])
r=sp.raws[0]
if isinstance(r,SphereClumpGeom):
for i in range(len(r.radii)): print r.centers[i],r.radii[i]
else:
for rr in r.rawShapes: print rr,rr.className,rr.center,rr.radius,rr.raw
# print [i for i in r.rawShapes]
r.recompute(div=10)
print 'equivRad',r.equivRad,p.shape.equivRadius
print 'volume',r.volume,p.mass/m.density
print 'inertia',r.inertia,p.inertia/m.density
print 'pos',r.pos,p.pos
print 'ori',r.ori,p.ori
print 50*'='
ee=p.shape
print ee
print 'volume',ee.volume
print 'equivRadius',ee.equivRadius
rr=(ee.volume/((4/3.)*pi))**(1/3.)
print 'sphere radius of the same volume',rr
print 'sphere volume',(4/3.)*pi*rr**3<|fim▁end|> | from woo import utils
m=woo.utils.defaultMaterial()
zeroSphere=woo.utils.sphere((0,0,0),.4) # sphere which is entirely inside the thing |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># follow/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from datetime import datetime, timedelta
from django.db import models
from election.models import ElectionManager
from exception.models import handle_exception, handle_record_found_more_than_one_exception,\
handle_record_not_found_exception, handle_record_not_saved_exception, print_to_log
from issue.models import IssueManager
from organization.models import OrganizationManager
import pytz
import wevote_functions.admin
from wevote_functions.functions import positive_value_exists
from voter.models import VoterManager
FOLLOWING = 'FOLLOWING'
STOP_FOLLOWING = 'STOP_FOLLOWING'
FOLLOW_IGNORE = 'FOLLOW_IGNORE'
STOP_IGNORING = 'STOP_IGNORING'
FOLLOWING_CHOICES = (
(FOLLOWING, 'Following'),
(STOP_FOLLOWING, 'Not Following'),
(FOLLOW_IGNORE, 'Ignoring'),
(STOP_IGNORING, 'Not Ignoring'),
)
# Kinds of lists of suggested organization
UPDATE_SUGGESTIONS_FROM_TWITTER_IDS_I_FOLLOW = 'UPDATE_SUGGESTIONS_FROM_TWITTER_IDS_I_FOLLOW'
UPDATE_SUGGESTIONS_FROM_WHAT_FRIENDS_FOLLOW = 'UPDATE_SUGGESTIONS_FROM_WHAT_FRIENDS_FOLLOW'
UPDATE_SUGGESTIONS_FROM_WHAT_FRIENDS_FOLLOW_ON_TWITTER = \
'UPDATE_SUGGESTIONS_FROM_WHAT_FRIENDS_FOLLOW_ON_TWITTER'
UPDATE_SUGGESTIONS_FROM_WHAT_FRIEND_FOLLOWS = 'UPDATE_SUGGESTIONS_FROM_WHAT_FRIEND_FOLLOWS'
UPDATE_SUGGESTIONS_FROM_WHAT_FRIEND_FOLLOWS_ON_TWITTER = \
'UPDATE_SUGGESTIONS_FROM_WHAT_FRIEND_FOLLOWS_ON_TWITTER'
UPDATE_SUGGESTIONS_ALL = 'UPDATE_SUGGESTIONS_ALL'
FOLLOW_SUGGESTIONS_FROM_TWITTER_IDS_I_FOLLOW = 'FOLLOW_SUGGESTIONS_FROM_TWITTER_IDS_I_FOLLOW'
FOLLOW_SUGGESTIONS_FROM_FRIENDS = 'FOLLOW_SUGGESTIONS_FROM_FRIENDS'
FOLLOW_SUGGESTIONS_FROM_FRIENDS_ON_TWITTER = 'FOLLOW_SUGGESTIONS_FROM_FRIENDS_ON_TWITTER'
logger = wevote_functions.admin.get_logger(__name__)
class FollowCampaignX(models.Model):
voter_we_vote_id = models.CharField(max_length=255, null=True, blank=True, unique=False, db_index=True)
organization_we_vote_id = models.CharField(max_length=255, null=True, blank=True, unique=False)
campaignx_id = models.PositiveIntegerField(null=True, blank=True)
campaignx_we_vote_id = models.CharField(max_length=255, null=True, blank=True, unique=False)
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=True, auto_now=True, db_index=True)
class FollowCampaignXManager(models.Manager):
def __unicode__(self):
return "FollowCampaignXManager"
def toggle_on_follow_campaignx(self, voter_we_vote_id, issue_id, issue_we_vote_id, following_status):
follow_campaignx_on_stage_found = False
follow_campaignx_changed = False
follow_campaignx_on_stage_id = 0
follow_campaignx_on_stage = FollowIssue()
status = ''
issue_identifier_exists = positive_value_exists(issue_we_vote_id) or positive_value_exists(issue_id)
if not positive_value_exists(voter_we_vote_id) and not issue_identifier_exists:
results = {
'success': True if follow_campaignx_on_stage_found else False,
'status': 'Insufficient inputs to toggle issue link, try passing ids for voter and issue ',
'follow_campaignx_found': follow_campaignx_on_stage_found,
'follow_campaignx_id': follow_campaignx_on_stage_id,
'follow_campaignx': follow_campaignx_on_stage,
}
return results
# Does a follow_campaignx entry exist from this voter already exist?
follow_campaignx_manager = FollowIssueManager()
follow_campaignx_id = 0
results = follow_campaignx_manager.retrieve_follow_campaignx(follow_campaignx_id, voter_we_vote_id, issue_id,
issue_we_vote_id)
if results['MultipleObjectsReturned']:
status += 'TOGGLE_FOLLOWING_ISSUE MultipleObjectsReturned ' + following_status
delete_results = follow_campaignx_manager.delete_follow_campaignx(
follow_campaignx_id, voter_we_vote_id, issue_id, issue_we_vote_id)
status += delete_results['status']
results = follow_campaignx_manager.retrieve_follow_campaignx(follow_campaignx_id, voter_we_vote_id, issue_id,
issue_we_vote_id)
if results['follow_campaignx_found']:
follow_campaignx_on_stage = results['follow_campaignx']
# Update this follow_campaignx entry with new values - we do not delete because we might be able to use
try:
follow_campaignx_on_stage.following_status = following_status
# We don't need to update here because set set auto_now=True in the field
# follow_campaignx_on_stage.date_last_changed =
follow_campaignx_on_stage.save()
follow_campaignx_changed = True
follow_campaignx_on_stage_id = follow_campaignx_on_stage.id
follow_campaignx_on_stage_found = True
status += 'FOLLOW_STATUS_UPDATED_AS ' + following_status
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
elif results['DoesNotExist']:
try:
# Create new follow_campaignx entry
# First make sure that issue_id is for a valid issue
issue_manager = IssueManager()
if positive_value_exists(issue_id):
results = issue_manager.retrieve_issue(issue_id)
else:
results = issue_manager.retrieve_issue(0, issue_we_vote_id)
if results['issue_found']:
issue = results['issue']
follow_campaignx_on_stage = FollowIssue(
voter_we_vote_id=voter_we_vote_id,
issue_id=issue.id,
issue_we_vote_id=issue.we_vote_id,
following_status=following_status,
)
# if auto_followed_from_twitter_suggestion:
# follow_campaignx_on_stage.auto_followed_from_twitter_suggestion = True
follow_campaignx_on_stage.save()
follow_campaignx_changed = True
follow_campaignx_on_stage_id = follow_campaignx_on_stage.id
follow_campaignx_on_stage_found = True
status += 'CREATE ' + following_status
else:
status = 'ISSUE_NOT_FOUND_ON_CREATE ' + following_status
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
else:
status += results['status']
results = {
'success': True if follow_campaignx_on_stage_found else False,
'status': status,
'follow_campaignx_found': follow_campaignx_on_stage_found,
'follow_campaignx_id': follow_campaignx_on_stage_id,
'follow_campaignx': follow_campaignx_on_stage,
}
return results
def retrieve_follow_campaignx(self, follow_campaignx_id, voter_we_vote_id, issue_id, issue_we_vote_id):
"""
follow_campaignx_id is the identifier for records stored in this table (it is NOT the issue_id)
"""
error_result = False
exception_does_not_exist = False
exception_multiple_object_returned = False
follow_campaignx_on_stage = FollowIssue()
follow_campaignx_on_stage_id = 0
try:
if positive_value_exists(follow_campaignx_id):
follow_campaignx_on_stage = FollowIssue.objects.get(id=follow_campaignx_id)
follow_campaignx_on_stage_id = issue_id.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_ID'
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_id):
follow_campaignx_on_stage = FollowIssue.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_id=issue_id)
follow_campaignx_on_stage_id = follow_campaignx_on_stage.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_VOTER_WE_VOTE_ID_AND_ISSUE_ID'
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_we_vote_id):
follow_campaignx_on_stage = FollowIssue.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_we_vote_id__iexact=issue_we_vote_id)
follow_campaignx_on_stage_id = follow_campaignx_on_stage.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_VOTER_WE_VOTE_ID_AND_ISSUE_WE_VOTE_ID'
else:
success = False
status = 'FOLLOW_ISSUE_MISSING_REQUIRED_VARIABLES'
except FollowIssue.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
error_result = True
exception_multiple_object_returned = True
success = False
status = 'FOLLOW_ISSUE_NOT_FOUND_MultipleObjectsReturned'
except FollowIssue.DoesNotExist:
error_result = False
exception_does_not_exist = True
success = True
status = 'FOLLOW_ISSUE_NOT_FOUND_DoesNotExist'
if positive_value_exists(follow_campaignx_on_stage_id):
follow_campaignx_on_stage_found = True
is_following = follow_campaignx_on_stage.is_following()
is_not_following = follow_campaignx_on_stage.is_not_following()
is_ignoring = follow_campaignx_on_stage.is_ignoring()
else:
follow_campaignx_on_stage_found = False
is_following = False
is_not_following = True
is_ignoring = False
results = {
'status': status,
'success': success,
'follow_campaignx_found': follow_campaignx_on_stage_found,
'follow_campaignx_id': follow_campaignx_on_stage_id,
'follow_campaignx': follow_campaignx_on_stage,
'is_following': is_following,
'is_not_following': is_not_following,
'is_ignoring': is_ignoring,
'error_result': error_result,
'DoesNotExist': exception_does_not_exist,
'MultipleObjectsReturned': exception_multiple_object_returned,
}
return results
def delete_follow_campaignx(self, follow_campaignx_id, voter_we_vote_id, issue_id, issue_we_vote_id):
"""
Remove any follow issue entries (we may have duplicate entries)
"""
follow_campaignx_deleted = False
status = ''
try:
if positive_value_exists(follow_campaignx_id):
follow_campaignx_on_stage = FollowIssue.objects.get(id=follow_campaignx_id)
follow_campaignx_on_stage.delete()
follow_campaignx_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETED_BY_ID '
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_id):
follow_campaignx_query = FollowIssue.objects.filter(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_id=issue_id)
follow_campaignx_list = list(follow_campaignx_query)
for one_follow_campaignx in follow_campaignx_list:
one_follow_campaignx.delete()
follow_campaignx_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETED_BY_VOTER_WE_VOTE_ID_AND_ISSUE_ID '
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_we_vote_id):
follow_campaignx_query = FollowIssue.objects.filter(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_we_vote_id__iexact=issue_we_vote_id)
follow_campaignx_list = list(follow_campaignx_query)
for one_follow_campaignx in follow_campaignx_list:
one_follow_campaignx.delete()
follow_campaignx_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETE_BY_VOTER_WE_VOTE_ID_AND_ISSUE_WE_VOTE_ID '
else:
success = False
status += 'FOLLOW_ISSUE_DELETE_MISSING_REQUIRED_VARIABLES '
except FollowIssue.DoesNotExist:
success = True
status = 'FOLLOW_ISSUE_DELETE_NOT_FOUND_DoesNotExist '
results = {
'status': status,
'success': success,
'follow_campaignx_deleted': follow_campaignx_deleted,
}
return results
class FollowIssue(models.Model):
# We are relying on built-in Python id field
# The voter following the issue
voter_we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, null=True, blank=True, unique=False, db_index=True)
organization_we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, null=True, blank=True, unique=False)
# The issue being followed
issue_id = models.PositiveIntegerField(null=True, blank=True)
# This is used when we want to export the issues that are being following
issue_we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, null=True, blank=True, unique=False)
# Is this person following, not following, or ignoring this issue?
following_status = models.CharField(max_length=15, choices=FOLLOWING_CHOICES, default=FOLLOWING, db_index=True)
# Is the fact that this issue is being followed visible to the public (if linked to organization)?
is_follow_visible_publicly = models.BooleanField(verbose_name='', default=False)
# The date the voter followed or stopped following this issue
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=True, auto_now=True, db_index=True)
def __unicode__(self):
return self.issue_we_vote_id
def is_following(self):
if self.following_status == FOLLOWING:
return True
return False
def is_not_following(self):
if self.following_status == STOP_FOLLOWING:
return True
return False
def is_ignoring(self):
if self.following_status == FOLLOW_IGNORE:
return True
return False
class FollowIssueManager(models.Manager):
def __unicode__(self):
return "FollowIssueManager"
def toggle_on_voter_following_issue(self, voter_we_vote_id, issue_id, issue_we_vote_id):
following_status = FOLLOWING
follow_issue_manager = FollowIssueManager()
return follow_issue_manager.toggle_following_issue(voter_we_vote_id, issue_id, issue_we_vote_id,
following_status)
def toggle_off_voter_following_issue(self, voter_we_vote_id, issue_id, issue_we_vote_id):
following_status = STOP_FOLLOWING
follow_issue_manager = FollowIssueManager()
return follow_issue_manager.toggle_following_issue(voter_we_vote_id, issue_id, issue_we_vote_id,
following_status)
def toggle_ignore_voter_following_issue(self, voter_we_vote_id, issue_id, issue_we_vote_id):
following_status = FOLLOW_IGNORE
follow_issue_manager = FollowIssueManager()
return follow_issue_manager.toggle_following_issue(voter_we_vote_id, issue_id, issue_we_vote_id,
following_status)
def toggle_following_issue(self, voter_we_vote_id, issue_id, issue_we_vote_id, following_status):
follow_issue_on_stage_found = False
follow_issue_changed = False
follow_issue_on_stage_id = 0
follow_issue_on_stage = FollowIssue()
status = ''
issue_identifier_exists = positive_value_exists(issue_we_vote_id) or positive_value_exists(issue_id)
if not positive_value_exists(voter_we_vote_id) and not issue_identifier_exists:
results = {
'success': True if follow_issue_on_stage_found else False,
'status': 'Insufficient inputs to toggle issue link, try passing ids for voter and issue ',
'follow_issue_found': follow_issue_on_stage_found,
'follow_issue_id': follow_issue_on_stage_id,
'follow_issue': follow_issue_on_stage,
}
return results
# Does a follow_issue entry exist from this voter already exist?
follow_issue_manager = FollowIssueManager()
follow_issue_id = 0
results = follow_issue_manager.retrieve_follow_issue(follow_issue_id, voter_we_vote_id, issue_id,
issue_we_vote_id)
if results['MultipleObjectsReturned']:
status += 'TOGGLE_FOLLOWING_ISSUE MultipleObjectsReturned ' + following_status
delete_results = follow_issue_manager.delete_follow_issue(
follow_issue_id, voter_we_vote_id, issue_id, issue_we_vote_id)
status += delete_results['status']
results = follow_issue_manager.retrieve_follow_issue(follow_issue_id, voter_we_vote_id, issue_id,
issue_we_vote_id)
if results['follow_issue_found']:
follow_issue_on_stage = results['follow_issue']
# Update this follow_issue entry with new values - we do not delete because we might be able to use
try:
follow_issue_on_stage.following_status = following_status
# We don't need to update here because set set auto_now=True in the field
# follow_issue_on_stage.date_last_changed =
follow_issue_on_stage.save()
follow_issue_changed = True
follow_issue_on_stage_id = follow_issue_on_stage.id
follow_issue_on_stage_found = True
status += 'FOLLOW_STATUS_UPDATED_AS ' + following_status
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
elif results['DoesNotExist']:
try:
# Create new follow_issue entry
# First make sure that issue_id is for a valid issue
issue_manager = IssueManager()
if positive_value_exists(issue_id):
results = issue_manager.retrieve_issue(issue_id)
else:
results = issue_manager.retrieve_issue(0, issue_we_vote_id)
if results['issue_found']:
issue = results['issue']
follow_issue_on_stage = FollowIssue(
voter_we_vote_id=voter_we_vote_id,
issue_id=issue.id,
issue_we_vote_id=issue.we_vote_id,
following_status=following_status,
)
# if auto_followed_from_twitter_suggestion:
# follow_issue_on_stage.auto_followed_from_twitter_suggestion = True
follow_issue_on_stage.save()
follow_issue_changed = True
follow_issue_on_stage_id = follow_issue_on_stage.id
follow_issue_on_stage_found = True
status += 'CREATE ' + following_status
else:
status = 'ISSUE_NOT_FOUND_ON_CREATE ' + following_status
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
else:
status += results['status']
results = {
'success': True if follow_issue_on_stage_found else False,
'status': status,
'follow_issue_found': follow_issue_on_stage_found,
'follow_issue_id': follow_issue_on_stage_id,
'follow_issue': follow_issue_on_stage,
}
return results
def retrieve_follow_issue(self, follow_issue_id, voter_we_vote_id, issue_id, issue_we_vote_id):
"""
follow_issue_id is the identifier for records stored in this table (it is NOT the issue_id)
"""
error_result = False
exception_does_not_exist = False
exception_multiple_object_returned = False
follow_issue_on_stage = FollowIssue()
follow_issue_on_stage_id = 0
try:
if positive_value_exists(follow_issue_id):
follow_issue_on_stage = FollowIssue.objects.get(id=follow_issue_id)
follow_issue_on_stage_id = issue_id.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_ID'
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_id):
follow_issue_on_stage = FollowIssue.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_id=issue_id)
follow_issue_on_stage_id = follow_issue_on_stage.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_VOTER_WE_VOTE_ID_AND_ISSUE_ID'
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_we_vote_id):
follow_issue_on_stage = FollowIssue.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_we_vote_id__iexact=issue_we_vote_id)
follow_issue_on_stage_id = follow_issue_on_stage.id
success = True
status = 'FOLLOW_ISSUE_FOUND_WITH_VOTER_WE_VOTE_ID_AND_ISSUE_WE_VOTE_ID'
else:
success = False
status = 'FOLLOW_ISSUE_MISSING_REQUIRED_VARIABLES'
except FollowIssue.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
error_result = True
exception_multiple_object_returned = True
success = False
status = 'FOLLOW_ISSUE_NOT_FOUND_MultipleObjectsReturned'
except FollowIssue.DoesNotExist:
error_result = False
exception_does_not_exist = True
success = True
status = 'FOLLOW_ISSUE_NOT_FOUND_DoesNotExist'
if positive_value_exists(follow_issue_on_stage_id):
follow_issue_on_stage_found = True
is_following = follow_issue_on_stage.is_following()
is_not_following = follow_issue_on_stage.is_not_following()
is_ignoring = follow_issue_on_stage.is_ignoring()
else:
follow_issue_on_stage_found = False
is_following = False
is_not_following = True
is_ignoring = False
results = {
'status': status,
'success': success,
'follow_issue_found': follow_issue_on_stage_found,
'follow_issue_id': follow_issue_on_stage_id,
'follow_issue': follow_issue_on_stage,
'is_following': is_following,
'is_not_following': is_not_following,
'is_ignoring': is_ignoring,
'error_result': error_result,
'DoesNotExist': exception_does_not_exist,
'MultipleObjectsReturned': exception_multiple_object_returned,
}
return results
def delete_follow_issue(self, follow_issue_id, voter_we_vote_id, issue_id, issue_we_vote_id):
"""
Remove any follow issue entries (we may have duplicate entries)
"""
follow_issue_deleted = False
status = ''
try:
if positive_value_exists(follow_issue_id):
follow_issue_on_stage = FollowIssue.objects.get(id=follow_issue_id)
follow_issue_on_stage.delete()
follow_issue_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETED_BY_ID '
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_id):
follow_issue_query = FollowIssue.objects.filter(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_id=issue_id)
follow_issue_list = list(follow_issue_query)
for one_follow_issue in follow_issue_list:
one_follow_issue.delete()
follow_issue_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETED_BY_VOTER_WE_VOTE_ID_AND_ISSUE_ID '
elif positive_value_exists(voter_we_vote_id) and positive_value_exists(issue_we_vote_id):
follow_issue_query = FollowIssue.objects.filter(
voter_we_vote_id__iexact=voter_we_vote_id,
issue_we_vote_id__iexact=issue_we_vote_id)
follow_issue_list = list(follow_issue_query)
for one_follow_issue in follow_issue_list:
one_follow_issue.delete()
follow_issue_deleted = True
success = True
status += 'FOLLOW_ISSUE_DELETE_BY_VOTER_WE_VOTE_ID_AND_ISSUE_WE_VOTE_ID '
else:
success = False
status += 'FOLLOW_ISSUE_DELETE_MISSING_REQUIRED_VARIABLES '
except FollowIssue.DoesNotExist:
success = True
status = 'FOLLOW_ISSUE_DELETE_NOT_FOUND_DoesNotExist '
results = {
'status': status,
'success': success,
'follow_issue_deleted': follow_issue_deleted,
}
return results
def update_or_create_suggested_issue_to_follow(self, viewer_voter_we_vote_id, issue_we_vote_id,
from_twitter=False):
"""
Create or update the SuggestedIssueToFollow table with suggested issues from twitter ids i follow
or issue of my friends follow.
:param viewer_voter_we_vote_id:
:param issue_we_vote_id:
:param from_twitter:
:return:
"""
status = ''
try:
suggested_issue_to_follow, created = SuggestedIssueToFollow.objects.update_or_create(
viewer_voter_we_vote_id=viewer_voter_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
defaults={
'viewer_voter_we_vote_id': viewer_voter_we_vote_id,
'issue_we_vote_id': issue_we_vote_id,
'from_twitter': from_twitter
}
)
suggested_issue_to_follow_saved = True
success = True
status += "SUGGESTED_ISSUE_TO_FOLLOW_UPDATED "
except Exception as e:
suggested_issue_to_follow_saved = False
suggested_issue_to_follow = SuggestedIssueToFollow()
success = False
status += "SUGGESTED_ISSUE_TO_FOLLOW_NOT_UPDATED " + str(e) + ' '
results = {
'success': success,
'status': status,
'suggested_issue_to_follow_saved': suggested_issue_to_follow_saved,
'suggested_issue_to_follow': suggested_issue_to_follow,
}
return results
def retrieve_suggested_issue_to_follow_list(self, viewer_voter_we_vote_id, from_twitter=False):
"""
Retrieving suggested issues who i follow from SuggestedOrganizationToFollow table.
:param viewer_voter_we_vote_id:
:param from_twitter:
:return:
"""
suggested_issue_to_follow_list = []
status = ''
try:
suggested_issue_to_follow_queryset = SuggestedIssueToFollow.objects.all()
suggested_issue_to_follow_list = suggested_issue_to_follow_queryset.filter(
viewer_voter_we_vote_id__iexact=viewer_voter_we_vote_id,
from_twitter=from_twitter)
if len(suggested_issue_to_follow_list):
success = True
suggested_issue_to_follow_list_found = True
status += "SUGGESTED_ISSUE_TO_FOLLOW_RETRIEVED "
else:
success = True
suggested_issue_to_follow_list_found = False
status += "NO_SUGGESTED_ISSUE_TO_FOLLOW_LIST_RETRIEVED "
except SuggestedIssueToFollow.DoesNotExist:
# No data found. Try again below
success = True
suggested_issue_to_follow_list_found = False
status = 'NO_SUGGESTED_ISSUE_TO_FOLLOW_LIST_RETRIEVED_DoesNotExist '
except Exception as e:
success = False
suggested_issue_to_follow_list_found = False
status += "SUGGESTED_ISSUE_TO_FOLLOW_LIST_NOT_RETRIEVED " + str(e) + ' '
results = {
'success': success,
'status': status,
'suggested_issue_to_follow_list_found': suggested_issue_to_follow_list_found,
'suggested_issue_to_follow_list': suggested_issue_to_follow_list,
}
return results
class FollowMetricsManager(models.Manager):
def __unicode__(self):
return "FollowMetricsManager"
def fetch_organization_followers(self, organization_we_vote_id, google_civic_election_id=0):
count_result = None
try:
count_query = FollowOrganization.objects.using('readonly').all()
count_query = count_query.filter(organization_we_vote_id__iexact=organization_we_vote_id)
count_query = count_query.filter(following_status=FOLLOWING)
count_query = count_query.values("voter_id").distinct()
if positive_value_exists(google_civic_election_id):
election_manager = ElectionManager()
election_result = election_manager.retrieve_election(google_civic_election_id)
if election_result['election_found']:
election = election_result['election']
if positive_value_exists(election.election_day_text):
timezone = pytz.timezone("America/Los_Angeles")
date_of_election = timezone.localize(datetime.strptime(election.election_day_text, "%Y-%m-%d"))
date_of_election += timedelta(days=1) # Add one day, to catch the entire election day
# Find all of the follow entries before or on the day of the election
count_query = count_query.filter(date_last_changed__lte=date_of_election)
else:
# Failed retrieving date, so we return 0
return 0
count_result = count_query.count()
except Exception as e:
pass
return count_result
def fetch_issues_followed(self, voter_we_vote_id='',
limit_to_one_date_as_integer=0, count_through_this_date_as_integer=0):
timezone = pytz.timezone("America/Los_Angeles")
if positive_value_exists(limit_to_one_date_as_integer):
one_date_string = str(limit_to_one_date_as_integer)
limit_to_one_date = timezone.localize(datetime.strptime(one_date_string, "%Y%m%d"))
if positive_value_exists(count_through_this_date_as_integer):
count_through_date_string = str(count_through_this_date_as_integer)
count_through_this_date = timezone.localize(datetime.strptime(count_through_date_string, "%Y%m%d"))<|fim▁hole|> if positive_value_exists(voter_we_vote_id):
count_query = count_query.filter(voter_we_vote_id__iexact=voter_we_vote_id)
count_query = count_query.filter(following_status=FOLLOWING)
if positive_value_exists(limit_to_one_date_as_integer):
# TODO DALE THIS NEEDS WORK TO FIND ALL ENTRIES ON ONE DAY
count_query = count_query.filter(date_last_changed=limit_to_one_date)
elif positive_value_exists(count_through_this_date_as_integer):
count_query = count_query.filter(date_last_changed__lte=count_through_this_date)
count_result = count_query.count()
except Exception as e:
pass
return count_result
def fetch_voter_organizations_followed(self, voter_id):
count_result = None
try:
count_query = FollowOrganization.objects.using('readonly').all()
count_query = count_query.filter(voter_id=voter_id)
count_query = count_query.filter(following_status=FOLLOWING)
count_result = count_query.count()
except Exception as e:
pass
return count_result
class FollowIssueList(models.Model):
"""
A way to retrieve all of the follow_issue information
"""
def fetch_follow_issue_count_by_issue_we_vote_id(self, issue_we_vote_id):
follow_issue_list_length = 0
try:
follow_issue_list_query = FollowIssue.objects.using('readonly').all()
follow_issue_list_query = follow_issue_list_query.filter(issue_we_vote_id__iexact=issue_we_vote_id)
follow_issue_list_query = follow_issue_list_query.filter(following_status=FOLLOWING)
follow_issue_list_length = follow_issue_list_query.count()
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
return follow_issue_list_length
def fetch_follow_issue_count_by_voter_we_vote_id(self, voter_we_vote_id, following_status=None):
if following_status is None:
following_status = FOLLOWING
follow_issue_list_length = 0
try:
follow_issue_list_query = FollowIssue.objects.using('readonly').all()
follow_issue_list_query = follow_issue_list_query.filter(voter_we_vote_id__iexact=voter_we_vote_id)
follow_issue_list_query = follow_issue_list_query.filter(following_status=following_status)
follow_issue_list_length = follow_issue_list_query.count()
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
return follow_issue_list_length
def retrieve_follow_issue_list_by_voter_we_vote_id(self, voter_we_vote_id, following_status=None, read_only=True):
"""
Retrieve a list of follow_issue entries for this voter
:param voter_we_vote_id:
:param following_status:
:param read_only:
:return: a list of follow_issue objects for the voter_we_vote_id
"""
follow_issue_list_found = False
if following_status is None:
following_status = FOLLOWING
follow_issue_list = {}
try:
if positive_value_exists(read_only):
follow_issue_list_query = FollowIssue.objects.using('readonly').all()
else:
follow_issue_list_query = FollowIssue.objects.all()
follow_issue_list_query = follow_issue_list_query.filter(voter_we_vote_id__iexact=voter_we_vote_id)
if positive_value_exists(following_status):
follow_issue_list = follow_issue_list_query.filter(following_status=following_status)
if len(follow_issue_list):
follow_issue_list_found = True
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
if follow_issue_list_found:
return follow_issue_list
else:
follow_issue_list = {}
return follow_issue_list
def retrieve_follow_issue_we_vote_id_list_by_voter_we_vote_id(self, voter_we_vote_id, following_status=None):
follow_issue_we_vote_id_list = []
follow_issue_we_vote_id_list_result = []
if following_status is None:
following_status = FOLLOWING
try:
follow_issue_list_query = FollowIssue.objects.using('readonly').all()
follow_issue_list_query = follow_issue_list_query.filter(voter_we_vote_id__iexact=voter_we_vote_id)
if positive_value_exists(following_status):
follow_issue_list_query = follow_issue_list_query.filter(following_status=following_status)
follow_issue_list_query = follow_issue_list_query.values("issue_we_vote_id").distinct()
follow_issue_we_vote_id_list_result = list(follow_issue_list_query)
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
for query in follow_issue_we_vote_id_list_result:
follow_issue_we_vote_id_list.append(query["issue_we_vote_id"])
return follow_issue_we_vote_id_list
def fetch_follow_issue_following_count_by_voter_we_vote_id(self, voter_we_vote_id):
following_status = FOLLOWING
return self.fetch_follow_issue_count_by_voter_we_vote_id(voter_we_vote_id, following_status)
def fetch_follow_issue_ignore_count_by_voter_we_vote_id(self, voter_we_vote_id):
following_status = FOLLOW_IGNORE
return self.fetch_follow_issue_count_by_voter_we_vote_id(voter_we_vote_id, following_status)
def retrieve_follow_issue_ignore_list_by_voter_we_vote_id(self, voter_we_vote_id):
following_status = FOLLOW_IGNORE
return self.retrieve_follow_issue_list_by_voter_we_vote_id(voter_we_vote_id, following_status)
def retrieve_follow_issue_following_we_vote_id_list_by_voter_we_vote_id(self, voter_we_vote_id):
following_status = FOLLOWING
return self.retrieve_follow_issue_we_vote_id_list_by_voter_we_vote_id(voter_we_vote_id, following_status)
def retrieve_follow_issue_ignore_we_vote_id_list_by_voter_we_vote_id(self, voter_we_vote_id):
following_status = FOLLOW_IGNORE
return self.retrieve_follow_issue_we_vote_id_list_by_voter_we_vote_id(voter_we_vote_id, following_status)
def retrieve_follow_issue_list_by_issue_id(self, issue_id):
issue_we_vote_id = None
following_status = FOLLOWING
return self.retrieve_follow_issue_list(issue_id, issue_we_vote_id, following_status)
def retrieve_follow_issue_following_list_by_issue_we_vote_id(self, issue_we_vote_id):
issue_id = None
following_status = FOLLOWING
return self.retrieve_follow_issue_list(issue_id, issue_we_vote_id, following_status)
def retrieve_follow_issue_list(self, issue_id, issue_we_vote_id, following_status):
follow_issue_list_found = False
follow_issue_list = {}
try:
follow_issue_list = FollowIssue.objects.using('readonly').all()
if positive_value_exists(issue_id):
follow_issue_list = follow_issue_list.filter(issue_id=issue_id)
else:
follow_issue_list = follow_issue_list.filter(issue_we_vote_id__iexact=issue_we_vote_id)
if positive_value_exists(following_status):
follow_issue_list = follow_issue_list.filter(following_status=following_status)
if len(follow_issue_list):
follow_issue_list_found = True
except Exception as e:
pass
if follow_issue_list_found:
return follow_issue_list
else:
follow_issue_list = {}
return follow_issue_list
class FollowOrganization(models.Model):
# We are relying on built-in Python id field
# The voter following the organization
voter_id = models.BigIntegerField(null=True, blank=True, db_index=True)
# The organization being followed
organization_id = models.BigIntegerField(null=True, blank=True, db_index=True)
voter_linked_organization_we_vote_id = models.CharField(
verbose_name="organization we vote permanent id",
max_length=255, null=True, blank=True, unique=False, db_index=True)
# This is used when we want to export the organizations that a voter is following
organization_we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, null=True, blank=True, unique=False, db_index=True)
# Is this person following or ignoring this organization?
following_status = models.CharField(max_length=15, choices=FOLLOWING_CHOICES, default=FOLLOWING, db_index=True)
# Is this person automatically following the suggested twitter organization?
auto_followed_from_twitter_suggestion = models.BooleanField(verbose_name='', default=False)
# Is the fact that this organization is being followed by voter visible to the public?
is_follow_visible_publicly = models.BooleanField(verbose_name='', default=False)
# The date the voter followed or stopped following this organization
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=True, auto_now=True)
# This is used when we want to export the organizations that a voter is following
def voter_we_vote_id(self):
voter_manager = VoterManager()
return voter_manager.fetch_we_vote_id_from_local_id(self.voter_id)
def __unicode__(self):
return self.organization_id
def is_following(self):
if self.following_status == FOLLOWING:
return True
return False
def is_not_following(self):
if self.following_status == STOP_FOLLOWING:
return True
return False
def is_ignoring(self):
if self.following_status == FOLLOW_IGNORE:
return True
return False
class FollowOrganizationManager(models.Manager):
def __unicode__(self):
return "FollowOrganizationManager"
def fetch_number_of_organizations_followed(self, voter_id):
number_of_organizations_followed = 0
try:
if positive_value_exists(voter_id):
follow_organization_query = FollowOrganization.objects.filter(
voter_id=voter_id,
following_status=FOLLOWING
)
number_of_organizations_followed = follow_organization_query.count()
except Exception as e:
pass
return number_of_organizations_followed
def toggle_on_voter_following_organization(self, voter_id, organization_id, organization_we_vote_id,
voter_linked_organization_we_vote_id,
auto_followed_from_twitter_suggestion=False):
following_status = FOLLOWING
follow_organization_manager = FollowOrganizationManager()
return follow_organization_manager.toggle_voter_following_organization(
voter_id, organization_id, organization_we_vote_id, voter_linked_organization_we_vote_id, following_status,
auto_followed_from_twitter_suggestion)
def toggle_off_voter_following_organization(self, voter_id, organization_id, organization_we_vote_id,
voter_linked_organization_we_vote_id):
following_status = STOP_FOLLOWING
follow_organization_manager = FollowOrganizationManager()
return follow_organization_manager.toggle_voter_following_organization(
voter_id, organization_id, organization_we_vote_id, voter_linked_organization_we_vote_id, following_status)
def toggle_ignore_voter_following_organization(self, voter_id, organization_id, organization_we_vote_id,
voter_linked_organization_we_vote_id):
following_status = FOLLOW_IGNORE
follow_organization_manager = FollowOrganizationManager()
return follow_organization_manager.toggle_voter_following_organization(
voter_id, organization_id, organization_we_vote_id, voter_linked_organization_we_vote_id, following_status)
def toggle_off_voter_ignoring_organization(self, voter_id, organization_id, organization_we_vote_id,
voter_linked_organization_we_vote_id):
following_status = STOP_FOLLOWING # STOP_IGNORING (We don't actually store STOP_IGNORING in the database
follow_organization_manager = FollowOrganizationManager()
return follow_organization_manager.toggle_voter_following_organization(
voter_id, organization_id, organization_we_vote_id, voter_linked_organization_we_vote_id, following_status)
def toggle_voter_following_organization(self, voter_id, organization_id, organization_we_vote_id,
voter_linked_organization_we_vote_id, following_status,
auto_followed_from_twitter_suggestion=False):
status = ""
# Does a follow_organization entry exist from this voter already exist?
follow_organization_manager = FollowOrganizationManager()
results = follow_organization_manager.retrieve_follow_organization(0, voter_id,
organization_id, organization_we_vote_id)
follow_organization_on_stage_found = False
follow_organization_on_stage_id = 0
follow_organization_on_stage = FollowOrganization()
if results['follow_organization_found']:
follow_organization_on_stage = results['follow_organization']
# Update this follow_organization entry with new values - we do not delete because we might be able to use
try:
if auto_followed_from_twitter_suggestion:
# If here we are auto-following because the voter follows this organization on Twitter
if follow_organization_on_stage.following_status == "STOP_FOLLOWING" or \
follow_organization_on_stage.following_status == "FOLLOW_IGNORE":
# Do not follow again
pass
else:
follow_organization_on_stage.following_status = following_status
else:
follow_organization_on_stage.following_status = following_status
follow_organization_on_stage.auto_followed_from_twitter_suggestion = False
follow_organization_on_stage.voter_linked_organization_we_vote_id = voter_linked_organization_we_vote_id
# We don't need to update here because set set auto_now=True in the field
# follow_organization_on_stage.date_last_changed =
follow_organization_on_stage.save()
follow_organization_on_stage_id = follow_organization_on_stage.id
follow_organization_on_stage_found = True
status += 'UPDATE ' + following_status
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status + ' '
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
elif results['MultipleObjectsReturned']:
logger.warning("follow_organization: delete all but one and take it over?")
status += 'TOGGLE_FOLLOWING_ORGANIZATION MultipleObjectsReturned ' + following_status + ' '
elif results['DoesNotExist']:
try:
# Create new follow_organization entry
# First make sure that organization_id is for a valid organization
organization_manager = OrganizationManager()
if positive_value_exists(organization_id):
results = organization_manager.retrieve_organization(organization_id)
else:
results = organization_manager.retrieve_organization(0, organization_we_vote_id)
if results['organization_found']:
organization = results['organization']
follow_organization_on_stage = FollowOrganization(
voter_id=voter_id,
organization_id=organization.id,
organization_we_vote_id=organization.we_vote_id,
voter_linked_organization_we_vote_id=voter_linked_organization_we_vote_id,
following_status=following_status,
)
if auto_followed_from_twitter_suggestion:
follow_organization_on_stage.auto_followed_from_twitter_suggestion = True
follow_organization_on_stage.save()
follow_organization_on_stage_id = follow_organization_on_stage.id
follow_organization_on_stage_found = True
status += 'CREATE ' + following_status + ' '
else:
status += 'ORGANIZATION_NOT_FOUND_ON_CREATE ' + following_status + ' '
except Exception as e:
status += 'FAILED_TO_UPDATE ' + following_status + ' '
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
else:
status += results['status']
results = {
'success': True if follow_organization_on_stage_found else False,
'status': status,
'follow_organization_found': follow_organization_on_stage_found,
'follow_organization_id': follow_organization_on_stage_id,
'follow_organization': follow_organization_on_stage,
'voter_linked_organization_we_vote_id': voter_linked_organization_we_vote_id,
}
return results
def retrieve_follow_organization(self, follow_organization_id, voter_id, organization_id, organization_we_vote_id,
read_only=False):
"""
follow_organization_id is the identifier for records stored in this table (it is NOT the organization_id)
"""
error_result = False
exception_does_not_exist = False
exception_multiple_object_returned = False
follow_organization_on_stage = FollowOrganization()
follow_organization_on_stage_id = 0
status = ""
try:
if positive_value_exists(follow_organization_id):
if read_only:
follow_organization_on_stage = FollowOrganization.objects.using('readonly').get(
id=follow_organization_id)
else:
follow_organization_on_stage = FollowOrganization.objects.get(id=follow_organization_id)
follow_organization_on_stage_id = organization_id.id
success = True
status += 'FOLLOW_ORGANIZATION_FOUND_WITH_ID '
elif positive_value_exists(voter_id) and positive_value_exists(organization_id):
if read_only:
follow_organization_on_stage = FollowOrganization.objects.using('readonly').get(
voter_id=voter_id, organization_id=organization_id)
else:
follow_organization_on_stage = FollowOrganization.objects.get(
voter_id=voter_id, organization_id=organization_id)
follow_organization_on_stage_id = follow_organization_on_stage.id
success = True
status += 'FOLLOW_ORGANIZATION_FOUND_WITH_VOTER_ID_AND_ORGANIZATION_ID '
elif positive_value_exists(voter_id) and positive_value_exists(organization_we_vote_id):
if read_only:
follow_organization_on_stage = FollowOrganization.objects.using('readonly').get(
voter_id=voter_id, organization_we_vote_id=organization_we_vote_id)
else:
follow_organization_on_stage = FollowOrganization.objects.get(
voter_id=voter_id, organization_we_vote_id=organization_we_vote_id)
follow_organization_on_stage_id = follow_organization_on_stage.id
success = True
status += 'FOLLOW_ORGANIZATION_FOUND_WITH_VOTER_ID_AND_ORGANIZATION_WE_VOTE_ID '
else:
success = False
status += 'FOLLOW_ORGANIZATION_MISSING_REQUIRED_VARIABLES '
except FollowOrganization.MultipleObjectsReturned as e:
handle_record_found_more_than_one_exception(e, logger=logger)
error_result = True
exception_multiple_object_returned = True
success = False
status += 'FOLLOW_ORGANIZATION_NOT_FOUND_MultipleObjectsReturned '
follow_organization_list_found = False
follow_organization_list = []
# Delete the oldest values and retrieve the correct one
try:
if positive_value_exists(voter_id) and positive_value_exists(organization_id):
follow_organization_query = FollowOrganization.objects.all()
follow_organization_query = follow_organization_query.filter(
voter_id=voter_id, organization_id=organization_id)
follow_organization_query = follow_organization_query.order_by('id')
follow_organization_list = list(follow_organization_query)
follow_organization_list_found = positive_value_exists(len(follow_organization_list))
success = True
status += 'FOLLOW_ORGANIZATION_FOUND_WITH_VOTER_ID_AND_ORGANIZATION_ID '
elif positive_value_exists(voter_id) and positive_value_exists(organization_we_vote_id):
follow_organization_query = FollowOrganization.objects.all()
follow_organization_query = follow_organization_query.filter(
voter_id=voter_id, organization_we_vote_id=organization_we_vote_id)
follow_organization_query = follow_organization_query.order_by('id')
follow_organization_list = list(follow_organization_query)
follow_organization_list_found = positive_value_exists(len(follow_organization_list))
success = True
status += 'FOLLOW_ORGANIZATION_FOUND_WITH_VOTER_ID_AND_ORGANIZATION_WE_VOTE_ID '
if follow_organization_list_found:
follow_organization_on_stage = follow_organization_list.pop()
follow_organization_on_stage_id = follow_organization_on_stage.id
# Now cycle through remaining list and delete
for one_follow_organization in follow_organization_list:
one_follow_organization.delete()
print_to_log(logger, exception_message_optional="FollowOrganization duplicates removed.")
except Exception as e:
handle_exception(e, logger,
exception_message="Error trying to delete duplicate FollowOrganization entries.")
except FollowOrganization.DoesNotExist:
error_result = False
exception_does_not_exist = True
success = True
status += 'FOLLOW_ORGANIZATION_NOT_FOUND_DoesNotExist '
if positive_value_exists(follow_organization_on_stage_id):
follow_organization_on_stage_found = True
is_following = follow_organization_on_stage.is_following()
is_not_following = follow_organization_on_stage.is_not_following()
is_ignoring = follow_organization_on_stage.is_ignoring()
else:
follow_organization_on_stage_found = False
is_following = False
is_not_following = True
is_ignoring = False
results = {
'status': status,
'success': success,
'follow_organization_found': follow_organization_on_stage_found,
'follow_organization_id': follow_organization_on_stage_id,
'follow_organization': follow_organization_on_stage,
'is_following': is_following,
'is_not_following': is_not_following,
'is_ignoring': is_ignoring,
'error_result': error_result,
'DoesNotExist': exception_does_not_exist,
'MultipleObjectsReturned': exception_multiple_object_returned,
}
return results
def retrieve_voter_following_org_status(self, voter_id, voter_we_vote_id,
organization_id, organization_we_vote_id, read_only=False):
"""
Retrieve one follow entry so we can see if a voter is following or ignoring a particular org
"""
if not positive_value_exists(voter_id) and positive_value_exists(voter_we_vote_id):
# We need voter_id to call retrieve_follow_organization
voter_manager = VoterManager()
voter_id = voter_manager.fetch_local_id_from_we_vote_id(voter_we_vote_id)
if not positive_value_exists(voter_id) and \
not (positive_value_exists(organization_id) or positive_value_exists(organization_we_vote_id)):
results = {
'status': 'RETRIEVE_VOTER_FOLLOWING_MISSING_VARIABLES',
'success': False,
'follow_organization_found': False,
'follow_organization_id': 0,
'follow_organization': FollowOrganization(),
'is_following': False,
'is_not_following': True,
'is_ignoring': False,
'error_result': True,
'DoesNotExist': False,
'MultipleObjectsReturned': False,
}
return results
return self.retrieve_follow_organization(
0, voter_id, organization_id, organization_we_vote_id, read_only=read_only)
def update_or_create_suggested_organization_to_follow(self, viewer_voter_we_vote_id, organization_we_vote_id,
from_twitter=False):
"""
Create or update the SuggestedOrganizationToFollow table with suggested organizations from twitter ids i follow
or organization of my friends follow.
:param viewer_voter_we_vote_id:
:param organization_we_vote_id:
:param from_twitter:
:return:
"""
status = ''
try:
suggested_organization_to_follow, created = SuggestedOrganizationToFollow.objects.update_or_create(
viewer_voter_we_vote_id=viewer_voter_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
defaults={
'viewer_voter_we_vote_id': viewer_voter_we_vote_id,
'organization_we_vote_id': organization_we_vote_id,
'from_twitter': from_twitter
}
)
suggested_organization_to_follow_saved = True
success = True
status += "SUGGESTED_ORGANIZATION_TO_FOLLOW_UPDATED "
except Exception as e:
suggested_organization_to_follow_saved = False
suggested_organization_to_follow = SuggestedOrganizationToFollow()
success = False
status += "SUGGESTED_ORGANIZATION_TO_FOLLOW_NOT_UPDATED " + str(e) + ' '
results = {
'success': success,
'status': status,
'suggested_organization_to_follow_saved': suggested_organization_to_follow_saved,
'suggested_organization_to_follow': suggested_organization_to_follow,
}
return results
def retrieve_suggested_organization_to_follow_list(self, viewer_voter_we_vote_id, from_twitter=False):
"""
Retrieving suggested organizations who i follow from SuggestedOrganizationToFollow table.
:param viewer_voter_we_vote_id:
:param from_twitter:
:return:
"""
suggested_organization_to_follow_list = []
status = ''
try:
suggested_organization_to_follow_queryset = SuggestedOrganizationToFollow.objects.all()
suggested_organization_to_follow_list = suggested_organization_to_follow_queryset.filter(
viewer_voter_we_vote_id__iexact=viewer_voter_we_vote_id,
from_twitter=from_twitter)
if len(suggested_organization_to_follow_list):
success = True
suggested_organization_to_follow_list_found = True
status += "SUGGESTED_ORGANIZATION_TO_FOLLOW_RETRIEVED "
else:
success = True
suggested_organization_to_follow_list_found = False
status += "NO_SUGGESTED_ORGANIZATION_TO_FOLLOW_LIST_RETRIEVED "
except SuggestedOrganizationToFollow.DoesNotExist:
# No data found. Try again below
success = True
suggested_organization_to_follow_list_found = False
status += 'NO_SUGGESTED_ORGANIZATION_TO_FOLLOW_LIST_RETRIEVED_DoesNotExist '
except Exception as e:
success = False
suggested_organization_to_follow_list_found = False
status += "SUGGESTED_ORGANIZATION_TO_FOLLOW_LIST_NOT_RETRIEVED " + str(e) + ' '
results = {
'success': success,
'status': status,
'suggested_organization_to_follow_list_found': suggested_organization_to_follow_list_found,
'suggested_organization_to_follow_list': suggested_organization_to_follow_list,
}
return results
class FollowOrganizationList(models.Model):
"""
A way to retrieve all of the follow_organization information
"""
def fetch_follow_organization_by_voter_id_count(self, voter_id):
follow_organization_list = self.retrieve_follow_organization_by_voter_id(voter_id)
return len(follow_organization_list)
def retrieve_follow_organization_by_voter_id(self, voter_id, auto_followed_from_twitter_suggestion=False,
read_only=False):
# Retrieve a list of follow_organization entries for this voter
follow_organization_list_found = False
following_status = FOLLOWING
follow_organization_list = {}
try:
# Should not default to 'readonly' since we sometimes save the results of this call
if read_only:
follow_organization_list = FollowOrganization.objects.using('readonly').all()
else:
follow_organization_list = FollowOrganization.objects.all()
follow_organization_list = follow_organization_list.filter(voter_id=voter_id)
follow_organization_list = follow_organization_list.filter(following_status=following_status)
if auto_followed_from_twitter_suggestion:
follow_organization_list = follow_organization_list.filter(
auto_followed_from_twitter_suggestion=auto_followed_from_twitter_suggestion)
if len(follow_organization_list):
follow_organization_list_found = True
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
if follow_organization_list_found:
return follow_organization_list
else:
follow_organization_list = {}
return follow_organization_list
def retrieve_follow_organization_by_own_organization_we_vote_id(self, organization_we_vote_id,
auto_followed_from_twitter_suggestion=False):
# Retrieve a list of followed organizations entries by voter_linked_organization_we_vote_id for voter guides
follow_organization_list_found = False
following_status = FOLLOWING
follow_organization_list = []
try:
follow_organization_list = FollowOrganization.objects.all()
follow_organization_list = follow_organization_list.filter(
voter_linked_organization_we_vote_id=organization_we_vote_id)
follow_organization_list = follow_organization_list.filter(following_status=following_status)
if auto_followed_from_twitter_suggestion:
follow_organization_list = follow_organization_list.filter(
auto_followed_from_twitter_suggestion=auto_followed_from_twitter_suggestion)
if len(follow_organization_list):
follow_organization_list_found = True
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
if follow_organization_list_found:
return follow_organization_list
else:
follow_organization_list = []
return follow_organization_list
def retrieve_ignore_organization_by_voter_id(self, voter_id, read_only=False):
# Retrieve a list of follow_organization entries for this voter
follow_organization_list_found = False
following_status = FOLLOW_IGNORE
follow_organization_list = {}
try:
if positive_value_exists(read_only):
follow_organization_list = FollowOrganization.objects.using('readonly').all()
else:
follow_organization_list = FollowOrganization.objects.all()
follow_organization_list = follow_organization_list.filter(voter_id=voter_id)
follow_organization_list = follow_organization_list.filter(following_status=following_status)
if len(follow_organization_list):
follow_organization_list_found = True
except Exception as e:
handle_record_not_found_exception(e, logger=logger)
if follow_organization_list_found:
return follow_organization_list
else:
follow_organization_list = {}
return follow_organization_list
def retrieve_follow_organization_by_voter_id_simple_id_array(self, voter_id, return_we_vote_id=False,
auto_followed_from_twitter_suggestion=False,
read_only=False):
follow_organization_list_manager = FollowOrganizationList()
follow_organization_list = \
follow_organization_list_manager.retrieve_follow_organization_by_voter_id(
voter_id, auto_followed_from_twitter_suggestion, read_only=read_only)
follow_organization_list_simple_array = []
if len(follow_organization_list):
voter_manager = VoterManager()
voter_linked_organization_we_vote_id = \
voter_manager.fetch_linked_organization_we_vote_id_from_local_id(voter_id)
for follow_organization in follow_organization_list:
if not read_only:
# Heal the data by making sure the voter's linked_organization_we_vote_id exists and is accurate
if positive_value_exists(voter_linked_organization_we_vote_id) \
and voter_linked_organization_we_vote_id != \
follow_organization.voter_linked_organization_we_vote_id:
try:
follow_organization.voter_linked_organization_we_vote_id = \
voter_linked_organization_we_vote_id
follow_organization.save()
except Exception as e:
status = 'FAILED_TO_UPDATE_FOLLOW_ISSUE-voter_id ' + str(voter_id)
handle_record_not_saved_exception(e, logger=logger, exception_message_optional=status)
if return_we_vote_id:
follow_organization_list_simple_array.append(follow_organization.organization_we_vote_id)
else:
follow_organization_list_simple_array.append(follow_organization.organization_id)
return follow_organization_list_simple_array
def retrieve_followed_organization_by_organization_we_vote_id_simple_id_array(
self, organization_we_vote_id, return_we_vote_id=False,
auto_followed_from_twitter_suggestion=False):
follow_organization_list_manager = FollowOrganizationList()
follow_organization_list = \
follow_organization_list_manager.retrieve_follow_organization_by_own_organization_we_vote_id(
organization_we_vote_id, auto_followed_from_twitter_suggestion)
follow_organization_list_simple_array = []
if len(follow_organization_list):
for follow_organization in follow_organization_list:
if return_we_vote_id:
follow_organization_list_simple_array.append(follow_organization.organization_we_vote_id)
else:
follow_organization_list_simple_array.append(follow_organization.organization_id)
return follow_organization_list_simple_array
def fetch_followers_list_by_organization_we_vote_id(
self, organization_we_vote_id, return_voter_we_vote_id=False):
"""
Fetch a list of the voter_id or voter_we_vote_id of followers of organization_we_vote_id.
:param organization_we_vote_id:
:param return_voter_we_vote_id:
:return:
"""
follow_organization_list_manager = FollowOrganizationList()
followers_list = \
follow_organization_list_manager.retrieve_follow_organization_by_organization_we_vote_id(
organization_we_vote_id)
followers_list_simple_array = []
if len(followers_list):
voter_manager = VoterManager()
for follow_organization in followers_list:
if return_voter_we_vote_id:
voter_we_vote_id = voter_manager.fetch_we_vote_id_from_local_id(follow_organization.voter_id)
if positive_value_exists(voter_we_vote_id):
followers_list_simple_array.append(voter_we_vote_id)
else:
if positive_value_exists(follow_organization.voter_id):
followers_list_simple_array.append(follow_organization.voter_id)
return followers_list_simple_array
def retrieve_followers_organization_by_organization_we_vote_id_simple_id_array(
self, organization_we_vote_id, return_we_vote_id=False,
auto_followed_from_twitter_suggestion=False):
"""
Retrieve the organization_id (or organization_we_vote_id) for each voter that follows organization_we_vote_id.
:param organization_we_vote_id:
:param return_we_vote_id:
:param auto_followed_from_twitter_suggestion:
:return:
"""
follow_organization_list_manager = FollowOrganizationList()
followers_organization_list = \
follow_organization_list_manager.retrieve_follow_organization_by_organization_we_vote_id(
organization_we_vote_id)
followers_organization_list_simple_array = []
if len(followers_organization_list):
for follow_organization in followers_organization_list:
if return_we_vote_id:
if positive_value_exists(follow_organization.voter_linked_organization_we_vote_id):
followers_organization_list_simple_array.append(
follow_organization.voter_linked_organization_we_vote_id)
else:
followers_organization_list_simple_array.append(follow_organization.organization_id)
return followers_organization_list_simple_array
def retrieve_ignore_organization_by_voter_id_simple_id_array(
self, voter_id, return_we_vote_id=False, read_only=False):
follow_organization_list_manager = FollowOrganizationList()
ignore_organization_list = \
follow_organization_list_manager.retrieve_ignore_organization_by_voter_id(voter_id, read_only=read_only)
ignore_organization_list_simple_array = []
if len(ignore_organization_list):
for ignore_organization in ignore_organization_list:
if return_we_vote_id:
ignore_organization_list_simple_array.append(ignore_organization.organization_we_vote_id)
else:
ignore_organization_list_simple_array.append(ignore_organization.organization_id)
return ignore_organization_list_simple_array
def retrieve_follow_organization_by_organization_id(self, organization_id):
# Retrieve a list of follow_organization entries for this organization
follow_organization_list_found = False
following_status = FOLLOWING
follow_organization_list = {}
try:
follow_organization_list = FollowOrganization.objects.all()
follow_organization_list = follow_organization_list.filter(organization_id=organization_id)
follow_organization_list = follow_organization_list.filter(following_status=following_status)
if len(follow_organization_list):
follow_organization_list_found = True
except Exception as e:
pass
if follow_organization_list_found:
return follow_organization_list
else:
follow_organization_list = {}
return follow_organization_list
def retrieve_follow_organization_by_organization_we_vote_id(self, organization_we_vote_id):
# Retrieve a list of follow_organization entries for this organization
follow_organization_list_found = False
following_status = FOLLOWING
follow_organization_list = {}
try:
follow_organization_list = FollowOrganization.objects.all()
follow_organization_list = follow_organization_list.filter(organization_we_vote_id=organization_we_vote_id)
follow_organization_list = follow_organization_list.filter(following_status=following_status)
if len(follow_organization_list):
follow_organization_list_found = True
except Exception as e:
pass
if follow_organization_list_found:
return follow_organization_list
else:
follow_organization_list = {}
return follow_organization_list
class SuggestedIssueToFollow(models.Model):
"""
This table stores possible suggested issues to follow
"""
viewer_voter_we_vote_id = models.CharField(
verbose_name="voter we vote id", max_length=255, null=True, blank=True, unique=False)
issue_we_vote_id = models.CharField(
verbose_name="issue we vote id", max_length=255, null=True, blank=True, unique=False)
# organization_we_vote_id_making_suggestion = models.CharField(
# verbose_name="organization we vote id making decision", max_length=255, null=True, blank=True, unique=False)
# from_twitter = models.BooleanField(verbose_name="from twitter", default=False)
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=True, auto_now=True)
# def fetch_other_organization_we_vote_id(self, one_we_vote_id):
# if one_we_vote_id == self.viewer_voter_we_vote_id:
# return self.viewee_voter_we_vote_id
# else:
# # If the we_vote_id passed in wasn't found, don't return another we_vote_id
# return ""
class SuggestedOrganizationToFollow(models.Model):
"""
This table stores possible suggested organization from twitter ids i follow or organization of my friends follow.
"""
viewer_voter_we_vote_id = models.CharField(
verbose_name="voter we vote id person 1", max_length=255, null=True, blank=True, unique=False)
organization_we_vote_id = models.CharField(
verbose_name="organization we vote id person 2", max_length=255, null=True, blank=True, unique=False)
# organization_we_vote_id_making_suggestion = models.CharField(
# verbose_name="organization we vote id making decision", max_length=255, null=True, blank=True, unique=False)
from_twitter = models.BooleanField(verbose_name="from twitter", default=False)
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=True, auto_now=True)
def fetch_other_organization_we_vote_id(self, one_we_vote_id):
if one_we_vote_id == self.viewer_voter_we_vote_id:
return self.viewee_voter_we_vote_id
else:
# If the we_vote_id passed in wasn't found, don't return another we_vote_id
return ""<|fim▁end|> | count_result = None
try:
count_query = FollowIssue.objects.using('readonly').all() |
<|file_name|>isr.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! This file is not part of zinc crate, it is linked separately, alongside the
//! ISRs for the platform.
#![allow(missing_docs)]
#[cfg(feature = "cpu_cortex-m0")]
#[path="cortex_m0/isr.rs"] pub mod isr_cortex_m0;
#[cfg(feature = "cpu_cortex-m3")]
#[path="cortex_m3/isr.rs"] pub mod isr_cortex_m3;
#[cfg(feature = "cpu_cortex-m4")]
#[path="cortex_m3/isr.rs"] pub mod isr_cortex_m4;
#[cfg(feature = "mcu_lpc17xx")]<|fim▁hole|>
#[cfg(feature = "mcu_tiva_c")]
#[path="tiva_c/isr.rs"] pub mod isr_tiva_c;<|fim▁end|> | #[path="lpc17xx/isr.rs"] pub mod isr_lpc17xx;
#[cfg(feature = "mcu_k20")]
#[path="k20/isr.rs"] pub mod isr_k20; |
<|file_name|>SchemaColumn.java<|end_file_name|><|fim▁begin|>/* This file is part of VoltDB.
* Copyright (C) 2008-2013 VoltDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
package org.voltdb.plannodes;<|fim▁hole|>import org.json_voltpatches.JSONObject;
import org.json_voltpatches.JSONStringer;
import org.voltdb.VoltType;
import org.voltdb.catalog.Database;
import org.voltdb.expressions.AbstractExpression;
import org.voltdb.expressions.TupleValueExpression;
/**
* This class encapsulates the data and operations needed to track columns
* in the planner.
*
*/
public class SchemaColumn
{
public enum Members {
COLUMN_NAME,
EXPRESSION,
}
/**
* Create a new SchemaColumn
* @param tableName The name of the table where this column originated,
* if any. Currently, internally created columns will be assigned
* the table name "VOLT_TEMP_TABLE" for disambiguation.
* @param columnName The name of this column, if any
* @param columnAlias The alias assigned to this column, if any
* @param expression The input expression which generates this
* column. SchemaColumn needs to have exclusive ownership
* so that it can adjust the index of any TupleValueExpressions
* without affecting other nodes/columns/plan iterations, so
* it clones this expression.
*/
public SchemaColumn(String tableName, String columnName,
String columnAlias, AbstractExpression expression)
{
m_tableName = tableName;
m_columnName = columnName;
m_columnAlias = columnAlias;
try
{
m_expression = (AbstractExpression) expression.clone();
}
catch (CloneNotSupportedException e)
{
throw new RuntimeException(e.getMessage());
}
}
/**
* Clone a schema column
*/
@Override
protected SchemaColumn clone()
{
return new SchemaColumn(m_tableName, m_columnName, m_columnAlias,
m_expression);
}
/**
* Return a copy of this SchemaColumn, but with the input expression
* replaced by an appropriate TupleValueExpression.
*/
public SchemaColumn copyAndReplaceWithTVE()
{
TupleValueExpression new_exp = null;
if (m_expression instanceof TupleValueExpression)
{
try
{
new_exp = (TupleValueExpression) m_expression.clone();
}
catch (CloneNotSupportedException e)
{
throw new RuntimeException(e.getMessage());
}
}
else
{
new_exp = new TupleValueExpression();
// XXX not sure this is right
new_exp.setTableName(m_tableName);
new_exp.setColumnName(m_columnName);
new_exp.setColumnAlias(m_columnAlias);
new_exp.setValueType(m_expression.getValueType());
new_exp.setValueSize(m_expression.getValueSize());
}
return new SchemaColumn(m_tableName, m_columnName, m_columnAlias,
new_exp);
}
public String getTableName()
{
return m_tableName;
}
public String getColumnName()
{
return m_columnName;
}
public String getColumnAlias()
{
return m_columnAlias;
}
public AbstractExpression getExpression()
{
return m_expression;
}
public VoltType getType()
{
return m_expression.getValueType();
}
public int getSize()
{
return m_expression.getValueSize();
}
/**
* Check if this SchemaColumn provides the column specified by the input
* arguments. A match is defined as matching both the table name and
* the column name if it is provided, otherwise matching the provided alias.
* @param tableName
* @param columnName
* @param columnAlias
* @return
*/
public boolean matches(String tableName, String columnName,
String columnAlias)
{
boolean retval = false;
if (tableName.equals(m_tableName))
{
if (columnName != null && !columnName.equals(""))
{
if (columnName.equals(m_columnName))
{
retval = true;
}
}
else if (columnAlias != null && !columnAlias.equals(""))
{
if (columnAlias.equals(m_columnAlias))
{
retval = true;
}
}
else if (tableName.equals("VOLT_TEMP_TABLE"))
{
retval = true;
}
else
{
throw new RuntimeException("Attempted to match a SchemaColumn " +
"but provided no name or alias.");
}
}
return retval;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("SchemaColumn:\n");
sb.append("\tTable Name: ").append(m_tableName).append("\n");
sb.append("\tColumn Name: ").append(m_columnName).append("\n");
sb.append("\tColumn Alias: ").append(m_columnAlias).append("\n");
sb.append("\tColumn Type: ").append(getType()).append("\n");
sb.append("\tColumn Size: ").append(getSize()).append("\n");
sb.append("\tExpression: ").append(m_expression.toString()).append("\n");
return sb.toString();
}
public void toJSONString(JSONStringer stringer) throws JSONException
{
stringer.object();
// Tell the EE that the column name is either a valid column
// alias or the original column name if no alias exists. This is a
// bit hacky, but it's the easiest way for the EE to generate
// a result set that has all the aliases that may have been specified
// by the user (thanks to chains of setOutputTable(getInputTable))
if (getColumnAlias() != null && !getColumnAlias().equals(""))
{
stringer.key(Members.COLUMN_NAME.name()).value(getColumnAlias());
}
else if (getColumnName() != null) {
stringer.key(Members.COLUMN_NAME.name()).value(getColumnName());
}
else
{
stringer.key(Members.COLUMN_NAME.name()).value("");
}
if (m_expression != null) {
stringer.key(Members.EXPRESSION.name());
stringer.object();
m_expression.toJSONString(stringer);
stringer.endObject();
}
else
{
stringer.key(Members.EXPRESSION.name()).value("");
}
stringer.endObject();
}
public static SchemaColumn fromJSONObject( JSONObject jobj, Database db ) throws JSONException {
String tableName = "";
String columnName = "";
String columnAlias = "";
AbstractExpression expression = null;
if( !jobj.isNull( Members.COLUMN_NAME.name() ) ){
columnName = jobj.getString( Members.COLUMN_NAME.name() );
}
if( !jobj.isNull( Members.EXPRESSION.name() ) ) {
expression = AbstractExpression.fromJSONObject( jobj.getJSONObject( Members.EXPRESSION.name() ), db);
}
return new SchemaColumn( tableName, columnName, columnAlias, expression );
}
private String m_tableName;
private String m_columnName;
private String m_columnAlias;
private AbstractExpression m_expression;
}<|fim▁end|> |
import org.json_voltpatches.JSONException; |
<|file_name|>TempFileUtilsTest.java<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2017 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.api.utils;
import org.apache.commons.io.FileUtils;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class TempFileUtilsTest {
@Test
public void createTempDirectory() throws IOException {
File dir = TempFileUtils.createTempDirectory();
try {
assertThat(dir.exists(), is(true));
assertThat(dir.isDirectory(), is(true));
assertThat(dir.listFiles().length, is(0));<|fim▁hole|>
} finally {
FileUtils.deleteDirectory(dir);
}
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Combines sequences of arithmetic or logical instructions into single instructions.
//! For every instruction, try to combine one of its operands into itself. This
//! transforms linear data-dependency chains into trees.
use crate::analysis::analyzer::{
Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, FuncAnalyzer,
};
use crate::frontend::radeco_containers::RadecoFunction;
use crate::middle::ir::MOpcode;
use crate::middle::ssa::ssa_traits::*;
use crate::middle::ssa::ssastorage::SSAStorage;
use either::*;
use std::any::Any;
use std::borrow::Cow;
use std::collections::HashMap;
use std::fmt;
mod combine_rules;
type SSAValue = <SSAStorage as SSA>::ValueRef;
/// Represents binary operations that are effectively unary because one of the
/// operands is constant. In other words, represents curried binary operations.
/// e.g. `(|x| 7 & x)`, `(|x| x + 3)`, etc.
#[derive(Clone)]
pub struct CombinableOpInfo(MOpcode, CombinableOpConstInfo);
#[derive(Clone, Debug)]
pub enum CombinableOpConstInfo {
/// no const, like in `(|x| !x)`
Unary,
/// const is on the left, like in `(|x| 3 - x)`
Left(u64),
/// const is on the right, like in `(|x| x - 3)`
Right(u64),
}
#[derive(Debug)]
pub struct CombineChange {
/// Index of the node to combine.
node: SSAValue,
/// Left: The node and one of its args were combined. The tuple contains: the
/// structure of the combined node (if the node is a no-op then `None` is present)
/// and the index of the non-const operand of the combined node.
/// Right: The node and its args were combined into a constant value.
res: Either<(Option<CombinableOpInfo>, SSAValue), u64>,
}
impl Change for CombineChange {
fn as_any(&self) -> &dyn Any {
self
}
}
enum CombErr {
/// The op-info is not combinable.
NoComb,
/// Skip this substitution.
Skip,
/// An abort request was raised.
Abort,
}
const NAME: &str = "combiner";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::Combiner,
requires: REQUIRES,
uses_policy: true,
};
#[derive(Debug)]
pub struct Combiner {
/// Nodes that could potentially be combined into another
combine_candidates: HashMap<SSAValue, (SSAValue, CombinableOpInfo)>,
}
impl Combiner {
pub fn new() -> Self {
Combiner {
combine_candidates: HashMap::new(),
}
}
/// Returns `Some(new_node)` if one of `cur_node`'s operands can be combined
/// into `cur_node`, resulting in `new_node`; or, if `cur_node` could be
/// simplified, resulting in `new_node`. In both cases `Action::Apply` was returned
/// by the policy function.
/// Returns `Err(CombErr::NoComb)` if no simplification can occur.
/// Returns `Err(CombErr::Skip)` if the policy function returned `Action::Skip`.
/// Returns `Err(CombErr::Abort)` if the policy function returned `Action::Abort`.
fn visit_node<T: FnMut(Box<dyn Change>) -> Action>(
&mut self,
cur_node: SSAValue,
ssa: &mut SSAStorage,
policy: &mut T,
) -> Result<SSAValue, CombErr> {
// bail if non-combinable
let extracted = extract_opinfo(cur_node, ssa).ok_or(CombErr::NoComb)?;
match extracted {
Left((sub_node, cur_opinfo, cur_vt)) => {
radeco_trace!(
"trying to combine ({:?} = {:?} {:?})",
cur_node,
sub_node,
cur_opinfo
);
let opt_new_node =
self.make_combined_node(cur_node, &cur_opinfo, cur_vt, sub_node, ssa, policy);
match opt_new_node {
Ok(Left((new_node, new_sub_node, new_opinfo))) => {
radeco_trace!(
" {:?} ==> ({:?} = {:?} {:?})",
cur_node,
new_node,
new_sub_node,
new_opinfo
);
self.combine_candidates
.insert(new_node, (new_sub_node, new_opinfo));
Ok(new_node)
}
Ok(Right(new_node)) => {
radeco_trace!(" {:?} ==> no-op", cur_node);
Ok(new_node)
}
Err(comb_err) => {
if let CombErr::NoComb = comb_err {
// no change; still add to `combine_candidates`
self.combine_candidates
.insert(cur_node, (sub_node, cur_opinfo));
}
Err(comb_err)
}
}
}
Right(c_val) => {
let action = policy(Box::new(CombineChange {
node: cur_node,
res: Right(c_val),
}));
match action {
Action::Apply => {
// combined to constant
radeco_trace!("{:?} = {:#x}", cur_node, c_val);
let c_node = ssa.insert_const(c_val, None).ok_or(CombErr::NoComb)?;
Ok(c_node)
}
Action::Skip => Err(CombErr::Skip),
Action::Abort => Err(CombErr::Abort),
}
}
}
}
/// Returns `Left(new_node, new_sub_node, new_opinfo)` if `cur_opinfo`
/// combined with an operand or if `cur_opinfo` was simplified.
/// Returns `Right(new_node)` if `cur_opinfo` canceled with an
/// operand to make a no-op or was originally a no-op.
/// Returns `Err(CombErr::NoComb)` if no combination or simplification exists.
/// Returns `Err(CombErr::Skip)` if the policy function returned `Action::Skip`.
/// Returns `Err(CombErr::Abort)` if the policy funcion returned `Action::Abort`.
fn make_combined_node<T: FnMut(Box<dyn Change>) -> Action>(
&self,
cur_node: SSAValue,
cur_opinfo: &CombinableOpInfo,
cur_vt: ValueInfo,
sub_node: SSAValue,
ssa: &mut SSAStorage,
policy: &mut T,
) -> Result<Either<(SSAValue, SSAValue, CombinableOpInfo), SSAValue>, CombErr> {
let (new_opinfo, new_sub_node) = self
.combine_opinfo(cur_opinfo, sub_node)
.map(|(oi, sn)| (Cow::Owned(oi), sn))
.unwrap_or((Cow::Borrowed(cur_opinfo), sub_node));
// simplify
match simplify_opinfo(&new_opinfo) {
Some(Some(simpl_new_opinfo)) => {
let action = policy(Box::new(CombineChange {
node: cur_node,
res: Left((Some(simpl_new_opinfo.clone()), new_sub_node)),
}));
match action {
Action::Apply => {
radeco_trace!(
" simplified ({:?}) into ({:?})",
new_opinfo,
simpl_new_opinfo
);
// make the new node
let new_node =
make_opinfo_node(cur_vt, simpl_new_opinfo.clone(), new_sub_node, ssa)
.ok_or(CombErr::NoComb)?;
Ok(Left((new_node, new_sub_node, simpl_new_opinfo)))
}
Action::Skip => Err(CombErr::Skip),
Action::Abort => Err(CombErr::Abort),
}
}
Some(None) => {
let action = policy(Box::new(CombineChange {
node: cur_node,
res: Left((None, new_sub_node)),
}));
match action {
Action::Apply => {
radeco_trace!(" simplified ({:?}) into no-op", new_opinfo);
Ok(Right(new_sub_node))
}
Action::Skip => Err(CombErr::Skip),
Action::Abort => Err(CombErr::Abort),
}
}
None => {
// no simplification
match new_opinfo {
Cow::Borrowed(_) => Err(CombErr::NoComb),
Cow::Owned(new_opinfo) => {
let action = policy(Box::new(CombineChange {
node: cur_node,
res: Left((Some(new_opinfo.clone()), new_sub_node)),
}));
match action {
Action::Apply => {
// combined, but no further simplification
let new_node =
make_opinfo_node(cur_vt, new_opinfo.clone(), new_sub_node, ssa)
.ok_or(CombErr::NoComb)?;
Ok(Left((new_node, new_sub_node, new_opinfo)))
}
Action::Skip => Err(CombErr::Skip),
Action::Abort => Err(CombErr::Abort),
}
}
}
}
}
}
/// Tries to combine `sub_node` into `cur_opinfo`.
/// Returns `None` if no combination exists.
fn combine_opinfo(
&self,
cur_opinfo: &CombinableOpInfo,
sub_node: SSAValue,
) -> Option<(CombinableOpInfo, SSAValue)> {
let &(sub_sub_node, ref sub_opinfo) = self.combine_candidates.get(&sub_node)?;
let new_opinfo = combine_rules::combine_opinfo(cur_opinfo, sub_opinfo)?;
radeco_trace!(
" combined ({:?} {:?}) into ({:?})",
sub_opinfo,
cur_opinfo,
new_opinfo
);
Some((new_opinfo, sub_sub_node))
}
}
impl Analyzer for Combiner {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl FuncAnalyzer for Combiner {
fn analyze<T: FnMut(Box<dyn Change>) -> Action>(
&mut self,
func: &mut RadecoFunction,
policy: Option<T>,
) -> Option<Box<dyn AnalyzerResult>> {
let ssa = func.ssa_mut();
let mut policy = policy.expect("A policy function must be provided");
for node in ssa.inorder_walk() {
let res = self.visit_node(node, ssa, &mut policy);
if let Ok(repl_node) = res {
let blk = ssa.block_for(node).unwrap();
let addr = ssa.address(node).unwrap();
ssa.replace_value(node, repl_node);
if !ssa.is_constant(repl_node) && ssa.address(repl_node).is_none() {
ssa.insert_into_block(repl_node, blk, addr);
}
}
if let Err(CombErr::Abort) = res {
return None;
}
}
None
}
}
/// Creates an SSA node from the given `CombinableOpInfo`.
/// Returns `None` on SSA error.
fn make_opinfo_node(
vt: ValueInfo,
opinfo: CombinableOpInfo,
sub_node: SSAValue,
ssa: &mut SSAStorage,
) -> Option<SSAValue> {
use self::CombinableOpConstInfo as COCI;
let ret = ssa.insert_op(opinfo.0, vt, None)?;
match opinfo.1 {
COCI::Unary => {
ssa.op_use(ret, 0, sub_node);
}
COCI::Left(new_c) => {
let new_cnode = ssa.insert_const(new_c, None)?;
ssa.op_use(ret, 0, new_cnode);
ssa.op_use(ret, 1, sub_node);
}
COCI::Right(new_c) => {
let new_cnode = ssa.insert_const(new_c, None)?;
ssa.op_use(ret, 0, sub_node);
ssa.op_use(ret, 1, new_cnode);
}
};
Some(ret)
}
/// Returns an equivalent `CombinableOpInfo`, but "simpler" in some sence.
/// Currently, this converts `OpAdd`s or `OpSub`s with "negative" constants into
/// equivalent operations with positive constants.
/// Returns `Some(None)` if `info` is a no-op.
/// Returns `None` if no simplification exists.
fn simplify_opinfo(info: &CombinableOpInfo) -> Option<Option<CombinableOpInfo>> {
use self::CombinableOpConstInfo as COCI;
use self::CombinableOpInfo as COI;
use crate::middle::ir::MOpcode::*;
match info {
COI(OpAdd, COCI::Left(0))
| COI(OpAdd, COCI::Right(0))
| COI(OpSub, COCI::Right(0))
| COI(OpAnd, COCI::Left(0xFFFFFFFFFFFFFFFF))
| COI(OpAnd, COCI::Right(0xFFFFFFFFFFFFFFFF))
| COI(OpOr, COCI::Left(0))
| COI(OpOr, COCI::Right(0))
| COI(OpXor, COCI::Left(0))
| COI(OpXor, COCI::Right(0)) => Some(None),
COI(OpAdd, COCI::Left(c)) | COI(OpAdd, COCI::Right(c)) if *c > u64::max_value() / 2 => {
let c = OpSub.eval_binop(0, *c).unwrap();
Some(Some(COI(OpSub, COCI::Right(c))))
}
COI(OpSub, COCI::Right(c)) if *c > u64::max_value() / 2 => {
let c = OpSub.eval_binop(0, *c).unwrap();
Some(Some(COI(OpAdd, COCI::Left(c))))
}
_ => None,
}
}
/// Returns `Some(Left(_))` if `cur_node` has exactly one non-const operand.
/// Returns `Some(Right(_))` if `cur_node` has all const operands.
/// Returns `None` if `cur_node` is non-combinable.
fn extract_opinfo(
cur_node: SSAValue,
ssa: &SSAStorage,
) -> Option<Either<(SSAValue, CombinableOpInfo, ValueInfo), u64>> {
use self::CombinableOpConstInfo as COCI;
// bail if non-`NodeType::Op`
let (cur_opcode, cur_vt) = extract_opcode(cur_node, ssa)?;
let cur_operands = ssa.operands_of(cur_node);
match cur_operands.as_slice() {
&[sub_node] => {
let cur_opinfo = CombinableOpInfo(cur_opcode, COCI::Unary);
Some(Left((sub_node, cur_opinfo, cur_vt)))
}
&[sub_node1, sub_node2] => {
match (ssa.constant(sub_node1), ssa.constant(sub_node2)) {
(Some(c1), Some(c2)) => {
// this is const_prop's job, but we can do this here too
// bail if `cur_opcode` is non-evalable, since that also
// implies it's non-combinable
let res_val = cur_opcode.eval_binop(c1, c2)?;
Some(Right(res_val))
}
(None, Some(c)) => {
let cur_opinfo = CombinableOpInfo(cur_opcode, COCI::Right(c));
Some(Left((sub_node1, cur_opinfo, cur_vt)))
}
(Some(c), None) => {
let cur_opinfo = CombinableOpInfo(cur_opcode, COCI::Left(c));
Some(Left((sub_node2, cur_opinfo, cur_vt)))
}
(None, None) => None,
}
}
_ => None,
}
}
fn extract_opcode(node: SSAValue, ssa: &SSAStorage) -> Option<(MOpcode, ValueInfo)> {
if let NodeData {
vt,
nt: NodeType::Op(opcode),
} = ssa.node_data(node).ok()?
{
Some((opcode, vt))
} else {
None
}
}<|fim▁hole|>
match self.1 {
COCI::Unary => fmt.write_fmt(format_args!("-> ({:?} .)", self.0)),
COCI::Left(c) => fmt.write_fmt(format_args!("-> ({:#x} {:?} .)", c, self.0)),
COCI::Right(c) => fmt.write_fmt(format_args!("-> (. {:?} {:#x})", self.0, c)),
}
}
}<|fim▁end|> |
impl fmt::Debug for CombinableOpInfo {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
use self::CombinableOpConstInfo as COCI; |
<|file_name|>struct-no-fields-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// except according to those terms.
struct Foo;
fn g3() {
let _mid_tuple = (Foo { }, 2);
//~^ ERROR: structure literal must either have at least one field
}
fn main() {}<|fim▁end|> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup, find_packages
from pip.req import parse_requirements
#REQUIREMENTS_FILE = os.path.join( os.path.dirname(__file__), 'requirements.openshift.txt')
PROJECT_NAME = '<your-project-name>'
AUTHOR_NAME = '<your-name>'
AUTHOR_EMAIL = '<your-email-address>'
PROJECT_URL = ''
DESCRIPTION = '<your-project-description>'
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
setup(name=PROJECT_NAME,<|fim▁hole|> author_email=AUTHOR_EMAIL,
url=PROJECT_URL,
packages=find_packages(),
include_package_data=True,
description=DESCRIPTION,
)<|fim▁end|> | version='1.0',
author=AUTHOR_NAME, |
<|file_name|>gr-thread-list_test.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import '../../../test/common-test-setup-karma';
import './gr-thread-list';
import {CommentSide, SpecialFilePath} from '../../../constants/constants';
import {CommentTabState} from '../../../types/events';
import {
compareThreads,
GrThreadList,
__testOnly_SortDropdownState,
} from './gr-thread-list';
import {queryAll} from '../../../test/test-utils';
import {accountOrGroupKey} from '../../../utils/account-util';
import {tap} from '@polymer/iron-test-helpers/mock-interactions';
import {
createAccountDetailWithId,
createParsedChange,
createThread,
} from '../../../test/test-data-generators';
import {
AccountId,
NumericChangeId,
PatchSetNum,
Timestamp,
} from '../../../api/rest-api';
import {RobotId, UrlEncodedCommentId} from '../../../types/common';
import {CommentThread} from '../../../utils/comment-util';
import {query, queryAndAssert} from '../../../utils/common-util';
import {GrAccountLabel} from '../../shared/gr-account-label/gr-account-label';
const basicFixture = fixtureFromElement('gr-thread-list');
suite('gr-thread-list tests', () => {
let element: GrThreadList;
setup(async () => {
element = basicFixture.instantiate();
element.changeNum = 123 as NumericChangeId;
element.change = createParsedChange();
element.account = createAccountDetailWithId();
element.threads = [
{
comments: [
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000001 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 4 as PatchSetNum,
id: 'ecf0b9fa_fe1a5f62' as UrlEncodedCommentId,
line: 5,
updated: '2015-12-01 15:15:15.000000000' as Timestamp,
message: 'test',
unresolved: true,
},
{
id: '503008e2_0ab203ee' as UrlEncodedCommentId,
path: '/COMMIT_MSG',
line: 5,
in_reply_to: 'ecf0b9fa_fe1a5f62' as UrlEncodedCommentId,
updated: '2015-12-01 15:16:15.000000000' as Timestamp,
message: 'draft',
unresolved: true,
__draft: true,
patch_set: '2' as PatchSetNum,
},
],
patchNum: 4 as PatchSetNum,
path: '/COMMIT_MSG',
line: 5,
rootId: 'ecf0b9fa_fe1a5f62' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
path: 'test.txt',
author: {
_account_id: 1000002 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 3 as PatchSetNum,
id: '09a9fb0a_1484e6cf' as UrlEncodedCommentId,
updated: '2015-12-02 15:16:15.000000000' as Timestamp,
message: 'Some comment on another patchset.',
unresolved: false,
},
],
patchNum: 3 as PatchSetNum,
path: 'test.txt',
rootId: '09a9fb0a_1484e6cf' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000002 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 2 as PatchSetNum,
id: '8caddf38_44770ec1' as UrlEncodedCommentId,
updated: '2015-12-03 15:16:15.000000000' as Timestamp,
message: 'Another unresolved comment',
unresolved: false,
},
],
patchNum: 2 as PatchSetNum,
path: '/COMMIT_MSG',
rootId: '8caddf38_44770ec1' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000003 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 2 as PatchSetNum,
id: 'scaddf38_44770ec1' as UrlEncodedCommentId,
line: 4,
updated: '2015-12-04 15:16:15.000000000' as Timestamp,
message: 'Yet another unresolved comment',
unresolved: true,
},
],
patchNum: 2 as PatchSetNum,
path: '/COMMIT_MSG',
line: 4,
rootId: 'scaddf38_44770ec1' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
id: 'zcf0b9fa_fe1a5f62' as UrlEncodedCommentId,
path: '/COMMIT_MSG',
line: 6,
updated: '2015-12-05 15:16:15.000000000' as Timestamp,
message: 'resolved draft',
unresolved: false,
__draft: true,
patch_set: '2' as PatchSetNum,
},
],
patchNum: 4 as PatchSetNum,
path: '/COMMIT_MSG',
line: 6,
rootId: 'zcf0b9fa_fe1a5f62' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
id: 'patchset_level_1' as UrlEncodedCommentId,
path: SpecialFilePath.PATCHSET_LEVEL_COMMENTS,
updated: '2015-12-06 15:16:15.000000000' as Timestamp,
message: 'patchset comment 1',
unresolved: false,
patch_set: '2' as PatchSetNum,
},
],
patchNum: 2 as PatchSetNum,
path: SpecialFilePath.PATCHSET_LEVEL_COMMENTS,
rootId: 'patchset_level_1' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
id: 'patchset_level_2' as UrlEncodedCommentId,
path: SpecialFilePath.PATCHSET_LEVEL_COMMENTS,
updated: '2015-12-07 15:16:15.000000000' as Timestamp,
message: 'patchset comment 2',
unresolved: false,
patch_set: '3' as PatchSetNum,
},
],
patchNum: 3 as PatchSetNum,
path: SpecialFilePath.PATCHSET_LEVEL_COMMENTS,
rootId: 'patchset_level_2' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000000 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 4 as PatchSetNum,
id: 'rc1' as UrlEncodedCommentId,
line: 5,
updated: '2015-12-08 15:16:15.000000000' as Timestamp,
message: 'test',
unresolved: true,
robot_id: 'rc1' as RobotId,
},
],
patchNum: 4 as PatchSetNum,
path: '/COMMIT_MSG',
line: 5,
rootId: 'rc1' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
{
comments: [
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000000 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 4 as PatchSetNum,
id: 'rc2' as UrlEncodedCommentId,
line: 7,
updated: '2015-12-09 15:16:15.000000000' as Timestamp,
message: 'test',
unresolved: true,
robot_id: 'rc2' as RobotId,
},
{
path: '/COMMIT_MSG',
author: {
_account_id: 1000000 as AccountId,
name: 'user',
username: 'user',
},
patch_set: 4 as PatchSetNum,
id: 'c2_1' as UrlEncodedCommentId,
line: 5,
updated: '2015-12-10 15:16:15.000000000' as Timestamp,
message: 'test',
unresolved: true,
},
],
patchNum: 4 as PatchSetNum,
path: '/COMMIT_MSG',
line: 7,
rootId: 'rc2' as UrlEncodedCommentId,
commentSide: CommentSide.REVISION,
},
];
await element.updateComplete;
});
suite('sort threads', () => {
test('sort all threads', () => {
element.sortDropdownValue = __testOnly_SortDropdownState.FILES;
assert.equal(element.getDisplayedThreads().length, 9);
const expected: UrlEncodedCommentId[] = [
'patchset_level_2' as UrlEncodedCommentId, // Posted on Patchset 3
'patchset_level_1' as UrlEncodedCommentId, // Posted on Patchset 2
'8caddf38_44770ec1' as UrlEncodedCommentId, // File level on COMMIT_MSG
'scaddf38_44770ec1' as UrlEncodedCommentId, // Line 4 on COMMIT_MSG
'rc1' as UrlEncodedCommentId, // Line 5 on COMMIT_MESSAGE newer
'ecf0b9fa_fe1a5f62' as UrlEncodedCommentId, // Line 5 on COMMIT_MESSAGE older
'zcf0b9fa_fe1a5f62' as UrlEncodedCommentId, // Line 6 on COMMIT_MSG
'rc2' as UrlEncodedCommentId, // Line 7 on COMMIT_MSG
'09a9fb0a_1484e6cf' as UrlEncodedCommentId, // File level on test.txt
];
const actual = element.getDisplayedThreads().map(t => t.rootId);
assert.sameOrderedMembers(actual, expected);
});
test('sort all threads by timestamp', () => {
element.sortDropdownValue = __testOnly_SortDropdownState.TIMESTAMP;
assert.equal(element.getDisplayedThreads().length, 9);
const expected: UrlEncodedCommentId[] = [
'rc2' as UrlEncodedCommentId,
'rc1' as UrlEncodedCommentId,
'patchset_level_2' as UrlEncodedCommentId,
'patchset_level_1' as UrlEncodedCommentId,
'zcf0b9fa_fe1a5f62' as UrlEncodedCommentId,
'scaddf38_44770ec1' as UrlEncodedCommentId,
'8caddf38_44770ec1' as UrlEncodedCommentId,
'09a9fb0a_1484e6cf' as UrlEncodedCommentId,
'ecf0b9fa_fe1a5f62' as UrlEncodedCommentId,
];
const actual = element.getDisplayedThreads().map(t => t.rootId);
assert.sameOrderedMembers(actual, expected);
});
});
test('renders', async () => {
await element.updateComplete;
expect(element).shadowDom.to.equal(/* HTML */ `
<div class="header">
<span class="sort-text">Sort By:</span>
<gr-dropdown-list id="sortDropdown"></gr-dropdown-list>
<span class="separator"></span>
<span class="filter-text">Filter By:</span>
<gr-dropdown-list id="filterDropdown"></gr-dropdown-list>
<span class="author-text">From:</span>
<gr-account-label
deselected=""
selectionchipstyle=""
nostatusicons=""
></gr-account-label>
<gr-account-label
deselected=""
selectionchipstyle=""
nostatusicons=""
></gr-account-label>
<gr-account-label
deselected=""
selectionchipstyle=""
nostatusicons=""
></gr-account-label>
<gr-account-label
deselected=""
selectionchipstyle=""
nostatusicons=""
></gr-account-label>
<gr-account-label
deselected=""
selectionchipstyle=""
nostatusicons=""
></gr-account-label>
</div>
<div id="threads" part="threads">
<gr-comment-thread
show-file-name=""
show-file-path=""
></gr-comment-thread>
<gr-comment-thread show-file-path=""></gr-comment-thread>
<div class="thread-separator"></div>
<gr-comment-thread
show-file-name=""
show-file-path=""
></gr-comment-thread>
<gr-comment-thread show-file-path=""></gr-comment-thread>
<div class="thread-separator"></div>
<gr-comment-thread
has-draft=""
show-file-name=""
show-file-path=""
></gr-comment-thread>
<gr-comment-thread show-file-path=""></gr-comment-thread>
<gr-comment-thread show-file-path=""></gr-comment-thread>
<div class="thread-separator"></div>
<gr-comment-thread
show-file-name=""
show-file-path=""
></gr-comment-thread>
<div class="thread-separator"></div>
<gr-comment-thread
has-draft=""
show-file-name=""
show-file-path=""
></gr-comment-thread>
</div>
`);
});
test('renders empty', async () => {
element.threads = [];
await element.updateComplete;
expect(queryAndAssert(element, 'div#threads')).dom.to.equal(/* HTML */ `
<div id="threads" part="threads">
<div><span>No comments</span></div>
</div>
`);
});
test('tapping single author chips', async () => {
element.account = createAccountDetailWithId(1);
await element.updateComplete;
const chips = Array.from(
queryAll<GrAccountLabel>(element, 'gr-account-label')
);
const authors = chips.map(chip => accountOrGroupKey(chip.account!)).sort();
assert.deepEqual(authors, [
1 as AccountId,
1000000 as AccountId,
1000001 as AccountId,
1000002 as AccountId,
1000003 as AccountId,
]);
assert.equal(element.threads.length, 9);
assert.equal(element.getDisplayedThreads().length, 9);
const chip = chips.find(chip => chip.account!._account_id === 1000001);
tap(chip!);
await element.updateComplete;
assert.equal(element.threads.length, 9);
assert.equal(element.getDisplayedThreads().length, 1);
assert.equal(
element.getDisplayedThreads()[0].comments[0].author?._account_id,
1000001 as AccountId
);
tap(chip!);
await element.updateComplete;
assert.equal(element.threads.length, 9);
assert.equal(element.getDisplayedThreads().length, 9);
});
test('tapping multiple author chips', async () => {
element.account = createAccountDetailWithId(1);
await element.updateComplete;
const chips = Array.from(
queryAll<GrAccountLabel>(element, 'gr-account-label')
);
tap(chips.find(chip => chip.account?._account_id === 1000001)!);
tap(chips.find(chip => chip.account?._account_id === 1000002)!);
await element.updateComplete;
assert.equal(element.threads.length, 9);
assert.equal(element.getDisplayedThreads().length, 3);
assert.equal(
element.getDisplayedThreads()[0].comments[0].author?._account_id,
1000002 as AccountId
);
assert.equal(
element.getDisplayedThreads()[1].comments[0].author?._account_id,
1000002 as AccountId
);
assert.equal(
element.getDisplayedThreads()[2].comments[0].author?._account_id,
1000001 as AccountId
);
});
test('show all comments', async () => {
const event = new CustomEvent('value-changed', {
detail: {value: CommentTabState.SHOW_ALL},
});
element.handleCommentsDropdownValueChange(event);
await element.updateComplete;
assert.equal(element.getDisplayedThreads().length, 9);
});
test('unresolved shows all unresolved comments', async () => {
const event = new CustomEvent('value-changed', {
detail: {value: CommentTabState.UNRESOLVED},
});
element.handleCommentsDropdownValueChange(event);
await element.updateComplete;
assert.equal(element.getDisplayedThreads().length, 4);
});
test('toggle drafts only shows threads with draft comments', async () => {
const event = new CustomEvent('value-changed', {
detail: {value: CommentTabState.DRAFTS},
});
element.handleCommentsDropdownValueChange(event);
await element.updateComplete;
assert.equal(element.getDisplayedThreads().length, 2);
});
suite('hideDropdown', () => {
test('header hidden for hideDropdown=true', async () => {
element.hideDropdown = true;
await element.updateComplete;
assert.isUndefined(query(element, '.header'));<|fim▁hole|>
test('header shown for hideDropdown=false', async () => {
element.hideDropdown = false;
await element.updateComplete;
assert.isDefined(query(element, '.header'));
});
});
suite('empty thread', () => {
setup(async () => {
element.threads = [];
await element.updateComplete;
});
test('default empty message should show', () => {
const threadsEl = queryAndAssert(element, '#threads');
assert.isTrue(threadsEl.textContent?.trim().includes('No comments'));
});
});
});
suite('compareThreads', () => {
let t1: CommentThread;
let t2: CommentThread;
const sortPredicate = (thread1: CommentThread, thread2: CommentThread) =>
compareThreads(thread1, thread2);
const checkOrder = (expected: CommentThread[]) => {
assert.sameOrderedMembers([t1, t2].sort(sortPredicate), expected);
assert.sameOrderedMembers([t2, t1].sort(sortPredicate), expected);
};
setup(() => {
t1 = createThread({});
t2 = createThread({});
});
test('patchset-level before file comments', () => {
t1.path = SpecialFilePath.PATCHSET_LEVEL_COMMENTS;
t2.path = SpecialFilePath.COMMIT_MESSAGE;
checkOrder([t1, t2]);
});
test('paths lexicographically', () => {
t1.path = 'a.txt';
t2.path = 'b.txt';
checkOrder([t1, t2]);
});
test('patchsets in reverse order', () => {
t1.patchNum = 2 as PatchSetNum;
t2.patchNum = 3 as PatchSetNum;
checkOrder([t2, t1]);
});
test('file level comment before line', () => {
t1.line = 123;
t2.line = 'FILE';
checkOrder([t2, t1]);
});
test('comments sorted by line', () => {
t1.line = 123;
t2.line = 321;
checkOrder([t1, t2]);
});
});<|fim▁end|> | }); |
<|file_name|>test-async.js<|end_file_name|><|fim▁begin|>/**
* NOTE: We are in the process of migrating these tests to Mocha. If you are
* adding a new test, consider creating a new spec file in mocha_tests/
*/
var async = require('../lib/async');
if (!Function.prototype.bind) {
Function.prototype.bind = function (thisArg) {
var args = Array.prototype.slice.call(arguments, 1);
var self = this;
return function () {
self.apply(thisArg, args.concat(Array.prototype.slice.call(arguments)));
};
};
}
function eachIterator(args, x, callback) {
setTimeout(function(){
args.push(x);
callback();
}, x*25);
}
function forEachOfIterator(args, value, key, callback) {
setTimeout(function(){
args.push(key, value);
callback();
}, value*25);
}
function mapIterator(call_order, x, callback) {
setTimeout(function(){
call_order.push(x);
callback(null, x*2);
}, x*25);
}
function filterIterator(x, callback) {
setTimeout(function(){
callback(x % 2);
}, x*25);
}
function detectIterator(call_order, x, callback) {
setTimeout(function(){
call_order.push(x);
callback(x == 2);
}, x*25);
}
function eachNoCallbackIterator(test, x, callback) {
test.equal(x, 1);
callback();
test.done();
}
function forEachOfNoCallbackIterator(test, x, key, callback) {
test.equal(x, 1);
test.equal(key, "a");
callback();
test.done();
}
function getFunctionsObject(call_order) {
return {
one: function(callback){
setTimeout(function(){
call_order.push(1);
callback(null, 1);
}, 125);
},
two: function(callback){
setTimeout(function(){
call_order.push(2);
callback(null, 2);
}, 200);
},
three: function(callback){
setTimeout(function(){
call_order.push(3);
callback(null, 3,3);
}, 50);
}
};
}
function isBrowser() {
return (typeof process === "undefined") ||
(process + "" !== "[object process]"); // browserify
}
exports['applyEach'] = function (test) {
test.expect(5);
var call_order = [];
var one = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('one');
cb(null, 1);
}, 100);
};
var two = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('two');
cb(null, 2);
}, 50);
};
var three = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('three');
cb(null, 3);
}, 150);
};
async.applyEach([one, two, three], 5, function (err) {
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, ['two', 'one', 'three']);
test.done();
});
};
exports['applyEachSeries'] = function (test) {
test.expect(5);
var call_order = [];
var one = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('one');
cb(null, 1);
}, 100);
};
var two = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('two');
cb(null, 2);
}, 50);
};
var three = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('three');
cb(null, 3);
}, 150);
};
async.applyEachSeries([one, two, three], 5, function (err) {
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, ['one', 'two', 'three']);
test.done();
});
};
exports['applyEach partial application'] = function (test) {
test.expect(4);
var call_order = [];
var one = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('one');
cb(null, 1);
}, 100);
};
var two = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('two');
cb(null, 2);
}, 50);
};
var three = function (val, cb) {
test.equal(val, 5);
setTimeout(function () {
call_order.push('three');
cb(null, 3);
}, 150);
};
async.applyEach([one, two, three])(5, function (err) {
if (err) throw err;
test.same(call_order, ['two', 'one', 'three']);
test.done();
});
};
exports['seq'] = function (test) {
test.expect(5);
var add2 = function (n, cb) {
test.equal(n, 3);
setTimeout(function () {
cb(null, n + 2);
}, 50);
};
var mul3 = function (n, cb) {
test.equal(n, 5);
setTimeout(function () {
cb(null, n * 3);
}, 15);
};
var add1 = function (n, cb) {
test.equal(n, 15);
setTimeout(function () {
cb(null, n + 1);
}, 100);
};
var add2mul3add1 = async.seq(add2, mul3, add1);
add2mul3add1(3, function (err, result) {
if (err) {
return test.done(err);
}
test.ok(err === null, err + " passed instead of 'null'");
test.equal(result, 16);
test.done();
});
};
exports['seq error'] = function (test) {
test.expect(3);
var testerr = new Error('test');
var add2 = function (n, cb) {
test.equal(n, 3);
setTimeout(function () {
cb(null, n + 2);
}, 50);
};
var mul3 = function (n, cb) {
test.equal(n, 5);
setTimeout(function () {
cb(testerr);
}, 15);
};
var add1 = function (n, cb) {
test.ok(false, 'add1 should not get called');
setTimeout(function () {
cb(null, n + 1);
}, 100);
};
var add2mul3add1 = async.seq(add2, mul3, add1);
add2mul3add1(3, function (err) {
test.equal(err, testerr);
test.done();
});
};
exports['seq binding'] = function (test) {
test.expect(4);
var testcontext = {name: 'foo'};
var add2 = function (n, cb) {
test.equal(this, testcontext);
setTimeout(function () {
cb(null, n + 2);
}, 50);
};
var mul3 = function (n, cb) {
test.equal(this, testcontext);
setTimeout(function () {
cb(null, n * 3);
}, 15);
};
var add2mul3 = async.seq(add2, mul3);
add2mul3.call(testcontext, 3, function (err, result) {
if (err) {
return test.done(err);
}
test.equal(this, testcontext);
test.equal(result, 15);
test.done();
});
};
exports['seq without callback'] = function (test) {
test.expect(2);
var testcontext = {name: 'foo'};
var add2 = function (n, cb) {
test.equal(this, testcontext);
setTimeout(function () {
cb(null, n + 2);
}, 50);
};
var mul3 = function () {
test.equal(this, testcontext);
setTimeout(function () {
test.done();
}, 15);
};
var add2mul3 = async.seq(add2, mul3);
add2mul3.call(testcontext, 3);
};
exports['auto'] = function(test){
var callOrder = [];
async.auto({
task1: ['task2', function(callback){
setTimeout(function(){
callOrder.push('task1');
callback();
}, 25);
}],
task2: function(callback){
setTimeout(function(){
callOrder.push('task2');
callback();
}, 50);
},
task3: ['task2', function(callback){
callOrder.push('task3');
callback();
}],
task4: ['task1', 'task2', function(callback){
callOrder.push('task4');
callback();
}],
task5: ['task2', function(callback){
setTimeout(function(){
callOrder.push('task5');
callback();
}, 0);
}],
task6: ['task2', function(callback){
callOrder.push('task6');
callback();
}]
},
function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(callOrder, ['task2','task6','task3','task5','task1','task4']);
test.done();
});
};
exports['auto concurrency'] = function (test) {
var concurrency = 2;
var runningTasks = [];
var makeCallback = function(taskName) {
return function(callback) {
runningTasks.push(taskName);
setTimeout(function(){
// Each task returns the array of running tasks as results.
var result = runningTasks.slice(0);
runningTasks.splice(runningTasks.indexOf(taskName), 1);
callback(null, result);
});
};
};
async.auto({
task1: ['task2', makeCallback('task1')],
task2: makeCallback('task2'),
task3: ['task2', makeCallback('task3')],
task4: ['task1', 'task2', makeCallback('task4')],
task5: ['task2', makeCallback('task5')],
task6: ['task2', makeCallback('task6')]
}, concurrency, function(err, results){
Object.keys(results).forEach(function(taskName) {
test.ok(results[taskName].length <= concurrency);
});
test.done();
});
};
exports['auto petrify'] = function (test) {
var callOrder = [];
async.auto({
task1: ['task2', function (callback) {
setTimeout(function () {
callOrder.push('task1');
callback();
}, 100);
}],
task2: function (callback) {
setTimeout(function () {
callOrder.push('task2');
callback();
}, 200);
},
task3: ['task2', function (callback) {
callOrder.push('task3');
callback();
}],
task4: ['task1', 'task2', function (callback) {
callOrder.push('task4');
callback();
}]
},
function (err) {
if (err) throw err;
test.same(callOrder, ['task2', 'task3', 'task1', 'task4']);
test.done();
});
};
exports['auto results'] = function(test){
var callOrder = [];
async.auto({
task1: ['task2', function(callback, results){
test.same(results.task2, 'task2');
setTimeout(function(){
callOrder.push('task1');
callback(null, 'task1a', 'task1b');
}, 25);
}],
task2: function(callback){
setTimeout(function(){
callOrder.push('task2');
callback(null, 'task2');
}, 50);
},
task3: ['task2', function(callback, results){
test.same(results.task2, 'task2');
callOrder.push('task3');
callback(null);
}],
task4: ['task1', 'task2', function(callback, results){
test.same(results.task1, ['task1a','task1b']);
test.same(results.task2, 'task2');
callOrder.push('task4');
callback(null, 'task4');
}]
},
function(err, results){
test.same(callOrder, ['task2','task3','task1','task4']);
test.same(results, {task1: ['task1a','task1b'], task2: 'task2', task3: undefined, task4: 'task4'});
test.done();
});
};
exports['auto empty object'] = function(test){
async.auto({}, function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.done();
});
};
exports['auto error'] = function(test){
test.expect(1);
async.auto({
task1: function(callback){
callback('testerror');
},
task2: ['task1', function(callback){
test.ok(false, 'task2 should not be called');
callback();
}],
task3: function(callback){
callback('testerror2');
}
},
function(err){
test.equals(err, 'testerror');
});
setTimeout(test.done, 100);
};
exports['auto no callback'] = function(test){
async.auto({
task1: function(callback){callback();},
task2: ['task1', function(callback){callback(); test.done();}]
});
};
exports['auto concurrency no callback'] = function(test){
async.auto({
task1: function(callback){callback();},
task2: ['task1', function(callback){callback(); test.done();}]
}, 1);
};
exports['auto error should pass partial results'] = function(test) {
async.auto({
task1: function(callback){
callback(false, 'result1');
},
task2: ['task1', function(callback){
callback('testerror', 'result2');
}],
task3: ['task2', function(){
test.ok(false, 'task3 should not be called');
}]
},
function(err, results){
test.equals(err, 'testerror');
test.equals(results.task1, 'result1');
test.equals(results.task2, 'result2');
test.done();
});
};
// Issue 24 on github: https://github.com/caolan/async/issues#issue/24
// Issue 76 on github: https://github.com/caolan/async/issues#issue/76
exports['auto removeListener has side effect on loop iterator'] = function(test) {
async.auto({
task1: ['task3', function(/*callback*/) { test.done(); }],
task2: ['task3', function(/*callback*/) { /* by design: DON'T call callback */ }],
task3: function(callback) { callback(); }
});
};
// Issue 410 on github: https://github.com/caolan/async/issues/410
exports['auto calls callback multiple times'] = function(test) {
if (isBrowser()) {
// node only test
test.done();
return;
}
var finalCallCount = 0;
var domain = require('domain').create();
domain.on('error', function (e) {
// ignore test error
if (!e._test_error) {
return test.done(e);
}
});
domain.run(function () {
async.auto({
task1: function(callback) { callback(null); },
task2: ['task1', function(callback) { callback(null); }]
},
// Error throwing final callback. This should only run once
function() {
finalCallCount++;
var e = new Error("An error");
e._test_error = true;
throw e;
});
});
setTimeout(function () {
test.equal(finalCallCount, 1,
"Final auto callback should only be called once"
);
test.done();
}, 10);
};
exports['auto calls callback multiple times with parallel functions'] = function(test) {
test.expect(1);
async.auto({
task1: function(callback) { setTimeout(callback,0,"err"); },
task2: function(callback) { setTimeout(callback,0,"err"); }
},
// Error throwing final callback. This should only run once
function(err) {
test.equal(err, "err");
test.done();
});
};
// Issue 462 on github: https://github.com/caolan/async/issues/462
exports['auto modifying results causes final callback to run early'] = function(test) {
async.auto({
task1: function(callback, results){
results.inserted = true;
callback(null, 'task1');
},
task2: function(callback){
setTimeout(function(){
callback(null, 'task2');
}, 50);
},
task3: function(callback){
setTimeout(function(){
callback(null, 'task3');
}, 100);
}
},
function(err, results){
test.equal(results.inserted, true);
test.ok(results.task3, 'task3');
test.done();
});
};
// Issue 263 on github: https://github.com/caolan/async/issues/263
exports['auto prevent dead-locks due to inexistant dependencies'] = function(test) {
test.throws(function () {
async.auto({
task1: ['noexist', function(callback){
callback(null, 'task1');
}]
});
}, Error);
test.done();
};
// Issue 263 on github: https://github.com/caolan/async/issues/263
exports['auto prevent dead-locks due to cyclic dependencies'] = function(test) {
test.throws(function () {
async.auto({
task1: ['task2', function(callback){
callback(null, 'task1');
}],
task2: ['task1', function(callback){
callback(null, 'task2');
}]
});
}, Error);
test.done();
};
// Issue 306 on github: https://github.com/caolan/async/issues/306
exports['retry when attempt succeeds'] = function(test) {
var failed = 3;
var callCount = 0;
var expectedResult = 'success';
function fn(callback) {
callCount++;
failed--;
if (!failed) callback(null, expectedResult);
else callback(true); // respond with error
}
async.retry(fn, function(err, result){
test.ok(err === null, err + " passed instead of 'null'");
test.equal(callCount, 3, 'did not retry the correct number of times');
test.equal(result, expectedResult, 'did not return the expected result');
test.done();
});
};
exports['retry when all attempts succeeds'] = function(test) {
var times = 3;
var callCount = 0;
var error = 'ERROR';
var erroredResult = 'RESULT';
function fn(callback) {
callCount++;
callback(error + callCount, erroredResult + callCount); // respond with indexed values
}
async.retry(times, fn, function(err, result){
test.equal(callCount, 3, "did not retry the correct number of times");
test.equal(err, error + times, "Incorrect error was returned");
test.equal(result, erroredResult + times, "Incorrect result was returned");
test.done();
});
};
exports['retry fails with invalid arguments'] = function(test) {
test.throws(function() {
async.retry("");
});
test.throws(function() {
async.retry();
});
test.throws(function() {
async.retry(function() {}, 2, function() {});
});
test.done();
};
exports['retry with interval when all attempts succeeds'] = function(test) {
var times = 3;
var interval = 500;
var callCount = 0;
var error = 'ERROR';
var erroredResult = 'RESULT';
function fn(callback) {
callCount++;
callback(error + callCount, erroredResult + callCount); // respond with indexed values
}
var start = new Date().getTime();
async.retry({ times: times, interval: interval}, fn, function(err, result){
var now = new Date().getTime();
var duration = now - start;
test.ok(duration > (interval * (times -1)), 'did not include interval');
test.equal(callCount, 3, "did not retry the correct number of times");
test.equal(err, error + times, "Incorrect error was returned");
test.equal(result, erroredResult + times, "Incorrect result was returned");
test.done();
});
};
exports['retry as an embedded task'] = function(test) {
var retryResult = 'RETRY';
var fooResults;
var retryResults;
async.auto({
foo: function(callback, results){
fooResults = results;
callback(null, 'FOO');
},
retry: async.retry(function(callback, results) {
retryResults = results;
callback(null, retryResult);
})
}, function(err, results){
test.equal(results.retry, retryResult, "Incorrect result was returned from retry function");
test.equal(fooResults, retryResults, "Incorrect results were passed to retry function");
test.done();
});
};
exports['retry as an embedded task with interval'] = function(test) {
var start = new Date().getTime();
var opts = {times: 5, interval: 100};
async.auto({
foo: function(callback){
callback(null, 'FOO');
},
retry: async.retry(opts, function(callback) {
callback('err');
})
}, function(){
var duration = new Date().getTime() - start;
var expectedMinimumDuration = (opts.times -1) * opts.interval;
test.ok(duration >= expectedMinimumDuration, "The duration should have been greater than " + expectedMinimumDuration + ", but was " + duration);
test.done();
});
};
exports['waterfall'] = {
'basic': function(test){
test.expect(7);
var call_order = [];
async.waterfall([
function(callback){
call_order.push('fn1');
setTimeout(function(){callback(null, 'one', 'two');}, 0);
},
function(arg1, arg2, callback){
call_order.push('fn2');
test.equals(arg1, 'one');
test.equals(arg2, 'two');
setTimeout(function(){callback(null, arg1, arg2, 'three');}, 25);
},
function(arg1, arg2, arg3, callback){
call_order.push('fn3');
test.equals(arg1, 'one');
test.equals(arg2, 'two');
test.equals(arg3, 'three');
callback(null, 'four');
},
function(arg4, callback){
call_order.push('fn4');
test.same(call_order, ['fn1','fn2','fn3','fn4']);
callback(null, 'test');
}
], function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.done();
});
},
'empty array': function(test){
async.waterfall([], function(err){
if (err) throw err;
test.done();
});
},
'non-array': function(test){
async.waterfall({}, function(err){
test.equals(err.message, 'First argument to waterfall must be an array of functions');
test.done();
});
},
'no callback': function(test){
async.waterfall([
function(callback){callback();},
function(callback){callback(); test.done();}
]);
},
'async': function(test){
var call_order = [];
async.waterfall([
function(callback){
call_order.push(1);
callback();
call_order.push(2);
},
function(callback){
call_order.push(3);
callback();
},
function(){
test.same(call_order, [1,2,3]);
test.done();
}
]);
},
'error': function(test){
test.expect(1);
async.waterfall([
function(callback){
callback('error');
},
function(callback){
test.ok(false, 'next function should not be called');
callback();
}
], function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
},
'multiple callback calls': function(test){
var call_order = [];
var arr = [
function(callback){
call_order.push(1);
// call the callback twice. this should call function 2 twice
callback(null, 'one', 'two');
callback(null, 'one', 'two');
},
function(arg1, arg2, callback){
call_order.push(2);
callback(null, arg1, arg2, 'three');
},
function(arg1, arg2, arg3, callback){
call_order.push(3);
callback(null, 'four');
},
function(/*arg4*/){
call_order.push(4);
arr[3] = function(){
call_order.push(4);
test.same(call_order, [1,2,2,3,3,4,4]);
test.done();
};
}
];
async.waterfall(arr);
},
'call in another context': function(test) {
if (isBrowser()) {
// node only test
test.done();
return;
}
var vm = require('vm');
var sandbox = {
async: async,
test: test
};
var fn = "(" + (function () {
async.waterfall([function (callback) {
callback();
}], function (err) {
if (err) {
return test.done(err);
}
test.done();
});
}).toString() + "())";
vm.runInNewContext(fn, sandbox);
}
};
exports['parallel'] = function(test){
var call_order = [];
async.parallel([
function(callback){
setTimeout(function(){
call_order.push(1);
callback(null, 1);
}, 50);
},
function(callback){
setTimeout(function(){
call_order.push(2);
callback(null, 2);
}, 100);
},
function(callback){
setTimeout(function(){
call_order.push(3);
callback(null, 3,3);
}, 25);
}
],
function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [3,1,2]);
test.same(results, [1,2,[3,3]]);
test.done();
});
};
exports['parallel empty array'] = function(test){
async.parallel([], function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, []);
test.done();
});
};
exports['parallel error'] = function(test){
async.parallel([
function(callback){
callback('error', 1);
},
function(callback){
callback('error2', 2);
}
],
function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 100);
};
exports['parallel no callback'] = function(test){
async.parallel([
function(callback){callback();},
function(callback){callback(); test.done();},
]);
};
exports['parallel object'] = function(test){
var call_order = [];
async.parallel(getFunctionsObject(call_order), function(err, results){
test.equals(err, null);
test.same(call_order, [3,1,2]);
test.same(results, {
one: 1,
two: 2,
three: [3,3]
});
test.done();
});
};
// Issue 10 on github: https://github.com/caolan/async/issues#issue/10
exports['paralel falsy return values'] = function (test) {
function taskFalse(callback) {
async.nextTick(function() {
callback(null, false);
});
}
function taskUndefined(callback) {
async.nextTick(function() {
callback(null, undefined);
});
}
function taskEmpty(callback) {
async.nextTick(function() {
callback(null);
});
}
function taskNull(callback) {
async.nextTick(function() {
callback(null, null);
});
}
async.parallel(
[taskFalse, taskUndefined, taskEmpty, taskNull],
function(err, results) {
test.equal(results.length, 4);
test.strictEqual(results[0], false);
test.strictEqual(results[1], undefined);
test.strictEqual(results[2], undefined);
test.strictEqual(results[3], null);
test.done();
}
);
};
exports['parallel limit'] = function(test){
var call_order = [];
async.parallelLimit([
function(callback){
setTimeout(function(){
call_order.push(1);
callback(null, 1);
}, 50);
},
function(callback){
setTimeout(function(){
call_order.push(2);
callback(null, 2);
}, 100);
},
function(callback){
setTimeout(function(){
call_order.push(3);
callback(null, 3,3);
}, 25);
}
],
2,
function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [1,3,2]);
test.same(results, [1,2,[3,3]]);
test.done();
});
};
exports['parallel limit empty array'] = function(test){
async.parallelLimit([], 2, function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, []);
test.done();
});
};
exports['parallel limit error'] = function(test){
async.parallelLimit([
function(callback){
callback('error', 1);
},
function(callback){
callback('error2', 2);
}
],
1,
function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 100);
};
exports['parallel limit no callback'] = function(test){
async.parallelLimit([
function(callback){callback();},
function(callback){callback(); test.done();},
], 1);
};
exports['parallel limit object'] = function(test){
var call_order = [];
async.parallelLimit(getFunctionsObject(call_order), 2, function(err, results){
test.equals(err, null);
test.same(call_order, [1,3,2]);
test.same(results, {
one: 1,
two: 2,
three: [3,3]
});
test.done();
});
};
exports['parallel call in another context'] = function(test) {
if (isBrowser()) {
// node only test
test.done();
return;
}
var vm = require('vm');
var sandbox = {
async: async,
test: test
};
var fn = "(" + (function () {
async.parallel([function (callback) {
callback();
}], function (err) {
if (err) {
return test.done(err);
}
test.done();
});
}).toString() + "())";
vm.runInNewContext(fn, sandbox);
};
exports['parallel does not continue replenishing after error'] = function (test) {
var started = 0;
var arr = [
funcToCall,
funcToCall,
funcToCall,
funcToCall,
funcToCall,
funcToCall,
funcToCall,
funcToCall,
funcToCall,
];
var delay = 10;
var limit = 3;
var maxTime = 10 * arr.length;
function funcToCall(callback) {
started ++;
if (started === 3) {
return callback(new Error ("Test Error"));
}
setTimeout(function(){
callback();
}, delay);
}
async.parallelLimit(arr, limit, function(){});
setTimeout(function(){
test.equal(started, 3);
test.done();
}, maxTime);
};
exports['series'] = {
'series': function(test){
var call_order = [];
async.series([
function(callback){
setTimeout(function(){
call_order.push(1);
callback(null, 1);
}, 25);
},
function(callback){
setTimeout(function(){
call_order.push(2);
callback(null, 2);
}, 50);
},
function(callback){
setTimeout(function(){
call_order.push(3);
callback(null, 3,3);
}, 15);
}
],
function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, [1,2,[3,3]]);
test.same(call_order, [1,2,3]);
test.done();
});
},
'empty array': function(test){
async.series([], function(err, results){
test.equals(err, null);
test.same(results, []);
test.done();
});
},
'error': function(test){
test.expect(1);
async.series([
function(callback){
callback('error', 1);
},
function(callback){
test.ok(false, 'should not be called');
callback('error2', 2);
}
],
function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 100);
},
'no callback': function(test){
async.series([
function(callback){callback();},
function(callback){callback(); test.done();},
]);
},
'object': function(test){
var call_order = [];
async.series(getFunctionsObject(call_order), function(err, results){
test.equals(err, null);
test.same(results, {
one: 1,
two: 2,
three: [3,3]
});
test.same(call_order, [1,2,3]);
test.done();
});
},
'call in another context': function(test) {
if (isBrowser()) {
// node only test
test.done();
return;
}
var vm = require('vm');
var sandbox = {
async: async,
test: test
};
var fn = "(" + (function () {
async.series([function (callback) {
callback();
}], function (err) {
if (err) {
return test.done(err);
}
test.done();
});
}).toString() + "())";
vm.runInNewContext(fn, sandbox);
},
// Issue 10 on github: https://github.com/caolan/async/issues#issue/10
'falsy return values': function (test) {
function taskFalse(callback) {
async.nextTick(function() {
callback(null, false);
});
}
function taskUndefined(callback) {
async.nextTick(function() {
callback(null, undefined);
});
}
function taskEmpty(callback) {
async.nextTick(function() {
callback(null);
});
}
function taskNull(callback) {
async.nextTick(function() {
callback(null, null);
});
}
async.series(
[taskFalse, taskUndefined, taskEmpty, taskNull],
function(err, results) {
test.equal(results.length, 4);
test.strictEqual(results[0], false);
test.strictEqual(results[1], undefined);
test.strictEqual(results[2], undefined);
test.strictEqual(results[3], null);
test.done();
}
);
}
};
exports['iterator'] = function(test){
var call_order = [];
var iterator = async.iterator([
function(){call_order.push(1);},
function(arg1){
test.equals(arg1, 'arg1');
call_order.push(2);
},
function(arg1, arg2){
test.equals(arg1, 'arg1');
test.equals(arg2, 'arg2');
call_order.push(3);
}
]);
iterator();
test.same(call_order, [1]);
var iterator2 = iterator();
test.same(call_order, [1,1]);
var iterator3 = iterator2('arg1');
test.same(call_order, [1,1,2]);
var iterator4 = iterator3('arg1', 'arg2');
test.same(call_order, [1,1,2,3]);
test.equals(iterator4, undefined);
test.done();
};
exports['iterator empty array'] = function(test){
var iterator = async.iterator([]);
test.equals(iterator(), undefined);
test.equals(iterator.next(), undefined);
test.done();
};
exports['iterator.next'] = function(test){
var call_order = [];
var iterator = async.iterator([
function(){call_order.push(1);},
function(arg1){
test.equals(arg1, 'arg1');
call_order.push(2);
},
function(arg1, arg2){
test.equals(arg1, 'arg1');
test.equals(arg2, 'arg2');
call_order.push(3);
}
]);
var fn = iterator.next();
var iterator2 = fn('arg1');
test.same(call_order, [2]);
iterator2('arg1','arg2');
test.same(call_order, [2,3]);
test.equals(iterator2.next(), undefined);
test.done();
};
exports['each'] = function(test){
var args = [];
async.each([1,3,2], eachIterator.bind(this, args), function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, [1,2,3]);
test.done();
});
};
exports['each extra callback'] = function(test){
var count = 0;
async.each([1,3,2], function(val, callback) {
count++;
callback();
test.throws(callback);
if (count == 3) {
test.done();
}
});
};
exports['each empty array'] = function(test){
test.expect(1);
async.each([], function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['each empty array, with other property on the array'] = function(test){
test.expect(1);
var myArray = [];
myArray.myProp = "anything";
async.each(myArray, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['each error'] = function(test){
test.expect(1);
async.each([1,2,3], function(x, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
};
exports['each no callback'] = function(test){
async.each([1], eachNoCallbackIterator.bind(this, test));
};
exports['forEach alias'] = function (test) {
test.strictEqual(async.each, async.forEach);
test.done();
};
exports['forEachOf'] = function(test){
var args = [];
async.forEachOf({ a: 1, b: 2 }, forEachOfIterator.bind(this, args), function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, ["a", 1, "b", 2]);
test.done();
});
};
exports['forEachOf - instant resolver'] = function(test){
test.expect(1);
var args = [];
async.forEachOf({ a: 1, b: 2 }, function(x, k, cb) {
args.push(k, x);
cb();
}, function(){
// ensures done callback isn't called before all items iterated
test.same(args, ["a", 1, "b", 2]);
test.done();
});
};
exports['forEachOf empty object'] = function(test){
test.expect(1);
async.forEachOf({}, function(value, key, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err) {
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['forEachOf empty array'] = function(test){
test.expect(1);
async.forEachOf([], function(value, key, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err) {
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['forEachOf error'] = function(test){
test.expect(1);
async.forEachOf({ a: 1, b: 2 }, function(value, key, callback) {
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
};
exports['forEachOf no callback'] = function(test){
async.forEachOf({ a: 1 }, forEachOfNoCallbackIterator.bind(this, test));
};
exports['eachOf alias'] = function(test){
test.equals(async.eachOf, async.forEachOf);
test.done();
};
exports['forEachOf with array'] = function(test){
var args = [];
async.forEachOf([ "a", "b" ], forEachOfIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, [0, "a", 1, "b"]);
test.done();
});
};
exports['eachSeries'] = function(test){
var args = [];
async.eachSeries([1,3,2], eachIterator.bind(this, args), function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, [1,3,2]);
test.done();
});
};
exports['eachSeries empty array'] = function(test){
test.expect(1);
async.eachSeries([], function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['eachSeries array modification'] = function(test) {
test.expect(1);
var arr = [1, 2, 3, 4];
async.eachSeries(arr, function (x, callback) {
async.setImmediate(callback);
}, function () {
test.ok(true, 'should call callback');
});
arr.pop();
arr.splice(0, 1);
setTimeout(test.done, 50);
};
// bug #782. Remove in next major release
exports['eachSeries single item'] = function (test) {
test.expect(1);
var sync = true;
async.eachSeries([1], function (i, cb) {
cb(null);
}, function () {
test.ok(sync, "callback not called on same tick");
});
sync = false;
test.done();
};
// bug #782. Remove in next major release
exports['eachSeries single item'] = function (test) {
test.expect(1);
var sync = true;
async.eachSeries([1], function (i, cb) {
cb(null);
}, function () {
test.ok(sync, "callback not called on same tick");
});
sync = false;
test.done();
};
exports['eachSeries error'] = function(test){
test.expect(2);
var call_order = [];
async.eachSeries([1,2,3], function(x, callback){
call_order.push(x);
callback('error');
}, function(err){
test.same(call_order, [1]);
test.equals(err, 'error');
});
setTimeout(test.done, 50);
};
exports['eachSeries no callback'] = function(test){
async.eachSeries([1], eachNoCallbackIterator.bind(this, test));
};
exports['eachLimit'] = function(test){
var args = [];
var arr = [0,1,2,3,4,5,6,7,8,9];
async.eachLimit(arr, 2, function(x,callback){
setTimeout(function(){
args.push(x);
callback();
}, x*5);
}, function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, arr);
test.done();
});
};
exports['eachLimit empty array'] = function(test){
test.expect(1);
async.eachLimit([], 2, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['eachLimit limit exceeds size'] = function(test){
var args = [];
var arr = [0,1,2,3,4,5,6,7,8,9];
async.eachLimit(arr, 20, eachIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, arr);
test.done();
});
};
exports['eachLimit limit equal size'] = function(test){
var args = [];
var arr = [0,1,2,3,4,5,6,7,8,9];
async.eachLimit(arr, 10, eachIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, arr);
test.done();
});
};
exports['eachLimit zero limit'] = function(test){
test.expect(1);
async.eachLimit([0,1,2,3,4,5], 0, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['eachLimit error'] = function(test){
test.expect(2);
var arr = [0,1,2,3,4,5,6,7,8,9];
var call_order = [];
async.eachLimit(arr, 3, function(x, callback){
call_order.push(x);
if (x === 2) {
callback('error');
}
}, function(err){
test.same(call_order, [0,1,2]);
test.equals(err, 'error');
});
setTimeout(test.done, 25);
};
exports['eachLimit no callback'] = function(test){
async.eachLimit([1], 1, eachNoCallbackIterator.bind(this, test));
};
exports['eachLimit synchronous'] = function(test){
var args = [];
var arr = [0,1,2];
async.eachLimit(arr, 5, function(x,callback){
args.push(x);
callback();
}, function(err){
if (err) throw err;
test.same(args, arr);
test.done();
});
};
exports['eachLimit does not continue replenishing after error'] = function (test) {
var started = 0;
var arr = [0,1,2,3,4,5,6,7,8,9];
var delay = 10;
var limit = 3;
var maxTime = 10 * arr.length;
async.eachLimit(arr, limit, function(x, callback) {
started ++;
if (started === 3) {
return callback(new Error ("Test Error"));
}
setTimeout(function(){
callback();
}, delay);
}, function(){});
setTimeout(function(){
test.equal(started, 3);
test.done();
}, maxTime);
};
exports['forEachSeries alias'] = function (test) {
test.strictEqual(async.eachSeries, async.forEachSeries);
test.done();
};
exports['forEachOfSeries'] = function(test){
var args = [];
async.forEachOfSeries({ a: 1, b: 2 }, forEachOfIterator.bind(this, args), function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, [ "a", 1, "b", 2 ]);
test.done();
});
};
exports['forEachOfSeries empty object'] = function(test){
test.expect(1);
async.forEachOfSeries({}, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['forEachOfSeries error'] = function(test){
test.expect(2);
var call_order = [];
async.forEachOfSeries({ a: 1, b: 2 }, function(value, key, callback){
call_order.push(value, key);
callback('error');
}, function(err){
test.same(call_order, [ 1, "a" ]);
test.equals(err, 'error');
});
setTimeout(test.done, 50);
};
exports['forEachOfSeries no callback'] = function(test){
async.forEachOfSeries({ a: 1 }, forEachOfNoCallbackIterator.bind(this, test));
};
exports['forEachOfSeries with array'] = function(test){
var args = [];
async.forEachOfSeries([ "a", "b" ], forEachOfIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, [ 0, "a", 1, "b" ]);
test.done();
});
};
exports['eachOfLimit alias'] = function(test){
test.equals(async.eachOfLimit, async.forEachOfLimit);
test.done();
};
exports['eachOfSeries alias'] = function(test){
test.equals(async.eachOfSeries, async.forEachOfSeries);
test.done();
};
exports['forEachLimit alias'] = function (test) {
test.strictEqual(async.eachLimit, async.forEachLimit);
test.done();
};
exports['forEachOfLimit'] = function(test){
var args = [];
var obj = { a: 1, b: 2, c: 3, d: 4 };
async.forEachOfLimit(obj, 2, function(value, key, callback){
setTimeout(function(){
args.push(value, key);
callback();
}, value * 5);
}, function(err){
test.ok(err === null, err + " passed instead of 'null'");
test.same(args, [ 1, "a", 2, "b", 3, "c", 4, "d" ]);
test.done();
});
};
exports['forEachOfLimit empty object'] = function(test){
test.expect(1);
async.forEachOfLimit({}, 2, function(value, key, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['forEachOfLimit limit exceeds size'] = function(test){
var args = [];
var obj = { a: 1, b: 2, c: 3, d: 4, e: 5 };
async.forEachOfLimit(obj, 10, forEachOfIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, [ "a", 1, "b", 2, "c", 3, "d", 4, "e", 5 ]);
test.done();
});
};
exports['forEachOfLimit limit equal size'] = function(test){
var args = [];
var obj = { a: 1, b: 2, c: 3, d: 4, e: 5 };
async.forEachOfLimit(obj, 5, forEachOfIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, [ "a", 1, "b", 2, "c", 3, "d", 4, "e", 5 ]);
test.done();
});
};
exports['forEachOfLimit zero limit'] = function(test){
test.expect(1);
async.forEachOfLimit({ a: 1, b: 2 }, 0, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
};
exports['forEachOfLimit error'] = function(test){
test.expect(2);
var obj = { a: 1, b: 2, c: 3, d: 4, e: 5 };
var call_order = [];
async.forEachOfLimit(obj, 3, function(value, key, callback){
call_order.push(value, key);
if (value === 2) {
callback('error');
}
}, function(err){
test.same(call_order, [ 1, "a", 2, "b" ]);
test.equals(err, 'error');
});
setTimeout(test.done, 25);
};
exports['forEachOfLimit no callback'] = function(test){
async.forEachOfLimit({ a: 1 }, 1, forEachOfNoCallbackIterator.bind(this, test));
};
exports['forEachOfLimit synchronous'] = function(test){
var args = [];
var obj = { a: 1, b: 2 };
async.forEachOfLimit(obj, 5, forEachOfIterator.bind(this, args), function(err){
if (err) throw err;
test.same(args, [ "a", 1, "b", 2 ]);
test.done();
});
};
exports['forEachOfLimit with array'] = function(test){
var args = [];
var arr = [ "a", "b" ];
async.forEachOfLimit(arr, 1, forEachOfIterator.bind(this, args), function (err) {
if (err) throw err;
test.same(args, [ 0, "a", 1, "b" ]);
test.done();
});
};
exports['map'] = {
'basic': function(test){
var call_order = [];
async.map([1,3,2], mapIterator.bind(this, call_order), function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [1,2,3]);
test.same(results, [2,6,4]);
test.done();
});
},
'map original untouched': function(test){
var a = [1,2,3];
async.map(a, function(x, callback){
callback(null, x*2);
}, function(err, results){
test.same(results, [2,4,6]);
test.same(a, [1,2,3]);
test.done();
});
},
'map without main callback': function(test){
var a = [1,2,3];
var r = [];
async.map(a, function(x, callback){
r.push(x);
callback(null);
if (r.length >= a.length) {
test.same(r, a);
test.done();
}
});
},
'map error': function(test){
test.expect(1);
async.map([1,2,3], function(x, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
},
'map undefined array': function(test){
test.expect(2);
async.map(undefined, function(x, callback){
callback();
}, function(err, result){
test.equals(err, null);
test.same(result, []);
});
setTimeout(test.done, 50);
},
'map object': function (test) {
async.map({a: 1, b: 2, c: 3}, function (val, callback) {
callback(null, val * 2);
}, function (err, result) {
if (err) throw err;
test.equals(Object.prototype.toString.call(result), '[object Object]');
test.same(result, {a: 2, b: 4, c: 6});
test.done();
});
},
'mapSeries': function(test){
var call_order = [];
async.mapSeries([1,3,2], mapIterator.bind(this, call_order), function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [1,3,2]);
test.same(results, [2,6,4]);
test.done();
});
},
'mapSeries error': function(test){
test.expect(1);
async.mapSeries([1,2,3], function(x, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
},
'mapSeries undefined array': function(test){
test.expect(2);
async.mapSeries(undefined, function(x, callback){
callback();
}, function(err, result){
test.equals(err, null);
test.same(result, []);
});
setTimeout(test.done, 50);
},
'mapSeries object': function (test) {
async.mapSeries({a: 1, b: 2, c: 3}, function (val, callback) {
callback(null, val * 2);
}, function (err, result) {
if (err) throw err;
test.same(result, {a: 2, b: 4, c: 6});
test.done();
});
},
'mapLimit': function(test){
var call_order = [];
async.mapLimit([2,4,3], 2, mapIterator.bind(this, call_order), function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [2,4,3]);
test.same(results, [4,8,6]);
test.done();
});
},
'mapLimit empty array': function(test){
test.expect(1);
async.mapLimit([], 2, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
},
'mapLimit undefined array': function(test){
test.expect(2);
async.mapLimit(undefined, 2, function(x, callback){
callback();
}, function(err, result){
test.equals(err, null);
test.same(result, []);
});
setTimeout(test.done, 50);
},
'mapLimit limit exceeds size': function(test){
var call_order = [];
async.mapLimit([0,1,2,3,4,5,6,7,8,9], 20, mapIterator.bind(this, call_order), function(err, results){
test.same(call_order, [0,1,2,3,4,5,6,7,8,9]);
test.same(results, [0,2,4,6,8,10,12,14,16,18]);
test.done();
});
},
'mapLimit limit equal size': function(test){
var call_order = [];
async.mapLimit([0,1,2,3,4,5,6,7,8,9], 10, mapIterator.bind(this, call_order), function(err, results){
test.same(call_order, [0,1,2,3,4,5,6,7,8,9]);
test.same(results, [0,2,4,6,8,10,12,14,16,18]);
test.done();
});
},
'mapLimit zero limit': function(test){
test.expect(2);
async.mapLimit([0,1,2,3,4,5], 0, function(x, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err, results){
test.same(results, []);
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
},
'mapLimit error': function(test){
test.expect(2);
var arr = [0,1,2,3,4,5,6,7,8,9];
var call_order = [];
async.mapLimit(arr, 3, function(x, callback){
call_order.push(x);
if (x === 2) {
callback('error');
}
}, function(err){
test.same(call_order, [0,1,2]);
test.equals(err, 'error');
});
setTimeout(test.done, 25);
},
'mapLimit does not continue replenishing after error': function (test) {
var started = 0;
var arr = [0,1,2,3,4,5,6,7,8,9];
var delay = 10;
var limit = 3;
var maxTime = 10 * arr.length;
async.mapLimit(arr, limit, function(x, callback) {
started ++;
if (started === 3) {
return callback(new Error ("Test Error"));
}
setTimeout(function(){
callback();
}, delay);
}, function(){});
setTimeout(function(){
test.equal(started, 3);
test.done();
}, maxTime);
}
};
exports['reduce'] = function(test){
var call_order = [];
async.reduce([1,2,3], 0, function(a, x, callback){
call_order.push(x);
callback(null, a + x);
}, function(err, result){
test.ok(err === null, err + " passed instead of 'null'");
test.equals(result, 6);
test.same(call_order, [1,2,3]);
test.done();
});
};
exports['reduce async with non-reference memo'] = function(test){
async.reduce([1,3,2], 0, function(a, x, callback){
setTimeout(function(){callback(null, a + x);}, Math.random()*100);
}, function(err, result){
test.equals(result, 6);
test.done();
});
};
exports['reduce error'] = function(test){
test.expect(1);
async.reduce([1,2,3], 0, function(a, x, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
};
exports['inject alias'] = function(test){
test.equals(async.inject, async.reduce);
test.done();
};
exports['foldl alias'] = function(test){
test.equals(async.foldl, async.reduce);
test.done();
};
exports['reduceRight'] = function(test){
var call_order = [];
var a = [1,2,3];
async.reduceRight(a, 0, function(a, x, callback){
call_order.push(x);
callback(null, a + x);
}, function(err, result){
test.equals(result, 6);
test.same(call_order, [3,2,1]);
test.same(a, [1,2,3]);
test.done();
});
};
exports['foldr alias'] = function(test){
test.equals(async.foldr, async.reduceRight);
test.done();
};
exports['transform implictly determines memo if not provided'] = function(test){
async.transform([1,2,3], function(memo, x, v, callback){
memo.push(x + 1);
callback();
}, function(err, result){
test.same(result, [2, 3, 4]);
test.done();
});
};
exports['transform async with object memo'] = function(test){
test.expect(2);
async.transform([1,3,2], {}, function(memo, v, k, callback){
setTimeout(function() {
memo[k] = v;
callback();
});
}, function(err, result) {
test.equals(err, null);
test.same(result, {
0: 1,
1: 3,
2: 2
});
test.done();
});
};
exports['transform iterating object'] = function(test){
test.expect(2);
async.transform({a: 1, b: 3, c: 2}, function(memo, v, k, callback){
setTimeout(function() {
memo[k] = v + 1;
callback();
});
}, function(err, result) {
test.equals(err, null);
test.same(result, {a: 2, b: 4, c: 3});
test.done();
});
};
exports['transform error'] = function(test){
async.transform([1,2,3], function(a, v, k, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
test.done();
});
};
exports['filter'] = function(test){
async.filter([3,1,2], filterIterator, function(results){
test.same(results, [3,1]);
test.done();
});
};
exports['filter original untouched'] = function(test){
var a = [3,1,2];
async.filter(a, function(x, callback){
callback(x % 2);
}, function(results){
test.same(results, [3,1]);
test.same(a, [3,1,2]);
test.done();
});
};
exports['filterSeries'] = function(test){
async.filterSeries([3,1,2], filterIterator, function(results){
test.same(results, [3,1]);
test.done();
});
};
exports['select alias'] = function(test){
test.equals(async.select, async.filter);
test.done();
};
exports['selectSeries alias'] = function(test){
test.equals(async.selectSeries, async.filterSeries);
test.done();
};
exports['reject'] = function(test){
test.expect(1);
async.reject([3,1,2], filterIterator, function(results){
test.same(results, [2]);
test.done();
});
};
exports['reject original untouched'] = function(test){
test.expect(2);
var a = [3,1,2];
async.reject(a, function(x, callback){
callback(x % 2);
}, function(results){
test.same(results, [2]);
test.same(a, [3,1,2]);
test.done();
});
};
exports['rejectSeries'] = function(test){
test.expect(1);
async.rejectSeries([3,1,2], filterIterator, function(results){
test.same(results, [2]);
test.done();
});
};
function testLimit(test, arr, limitFunc, limit, iter, done) {
var args = [];
limitFunc(arr, limit, function(x) {
args.push(x);
iter.apply(this, arguments);
}, function() {
test.same(args, arr);
if (done) done.apply(this, arguments);
else test.done();
});
}
exports['rejectLimit'] = function(test) {
test.expect(2);
testLimit(test, [5, 4, 3, 2, 1], async.rejectLimit, 2, function(v, next) {
next(v % 2);
}, function(x) {
test.same(x, [4, 2]);
test.done();
});
};
exports['filterLimit'] = function(test) {
test.expect(2);
testLimit(test, [5, 4, 3, 2, 1], async.filterLimit, 2, function(v, next) {
next(v % 2);
}, function(x) {
test.same(x, [5, 3, 1]);
test.done();
});
};
exports['some true'] = function(test){
test.expect(1);
async.some([3,1,2], function(x, callback){
setTimeout(function(){callback(x === 1);}, 0);
}, function(result){
test.equals(result, true);
test.done();
});
};
exports['some false'] = function(test){
test.expect(1);
async.some([3,1,2], function(x, callback){
setTimeout(function(){callback(x === 10);}, 0);
}, function(result){
test.equals(result, false);
test.done();
});
};
exports['some early return'] = function(test){
test.expect(1);
var call_order = [];
async.some([1,2,3], function(x, callback){
setTimeout(function(){
call_order.push(x);
callback(x === 1);
}, x*25);
}, function(){
call_order.push('callback');
});
setTimeout(function(){
test.same(call_order, [1,'callback',2,3]);
test.done();
}, 100);
};
exports['someLimit true'] = function(test){
async.someLimit([3,1,2], 2, function(x, callback){
setTimeout(function(){callback(x === 2);}, 0);
}, function(result){
test.equals(result, true);
test.done();
});
};
exports['someLimit false'] = function(test){
async.someLimit([3,1,2], 2, function(x, callback){
setTimeout(function(){callback(x === 10);}, 0);
}, function(result){
test.equals(result, false);
test.done();
});
};
exports['every true'] = function(test){
async.everyLimit([3,1,2], 1, function(x, callback){
setTimeout(function(){callback(x > 1);}, 0);
}, function(result){
test.equals(result, true);
test.done();
});
};
exports['everyLimit false'] = function(test){
async.everyLimit([3,1,2], 2, function(x, callback){
setTimeout(function(){callback(x === 2);}, 0);
}, function(result){
test.equals(result, false);
test.done();
});
};
exports['everyLimit short-circuit'] = function(test){
test.expect(2);
var calls = 0;
async.everyLimit([3,1,2], 1, function(x, callback){
calls++;
callback(x === 1);
}, function(result){
test.equals(result, false);
test.equals(calls, 1);
test.done();
});
};
exports['someLimit short-circuit'] = function(test){
test.expect(2);
var calls = 0;
async.someLimit([3,1,2], 1, function(x, callback){
calls++;
callback(x === 1);
}, function(result){
test.equals(result, true);
test.equals(calls, 2);
test.done();
});
};
exports['any alias'] = function(test){
test.equals(async.any, async.some);
test.done();
};
exports['every true'] = function(test){
test.expect(1);
async.every([1,2,3], function(x, callback){
setTimeout(function(){callback(true);}, 0);
}, function(result){
test.equals(result, true);
test.done();
});
};
exports['every false'] = function(test){
test.expect(1);
async.every([1,2,3], function(x, callback){
setTimeout(function(){callback(x % 2);}, 0);
}, function(result){
test.equals(result, false);
test.done();
});
};
exports['every early return'] = function(test){
test.expect(1);
var call_order = [];
async.every([1,2,3], function(x, callback){
setTimeout(function(){
call_order.push(x);
callback(x === 1);
}, x*25);
}, function(){
call_order.push('callback');
});
setTimeout(function(){
test.same(call_order, [1,2,'callback',3]);
test.done();
}, 100);
};
exports['all alias'] = function(test){
test.equals(async.all, async.every);
test.done();
};
exports['detect'] = function(test){
test.expect(2);
var call_order = [];
async.detect([3,2,1], detectIterator.bind(this, call_order), function(result){
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function(){
test.same(call_order, [1,2,'callback',3]);
test.done();
}, 100);
};
exports['detect - mulitple matches'] = function(test){
test.expect(2);
var call_order = [];
async.detect([3,2,2,1,2], detectIterator.bind(this, call_order), function(result){
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function(){
test.same(call_order, [1,2,'callback',2,2,3]);
test.done();
}, 100);
};
exports['detectSeries'] = function(test){
test.expect(2);
var call_order = [];
async.detectSeries([3,2,1], detectIterator.bind(this, call_order), function(result){
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function(){
test.same(call_order, [3,2,'callback']);
test.done();
}, 200);
};
exports['detectSeries - multiple matches'] = function(test){
test.expect(2);
var call_order = [];
async.detectSeries([3,2,2,1,2], detectIterator.bind(this, call_order), function(result){
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function(){
test.same(call_order, [3,2,'callback']);
test.done();
}, 200);
};
exports['detectSeries - ensure stop'] = function (test) {
test.expect(1);
async.detectSeries([1, 2, 3, 4, 5], function (num, cb) {
if (num > 3) throw new Error("detectSeries did not stop iterating");
cb(num === 3);
}, function (result) {
test.equals(result, 3);
test.done();
});
};
exports['detectLimit'] = function(test){
test.expect(2);
var call_order = [];
async.detectLimit([3, 2, 1], 2, detectIterator.bind(this, call_order), function(result) {
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function() {
test.same(call_order, [2, 'callback', 3]);
test.done();
}, 100);
};
exports['detectLimit - multiple matches'] = function(test){
test.expect(2);
var call_order = [];
async.detectLimit([3,2,2,1,2], 2, detectIterator.bind(this, call_order), function(result){
call_order.push('callback');
test.equals(result, 2);
});
setTimeout(function(){
test.same(call_order, [2, 'callback', 3]);
test.done();
}, 100);
};
exports['detectLimit - ensure stop'] = function (test) {
test.expect(1);
async.detectLimit([1, 2, 3, 4, 5], 2, function (num, cb) {
if (num > 4) throw new Error("detectLimit did not stop iterating");
cb(num === 3);
}, function (result) {
test.equals(result, 3);
test.done();
});
};
exports['sortBy'] = function(test){
test.expect(2);
async.sortBy([{a:1},{a:15},{a:6}], function(x, callback){
setTimeout(function(){callback(null, x.a);}, 0);
}, function(err, result){
test.ok(err === null, err + " passed instead of 'null'");
test.same(result, [{a:1},{a:6},{a:15}]);
test.done();
});
};
exports['sortBy inverted'] = function(test){
test.expect(1);
async.sortBy([{a:1},{a:15},{a:6}], function(x, callback){
setTimeout(function(){callback(null, x.a*-1);}, 0);
}, function(err, result){
test.same(result, [{a:15},{a:6},{a:1}]);
test.done();
});
};
exports['sortBy error'] = function(test){
test.expect(1);
var error = new Error('asdas');
async.sortBy([{a:1},{a:15},{a:6}], function(x, callback){
async.setImmediate(function(){
callback(error);
});
}, function(err){
test.equal(err, error);
test.done();
});
};
exports['apply'] = function(test){
test.expect(6);
var fn = function(){
test.same(Array.prototype.slice.call(arguments), [1,2,3,4]);
};
async.apply(fn, 1, 2, 3, 4)();
async.apply(fn, 1, 2, 3)(4);
async.apply(fn, 1, 2)(3, 4);
async.apply(fn, 1)(2, 3, 4);
async.apply(fn)(1, 2, 3, 4);
test.equals(
async.apply(function(name){return 'hello ' + name;}, 'world')(),
'hello world'
);
test.done();
};
// generates tests for console functions such as async.log
var console_fn_tests = function(name){
if (typeof console !== 'undefined') {
exports[name] = function(test){
test.expect(5);
var fn = function(arg1, callback){
test.equals(arg1, 'one');
setTimeout(function(){callback(null, 'test');}, 0);
};
var fn_err = function(arg1, callback){
test.equals(arg1, 'one');
setTimeout(function(){callback('error');}, 0);
};
var _console_fn = console[name];
var _error = console.error;
console[name] = function(val){
test.equals(val, 'test');
test.equals(arguments.length, 1);
console.error = function(val){
test.equals(val, 'error');
console[name] = _console_fn;
console.error = _error;
test.done();
};
async[name](fn_err, 'one');
};
async[name](fn, 'one');
};
exports[name + ' with multiple result params'] = function(test){
test.expect(1);
var fn = function(callback){callback(null,'one','two','three');};
var _console_fn = console[name];
var called_with = [];
console[name] = function(x){
called_with.push(x);
};
async[name](fn);
test.same(called_with, ['one','two','three']);
console[name] = _console_fn;
test.done();
};
}
// browser-only test
exports[name + ' without console.' + name] = function(test){
if (typeof window !== 'undefined') {
var _console = window.console;
window.console = undefined;
var fn = function(callback){callback(null, 'val');};
var fn_err = function(callback){callback('error');};
async[name](fn);
async[name](fn_err);
window.console = _console;
}
test.done();
};
};
exports['times'] = {
'times': function(test) {
test.expect(2);
async.times(5, function(n, next) {
next(null, n);
}, function(err, results) {
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, [0,1,2,3,4]);
test.done();
});
},
'times 3': function(test){
test.expect(1);
var args = [];
async.times(3, function(n, callback){
setTimeout(function(){
args.push(n);
callback();
}, n * 25);
}, function(err){
if (err) throw err;
test.same(args, [0,1,2]);
test.done();
});
},
'times 0': function(test){
test.expect(1);
async.times(0, function(n, callback){
test.ok(false, 'iterator should not be called');
callback();
}, function(err){
if (err) throw err;
test.ok(true, 'should call callback');
});
setTimeout(test.done, 25);
},
'times error': function(test){
test.expect(1);
async.times(3, function(n, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
},
'timesSeries': function(test){
test.expect(2);
var call_order = [];
async.timesSeries(5, function(n, callback){
setTimeout(function(){
call_order.push(n);
callback(null, n);
}, 100 - n * 10);
}, function(err, results){
test.same(call_order, [0,1,2,3,4]);
test.same(results, [0,1,2,3,4]);
test.done();
});
},
'timesSeries error': function(test){
test.expect(1);
async.timesSeries(5, function(n, callback){
callback('error');
}, function(err){
test.equals(err, 'error');
});
setTimeout(test.done, 50);
},
'timesLimit': function(test){
test.expect(7);
var limit = 2;
var running = 0;
async.timesLimit(5, limit, function (i, next) {
running++;
test.ok(running <= limit && running > 0, running);
setTimeout(function () {
running--;
next(null, i * 2);
}, (3 - i) * 10);
}, function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, [0, 2, 4, 6, 8]);
test.done();
});
}
};
console_fn_tests('log');
console_fn_tests('dir');
/*console_fn_tests('info');
console_fn_tests('warn');
console_fn_tests('error');*/
exports['nextTick'] = function(test){
test.expect(1);
var call_order = [];
async.nextTick(function(){call_order.push('two');});
call_order.push('one');
setTimeout(function(){
test.same(call_order, ['one','two']);
test.done();
}, 50);
};
exports['nextTick in the browser'] = function(test){
if (!isBrowser()) {
// skip this test in node
return test.done();
}
test.expect(1);
var call_order = [];
async.nextTick(function(){call_order.push('two');});
call_order.push('one');
setTimeout(function(){
test.same(call_order, ['one','two']);
}, 50);
setTimeout(test.done, 100);
};
exports['noConflict - node only'] = function(test){
if (!isBrowser()) {
// node only test
test.expect(3);
var fs = require('fs');
var vm = require('vm');
var filename = __dirname + '/../lib/async.js';
fs.readFile(filename, function(err, content){
if(err) return test.done();
var s = vm.createScript(content, filename);
var s2 = vm.createScript(
content + 'this.async2 = this.async.noConflict();',
filename
);
var sandbox1 = {async: 'oldvalue'};
s.runInNewContext(sandbox1);
test.ok(sandbox1.async);
var sandbox2 = {async: 'oldvalue'};
s2.runInNewContext(sandbox2);
test.equals(sandbox2.async, 'oldvalue');
test.ok(sandbox2.async2);
test.done();
});
}
else test.done();
};
exports['concat'] = function(test){
test.expect(3);
var call_order = [];
var iterator = function (x, cb) {
setTimeout(function(){
call_order.push(x);
var r = [];
while (x > 0) {
r.push(x);
x--;
}
cb(null, r);
}, x*25);
};
async.concat([1,3,2], iterator, function(err, results){
test.same(results, [1,2,1,3,2,1]);
test.same(call_order, [1,2,3]);
test.ok(err === null, err + " passed instead of 'null'");
test.done();
});
};
exports['concat error'] = function(test){
test.expect(1);
var iterator = function (x, cb) {
cb(new Error('test error'));
};
async.concat([1,2,3], iterator, function(err){
test.ok(err);
test.done();
});
};
exports['concatSeries'] = function(test){
test.expect(3);
var call_order = [];
var iterator = function (x, cb) {
setTimeout(function(){
call_order.push(x);
var r = [];
while (x > 0) {
r.push(x);
x--;
}
cb(null, r);
}, x*25);
};
async.concatSeries([1,3,2], iterator, function(err, results){
test.same(results, [1,3,2,1,2,1]);
test.same(call_order, [1,3,2]);
test.ok(err === null, err + " passed instead of 'null'");
test.done();
});
};
exports['until'] = function (test) {
test.expect(4);
var call_order = [];
var count = 0;
async.until(
function () {
call_order.push(['test', count]);
return (count == 5);
},
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function (err, result) {
test.ok(err === null, err + " passed instead of 'null'");
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['test', 0],
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5],
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doUntil'] = function (test) {
test.expect(4);
var call_order = [];
var count = 0;
async.doUntil(
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function () {
call_order.push(['test', count]);
return (count == 5);
},
function (err, result) {
test.ok(err === null, err + " passed instead of 'null'");
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5]
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doUntil callback params'] = function (test) {
test.expect(3);
var call_order = [];
var count = 0;
async.doUntil(
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function (c) {
call_order.push(['test', c]);
return (c == 5);
},
function (err, result) {
if (err) throw err;
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5]
]);
test.equals(count, 5);
test.done();
}
);
};
exports['whilst'] = function (test) {
test.expect(4);
var call_order = [];
var count = 0;
async.whilst(
function () {
call_order.push(['test', count]);
return (count < 5);
},
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function (err, result) {
test.ok(err === null, err + " passed instead of 'null'");
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['test', 0],
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5],
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doWhilst'] = function (test) {
test.expect(4);
var call_order = [];
var count = 0;
async.doWhilst(
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function () {
call_order.push(['test', count]);
return (count < 5);
},
function (err, result) {
test.ok(err === null, err + " passed instead of 'null'");
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5]
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doWhilst callback params'] = function (test) {
test.expect(3);
var call_order = [];
var count = 0;
async.doWhilst(
function (cb) {
call_order.push(['iterator', count]);
count++;
cb(null, count);
},
function (c) {
call_order.push(['test', c]);
return (c < 5);
},
function (err, result) {
if (err) throw err;
test.equals(result, 5, 'last result passed through');
test.same(call_order, [
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5]
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doWhilst - error'] = function (test) {
test.expect(1);
var error = new Error('asdas');
async.doWhilst(
function (cb) {
cb(error);
},
function () {},
function (err) {
test.equal(err, error);
test.done();
}
);
};
exports['during'] = function (test) {
var call_order = [];
var count = 0;
async.during(
function (cb) {
call_order.push(['test', count]);
cb(null, count < 5);
},
function (cb) {
call_order.push(['iterator', count]);
count++;
cb();
},
function (err) {
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [
['test', 0],
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5],
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doDuring'] = function (test) {
var call_order = [];
var count = 0;
async.doDuring(
function (cb) {
call_order.push(['iterator', count]);
count++;
cb();
},
function (cb) {
call_order.push(['test', count]);
cb(null, count < 5);
},
function (err) {
test.ok(err === null, err + " passed instead of 'null'");
test.same(call_order, [
['iterator', 0], ['test', 1],
['iterator', 1], ['test', 2],
['iterator', 2], ['test', 3],
['iterator', 3], ['test', 4],
['iterator', 4], ['test', 5],
]);
test.equals(count, 5);
test.done();
}
);
};
exports['doDuring - error test'] = function (test) {
test.expect(1);
var error = new Error('asdas');
async.doDuring(
function (cb) {
cb(error);
},
function () {},
function (err) {
test.equal(err, error);
test.done();
}
);
};
exports['doDuring - error iterator'] = function (test) {
test.expect(1);
var error = new Error('asdas');
async.doDuring(
function (cb) {
cb(null);
},
function (cb) {
cb(error);
},
function (err) {
test.equal(err, error);
test.done();
}
);
};
exports['whilst optional callback'] = function (test) {
var counter = 0;
async.whilst(
function () { return counter < 2; },
function (cb) {
counter++;
cb();
}
);
test.equal(counter, 2);
test.done();
};
exports['queue'] = {
'queue': function (test) {
test.expect(17);
var call_order = [],
delays = [160,80,240,80];
// worker1: --1-4
// worker2: -2---3
// order of completion: 2,1,4,3
var q = async.queue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', 'arg');
}, delays.splice(0,1)[0]);
}, 2);
q.push(1, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 1);
call_order.push('callback ' + 1);
});
q.push(2, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 2);
call_order.push('callback ' + 2);
});
q.push(3, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 3);
});
q.push(4, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 4);
});
test.equal(q.length(), 4);
test.equal(q.concurrency, 2);
q.drain = function () {
test.same(call_order, [
'process 2', 'callback 2',
'process 1', 'callback 1',
'process 4', 'callback 4',
'process 3', 'callback 3'
]);
test.equal(q.concurrency, 2);
test.equal(q.length(), 0);
test.done();
};
},
'default concurrency': function (test) {
test.expect(17);
var call_order = [],
delays = [160,80,240,80];
// order of completion: 1,2,3,4
var q = async.queue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', 'arg');
}, delays.splice(0,1)[0]);
});
q.push(1, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 3);
call_order.push('callback ' + 1);
});
q.push(2, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 2);
call_order.push('callback ' + 2);
});
q.push(3, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 1);
call_order.push('callback ' + 3);
});
q.push(4, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 4);
});
test.equal(q.length(), 4);
test.equal(q.concurrency, 1);
q.drain = function () {
test.same(call_order, [
'process 1', 'callback 1',
'process 2', 'callback 2',
'process 3', 'callback 3',
'process 4', 'callback 4'
]);
test.equal(q.concurrency, 1);
test.equal(q.length(), 0);
test.done();
};
},
'zero concurrency': function(test){
test.expect(1);
test.throws(function () {
async.queue(function (task, callback) {
callback(null, task);
}, 0);
});
test.done();
},
'error propagation': function(test){
test.expect(1);
var results = [];
var q = async.queue(function (task, callback) {
callback(task.name === 'foo' ? new Error('fooError') : null);
}, 2);
q.drain = function() {
test.deepEqual(results, ['bar', 'fooError']);
test.done();
};
q.push({name: 'bar'}, function (err) {
if(err) {
results.push('barError');
return;
}
results.push('bar');
});
q.push({name: 'foo'}, function (err) {
if(err) {
results.push('fooError');
return;
}
results.push('foo');
});
},
// The original queue implementation allowed the concurrency to be changed only
// on the same event loop during which a task was added to the queue. This
// test attempts to be a more robust test.
// Start with a concurrency of 1. Wait until a leter event loop and change
// the concurrency to 2. Wait again for a later loop then verify the concurrency.
// Repeat that one more time by chaning the concurrency to 5.
'changing concurrency': function (test) {
test.expect(3);
var q = async.queue(function(task, callback){
setTimeout(function(){
callback();
}, 100);
}, 1);
for(var i = 0; i < 50; i++){
q.push('');
}
q.drain = function(){
test.done();
};
setTimeout(function(){
test.equal(q.concurrency, 1);
q.concurrency = 2;
setTimeout(function(){
test.equal(q.running(), 2);
q.concurrency = 5;
setTimeout(function(){
test.equal(q.running(), 5);
}, 500);
}, 500);
}, 500);
},
'push without callback': function (test) {
test.expect(1);
var call_order = [],
delays = [160,80,240,80];
// worker1: --1-4
// worker2: -2---3
// order of completion: 2,1,4,3
var q = async.queue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', 'arg');
}, delays.splice(0,1)[0]);
}, 2);
q.push(1);
q.push(2);
q.push(3);
q.push(4);
setTimeout(function () {
test.same(call_order, [
'process 2',
'process 1',
'process 4',
'process 3'
]);
test.done();
}, 800);
},
'push with non-function': function (test) {
test.expect(1);
var q = async.queue(function () {}, 1);
test.throws(function () {
q.push({}, 1);
});
test.done();
},
'unshift': function (test) {
test.expect(1);
var queue_order = [];
var q = async.queue(function (task, callback) {
queue_order.push(task);
callback();
}, 1);
q.unshift(4);
q.unshift(3);
q.unshift(2);
q.unshift(1);
setTimeout(function () {
test.same(queue_order, [ 1, 2, 3, 4 ]);
test.done();
}, 100);
},
'too many callbacks': function (test) {
test.expect(1);
var q = async.queue(function (task, callback) {
callback();
test.throws(function() {
callback();
});
test.done();
}, 2);
q.push(1);
},
'bulk task': function (test) {
test.expect(9);
var call_order = [],
delays = [160,80,240,80];
// worker1: --1-4
// worker2: -2---3
// order of completion: 2,1,4,3
var q = async.queue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', task);
}, delays.splice(0,1)[0]);
}, 2);
q.push( [1,2,3,4], function (err, arg) {
test.equal(err, 'error');
call_order.push('callback ' + arg);
});
test.equal(q.length(), 4);
test.equal(q.concurrency, 2);
setTimeout(function () {
test.same(call_order, [
'process 2', 'callback 2',
'process 1', 'callback 1',
'process 4', 'callback 4',
'process 3', 'callback 3'
]);
test.equal(q.concurrency, 2);
test.equal(q.length(), 0);
test.done();
}, 800);
},
'idle': function(test) {
test.expect(7);
var q = async.queue(function (task, callback) {
// Queue is busy when workers are running
test.equal(q.idle(), false);
callback();
}, 1);
// Queue is idle before anything added
test.equal(q.idle(), true);
q.unshift(4);
q.unshift(3);
q.unshift(2);
q.unshift(1);
// Queue is busy when tasks added
test.equal(q.idle(), false);
q.drain = function() {
// Queue is idle after drain
test.equal(q.idle(), true);
test.done();
};
},
'pause': function(test) {
test.expect(3);
var call_order = [],
task_timeout = 100,
pause_timeout = 300,
resume_timeout = 500,
tasks = [ 1, 2, 3, 4, 5, 6 ],
elapsed = (function () {
var start = (new Date()).valueOf();
return function () {
return Math.round(((new Date()).valueOf() - start) / 100) * 100;
};
})();
var q = async.queue(function (task, callback) {
call_order.push('process ' + task);
call_order.push('timeout ' + elapsed());
callback();
});
function pushTask () {
var task = tasks.shift();
if (!task) { return; }
setTimeout(function () {
q.push(task);
pushTask();
}, task_timeout);
}
pushTask();
setTimeout(function () {
q.pause();
test.equal(q.paused, true);
}, pause_timeout);
setTimeout(function () {
q.resume();
test.equal(q.paused, false);
}, resume_timeout);
setTimeout(function () {
test.same(call_order, [
'process 1', 'timeout 100',
'process 2', 'timeout 200',
'process 3', 'timeout 500',
'process 4', 'timeout 500',
'process 5', 'timeout 500',
'process 6', 'timeout 600'
]);
test.done();
}, 800);
},
'pause in worker with concurrency': function(test) {
test.expect(1);
var call_order = [];
var q = async.queue(function (task, callback) {
if (task.isLongRunning) {
q.pause();
setTimeout(function () {
call_order.push(task.id);
q.resume();
callback();
}, 500);
}
else {
call_order.push(task.id);
callback();
}
}, 10);
q.push({ id: 1, isLongRunning: true});
q.push({ id: 2 });
q.push({ id: 3 });
q.push({ id: 4 });
q.push({ id: 5 });
setTimeout(function () {
test.same(call_order, [1, 2, 3, 4, 5]);
test.done();
}, 1000);
},
'pause with concurrency': function(test) {
test.expect(4);
var call_order = [],
task_timeout = 100,
pause_timeout = 50,
resume_timeout = 300,
tasks = [ 1, 2, 3, 4, 5, 6 ],
elapsed = (function () {
var start = (new Date()).valueOf();
return function () {
return Math.round(((new Date()).valueOf() - start) / 100) * 100;
};
})();
var q = async.queue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
call_order.push('timeout ' + elapsed());
callback();
}, task_timeout);
}, 2);
q.push(tasks);
setTimeout(function () {
q.pause();
test.equal(q.paused, true);
}, pause_timeout);
setTimeout(function () {
q.resume();
test.equal(q.paused, false);
}, resume_timeout);
setTimeout(function () {
test.equal(q.running(), 2);
}, resume_timeout + 10);
setTimeout(function () {
test.same(call_order, [
'process 1', 'timeout 100',
'process 2', 'timeout 100',
'process 3', 'timeout 400',
'process 4', 'timeout 400',
'process 5', 'timeout 500',
'process 6', 'timeout 500'
]);
test.done();
}, 800);
},
'start paused': function (test) {
test.expect(2);
var q = async.queue(function (task, callback) {
setTimeout(function () {
callback();
}, 40);
}, 2);
q.pause();
q.push([1, 2, 3]);
setTimeout(function () {
q.resume();
}, 5);
setTimeout(function () {
test.equal(q.tasks.length, 1);
test.equal(q.running(), 2);
q.resume();
}, 15);
q.drain = function () {
test.done();
};
},
'kill': function (test) {
test.expect(1);
var q = async.queue(function (task, callback) {
setTimeout(function () {
test.ok(false, "Function should never be called");
callback();
}, 300);
}, 1);
q.drain = function() {
test.ok(false, "Function should never be called");
};
q.push(0);
q.kill();
setTimeout(function() {
test.equal(q.length(), 0);
test.done();
}, 600);
},
'events': function(test) {
test.expect(4);
var calls = [];
var q = async.queue(function(task, cb) {
// nop
calls.push('process ' + task);
async.setImmediate(cb);
}, 10);
q.concurrency = 3;
q.saturated = function() {
test.ok(q.length() == 3, 'queue should be saturated now');
calls.push('saturated');
};
q.empty = function() {
test.ok(q.length() === 0, 'queue should be empty now');
calls.push('empty');
};
q.drain = function() {
test.ok(
q.length() === 0 && q.running() === 0,
'queue should be empty now and no more workers should be running'
);
calls.push('drain');
test.same(calls, [
'saturated',
'process foo',
'process bar',
'process zoo',
'foo cb',
'process poo',
'bar cb',
'empty',
'process moo',
'zoo cb',
'poo cb',
'moo cb',
'drain'
]);
test.done();
};
q.push('foo', function () {calls.push('foo cb');});
q.push('bar', function () {calls.push('bar cb');});
q.push('zoo', function () {calls.push('zoo cb');});
q.push('poo', function () {calls.push('poo cb');});
q.push('moo', function () {calls.push('moo cb');});
},
'empty': function(test) {
test.expect(2);
var calls = [];
var q = async.queue(function(task, cb) {
// nop
calls.push('process ' + task);
async.setImmediate(cb);
}, 3);
q.drain = function() {
test.ok(
q.length() === 0 && q.running() === 0,
'queue should be empty now and no more workers should be running'
);
calls.push('drain');
test.same(calls, [
'drain'
]);
test.done();
};
q.push([]);
},
'saturated': function (test) {
test.expect(1);
var saturatedCalled = false;
var q = async.queue(function(task, cb) {
async.setImmediate(cb);
}, 2);
q.saturated = function () {
saturatedCalled = true;
};
q.drain = function () {
test.ok(saturatedCalled, "saturated not called");
test.done();
};
setTimeout(function () {
q.push(['foo', 'bar', 'baz', 'moo']);
}, 10);
},
'started': function(test) {
test.expect(2);
var q = async.queue(function(task, cb) {
cb(null, task);
});
test.equal(q.started, false);
q.push([]);
test.equal(q.started, true);
test.done();
}
};
exports['priorityQueue'] = {
'priorityQueue': function (test) {
test.expect(17);
var call_order = [];
// order of completion: 2,1,4,3
var q = async.priorityQueue(function (task, callback) {
call_order.push('process ' + task);
callback('error', 'arg');
}, 1);
q.push(1, 1.4, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 2);
call_order.push('callback ' + 1);
});
q.push(2, 0.2, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 3);
call_order.push('callback ' + 2);
});
q.push(3, 3.8, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 3);
});
q.push(4, 2.9, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 1);
call_order.push('callback ' + 4);
});
test.equal(q.length(), 4);
test.equal(q.concurrency, 1);
q.drain = function () {
test.same(call_order, [
'process 2', 'callback 2',
'process 1', 'callback 1',
'process 4', 'callback 4',
'process 3', 'callback 3'
]);
test.equal(q.concurrency, 1);
test.equal(q.length(), 0);
test.done();
};
},
'concurrency': function (test) {
test.expect(17);
var call_order = [],
delays = [160,80,240,80];
// worker1: --2-3
// worker2: -1---4
// order of completion: 1,2,3,4
var q = async.priorityQueue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', 'arg');
}, delays.splice(0,1)[0]);
}, 2);
q.push(1, 1.4, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 2);
call_order.push('callback ' + 1);
});
q.push(2, 0.2, function (err, arg) {<|fim▁hole|> test.equal(arg, 'arg');
test.equal(q.length(), 1);
call_order.push('callback ' + 2);
});
q.push(3, 3.8, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 3);
});
q.push(4, 2.9, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(q.length(), 0);
call_order.push('callback ' + 4);
});
test.equal(q.length(), 4);
test.equal(q.concurrency, 2);
q.drain = function () {
test.same(call_order, [
'process 1', 'callback 1',
'process 2', 'callback 2',
'process 3', 'callback 3',
'process 4', 'callback 4'
]);
test.equal(q.concurrency, 2);
test.equal(q.length(), 0);
test.done();
};
}
};
exports['cargo'] = {
'cargo': function (test) {
test.expect(19);
var call_order = [],
delays = [160, 160, 80];
// worker: --12--34--5-
// order of completion: 1,2,3,4,5
var c = async.cargo(function (tasks, callback) {
setTimeout(function () {
call_order.push('process ' + tasks.join(' '));
callback('error', 'arg');
}, delays.shift());
}, 2);
c.push(1, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(c.length(), 3);
call_order.push('callback ' + 1);
});
c.push(2, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(c.length(), 3);
call_order.push('callback ' + 2);
});
test.equal(c.length(), 2);
// async push
setTimeout(function () {
c.push(3, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(c.length(), 1);
call_order.push('callback ' + 3);
});
}, 60);
setTimeout(function () {
c.push(4, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(c.length(), 1);
call_order.push('callback ' + 4);
});
test.equal(c.length(), 2);
c.push(5, function (err, arg) {
test.equal(err, 'error');
test.equal(arg, 'arg');
test.equal(c.length(), 0);
call_order.push('callback ' + 5);
});
}, 120);
setTimeout(function () {
test.same(call_order, [
'process 1 2', 'callback 1', 'callback 2',
'process 3 4', 'callback 3', 'callback 4',
'process 5' , 'callback 5'
]);
test.equal(c.length(), 0);
test.done();
}, 800);
},
'without callback': function (test) {
test.expect(1);
var call_order = [],
delays = [160,80,240,80];
// worker: --1-2---34-5-
// order of completion: 1,2,3,4,5
var c = async.cargo(function (tasks, callback) {
setTimeout(function () {
call_order.push('process ' + tasks.join(' '));
callback('error', 'arg');
}, delays.shift());
}, 2);
c.push(1);
setTimeout(function () {
c.push(2);
}, 120);
setTimeout(function () {
c.push(3);
c.push(4);
c.push(5);
}, 180);
setTimeout(function () {
test.same(call_order, [
'process 1',
'process 2',
'process 3 4',
'process 5'
]);
test.done();
}, 800);
},
'bulk task': function (test) {
test.expect(7);
var call_order = [],
delays = [120,40];
// worker: -123-4-
// order of completion: 1,2,3,4
var c = async.cargo(function (tasks, callback) {
setTimeout(function () {
call_order.push('process ' + tasks.join(' '));
callback('error', tasks.join(' '));
}, delays.shift());
}, 3);
c.push( [1,2,3,4], function (err, arg) {
test.equal(err, 'error');
call_order.push('callback ' + arg);
});
test.equal(c.length(), 4);
setTimeout(function () {
test.same(call_order, [
'process 1 2 3', 'callback 1 2 3',
'callback 1 2 3', 'callback 1 2 3',
'process 4', 'callback 4',
]);
test.equal(c.length(), 0);
test.done();
}, 800);
},
'drain once': function (test) {
test.expect(1);
var c = async.cargo(function (tasks, callback) {
callback();
}, 3);
var drainCounter = 0;
c.drain = function () {
drainCounter++;
};
for(var i = 0; i < 10; i++){
c.push(i);
}
setTimeout(function(){
test.equal(drainCounter, 1);
test.done();
}, 500);
},
'drain twice': function (test) {
test.expect(1);
var c = async.cargo(function (tasks, callback) {
callback();
}, 3);
var loadCargo = function(){
for(var i = 0; i < 10; i++){
c.push(i);
}
};
var drainCounter = 0;
c.drain = function () {
drainCounter++;
};
loadCargo();
setTimeout(loadCargo, 500);
setTimeout(function(){
test.equal(drainCounter, 2);
test.done();
}, 1000);
},
'events': function(test) {
test.expect(4);
var calls = [];
var q = async.cargo(function(task, cb) {
// nop
calls.push('process ' + task);
async.setImmediate(cb);
}, 1);
q.concurrency = 3;
q.saturated = function() {
test.ok(q.length() == 3, 'cargo should be saturated now');
calls.push('saturated');
};
q.empty = function() {
test.ok(q.length() === 0, 'cargo should be empty now');
calls.push('empty');
};
q.drain = function() {
test.ok(
q.length() === 0 && q.running() === 0,
'cargo should be empty now and no more workers should be running'
);
calls.push('drain');
test.same(calls, [
'saturated',
'process foo',
'process bar',
'process zoo',
'foo cb',
'process poo',
'bar cb',
'empty',
'process moo',
'zoo cb',
'poo cb',
'moo cb',
'drain'
]);
test.done();
};
q.push('foo', function () {calls.push('foo cb');});
q.push('bar', function () {calls.push('bar cb');});
q.push('zoo', function () {calls.push('zoo cb');});
q.push('poo', function () {calls.push('poo cb');});
q.push('moo', function () {calls.push('moo cb');});
},
'expose payload': function (test) {
test.expect(5);
var called_once = false;
var cargo= async.cargo(function(tasks, cb) {
if (!called_once) {
test.equal(cargo.payload, 1);
test.ok(tasks.length === 1, 'should start with payload = 1');
} else {
test.equal(cargo.payload, 2);
test.ok(tasks.length === 2, 'next call shold have payload = 2');
}
called_once = true;
setTimeout(cb, 25);
}, 1);
cargo.drain = function () {
test.done();
};
test.equals(cargo.payload, 1);
cargo.push([1, 2, 3]);
setTimeout(function () {
cargo.payload = 2;
}, 15);
}
};
exports['memoize'] = {
'memoize': function (test) {
test.expect(5);
var call_order = [];
var fn = function (arg1, arg2, callback) {
async.setImmediate(function () {
call_order.push(['fn', arg1, arg2]);
callback(null, arg1 + arg2);
});
};
var fn2 = async.memoize(fn);
fn2(1, 2, function (err, result) {
test.ok(err === null, err + " passed instead of 'null'");
test.equal(result, 3);
fn2(1, 2, function (err, result) {
test.equal(result, 3);
fn2(2, 2, function (err, result) {
test.equal(result, 4);
test.same(call_order, [['fn',1,2], ['fn',2,2]]);
test.done();
});
});
});
},
'maintains asynchrony': function (test) {
test.expect(3);
var call_order = [];
var fn = function (arg1, arg2, callback) {
call_order.push(['fn', arg1, arg2]);
async.setImmediate(function () {
call_order.push(['cb', arg1, arg2]);
callback(null, arg1 + arg2);
});
};
var fn2 = async.memoize(fn);
fn2(1, 2, function (err, result) {
test.equal(result, 3);
fn2(1, 2, function (err, result) {
test.equal(result, 3);
async.nextTick(memoize_done);
call_order.push('tick3');
});
call_order.push('tick2');
});
call_order.push('tick1');
function memoize_done() {
var async_call_order = [
['fn',1,2], // initial async call
'tick1', // async caller
['cb',1,2], // async callback
// ['fn',1,2], // memoized // memoized async body
'tick2', // handler for first async call
// ['cb',1,2], // memoized // memoized async response body
'tick3' // handler for memoized async call
];
test.same(call_order, async_call_order);
test.done();
}
},
'unmemoize': function(test) {
test.expect(4);
var call_order = [];
var fn = function (arg1, arg2, callback) {
call_order.push(['fn', arg1, arg2]);
async.setImmediate(function () {
callback(null, arg1 + arg2);
});
};
var fn2 = async.memoize(fn);
var fn3 = async.unmemoize(fn2);
fn3(1, 2, function (err, result) {
test.equal(result, 3);
fn3(1, 2, function (err, result) {
test.equal(result, 3);
fn3(2, 2, function (err, result) {
test.equal(result, 4);
test.same(call_order, [['fn',1,2], ['fn',1,2], ['fn',2,2]]);
test.done();
});
});
});
},
'unmemoize a not memoized function': function(test) {
test.expect(1);
var fn = function (arg1, arg2, callback) {
callback(null, arg1 + arg2);
};
var fn2 = async.unmemoize(fn);
fn2(1, 2, function(err, result) {
test.equal(result, 3);
});
test.done();
},
'error': function (test) {
test.expect(1);
var testerr = new Error('test');
var fn = function (arg1, arg2, callback) {
callback(testerr, arg1 + arg2);
};
async.memoize(fn)(1, 2, function (err) {
test.equal(err, testerr);
});
test.done();
},
'multiple calls': function (test) {
test.expect(3);
var fn = function (arg1, arg2, callback) {
test.ok(true);
setTimeout(function(){
callback(null, arg1, arg2);
}, 10);
};
var fn2 = async.memoize(fn);
fn2(1, 2, function(err, result) {
test.equal(result, 1, 2);
});
fn2(1, 2, function(err, result) {
test.equal(result, 1, 2);
test.done();
});
},
'custom hash function': function (test) {
test.expect(2);
var testerr = new Error('test');
var fn = function (arg1, arg2, callback) {
callback(testerr, arg1 + arg2);
};
var fn2 = async.memoize(fn, function () {
return 'custom hash';
});
fn2(1, 2, function (err, result) {
test.equal(result, 3);
fn2(2, 2, function (err, result) {
test.equal(result, 3);
test.done();
});
});
},
'manually added memo value': function (test) {
test.expect(1);
var fn = async.memoize(function() {
test(false, "Function should never be called");
});
fn.memo["foo"] = ["bar"];
fn("foo", function(val) {
test.equal(val, "bar");
test.done();
});
}
};
exports['ensureAsync'] = {
'defer sync functions': function (test) {
test.expect(6);
var sync = true;
async.ensureAsync(function (arg1, arg2, cb) {
test.equal(arg1, 1);
test.equal(arg2, 2);
cb(null, 4, 5);
})(1, 2, function (err, arg4, arg5) {
test.equal(err, null);
test.equal(arg4, 4);
test.equal(arg5, 5);
test.ok(!sync, 'callback called on same tick');
test.done();
});
sync = false;
},
'do not defer async functions': function (test) {
test.expect(6);
var sync = false;
async.ensureAsync(function (arg1, arg2, cb) {
test.equal(arg1, 1);
test.equal(arg2, 2);
async.setImmediate(function () {
sync = true;
cb(null, 4, 5);
sync = false;
});
})(1, 2, function (err, arg4, arg5) {
test.equal(err, null);
test.equal(arg4, 4);
test.equal(arg5, 5);
test.ok(sync, 'callback called on next tick');
test.done();
});
},
'double wrapping': function (test) {
test.expect(6);
var sync = true;
async.ensureAsync(async.ensureAsync(function (arg1, arg2, cb) {
test.equal(arg1, 1);
test.equal(arg2, 2);
cb(null, 4, 5);
}))(1, 2, function (err, arg4, arg5) {
test.equal(err, null);
test.equal(arg4, 4);
test.equal(arg5, 5);
test.ok(!sync, 'callback called on same tick');
test.done();
});
sync = false;
}
};
exports['constant'] = function (test) {
test.expect(5);
var f = async.constant(42, 1, 2, 3);
f(function (err, value, a, b, c) {
test.ok(!err);
test.ok(value === 42);
test.ok(a === 1);
test.ok(b === 2);
test.ok(c === 3);
test.done();
});
};
exports['asyncify'] = {
'asyncify': function (test) {
var parse = async.asyncify(JSON.parse);
parse("{\"a\":1}", function (err, result) {
test.ok(!err);
test.ok(result.a === 1);
test.done();
});
},
'asyncify null': function (test) {
var parse = async.asyncify(function() {
return null;
});
parse("{\"a\":1}", function (err, result) {
test.ok(!err);
test.ok(result === null);
test.done();
});
},
'variable numbers of arguments': function (test) {
async.asyncify(function (x, y, z) {
test.ok(arguments.length === 3);
test.ok(x === 1);
test.ok(y === 2);
test.ok(z === 3);
})(1, 2, 3, function () {});
test.done();
},
'catch errors': function (test) {
async.asyncify(function () {
throw new Error("foo");
})(function (err) {
test.ok(err);
test.ok(err.message === "foo");
test.done();
});
},
'dont catch errors in the callback': function (test) {
try {
async.asyncify(function () {})(function (err) {
if (err) {
return test.done(new Error("should not get an error here"));
}
throw new Error("callback error");
});
} catch (e) {
test.ok(e.message === "callback error");
test.done();
}
},
'promisified': [
'native-promise-only',
'bluebird',
'es6-promise',
'rsvp'
].reduce(function(promises, name) {
if (isBrowser()) {
// node only test
return;
}
var Promise = require(name);
if (typeof Promise.Promise === 'function') {
Promise = Promise.Promise;
}
promises[name] = {
'resolve': function(test) {
var promisified = function(argument) {
return new Promise(function (resolve) {
setTimeout(function () {
resolve(argument + " resolved");
}, 15);
});
};
async.asyncify(promisified)("argument", function (err, value) {
if (err) {
return test.done(new Error("should not get an error here"));
}
test.ok(value === "argument resolved");
test.done();
});
},
'reject': function(test) {
var promisified = function(argument) {
return new Promise(function (resolve, reject) {
reject(argument + " rejected");
});
};
async.asyncify(promisified)("argument", function (err) {
test.ok(err);
test.ok(err.message === "argument rejected");
test.done();
});
}
};
return promises;
}, {})
};<|fim▁end|> | test.equal(err, 'error'); |
<|file_name|>content.py<|end_file_name|><|fim▁begin|>import re
import uuid
from xmodule.assetstore.assetmgr import AssetManager
XASSET_LOCATION_TAG = 'c4x'
XASSET_SRCREF_PREFIX = 'xasset:'
XASSET_THUMBNAIL_TAIL_NAME = '.jpg'
STREAM_DATA_CHUNK_SIZE = 1024
import os
import logging
import StringIO
from urlparse import urlparse, urlunparse, parse_qsl
from urllib import urlencode
from opaque_keys.edx.locator import AssetLocator
from opaque_keys.edx.keys import CourseKey, AssetKey
from opaque_keys import InvalidKeyError
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.exceptions import NotFoundError
from PIL import Image
class StaticContent(object):
def __init__(self, loc, name, content_type, data, last_modified_at=None, thumbnail_location=None, import_path=None,
length=None, locked=False):
self.location = loc
self.name = name # a display string which can be edited, and thus not part of the location which needs to be fixed
self.content_type = content_type
self._data = data
self.length = length
self.last_modified_at = last_modified_at
self.thumbnail_location = thumbnail_location
# optional information about where this file was imported from. This is needed to support import/export
# cycles
self.import_path = import_path<|fim▁hole|> self.locked = locked
@property
def is_thumbnail(self):
return self.location.category == 'thumbnail'
@staticmethod
def generate_thumbnail_name(original_name, dimensions=None):
"""
- original_name: Name of the asset (typically its location.name)
- dimensions: `None` or a tuple of (width, height) in pixels
"""
name_root, ext = os.path.splitext(original_name)
if not ext == XASSET_THUMBNAIL_TAIL_NAME:
name_root = name_root + ext.replace(u'.', u'-')
if dimensions:
width, height = dimensions # pylint: disable=unpacking-non-sequence
name_root += "-{}x{}".format(width, height)
return u"{name_root}{extension}".format(
name_root=name_root,
extension=XASSET_THUMBNAIL_TAIL_NAME,
)
@staticmethod
def compute_location(course_key, path, revision=None, is_thumbnail=False):
"""
Constructs a location object for static content.
- course_key: the course that this asset belongs to
- path: is the name of the static asset
- revision: is the object's revision information
- is_thumbnail: is whether or not we want the thumbnail version of this
asset
"""
path = path.replace('/', '_')
return course_key.make_asset_key(
'asset' if not is_thumbnail else 'thumbnail',
AssetLocator.clean_keeping_underscores(path)
).for_branch(None)
def get_id(self):
return self.location
@property
def data(self):
return self._data
ASSET_URL_RE = re.compile(r"""
/?c4x/
(?P<org>[^/]+)/
(?P<course>[^/]+)/
(?P<category>[^/]+)/
(?P<name>[^/]+)
""", re.VERBOSE | re.IGNORECASE)
@staticmethod
def is_c4x_path(path_string):
"""
Returns a boolean if a path is believed to be a c4x link based on the leading element
"""
return StaticContent.ASSET_URL_RE.match(path_string) is not None
@staticmethod
def get_static_path_from_location(location):
"""
This utility static method will take a location identifier and create a 'durable' /static/.. URL representation of it.
This link is 'durable' as it can maintain integrity across cloning of courseware across course-ids, e.g. reruns of
courses.
In the LMS/CMS, we have runtime link-rewriting, so at render time, this /static/... format will get translated into
the actual /c4x/... path which the client needs to reference static content
"""
if location is not None:
return u"/static/{name}".format(name=location.name)
else:
return None
@staticmethod
def get_base_url_path_for_course_assets(course_key):
if course_key is None:
return None
assert isinstance(course_key, CourseKey)
placeholder_id = uuid.uuid4().hex
# create a dummy asset location with a fake but unique name. strip off the name, and return it
url_path = StaticContent.serialize_asset_key_with_slash(
course_key.make_asset_key('asset', placeholder_id).for_branch(None)
)
return url_path.replace(placeholder_id, '')
@staticmethod
def get_location_from_path(path):
"""
Generate an AssetKey for the given path (old c4x/org/course/asset/name syntax)
"""
try:
return AssetKey.from_string(path)
except InvalidKeyError:
# TODO - re-address this once LMS-11198 is tackled.
if path.startswith('/'):
# try stripping off the leading slash and try again
return AssetKey.from_string(path[1:])
@staticmethod
def get_asset_key_from_path(course_key, path):
"""
Parses a path, extracting an asset key or creating one.
Args:
course_key: key to the course which owns this asset
path: the path to said content
Returns:
AssetKey: the asset key that represents the path
"""
# Clean up the path, removing any static prefix and any leading slash.
if path.startswith('/static/'):
path = path[len('/static/'):]
path = path.lstrip('/')
try:
return AssetKey.from_string(path)
except InvalidKeyError:
# If we couldn't parse the path, just let compute_location figure it out.
# It's most likely a path like /image.png or something.
return StaticContent.compute_location(course_key, path)
@staticmethod
def get_canonicalized_asset_path(course_key, path, base_url):
"""
Returns a fully-qualified path to a piece of static content.
If a static asset CDN is configured, this path will include it.
Otherwise, the path will simply be relative.
Args:
course_key: key to the course which owns this asset
path: the path to said content
Returns:
string: fully-qualified path to asset
"""
# Break down the input path.
_, _, relative_path, params, query_string, fragment = urlparse(path)
# Convert our path to an asset key if it isn't one already.
asset_key = StaticContent.get_asset_key_from_path(course_key, relative_path)
# Check the status of the asset to see if this can be served via CDN aka publicly.
serve_from_cdn = False
try:
content = AssetManager.find(asset_key, as_stream=True)
is_locked = getattr(content, "locked", True)
serve_from_cdn = not is_locked
except (ItemNotFoundError, NotFoundError):
# If we can't find the item, just treat it as if it's locked.
serve_from_cdn = False
# Update any query parameter values that have asset paths in them. This is for assets that
# require their own after-the-fact values, like a Flash file that needs the path of a config
# file passed to it e.g. /static/visualization.swf?configFile=/static/visualization.xml
query_params = parse_qsl(query_string)
updated_query_params = []
for query_name, query_value in query_params:
if query_value.startswith("/static/"):
new_query_value = StaticContent.get_canonicalized_asset_path(course_key, query_value, base_url)
updated_query_params.append((query_name, new_query_value))
else:
updated_query_params.append((query_name, query_value))
serialized_asset_key = StaticContent.serialize_asset_key_with_slash(asset_key)
base_url = base_url if serve_from_cdn else ''
return urlunparse((None, base_url, serialized_asset_key, params, urlencode(updated_query_params), fragment))
def stream_data(self):
yield self._data
@staticmethod
def serialize_asset_key_with_slash(asset_key):
"""
Legacy code expects the serialized asset key to start w/ a slash; so, do that in one place
:param asset_key:
"""
url = unicode(asset_key)
if not url.startswith('/'):
url = '/' + url # TODO - re-address this once LMS-11198 is tackled.
return url
class StaticContentStream(StaticContent):
def __init__(self, loc, name, content_type, stream, last_modified_at=None, thumbnail_location=None, import_path=None,
length=None, locked=False):
super(StaticContentStream, self).__init__(loc, name, content_type, None, last_modified_at=last_modified_at,
thumbnail_location=thumbnail_location, import_path=import_path,
length=length, locked=locked)
self._stream = stream
def stream_data(self):
while True:
chunk = self._stream.read(STREAM_DATA_CHUNK_SIZE)
if len(chunk) == 0:
break
yield chunk
def stream_data_in_range(self, first_byte, last_byte):
"""
Stream the data between first_byte and last_byte (included)
"""
self._stream.seek(first_byte)
position = first_byte
while True:
if last_byte < position + STREAM_DATA_CHUNK_SIZE - 1:
chunk = self._stream.read(last_byte - position + 1)
yield chunk
break
chunk = self._stream.read(STREAM_DATA_CHUNK_SIZE)
position += STREAM_DATA_CHUNK_SIZE
yield chunk
def close(self):
self._stream.close()
def copy_to_in_mem(self):
self._stream.seek(0)
content = StaticContent(self.location, self.name, self.content_type, self._stream.read(),
last_modified_at=self.last_modified_at, thumbnail_location=self.thumbnail_location,
import_path=self.import_path, length=self.length, locked=self.locked)
return content
class ContentStore(object):
'''
Abstraction for all ContentStore providers (e.g. MongoDB)
'''
def save(self, content):
raise NotImplementedError
def find(self, filename):
raise NotImplementedError
def get_all_content_for_course(self, course_key, start=0, maxresults=-1, sort=None, filter_params=None):
'''
Returns a list of static assets for a course, followed by the total number of assets.
By default all assets are returned, but start and maxresults can be provided to limit the query.
The return format is a list of asset data dictionaries.
The asset data dictionaries have the following keys:
asset_key (:class:`opaque_keys.edx.AssetKey`): The key of the asset
displayname: The human-readable name of the asset
uploadDate (datetime.datetime): The date and time that the file was uploadDate
contentType: The mimetype string of the asset
md5: An md5 hash of the asset content
'''
raise NotImplementedError
def delete_all_course_assets(self, course_key):
"""
Delete all of the assets which use this course_key as an identifier
:param course_key:
"""
raise NotImplementedError
def copy_all_course_assets(self, source_course_key, dest_course_key):
"""
Copy all the course assets from source_course_key to dest_course_key
"""
raise NotImplementedError
def generate_thumbnail(self, content, tempfile_path=None, dimensions=None):
"""Create a thumbnail for a given image.
Returns a tuple of (StaticContent, AssetKey)
`content` is the StaticContent representing the image you want to make a
thumbnail out of.
`tempfile_path` is a string path to the location of a file to read from
in order to grab the image data, instead of relying on `content.data`
`dimensions` is an optional param that represents (width, height) in
pixels. It defaults to None.
"""
thumbnail_content = None
# use a naming convention to associate originals with the thumbnail
thumbnail_name = StaticContent.generate_thumbnail_name(
content.location.name, dimensions=dimensions
)
thumbnail_file_location = StaticContent.compute_location(
content.location.course_key, thumbnail_name, is_thumbnail=True
)
# if we're uploading an image, then let's generate a thumbnail so that we can
# serve it up when needed without having to rescale on the fly
if content.content_type is not None and content.content_type.split('/')[0] == 'image':
try:
# use PIL to do the thumbnail generation (http://www.pythonware.com/products/pil/)
# My understanding is that PIL will maintain aspect ratios while restricting
# the max-height/width to be whatever you pass in as 'size'
# @todo: move the thumbnail size to a configuration setting?!?
if tempfile_path is None:
im = Image.open(StringIO.StringIO(content.data))
else:
im = Image.open(tempfile_path)
# I've seen some exceptions from the PIL library when trying to save palletted
# PNG files to JPEG. Per the google-universe, they suggest converting to RGB first.
im = im.convert('RGB')
if not dimensions:
dimensions = (128, 128)
im.thumbnail(dimensions, Image.ANTIALIAS)
thumbnail_file = StringIO.StringIO()
im.save(thumbnail_file, 'JPEG')
thumbnail_file.seek(0)
# store this thumbnail as any other piece of content
thumbnail_content = StaticContent(thumbnail_file_location, thumbnail_name,
'image/jpeg', thumbnail_file)
self.save(thumbnail_content)
except Exception, e:
# log and continue as thumbnails are generally considered as optional
logging.exception(u"Failed to generate thumbnail for {0}. Exception: {1}".format(content.location, str(e)))
return thumbnail_content, thumbnail_file_location
def ensure_indexes(self):
"""
Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
an exception if unable to.
"""
pass<|fim▁end|> | |
<|file_name|>test_geomutils.py<|end_file_name|><|fim▁begin|>#!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
__version__='''$Id: test_geomutils.py 3355 2009-01-08 14:58:44Z jonas $'''<|fim▁hole|>setOutDir(__name__)
class GeomTestCase(unittest.TestCase):
def test_padding(self):
"Test reportlab.lib.boxstuff.normalizePadding."
from reportlab.lib.geomutils import normalizeTRBL
paddings = (
(4, (4, 4, 4, 4)),
((0, 1), (0, 1, 0, 1)),
((0, 1, 2), (0, 1, 2, 1)),
((0, 1, 2, 3), (0, 1, 2, 3)),
)
for pin, pout in paddings:
pres = normalizeTRBL(pin)
assert pres == pout, "normalizeTRBL(%s) returned %s, expected %s" % (pin, pres, pout)
def makeSuite():
return makeSuiteForClasses(GeomTestCase)
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())<|fim▁end|> | __doc__="""Tests for geometry utility functions."""
import unittest
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from os import path
<|fim▁hole|><|fim▁end|> | current_dir = path.dirname(__file__)
sys.path.insert(0, path.join(path.dirname(current_dir), 'wdom')) |
<|file_name|>channel_mpsc.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::io;
use std::io::{Error, ErrorKind};
use std::sync::mpsc;
///
/// Handles the channel implementation when in process channels are enabled.
///
pub type PayloadSender = MsgSender<Payload>;
pub type PayloadReceiver = MsgReceiver<Payload>;
impl PayloadSenderHelperMethods for PayloadSender {
fn send_payload(&self, payload: Payload) -> Result<(), Error> {
self.send(payload)
}
}
impl PayloadReceiverHelperMethods for PayloadReceiver {
fn recv_payload(&self) -> Result<Payload, Error> {
self.recv()
}
fn to_mpsc_receiver(self) -> Receiver<Payload> {
self.rx
}
}
pub struct MsgReceiver<T> {
rx: mpsc::Receiver<T>,
}
impl<T> MsgReceiver<T> {
pub fn recv(&self) -> Result<T, Error> {
use std::error::Error;
self.rx.recv().map_err(|e| io::Error::new(ErrorKind::Other, e.description()))
}
}
#[derive(Clone)]
pub struct MsgSender<T> {
tx: mpsc::Sender<T>,
}
impl<T> MsgSender<T> {
pub fn send(&self, data: T) -> Result<(), Error> {
self.tx.send(data).map_err(|_| Error::new(ErrorKind::Other, "cannot send on closed channel"))
}
}
pub fn payload_channel() -> Result<(PayloadSender, PayloadReceiver), Error> {
let (tx, rx) = mpsc::channel();
Ok((PayloadSender { tx }, PayloadReceiver { rx }))
}
pub fn msg_channel<T>() -> Result<(MsgSender<T>, MsgReceiver<T>), Error> {
let (tx, rx) = mpsc::channel();
Ok((MsgSender { tx }, MsgReceiver { rx }))
}
///
/// These serialize methods are needed to satisfy the compiler
/// which uses these implementations for IPC, and also for the
/// recording tool. The recording tool only outputs messages
/// that don't contain Senders or Receivers, so in theory
/// these should never be called in the in-process config.
/// If they are called, there may be a bug in the messages
/// that the replay tool is writing.
///
impl<T> Serialize for MsgReceiver<T> {
fn serialize<S: Serializer>(&self, _: S) -> Result<S::Ok, S::Error> {
unreachable!();
}
}
impl<T> Serialize for MsgSender<T> {
fn serialize<S: Serializer>(&self, _: S) -> Result<S::Ok, S::Error> {<|fim▁hole|>impl<'de, T> Deserialize<'de> for MsgReceiver<T> {
fn deserialize<D>(_: D) -> Result<MsgReceiver<T>, D::Error>
where D: Deserializer<'de> {
unreachable!();
}
}
impl<'de, T> Deserialize<'de> for MsgSender<T> {
fn deserialize<D>(_: D) -> Result<MsgSender<T>, D::Error>
where D: Deserializer<'de> {
unreachable!();
}
}<|fim▁end|> | unreachable!();
}
}
|
<|file_name|>unnamed_argument_mode.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn good(_a: &int) {
}
// unnamed argument &int is now parse x: &int
fn called(_f: |&int|) {
}
pub fn main() {
called(good);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># https://djangosnippets.org/snippets/690/
import re
from django.template.defaultfilters import slugify
def unique_slugify(instance, value, slug_field_name='slug', queryset=None,
slug_separator='-'):
"""
Calculates and stores a unique slug of ``value`` for an instance.
``slug_field_name`` should be a string matching the name of the field to
store the slug in (and the field to check against for uniqueness).
``queryset`` usually doesn't need to be explicitly provided - it'll default
to using the ``.all()`` queryset from the model's default manager.
"""
slug_field = instance._meta.get_field(slug_field_name)
slug = getattr(instance, slug_field.attname)
slug_len = slug_field.max_length
# Sort out the initial slug, limiting its length if necessary.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create the queryset if one wasn't explicitly provided and exclude the
# current instance from the queryset.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '%s%s' % (slug_separator, next)
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
def _slug_strip(value, separator='-'):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of
the default '-' separator with the new separator.
"""
separator = separator or ''<|fim▁hole|> else:
re_sep = '(?:-|%s)' % re.escape(separator)
# Remove multiple instances and if an alternate separator is provided,
# replace the default '-' separator.
if separator != re_sep:
value = re.sub('%s+' % re_sep, separator, value)
# Remove separator from the beginning and end of the slug.
if separator:
if separator != '-':
re_sep = re.escape(separator)
value = re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
return value<|fim▁end|> | if separator == '-' or not separator:
re_sep = '-' |
<|file_name|>PMMLDocumentMetadataProviderTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.editors.expressions.types.function.supplementary.pmml;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.soup.commons.util.Sets;
import org.kie.workbench.common.dmn.api.definition.model.Definitions;
import org.kie.workbench.common.dmn.api.definition.model.Import;
import org.kie.workbench.common.dmn.api.definition.model.ImportDMN;
import org.kie.workbench.common.dmn.api.definition.model.ImportPMML;
import org.kie.workbench.common.dmn.api.editors.included.DMNImportTypes;
import org.kie.workbench.common.dmn.api.editors.included.PMMLDocumentMetadata;
import org.kie.workbench.common.dmn.api.editors.included.PMMLIncludedModel;
import org.kie.workbench.common.dmn.api.editors.included.PMMLModelMetadata;
import org.kie.workbench.common.dmn.api.editors.included.PMMLParameterMetadata;
import org.kie.workbench.common.dmn.api.property.dmn.LocationURI;
import org.kie.workbench.common.dmn.client.editors.included.imports.IncludedModelsPageStateProviderImpl;
import org.kie.workbench.common.dmn.client.graph.DMNGraphUtils;
import org.kie.workbench.common.dmn.client.service.DMNClientServicesProxy;
import org.kie.workbench.common.stunner.core.client.service.ServiceCallback;
import org.kie.workbench.common.stunner.core.diagram.Diagram;
import org.kie.workbench.common.stunner.core.diagram.Metadata;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.backend.vfs.Path;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyListOf;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class PMMLDocumentMetadataProviderTest {
@Mock
private DMNGraphUtils graphUtils;
@Mock
private DMNClientServicesProxy clientServicesProxy;
@Mock
private IncludedModelsPageStateProviderImpl stateProvider;
@Mock
private Path dmnModelPath;
@Captor
private ArgumentCaptor<List<PMMLIncludedModel>> pmmlIncludedModelsArgumentCaptor;
@Captor
private ArgumentCaptor<ServiceCallback<List<PMMLDocumentMetadata>>> callbackArgumentCaptor;
private Definitions definitions;
private PMMLDocumentMetadataProvider provider;
@Before
public void setup() {
this.definitions = new Definitions();
this.provider = new PMMLDocumentMetadataProvider(graphUtils,
clientServicesProxy,
stateProvider);
final Diagram diagram = mock(Diagram.class);
final Metadata metadata = mock(Metadata.class);
when(stateProvider.getDiagram()).thenReturn(Optional.of(diagram));
when(diagram.getMetadata()).thenReturn(metadata);
when(metadata.getPath()).thenReturn(dmnModelPath);
when(graphUtils.getDefinitions(diagram)).thenReturn(definitions);
}
@Test
@SuppressWarnings("unchecked")
public void testLoadPMMLIncludedDocumentsDMNModelPath() {
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(eq(dmnModelPath),
anyListOf(PMMLIncludedModel.class),
any(ServiceCallback.class));
}
@Test
@SuppressWarnings("unchecked")
public void testLoadPMMLIncludedDocumentsPMMLIncludedModels() {
final Import dmn = new ImportDMN("dmn",
new LocationURI("dmn-location"),
DMNImportTypes.DMN.getDefaultNamespace());
final Import pmml = new ImportPMML("pmml",
new LocationURI("pmml-location"),
DMNImportTypes.PMML.getDefaultNamespace());
dmn.getName().setValue("dmn");
pmml.getName().setValue("pmml");
definitions.getImport().add(dmn);
definitions.getImport().add(pmml);
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(any(Path.class),
pmmlIncludedModelsArgumentCaptor.capture(),
any(ServiceCallback.class));
final List<PMMLIncludedModel> actualIncludedModels = pmmlIncludedModelsArgumentCaptor.getValue();
assertThat(actualIncludedModels).hasSize(1);
final PMMLIncludedModel pmmlIncludedModel = actualIncludedModels.get(0);
assertThat(pmmlIncludedModel.getModelName()).isEqualTo("pmml");
assertThat(pmmlIncludedModel.getPath()).isEqualTo("pmml-location");
assertThat(pmmlIncludedModel.getImportType()).isEqualTo(DMNImportTypes.PMML.getDefaultNamespace());
}
@Test
public void testGetPMMLDocumentNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path1",
"zDocument1",
DMNImportTypes.PMML.getDefaultNamespace(),
Collections.emptyList()));
pmmlDocuments.add(new PMMLDocumentMetadata("path2",
"aDocument2",
DMNImportTypes.PMML.getDefaultNamespace(),
Collections.emptyList()));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();<|fim▁hole|>
final List<String> documentNames = provider.getPMMLDocumentNames();
assertThat(documentNames).containsSequence("aDocument2", "zDocument1");
}
private ServiceCallback<List<PMMLDocumentMetadata>> loadPMMLIncludedDocuments() {
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(any(Path.class),
anyListOf(PMMLIncludedModel.class),
callbackArgumentCaptor.capture());
return callbackArgumentCaptor.getValue();
}
@Test
public void testGetPMMLDocumentModelNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path",
"document",
DMNImportTypes.PMML.getDefaultNamespace(),
asList(new PMMLModelMetadata("zModel1",
Collections.emptySet()),
new PMMLModelMetadata("aModel2",
Collections.emptySet()))));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();
callback.onSuccess(pmmlDocuments);
final List<String> modelNames = provider.getPMMLDocumentModels("document");
assertThat(modelNames).containsSequence("aModel2", "zModel1");
assertThat(provider.getPMMLDocumentModels("unknown")).isEmpty();
}
@Test
public void testGetPMMLDocumentModelParameterNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path",
"document",
DMNImportTypes.PMML.getDefaultNamespace(),
singletonList(new PMMLModelMetadata("model",
new Sets.Builder<PMMLParameterMetadata>()
.add(new PMMLParameterMetadata("zParameter1"))
.add(new PMMLParameterMetadata("aParameter2"))
.build()))));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();
callback.onSuccess(pmmlDocuments);
final List<String> modelNames = provider.getPMMLDocumentModelParameterNames("document", "model");
assertThat(modelNames).containsSequence("aParameter2", "zParameter1");
assertThat(provider.getPMMLDocumentModelParameterNames("unknown", "unknown")).isEmpty();
}
}<|fim▁end|> |
callback.onSuccess(pmmlDocuments); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import click
def incomplete(package):
click.echo('{} packages not yet implemented'.format(package))
@click.group()
def run():
'''Build packages inside Docker containers.'''
pass
@click.command()
@click.option('--image', '-i', help='image to build in', required=True)
def rpm(image):
package = click.style('RPM', fg='red', bold=True)
incomplete(package)
@click.command()
@click.option('--image', '-i', help='image to build in', required=True)
def deb(image):
package = click.style('Debian', fg='magenta', bold=True)
incomplete(package)<|fim▁hole|>@click.command()
@click.option('--image', '-i', help='image to build in', required=True)
def arch(image):
package = click.style('Arch', fg='cyan', bold=True)
incomplete(package)
run.add_command(rpm)
run.add_command(deb)
run.add_command(arch)
# vim: ft=python sw=4 ts=4 et<|fim▁end|> | |
<|file_name|>global-styles.js<|end_file_name|><|fim▁begin|>import { createGlobalStyle } from 'styled-components';
const GlobalStyle = createGlobalStyle`
.ant-breadcrumb {
display: flex;
align-items: center;
font-size: 10px;
color: #818181;
border-bottom: 1px solid #ccc;<|fim▁hole|><|fim▁end|> | }
`;
export default GlobalStyle; |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>use uuid::Uuid;
use std::str::FromStr;
use std::fmt;
use version;
#[derive(Clone,Debug)]
pub struct SessionConfig {
pub user_agent: String,
pub device_id: String,
}
impl Default for SessionConfig {
fn default() -> SessionConfig {
let device_id = Uuid::new_v4().hyphenated().to_string();
SessionConfig {
user_agent: version::version_string(),
device_id: device_id,
}
}
}
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum Bitrate {
Bitrate96,
Bitrate160,
Bitrate320,
}
impl FromStr for Bitrate {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"96" => Ok(Bitrate::Bitrate96),
"160" => Ok(Bitrate::Bitrate160),
"320" => Ok(Bitrate::Bitrate320),
_ => Err(()),
}
}
}
impl Default for Bitrate {
fn default() -> Bitrate {
Bitrate::Bitrate160
}
}
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum DeviceType {
Unknown = 0,<|fim▁hole|> TV = 5,
AVR = 6,
STB = 7,
AudioDongle = 8,
}
impl FromStr for DeviceType {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use self::DeviceType::*;
match s.to_lowercase().as_ref() {
"computer" => Ok(Computer),
"tablet" => Ok(Tablet),
"smartphone" => Ok(Smartphone),
"speaker" => Ok(Speaker),
"tv" => Ok(TV),
"avr" => Ok(AVR),
"stb" => Ok(STB),
"audiodongle" => Ok(AudioDongle),
_ => Err(()),
}
}
}
impl fmt::Display for DeviceType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::DeviceType::*;
match *self {
Unknown => f.write_str("Unknown"),
Computer => f.write_str("Computer"),
Tablet => f.write_str("Tablet"),
Smartphone => f.write_str("Smartphone"),
Speaker => f.write_str("Speaker"),
TV => f.write_str("TV"),
AVR => f.write_str("AVR"),
STB => f.write_str("STB"),
AudioDongle => f.write_str("AudioDongle"),
}
}
}
impl Default for DeviceType {
fn default() -> DeviceType {
DeviceType::Speaker
}
}
#[derive(Clone,Debug)]
pub struct PlayerConfig {
pub bitrate: Bitrate,
pub onstart: Option<String>,
pub onstop: Option<String>,
}
impl Default for PlayerConfig {
fn default() -> PlayerConfig {
PlayerConfig {
bitrate: Bitrate::default(),
onstart: None,
onstop: None,
}
}
}
#[derive(Clone,Debug)]
pub struct ConnectConfig {
pub name: String,
pub device_type: DeviceType,
}<|fim▁end|> | Computer = 1,
Tablet = 2,
Smartphone = 3,
Speaker = 4, |
<|file_name|>screen_error_message.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview Offline message screen implementation.
*/
cr.define('login', function() {
// Screens that should have offline message overlay.
const MANAGED_SCREENS = ['gaia-signin'];
// Network state constants.
const NET_STATE = {
OFFLINE: 0,
ONLINE: 1,
PORTAL: 2
};
// Error reasons which are passed to updateState_() method.
const ERROR_REASONS = {
PROXY_AUTH_CANCELLED: 'frame error:111',
PROXY_CONNECTION_FAILED: 'frame error:130',
PROXY_CONFIG_CHANGED: 'proxy changed',
LOADING_TIMEOUT: 'loading timeout',
PORTAL_DETECTED: 'portal detected'
};
// Frame loading errors.
const NET_ERROR = {
ABORTED_BY_USER: 3
};
// Link which starts guest session for captive portal fixing.
const FIX_CAPTIVE_PORTAL_ID = 'captive-portal-fix-link';
// Id of the element which holds current network name.
const CURRENT_NETWORK_NAME_ID = 'captive-portal-network-name';
// Link which triggers frame reload.
const RELOAD_PAGE_ID = 'proxy-error-retry-link';
/**
* Creates a new offline message screen div.
* @constructor
* @extends {HTMLDivElement}
*/
var ErrorMessageScreen = cr.ui.define('div');
/**
* Registers with Oobe.
*/
ErrorMessageScreen.register = function() {
var screen = $('error-message');
ErrorMessageScreen.decorate(screen);
// Note that ErrorMessageScreen is not registered with Oobe because
// it is shown on top of sign-in screen instead of as an independent screen.
};
ErrorMessageScreen.prototype = {
__proto__: HTMLDivElement.prototype,
/** @inheritDoc */
decorate: function() {
chrome.send('loginAddNetworkStateObserver',
['login.ErrorMessageScreen.updateState']);
cr.ui.DropDown.decorate($('offline-networks-list'));
this.updateLocalizedContent_();
},
/**
* Updates localized content of the screen that is not updated via template.
*/
updateLocalizedContent_: function() {
$('captive-portal-message-text').innerHTML = localStrings.getStringF(
'captivePortalMessage',
'<b id="' + CURRENT_NETWORK_NAME_ID + '"></b>',
'<a id="' + FIX_CAPTIVE_PORTAL_ID + '" class="signin-link" href="#">',
'</a>');
$(FIX_CAPTIVE_PORTAL_ID).onclick = function() {
chrome.send('showCaptivePortal');
};
$('proxy-message-text').innerHTML = localStrings.getStringF(
'proxyMessageText',
'<a id="' + RELOAD_PAGE_ID + '" class="signin-link" href="#">',
'</a>');
$(RELOAD_PAGE_ID).onclick = function() {
var currentScreen = Oobe.getInstance().currentScreen;
// Schedules a immediate retry.
currentScreen.doReload();
};
$('error-guest-signin').innerHTML = localStrings.getStringF(
'guestSignin',
'<a id="error-guest-signin-link" class="signin-link" href="#">',
'</a>');
$('error-guest-signin-link').onclick = function() {
chrome.send('launchIncognito');
};
$('error-offline-login').innerHTML = localStrings.getStringF(
'offlineLogin',
'<a id="error-offline-login-link" class="signin-link" href="#">',
'</a>');
$('error-offline-login-link').onclick = function() {
chrome.send('offlineLogin', []);
};
},
onBeforeShow: function(lastNetworkType) {
var currentScreen = Oobe.getInstance().currentScreen;
cr.ui.DropDown.show('offline-networks-list', false, lastNetworkType);
$('error-guest-signin').hidden = $('guestSignin').hidden ||
!$('add-user-header-bar-item').hidden;
$('error-offline-login').hidden = !currentScreen.isOfflineAllowed;
},
onBeforeHide: function() {
cr.ui.DropDown.hide('offline-networks-list');
},
update: function() {
chrome.send('loginRequestNetworkState',
['login.ErrorMessageScreen.updateState',
'update']);
},
/**
* Shows or hides offline message based on network on/offline state.
* @param {Integer} state Current state of the network (see NET_STATE).
* @param {string} network Name of the current network.
* @param {string} reason Reason the callback was called.
* @param {int} lastNetworkType Last active network type.
*/
updateState_: function(state, network, reason, lastNetworkType) {
var currentScreen = Oobe.getInstance().currentScreen;
var offlineMessage = this;
var isOnline = (state == NET_STATE.ONLINE);
var isUnderCaptivePortal = (state == NET_STATE.PORTAL);
var isProxyError = reason == ERROR_REASONS.PROXY_AUTH_CANCELLED ||
reason == ERROR_REASONS.PROXY_CONNECTION_FAILED;
var shouldOverlay = MANAGED_SCREENS.indexOf(currentScreen.id) != -1 &&
!currentScreen.isLocal;
var isTimeout = false;
if (reason == ERROR_REASONS.PROXY_CONFIG_CHANGED && shouldOverlay &&
!offlineMessage.classList.contains('hidden') &&
offlineMessage.classList.contains('show-captive-portal')) {
// Schedules a immediate retry.
currentScreen.doReload();
console.log('Retry page load since proxy settings has been changed');
}
// Fake portal state for loading timeout.
if (reason == ERROR_REASONS.LOADING_TIMEOUT) {
isOnline = false;
isUnderCaptivePortal = true;
isTimeout = true;
}
// Portal was detected via generate_204 redirect on Chrome side.
// Subsequent call to show dialog if it's already shown does nothing.
if (reason == ERROR_REASONS.PORTAL_DETECTED) {
isOnline = false;
isUnderCaptivePortal = true;
}
if (!isOnline && shouldOverlay) {
console.log('Show offline message: state=' + state +
', network=' + network + ', reason=' + reason,
', isUnderCaptivePortal=' + isUnderCaptivePortal);
offlineMessage.onBeforeShow(lastNetworkType);
if (isUnderCaptivePortal && !isProxyError) {
// In case of timeout we're suspecting that network might be
// a captive portal but would like to check that first.
// Otherwise (signal from flimflam / generate_204 got redirected)
// show dialog right away.
if (isTimeout)
chrome.send('fixCaptivePortal');
else
chrome.send('showCaptivePortal');
} else {
chrome.send('hideCaptivePortal');
}
if (isUnderCaptivePortal) {
if (isProxyError) {
offlineMessage.classList.remove('show-offline-message');
offlineMessage.classList.remove('show-captive-portal');
offlineMessage.classList.add('show-proxy-error');
} else {
$(CURRENT_NETWORK_NAME_ID).textContent = network;
offlineMessage.classList.remove('show-offline-message');
offlineMessage.classList.remove('show-proxy-error');
offlineMessage.classList.add('show-captive-portal');
}
} else {
offlineMessage.classList.remove('show-captive-portal');
offlineMessage.classList.remove('show-proxy-error');
offlineMessage.classList.add('show-offline-message');
}
offlineMessage.classList.remove('hidden');
offlineMessage.classList.remove('faded');
if (!currentScreen.classList.contains('faded')) {
currentScreen.classList.add('faded');
currentScreen.addEventListener('webkitTransitionEnd',
function f(e) {
currentScreen.removeEventListener('webkitTransitionEnd', f);
if (currentScreen.classList.contains('faded'))
currentScreen.classList.add('hidden');
});
}
chrome.send('networkErrorShown');
} else {
chrome.send('hideCaptivePortal');
if (!offlineMessage.classList.contains('faded')) {
console.log('Hide offline message.');
offlineMessage.onBeforeHide();
offlineMessage.classList.add('faded');
offlineMessage.addEventListener('webkitTransitionEnd',<|fim▁hole|> });
currentScreen.classList.remove('hidden');
currentScreen.classList.remove('faded');
// Forces a reload for Gaia screen on hiding error message.
if (currentScreen.id == 'gaia-signin')
currentScreen.doReload();
}
}
},
// Request network state update with loading timeout as reason.
showLoadingTimeoutError: function() {
// Shows error message if it is not shown already.
if (this.classList.contains('hidden')) {
chrome.send('loginRequestNetworkState',
['login.ErrorMessageScreen.updateState',
ERROR_REASONS.LOADING_TIMEOUT]);
}
}
};
/**
* Network state changed callback.
* @param {Integer} state Current state of the network (see NET_STATE).
* @param {string} network Name of the current network.
* @param {string} reason Reason the callback was called.
* @param {int} lastNetworkType Last active network type.
*/
ErrorMessageScreen.updateState = function(
state, network, reason, lastNetworkType) {
$('error-message').updateState_(state, network, reason, lastNetworkType);
};
/**
* Handler for iframe's error notification coming from the outside.
* For more info see C++ class 'SnifferObserver' which calls this method.
* @param {number} error Error code.
*/
ErrorMessageScreen.onFrameError = function(error) {
console.log('Gaia frame error = ' + error);
if (error == NET_ERROR.ABORTED_BY_USER) {
// Gaia frame was reloaded. Nothing to do here.
return;
}
$('gaia-signin').onFrameError(error);
// Offline and simple captive portal cases are handled by the
// NetworkStateInformer, so only the case when browser is online is
// valuable.
if (window.navigator.onLine) {
// Check current network state if currentScreen is a managed one.
var currentScreen = Oobe.getInstance().currentScreen;
if (MANAGED_SCREENS.indexOf(currentScreen.id) != -1) {
chrome.send('loginRequestNetworkState',
['login.ErrorMessageScreen.maybeRetry',
'frame error:' + error]);
}
}
};
/**
* Network state callback where we decide whether to schdule a retry.
*/
ErrorMessageScreen.maybeRetry =
function(state, network, reason, lastNetworkType) {
console.log('ErrorMessageScreen.maybeRetry, state=' + state +
', network=' + network);
// No retry if we are not online.
if (state != NET_STATE.ONLINE)
return;
var currentScreen = Oobe.getInstance().currentScreen;
if (MANAGED_SCREENS.indexOf(currentScreen.id) != -1) {
this.updateState(NET_STATE.PORTAL, network, reason, lastNetworkType);
// Schedules a retry.
currentScreen.scheduleRetry();
}
};
/**
* Updates screen localized content like links since they're not updated
* via template.
*/
ErrorMessageScreen.updateLocalizedContent = function() {
$('error-message').updateLocalizedContent_();
};
return {
ErrorMessageScreen: ErrorMessageScreen
};
});<|fim▁end|> | function f(e) {
offlineMessage.removeEventListener('webkitTransitionEnd', f);
if (offlineMessage.classList.contains('faded'))
offlineMessage.classList.add('hidden'); |
<|file_name|>devmapper_wrapper.go<|end_file_name|><|fim▁begin|>// +build linux,cgo
package devicemapper // import "github.com/docker/docker/pkg/devicemapper"
/*
#define _GNU_SOURCE
#include <libdevmapper.h>
#include <linux/fs.h> // FIXME: present only for BLKGETSIZE64, maybe we can remove it?
// FIXME: Can't we find a way to do the logging in pure Go?
extern void DevmapperLogCallback(int level, char *file, int line, int dm_errno_or_class, char *str);
static void log_cb(int level, const char *file, int line, int dm_errno_or_class, const char *f, ...)
{
char *buffer = NULL;
va_list ap;
int ret;
va_start(ap, f);
ret = vasprintf(&buffer, f, ap);
va_end(ap);
if (ret < 0) {
// memory allocation failed -- should never happen?
return;
}
DevmapperLogCallback(level, (char *)file, line, dm_errno_or_class, buffer);
free(buffer);
}
static void log_with_errno_init()
{
dm_log_with_errno_init(log_cb);
}
*/
import "C"
import (
"reflect"
"unsafe"
)
type (
cdmTask C.struct_dm_task
)
// IOCTL consts
const (
BlkGetSize64 = C.BLKGETSIZE64
BlkDiscard = C.BLKDISCARD
)
// Devicemapper cookie flags.
const (
DmUdevDisableSubsystemRulesFlag = C.DM_UDEV_DISABLE_SUBSYSTEM_RULES_FLAG
DmUdevDisableDiskRulesFlag = C.DM_UDEV_DISABLE_DISK_RULES_FLAG
DmUdevDisableOtherRulesFlag = C.DM_UDEV_DISABLE_OTHER_RULES_FLAG
DmUdevDisableLibraryFallback = C.DM_UDEV_DISABLE_LIBRARY_FALLBACK
)
// DeviceMapper mapped functions.
var (
DmGetLibraryVersion = dmGetLibraryVersionFct
DmGetNextTarget = dmGetNextTargetFct
DmSetDevDir = dmSetDevDirFct
DmTaskAddTarget = dmTaskAddTargetFct
DmTaskCreate = dmTaskCreateFct
DmTaskDestroy = dmTaskDestroyFct
DmTaskGetDeps = dmTaskGetDepsFct
DmTaskGetInfo = dmTaskGetInfoFct
DmTaskGetDriverVersion = dmTaskGetDriverVersionFct
DmTaskRun = dmTaskRunFct
DmTaskSetAddNode = dmTaskSetAddNodeFct
DmTaskSetCookie = dmTaskSetCookieFct<|fim▁hole|> DmTaskSetSector = dmTaskSetSectorFct
DmUdevWait = dmUdevWaitFct
DmUdevSetSyncSupport = dmUdevSetSyncSupportFct
DmUdevGetSyncSupport = dmUdevGetSyncSupportFct
DmCookieSupported = dmCookieSupportedFct
LogWithErrnoInit = logWithErrnoInitFct
DmTaskDeferredRemove = dmTaskDeferredRemoveFct
DmTaskGetInfoWithDeferred = dmTaskGetInfoWithDeferredFct
)
func free(p *C.char) {
C.free(unsafe.Pointer(p))
}
func dmTaskDestroyFct(task *cdmTask) {
C.dm_task_destroy((*C.struct_dm_task)(task))
}
func dmTaskCreateFct(taskType int) *cdmTask {
return (*cdmTask)(C.dm_task_create(C.int(taskType)))
}
func dmTaskRunFct(task *cdmTask) int {
ret, _ := C.dm_task_run((*C.struct_dm_task)(task))
return int(ret)
}
func dmTaskSetNameFct(task *cdmTask, name string) int {
Cname := C.CString(name)
defer free(Cname)
return int(C.dm_task_set_name((*C.struct_dm_task)(task), Cname))
}
func dmTaskSetMessageFct(task *cdmTask, message string) int {
Cmessage := C.CString(message)
defer free(Cmessage)
return int(C.dm_task_set_message((*C.struct_dm_task)(task), Cmessage))
}
func dmTaskSetSectorFct(task *cdmTask, sector uint64) int {
return int(C.dm_task_set_sector((*C.struct_dm_task)(task), C.uint64_t(sector)))
}
func dmTaskSetCookieFct(task *cdmTask, cookie *uint, flags uint16) int {
cCookie := C.uint32_t(*cookie)
defer func() {
*cookie = uint(cCookie)
}()
return int(C.dm_task_set_cookie((*C.struct_dm_task)(task), &cCookie, C.uint16_t(flags)))
}
func dmTaskSetAddNodeFct(task *cdmTask, addNode AddNodeType) int {
return int(C.dm_task_set_add_node((*C.struct_dm_task)(task), C.dm_add_node_t(addNode)))
}
func dmTaskAddTargetFct(task *cdmTask,
start, size uint64, ttype, params string) int {
Cttype := C.CString(ttype)
defer free(Cttype)
Cparams := C.CString(params)
defer free(Cparams)
return int(C.dm_task_add_target((*C.struct_dm_task)(task), C.uint64_t(start), C.uint64_t(size), Cttype, Cparams))
}
func dmTaskGetDepsFct(task *cdmTask) *Deps {
Cdeps := C.dm_task_get_deps((*C.struct_dm_task)(task))
if Cdeps == nil {
return nil
}
// golang issue: https://github.com/golang/go/issues/11925
hdr := reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(uintptr(unsafe.Pointer(Cdeps)) + unsafe.Sizeof(*Cdeps))),
Len: int(Cdeps.count),
Cap: int(Cdeps.count),
}
devices := *(*[]C.uint64_t)(unsafe.Pointer(&hdr))
deps := &Deps{
Count: uint32(Cdeps.count),
Filler: uint32(Cdeps.filler),
}
for _, device := range devices {
deps.Device = append(deps.Device, uint64(device))
}
return deps
}
func dmTaskGetInfoFct(task *cdmTask, info *Info) int {
Cinfo := C.struct_dm_info{}
defer func() {
info.Exists = int(Cinfo.exists)
info.Suspended = int(Cinfo.suspended)
info.LiveTable = int(Cinfo.live_table)
info.InactiveTable = int(Cinfo.inactive_table)
info.OpenCount = int32(Cinfo.open_count)
info.EventNr = uint32(Cinfo.event_nr)
info.Major = uint32(Cinfo.major)
info.Minor = uint32(Cinfo.minor)
info.ReadOnly = int(Cinfo.read_only)
info.TargetCount = int32(Cinfo.target_count)
}()
return int(C.dm_task_get_info((*C.struct_dm_task)(task), &Cinfo))
}
func dmTaskGetDriverVersionFct(task *cdmTask) string {
buffer := C.malloc(128)
defer C.free(buffer)
res := C.dm_task_get_driver_version((*C.struct_dm_task)(task), (*C.char)(buffer), 128)
if res == 0 {
return ""
}
return C.GoString((*C.char)(buffer))
}
func dmGetNextTargetFct(task *cdmTask, next unsafe.Pointer, start, length *uint64, target, params *string) unsafe.Pointer {
var (
Cstart, Clength C.uint64_t
CtargetType, Cparams *C.char
)
defer func() {
*start = uint64(Cstart)
*length = uint64(Clength)
*target = C.GoString(CtargetType)
*params = C.GoString(Cparams)
}()
nextp := C.dm_get_next_target((*C.struct_dm_task)(task), next, &Cstart, &Clength, &CtargetType, &Cparams)
return nextp
}
func dmUdevSetSyncSupportFct(syncWithUdev int) {
C.dm_udev_set_sync_support(C.int(syncWithUdev))
}
func dmUdevGetSyncSupportFct() int {
return int(C.dm_udev_get_sync_support())
}
func dmUdevWaitFct(cookie uint) int {
return int(C.dm_udev_wait(C.uint32_t(cookie)))
}
func dmCookieSupportedFct() int {
return int(C.dm_cookie_supported())
}
func logWithErrnoInitFct() {
C.log_with_errno_init()
}
func dmSetDevDirFct(dir string) int {
Cdir := C.CString(dir)
defer free(Cdir)
return int(C.dm_set_dev_dir(Cdir))
}
func dmGetLibraryVersionFct(version *string) int {
buffer := C.CString(string(make([]byte, 128)))
defer free(buffer)
defer func() {
*version = C.GoString(buffer)
}()
return int(C.dm_get_library_version(buffer, 128))
}<|fim▁end|> | DmTaskSetMessage = dmTaskSetMessageFct
DmTaskSetName = dmTaskSetNameFct |
<|file_name|>show.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Http, Response } from '@angular/http';
import { Show, ShowResponse } from './show';
import { DiskService } from '../disk/disk.service';
import { SettingsService } from '../settings';
import { UserService } from '../userinfo/user.service';
import { Logger } from '../logger';
import { Observable } from 'rxjs/Observable';
import '../rxjs-operators';
@Injectable()
export class ShowService {
private showGetUrl = 'regularfilmshow/';
private showVoteUrl(id: string) {<|fim▁hole|> private http: Http,
private diskService: DiskService,
private settings: SettingsService,
private logger: Logger,
) {}
getShow(index: number, limit: number): Observable<ShowResponse> {
let params = "?limit="+limit+"&page="+index;
let url = this.settings.api_base() + this.showGetUrl + params;
return this.http.get(url)
.map(this.extractData)
.catch(this.logger.errorHandler);
}
private extractData = (res: Response) => {
let body = res.json();
let i: number;
if(body.objects){
for (i = 0; i < body.objects.length; i++) {
body.objects[i].film_1 = this.diskService.getDiskFullUrl(body.objects[i].film_1);
body.objects[i].film_2 = this.diskService.getDiskFullUrl(body.objects[i].film_2);
body.objects[i].film_3 = this.diskService.getDiskFullUrl(body.objects[i].film_3);
}
}
return body || {};
}
vote(showId: number, filmId: number): Observable<any> {
let url = this.showVoteUrl(String(showId));
let body = {film_id: filmId};
return this.http.post(url, body)
.map(this.extractVoteResponse)
.catch(this.logger.errorHandler);
}
private extractVoteResponse = (res: Response) => {
let body = res.json();
return body || {};
}
}<|fim▁end|> | return this.settings.api_base() + 'regularfilmshow/' + id + '/vote/';
}
constructor( |
<|file_name|>status.py<|end_file_name|><|fim▁begin|>"""
This example demonstrate how status works
"""
from juju import jasyncio
from juju import loop
import logging
import sys
from logging import getLogger
from juju.model import Model
from juju.status import formatted_status
LOG = getLogger(__name__)<|fim▁hole|>
async def main():
model = Model()
await model.connect_current()
application = await model.deploy(
'cs:ubuntu-10',
application_name='ubuntu',
series='trusty',
channel='stable',
)
await jasyncio.sleep(10)
# Print the status to observe the evolution
# during a minute
for i in range(12):
try:
# By setting raw to True, the returned
# entry contains a FullStatus object with
# all the available status data.
# status = await model.status(raw=True)
status = await formatted_status(model)
print(status)
except Exception as e:
print(e)
await jasyncio.sleep(5)
await application.remove()
await model.disconnect()
if __name__ == '__main__':
loop.run(main())<|fim▁end|> | logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Numeric traits and functions for generic mathematics
//!
//! These are implemented for the primitive numeric types in `std::{u8, u16,
//! u32, u64, usize, i8, i16, i32, i64, isize, f32, f64}`.
#![stable(feature = "rust1", since = "1.0.0")]
#![allow(missing_docs)]
pub use core::num::{Zero, One};
pub use core::num::{FpCategory, ParseIntError, ParseFloatError};
pub use core::num::{wrapping, Wrapping};
#[cfg(test)] use cmp::PartialEq;
#[cfg(test)] use fmt;
#[cfg(test)] use marker::Copy;
#[cfg(test)] use ops::{Add, Sub, Mul, Div, Rem};
/// Helper function for testing numeric operations
#[cfg(test)]
pub fn test_num<T>(ten: T, two: T) where
T: PartialEq
+ Add<Output=T> + Sub<Output=T>
+ Mul<Output=T> + Div<Output=T>
+ Rem<Output=T> + fmt::Debug
+ Copy
{
assert_eq!(ten.add(two), ten + two);
assert_eq!(ten.sub(two), ten - two);
assert_eq!(ten.mul(two), ten * two);
assert_eq!(ten.div(two), ten / two);
assert_eq!(ten.rem(two), ten % two);
}
#[cfg(test)]
mod tests {
use super::*;
use i8;
use i16;
use i32;
use i64;
use isize;
use u8;
use u16;
use u32;
use u64;
use usize;
use string::ToString;
use ops::Mul;
#[test]
fn test_saturating_add_uint() {
use usize::MAX;
assert_eq!(3_usize.saturating_add(5_usize), 8_usize);
assert_eq!(3_usize.saturating_add(MAX-1), MAX);
assert_eq!(MAX.saturating_add(MAX), MAX);
assert_eq!((MAX-2).saturating_add(1), MAX-1);
}
#[test]
fn test_saturating_sub_uint() {
use usize::MAX;
assert_eq!(5_usize.saturating_sub(3_usize), 2_usize);
assert_eq!(3_usize.saturating_sub(5_usize), 0_usize);
assert_eq!(0_usize.saturating_sub(1_usize), 0_usize);
assert_eq!((MAX-1).saturating_sub(MAX), 0);
}
#[test]
fn test_saturating_add_int() {
use isize::{MIN,MAX};
assert_eq!(3i32.saturating_add(5), 8);
assert_eq!(3isize.saturating_add(MAX-1), MAX);
assert_eq!(MAX.saturating_add(MAX), MAX);
assert_eq!((MAX-2).saturating_add(1), MAX-1);
assert_eq!(3i32.saturating_add(-5), -2);
assert_eq!(MIN.saturating_add(-1), MIN);
assert_eq!((-2isize).saturating_add(-MAX), MIN);
}
#[test]
fn test_saturating_sub_int() {
use isize::{MIN,MAX};
assert_eq!(3i32.saturating_sub(5), -2);
assert_eq!(MIN.saturating_sub(1), MIN);
assert_eq!((-2isize).saturating_sub(MAX), MIN);
assert_eq!(3i32.saturating_sub(-5), 8);
assert_eq!(3isize.saturating_sub(-(MAX-1)), MAX);
assert_eq!(MAX.saturating_sub(-MAX), MAX);
assert_eq!((MAX-2).saturating_sub(-1), MAX-1);
}
#[test]
fn test_checked_add() {
let five_less = usize::MAX - 5;
assert_eq!(five_less.checked_add(0), Some(usize::MAX - 5));
assert_eq!(five_less.checked_add(1), Some(usize::MAX - 4));
assert_eq!(five_less.checked_add(2), Some(usize::MAX - 3));
assert_eq!(five_less.checked_add(3), Some(usize::MAX - 2));
assert_eq!(five_less.checked_add(4), Some(usize::MAX - 1));
assert_eq!(five_less.checked_add(5), Some(usize::MAX));
assert_eq!(five_less.checked_add(6), None);
assert_eq!(five_less.checked_add(7), None);
}
#[test]
fn test_checked_sub() {
assert_eq!(5_usize.checked_sub(0), Some(5));
assert_eq!(5_usize.checked_sub(1), Some(4));
assert_eq!(5_usize.checked_sub(2), Some(3));
assert_eq!(5_usize.checked_sub(3), Some(2));
assert_eq!(5_usize.checked_sub(4), Some(1));
assert_eq!(5_usize.checked_sub(5), Some(0));
assert_eq!(5_usize.checked_sub(6), None);
assert_eq!(5_usize.checked_sub(7), None);
}
#[test]
fn test_checked_mul() {
let third = usize::MAX / 3;
assert_eq!(third.checked_mul(0), Some(0));
assert_eq!(third.checked_mul(1), Some(third));
assert_eq!(third.checked_mul(2), Some(third * 2));
assert_eq!(third.checked_mul(3), Some(third * 3));
assert_eq!(third.checked_mul(4), None);
}
macro_rules! test_is_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).is_power_of_two(), false);
assert_eq!((1 as $T).is_power_of_two(), true);
assert_eq!((2 as $T).is_power_of_two(), true);
assert_eq!((3 as $T).is_power_of_two(), false);
assert_eq!((4 as $T).is_power_of_two(), true);
assert_eq!((5 as $T).is_power_of_two(), false);
assert_eq!(($T::MAX / 2 + 1).is_power_of_two(), true);
}
)
}
test_is_power_of_two!{ test_is_power_of_two_u8, u8 }
test_is_power_of_two!{ test_is_power_of_two_u16, u16 }
test_is_power_of_two!{ test_is_power_of_two_u32, u32 }
test_is_power_of_two!{ test_is_power_of_two_u64, u64 }
test_is_power_of_two!{ test_is_power_of_two_uint, usize }
macro_rules! test_next_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).next_power_of_two(), 1);
let mut next_power = 1;
for i in 1 as $T..40 {
assert_eq!(i.next_power_of_two(), next_power);
if i == next_power { next_power *= 2 }
}
}
)
}
test_next_power_of_two! { test_next_power_of_two_u8, u8 }
test_next_power_of_two! { test_next_power_of_two_u16, u16 }
test_next_power_of_two! { test_next_power_of_two_u32, u32 }
test_next_power_of_two! { test_next_power_of_two_u64, u64 }
test_next_power_of_two! { test_next_power_of_two_uint, usize }
macro_rules! test_checked_next_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).checked_next_power_of_two(), Some(1));
assert!(($T::MAX / 2).checked_next_power_of_two().is_some());
assert_eq!(($T::MAX - 1).checked_next_power_of_two(), None);
assert_eq!($T::MAX.checked_next_power_of_two(), None);
let mut next_power = 1;
for i in 1 as $T..40 {
assert_eq!(i.checked_next_power_of_two(), Some(next_power));
if i == next_power { next_power *= 2 }
}
}
)
}
test_checked_next_power_of_two! { test_checked_next_power_of_two_u8, u8 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u16, u16 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u32, u32 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u64, u64 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_uint, usize }
#[test]
fn test_pow() {
fn naive_pow<T: Mul<Output=T> + One + Copy>(base: T, exp: usize) -> T {
let one: T = T::one();
(0..exp).fold(one, |acc, _| acc * base)
}
macro_rules! assert_pow {
(($num:expr, $exp:expr) => $expected:expr) => {{
let result = $num.pow($exp);
assert_eq!(result, $expected);
assert_eq!(result, naive_pow($num, $exp));
}}
}
assert_pow!((3u32, 0 ) => 1);
assert_pow!((5u32, 1 ) => 5);
assert_pow!((-4i32, 2 ) => 16);
assert_pow!((8u32, 3 ) => 512);
assert_pow!((2u64, 50) => 1125899906842624);
}
#[test]
fn test_uint_to_str_overflow() {
let mut u8_val: u8 = 255;
assert_eq!(u8_val.to_string(), "255");
u8_val = u8_val.wrapping_add(1);
assert_eq!(u8_val.to_string(), "0");
let mut u16_val: u16 = 65_535;
assert_eq!(u16_val.to_string(), "65535");
u16_val = u16_val.wrapping_add(1);
assert_eq!(u16_val.to_string(), "0");
let mut u32_val: u32 = 4_294_967_295;
assert_eq!(u32_val.to_string(), "4294967295");
u32_val = u32_val.wrapping_add(1);
assert_eq!(u32_val.to_string(), "0");
let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(u64_val.to_string(), "18446744073709551615");
u64_val = u64_val.wrapping_add(1);
assert_eq!(u64_val.to_string(), "0");
}
fn from_str<T: ::str::FromStr>(t: &str) -> Option<T> {
::str::FromStr::from_str(t).ok()
}
#[test]
fn test_uint_from_str_overflow() {
let mut u8_val: u8 = 255;
assert_eq!(from_str::<u8>("255"), Some(u8_val));
assert_eq!(from_str::<u8>("256"), None);
<|fim▁hole|> assert_eq!(from_str::<u8>("-1"), None);
let mut u16_val: u16 = 65_535;
assert_eq!(from_str::<u16>("65535"), Some(u16_val));
assert_eq!(from_str::<u16>("65536"), None);
u16_val = u16_val.wrapping_add(1);
assert_eq!(from_str::<u16>("0"), Some(u16_val));
assert_eq!(from_str::<u16>("-1"), None);
let mut u32_val: u32 = 4_294_967_295;
assert_eq!(from_str::<u32>("4294967295"), Some(u32_val));
assert_eq!(from_str::<u32>("4294967296"), None);
u32_val = u32_val.wrapping_add(1);
assert_eq!(from_str::<u32>("0"), Some(u32_val));
assert_eq!(from_str::<u32>("-1"), None);
let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(from_str::<u64>("18446744073709551615"), Some(u64_val));
assert_eq!(from_str::<u64>("18446744073709551616"), None);
u64_val = u64_val.wrapping_add(1);
assert_eq!(from_str::<u64>("0"), Some(u64_val));
assert_eq!(from_str::<u64>("-1"), None);
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use prelude::v1::*;
#[bench]
fn bench_pow_function(b: &mut Bencher) {
let v = (0..1024).collect::<Vec<u32>>();
b.iter(|| {v.iter().fold(0u32, |old, new| old.pow(*new as u32));});
}
}<|fim▁end|> | u8_val = u8_val.wrapping_add(1);
assert_eq!(from_str::<u8>("0"), Some(u8_val)); |
<|file_name|>CharTypes.cpp<|end_file_name|><|fim▁begin|>#include "CharTypes.h"
namespace GeneHunter {
<|fim▁hole|>bool firstMatchSecond( char c1, char c2 )
{
if ( c1 == c2 ) return true;
if ( c1 == 'U' and c2 == 'T' ) return true;
if ( c1 == 'T' and c2 == 'U' ) return true;
if ( c1 == 'K' and ( c2 == 'G' or c2 == 'T' )) return true;
if ( c1 == 'S' and ( c2 == 'G' or c2 == 'C' )) return true;
if ( c1 == 'R' and ( c2 == 'G' or c2 == 'A' )) return true;
if ( c1 == 'M' and ( c2 == 'A' or c2 == 'C' )) return true;
if ( c1 == 'W' and ( c2 == 'A' or c2 == 'T' )) return true;
if ( c1 == 'Y' and ( c2 == 'T' or c2 == 'C' )) return true;
return false;
}
} // namespace GeneHunter<|fim▁end|> | |
<|file_name|>GenEdLookup.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd
import pickle
<|fim▁hole|> picklepath = "data\\dietrich_gen_eds.p"
try:
with open(picklepath,'rb') as file:
gen_eds = pickle.load(file)
except:
df = pd.read_csv(fileName,names=['Dept','Num','Title','1','2'])
gen_eds = set(df['Dept'].values)
with open(picklepath,'wb') as file:
pickle.dump(gen_eds,file)
return cNum in gen_eds
'''
genEdubility = lookupGenEd(73100, "dietrich")
print("73100")
print('Is Gen Ed?:', genEdubility)
print()
genEdubility = lookupGenEd(70100, "tepper")
print("70100")
print('Is Gen Ed?:', genEdubility)
print()
genEdubility = lookupGenEd(15322, "scs")
print("15322")
print('Is Gen Ed?:', genEdubility)
print()
'''<|fim▁end|> | # Return 0 or 1 based on whether Course fulfills a General Education Requirement
def lookupGenEd(cNum, college):
fileName = "data/Dietrich Gen Eds.csv"
|
<|file_name|>mozvisibility.js<|end_file_name|><|fim▁begin|>;(function() {
MozVisibility = {
_MAX_TRIES: 10,
_date: new Date,
_tries: 0,
_timer: null,
_isVisible: undefined,
_proxy: function(fn, context) {
context = context || window;
return function() {
fn.apply(context, arguments);
};
},
_getEvent: function() {
if (!this._event) {
this._event = document.createEvent('HTMLEvents');
this._event.initEvent('mozvisibilitychange', true, true);
this._event.eventName = 'mozvisibilitychange';
}
return this._event;
},
_setVisibilityState: function(state) {
this._isVisible = (state === 'visible');
document.mozVisibilityState = state;
document.mozHidden = !this._isVisible;
document.dispatchEvent(this._getEvent());
},
_visibilityCheck: function() {
this._date = new Date;
this._tries = 0;<|fim▁hole|> },
_invisibilityCheckTimeoutTemplate: function() {
var newdate = new Date;
var delta = newdate - this._date;
this._date = newdate;
this._tries++;
if (delta > 1000) {
this._setVisibilityState('hidden');
} else if (this._tries < this._MAX_TRIES) {
this._timer = setTimeout(this._invisibilityCheckTimeout, 0);
}
},
_onFocus: function() {
clearTimeout(this._timer);
if (!this._isVisible) {
this._setVisibilityState('visible');
}
},
_onBlur: function() {
if (!this._isVisible) {
return;
}
this._visibilityCheck();
},
canBeEmulated: function() {
var rmozilla = /(mozilla)(?:.*? rv:([\w.]+))?/,
ua = navigator.userAgent.toLowerCase();
var match = ua.indexOf('compatible') < 0 && rmozilla.exec(ua) || [];
return (window.top === window && // not in IFRAME
match[2] && parseInt(match[2]) >= 5 && // Firefox 5.0+
!document.visibilityState && !document.MozVisibilityState); // visibility API is not already supported
},
emulate: function() {
if (!this.canBeEmulated()) {
return false;
}
this._invisibilityCheckTimeout = this._proxy(this._invisibilityCheckTimeoutTemplate, this);
window.addEventListener("focus", this._proxy(this._onFocus, this), false);
window.addEventListener("blur", this._proxy(this._onBlur, this), false);
this._visibilityCheck();
return true;
}
};
MozVisibility.emulate();
})();<|fim▁end|> | this._timer = setTimeout(this._invisibilityCheckTimeout, 0); |
<|file_name|>ychenik_search.js<|end_file_name|><|fim▁begin|>(function ($) {
Drupal.ychenikSearch = {};
/**
* jQuery plugin.
*/
$.fn.ychenikSearch = function (stars) {
stars = stars || 0;
this.each(function () {
$(this).addClass('ychenik-search').find('label').each(function () {
var context = this.form;
var $label = $(this);
if (!$label.attr('for')) {
return;
}
var $field = $('#' + $label.attr('for'), context);
if (!$field.length || !$field.is('input:text,input:password,textarea')) {
return;
}
// Store the initial field value, in case the browser is going to
// automatically fill it in upon focus.
var initial_value = $field.val();
if (initial_value != '') {
// Firefox doesn't like .hide() here for some reason.
$label.css('display', 'none');
}
$label.parent().addClass('ychenik-search-wrapper');
$label.addClass('ychenik-search-label');
$field.addClass('ychenik-search-field');
if (stars === 0) {
$label.find('.form-required').hide();
}
else if (stars === 2) {
$label.find('.form-required').insertAfter($field).prepend(' ');
}
$field.focus(function () {
// Some browsers (e.g., Firefox) are automatically inserting a stored
// username and password into login forms. In case the password field is
// manually emptied afterwards, and the user jumps back to the username
// field (without changing it), and forth to the password field, then
// the browser automatically re-inserts the password again. Therefore,
// we also need to test against the initial field value.<|fim▁hole|> });
$field.blur(function () {
if ($field.val() === '') {
$label.fadeIn('slow');
}
});
// Chrome adds passwords after page load, so we need to track changes.
$field.change(function () {
if ($field.get(0) != document.activeElement) {
if ($field.val() === '') {
$label.fadeIn('fast');
}
else {
$label.css('display', 'none');
}
}
});
});
});
};
/**
* Attach compact forms behavior to all enabled forms upon page load.
*/
Drupal.behaviors.ychenikSearch = {
attach: function (context, settings) {
if (!settings || !settings.ychenikSearch) {
return;
}
$('#' + settings.ychenikSearch.forms.join(',#'), context).ychenikSearch(settings.ychenikSearch.stars);
// Safari adds passwords without triggering any event after page load.
// We therefore need to wait a bit and then check for field values.
if ($.browser.safari) {
setTimeout(Drupal.ychenikSearch.fixSafari, 200);
}
}
};
/**
* Checks for field values and hides the corresponding label if non-empty.
*
* @todo Convert $.fn.compactForm to always use a function like this.
*/
Drupal.ychenikSearch.fixSafari = function () {
$('label.ychenik-search-label').each(function () {
var $label = $(this);
var context = this.form;
if ($('#' + $label.attr('for'), context).val() != '') {
$label.css('display', 'none');
}
});
}
})(jQuery);<|fim▁end|> | if ($field.val() === initial_value || $field.val() === '') {
$label.fadeOut('fast');
} |
<|file_name|>pre_gen_project.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!' |
<|file_name|>SbcTest.java<|end_file_name|><|fim▁begin|>package com.nes.processor;
import com.nes.NesAbstractTst;
import org.junit.Test;
/**
*
* @author Dmitry
*/
public class SbcTest extends NesAbstractTst {
@Test
public void testSbc() {
String[] lines;
lines = new String[]{
"clc",
"lda #$50",
"sbc #$5"
};
testAlu(lines, 0x4a, 0x00, 0, 0xfd, 0x606, true, false, false, false);
lines = new String[]{
"sec",
"lda #$50",
"sbc #$5"
};
testAlu(lines, 0x4b, 0x00, 0, 0xfd, 0x606, true, false, false, false);
lines = new String[]{
"sec",
"lda #$5",
"sbc #$55"
};
testAlu(lines, 0xb0, 0x00, 0, 0xfd, 0x606, false, false, true, false);
lines = new String[]{
"clc",
"lda #$80",
"sbc #$20"
};
testAlu(lines, 0x5f, 0x00, 0, 0xfd, 0x606, true, false, false, true);
lines = new String[]{
"clc",
"lda #$20",
"sbc #$80"
<|fim▁hole|>
testAlu(lines, 0x9f, 0x00, 0, 0xfd, 0x606, false, false, true, true);
lines = new String[]{
"sec",
"lda #$20",
"sbc #$80"
};
testAlu(lines, 0xa0, 0x00, 0, 0xfd, 0x606, false, false, true, true);
}
}<|fim▁end|> | };
|
<|file_name|>extern-crosscrate.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast
//aux-build:extern-crosscrate-source.rs
extern mod externcallback(vers = "0.1");
<|fim▁hole|> externcallback::rustrt::rust_dbg_call(externcallback::cb, n)
}
}
pub fn main() {
let result = fact(10u);
debug!("result = %?", result);
assert!(result == 3628800u);
}<|fim▁end|> | fn fact(n: uint) -> uint {
unsafe {
debug!("n = %?", n); |
<|file_name|>filter-4.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from gwpy.plotter import FrequencySeriesPlot
plot = FrequencySeriesPlot(whiteasd, dispasd, sep=True, sharex=True, label=None) |
<|file_name|>jquery.bgiframe.unminified.js<|end_file_name|><|fim▁begin|>/*! Copyright (c) 2013 Brandon Aaron (http://brandonaaron.net)
* Licensed under the MIT License (LICENSE.txt).
*
* Version 3.0.0
*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['jquery'], factory);
} else {
// Browser globals
factory(jQuery);
}
}(function ($) {
$.fn.bgiframe = function(s) {
s = $.extend({
top : 'auto', // auto == borderTopWidth
left : 'auto', // auto == borderLeftWidth
width : 'auto', // auto == offsetWidth
height : 'auto', // auto == offsetHeight
opacity : true,
src : 'javascript:false;',
conditional : /MSIE 6.0/.test(navigator.userAgent) // expresion or function. return false to prevent iframe insertion
}, s);
// wrap conditional in a function if it isn't already<|fim▁hole|>
var $iframe = $('<iframe class="bgiframe"frameborder="0"tabindex="-1"src="'+s.src+'"'+
'style="display:block;position:absolute;z-index:-1;"/>');
return this.each(function() {
var $this = $(this);
if ( s.conditional(this) === false ) { return; }
var existing = $this.children('iframe.bgiframe');
var $el = existing.length === 0 ? $iframe.clone() : existing;
$el.css({
'top': s.top == 'auto' ?
((parseInt($this.css('borderTopWidth'),10)||0)*-1)+'px' : prop(s.top),
'left': s.left == 'auto' ?
((parseInt($this.css('borderLeftWidth'),10)||0)*-1)+'px' : prop(s.left),
'width': s.width == 'auto' ? (this.offsetWidth + 'px') : prop(s.width),
'height': s.height == 'auto' ? (this.offsetHeight + 'px') : prop(s.height),
'opacity': s.opacity === true ? 0 : undefined
});
if ( existing.length === 0 ) {
$this.prepend($el);
}
});
};
// old alias
$.fn.bgIframe = $.fn.bgiframe;
function prop(n) {
return n && n.constructor === Number ? n + 'px' : n;
}
}));<|fim▁end|> | if (!$.isFunction(s.conditional)) {
var condition = s.conditional;
s.conditional = function() { return condition; };
} |
<|file_name|>Request.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _url = require('url');
var _url2 = _interopRequireDefault(_url);
var _qs = require('qs');
var _qs2 = _interopRequireDefault(_qs);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Request is generated each time the user navigates.
* @param {string} path
* @param {object} query
* @param {object} params
*
* @property {string} path - The request path.
* @property {object} query - If the hash contains a query part, it is treated as a query string.
* @property {object} params - An object containing properties mapped to the named route “parameters”.
*/
var Request = function () {
function Request(path, query, params) {
_classCallCheck(this, Request);
this.path = path;
this.query = query;
this.params = params;
}
/**
* create a new Request object
* @param {string} path<|fim▁hole|> */
_createClass(Request, null, [{
key: 'create',
value: function create(path, query, keys, results) {
var params = Object.create(null);
keys.forEach(function (key, index) {
return params[key.name] = results[index + 1];
});
return new Request(path, _qs2.default.parse(query), params);
}
}]);
return Request;
}();
exports.default = Request;
//# sourceMappingURL=Request.js.map<|fim▁end|> | * @param {string} query
* @param {array} keys
* @param {array} results |
<|file_name|>test_topo_map.py<|end_file_name|><|fim▁begin|>import json
from c2corg_api.models.route import Route
from c2corg_api.models.topo_map import ArchiveTopoMap, TopoMap, MAP_TYPE
from c2corg_api.models.topo_map_association import TopoMapAssociation
from c2corg_api.models.waypoint import Waypoint
from c2corg_api.tests.search import reset_search_index
from c2corg_api.models.common.attributes import quality_types
from shapely.geometry import shape, Polygon
from c2corg_api.models.document import (
DocumentGeometry, ArchiveDocumentLocale, DocumentLocale)
from c2corg_api.views.document import DocumentRest
from c2corg_api.tests.views import BaseDocumentTestRest
class TestTopoMapRest(BaseDocumentTestRest):
def setUp(self): # noqa
self.set_prefix_and_model(
"/maps", MAP_TYPE, TopoMap, ArchiveTopoMap, ArchiveDocumentLocale)
BaseDocumentTestRest.setUp(self)
self._add_test_data()
def test_get_collection(self):
body = self.get_collection()
doc = body['documents'][0]
self.assertNotIn('geometry', doc)
def test_get_collection_paginated(self):
self.app.get("/maps?offset=invalid", status=400)
self.assertResultsEqual(
self.get_collection({'offset': 0, 'limit': 0}), [], 4)
self.assertResultsEqual(
self.get_collection({'offset': 0, 'limit': 1}),
[self.map4.document_id], 4)
self.assertResultsEqual(
self.get_collection({'offset': 0, 'limit': 2}),
[self.map4.document_id, self.map3.document_id], 4)
self.assertResultsEqual(
self.get_collection({'offset': 1, 'limit': 2}),
[self.map3.document_id, self.map2.document_id], 4)
def test_get_collection_lang(self):
self.get_collection_lang()
def test_get_collection_search(self):
reset_search_index(self.session)
self.assertResultsEqual(
self.get_collection_search({'l': 'en'}),
[self.map4.document_id, self.map1.document_id], 2)
def test_get(self):
body = self.get(self.map1)
self._assert_geometry(body)
self.assertNotIn('maps', body)
def test_get_cooked(self):
self.get_cooked(self.map1)
def test_get_cooked_with_defaulting(self):
self.get_cooked_with_defaulting(self.map1)
def test_get_lang(self):
self.get_lang(self.map1)
def test_get_new_lang(self):
self.get_new_lang(self.map1)
def test_get_404(self):
self.get_404()
def test_get_caching(self):<|fim▁hole|> self.get_caching(self.map1)
def test_get_info(self):
body, locale = self.get_info(self.map1, 'en')
self.assertEqual(locale.get('lang'), 'en')
def test_post_not_moderator(self):
headers = self.add_authorization_header(username='contributor')
self.app_post_json(
self._prefix, {}, headers=headers,
expect_errors=True, status=403)
def test_post_error(self):
body = self.post_error({}, user='moderator')
errors = body.get('errors')
self.assertEqual(len(errors), 2)
self.assertCorniceRequired(errors[0], 'locales')
self.assertCorniceRequired(errors[1], 'geometry')
def test_post_missing_title(self):
body_post = {
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'geometry': {
'id': 5678, 'version': 6789,
'geom_detail': '{"type":"Polygon","coordinates":[[[668519.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668519.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en'}
]
}
self.post_missing_title(body_post, user='moderator')
def test_post_non_whitelisted_attribute(self):
body = {
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'protected': True,
'geometry': {
'id': 5678, 'version': 6789,
'geom_detail': '{"type":"Polygon","coordinates":[[[668519.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668519.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy'}
]
}
self.post_non_whitelisted_attribute(body, user='moderator')
def test_post_missing_content_type(self):
self.post_missing_content_type({})
def test_post_success(self):
body = {
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'geometry': {
'id': 5678, 'version': 6789,
'geom_detail': '{"type":"Polygon","coordinates":[[[668518.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668518.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy'}
]
}
body, doc = self.post_success(body, user='moderator')
self.assertIsNotNone(body['geometry'].get('geom_detail'))
version = doc.versions[0]
archive_map = version.document_archive
self.assertEqual(archive_map.editor, 'IGN')
self.assertEqual(archive_map.scale, '25000')
self.assertEqual(archive_map.code, '3432OT')
archive_locale = version.document_locales_archive
self.assertEqual(archive_locale.lang, 'en')
self.assertEqual(archive_locale.title, 'Lac d\'Annecy')
archive_geometry = version.document_geometry_archive
self.assertEqual(archive_geometry.version, doc.geometry.version)
self.assertIsNotNone(archive_geometry.geom_detail)
self.assertIsNotNone(archive_geometry.geom_detail)
# check that a link for intersecting documents is created
links = self.session.query(TopoMapAssociation). \
filter(
TopoMapAssociation.topo_map_id == doc.document_id). \
order_by(TopoMapAssociation.document_id). \
all()
self.assertEqual(len(links), 2)
self.assertEqual(links[0].document_id, self.waypoint1.document_id)
self.check_cache_version(self.waypoint1.document_id, 2)
self.assertEqual(links[1].document_id, self.route.document_id)
self.check_cache_version(self.route.document_id, 2)
def test_put_wrong_document_id(self):
body = {
'document': {
'document_id': '9999999',
'version': self.map1.version,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
self.put_wrong_document_id(body, user='moderator')
def test_put_wrong_document_version(self):
body = {
'document': {
'document_id': self.map1.document_id,
'version': -9999,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
self.put_wrong_version(body, self.map1.document_id, user='moderator')
def test_put_wrong_locale_version(self):
body = {
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': -9999}
]
}
}
self.put_wrong_version(body, self.map1.document_id, user='moderator')
def test_put_wrong_ids(self):
body = {
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'editor': 'IGN',
'scale': '25000',
'code': '3432OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
self.put_wrong_ids(body, self.map1.document_id, user='moderator')
def test_put_no_document(self):
self.put_put_no_document(self.map1.document_id, user='moderator')
def test_put_success_all(self):
body = {
'message': 'Update',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3433OT',
'geometry': {
'version': self.map1.geometry.version,
'geom_detail': '{"type":"Polygon","coordinates":[[[668519.249382151,5728802.39591739],[668518.249382151,5745465.66808356],[689156.247019149,5745465.66808356],[689156.247019149,5728802.39591739],[668519.249382151,5728802.39591739]]]}' # noqa
},
'locales': [
{'lang': 'en', 'title': 'New title',
'version': self.locale_en.version}
]
}
}
(body, map1) = self.put_success_all(body, self.map1, user='moderator')
self.assertEqual(map1.code, '3433OT')
locale_en = map1.get_locale('en')
self.assertEqual(locale_en.title, 'New title')
# version with lang 'en'
versions = map1.versions
version_en = self.get_latest_version('en', versions)
archive_locale = version_en.document_locales_archive
self.assertEqual(archive_locale.title, 'New title')
archive_document_en = version_en.document_archive
self.assertEqual(archive_document_en.scale, '25000')
self.assertEqual(archive_document_en.code, '3433OT')
archive_geometry_en = version_en.document_geometry_archive
self.assertEqual(archive_geometry_en.version, 2)
# version with lang 'fr'
version_fr = self.get_latest_version('fr', versions)
archive_locale = version_fr.document_locales_archive
self.assertEqual(archive_locale.title, 'Lac d\'Annecy')
# check that the links to intersecting documents are updated
links = self.session.query(TopoMapAssociation). \
filter(
TopoMapAssociation.topo_map_id == self.map1.document_id). \
all()
self.assertEqual(len(links), 2)
self.assertEqual(links[0].document_id, self.waypoint1.document_id)
self.check_cache_version(self.waypoint1.document_id, 2)
self.assertEqual(links[1].document_id, self.route.document_id)
self.check_cache_version(self.route.document_id, 2)
# waypoint 2 is no longer associated, the cache key was incremented
self.check_cache_version(self.waypoint2.document_id, 2)
def test_put_success_figures_only(self):
body = {
'message': 'Changing figures',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3433OT',
'locales': [
{'lang': 'en', 'title': 'Lac d\'Annecy',
'version': self.locale_en.version}
]
}
}
(body, map1) = self.put_success_figures_only(
body, self.map1, user='moderator')
self.assertEqual(map1.code, '3433OT')
def test_put_success_lang_only(self):
body = {
'message': 'Changing lang',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3431OT',
'locales': [
{'lang': 'en', 'title': 'New title',
'version': self.locale_en.version}
]
}
}
(body, map1) = self.put_success_lang_only(
body, self.map1, user='moderator')
self.assertEqual(
map1.get_locale('en').title, 'New title')
def test_put_success_new_lang(self):
"""Test updating a document by adding a new locale.
"""
body = {
'message': 'Adding lang',
'document': {
'document_id': self.map1.document_id,
'version': self.map1.version,
'quality': quality_types[1],
'editor': 'IGN',
'scale': '25000',
'code': '3431OT',
'locales': [
{'lang': 'es', 'title': 'Lac d\'Annecy'}
]
}
}
(body, map1) = self.put_success_new_lang(
body, self.map1, user='moderator')
self.assertEqual(map1.get_locale('es').title, 'Lac d\'Annecy')
def _assert_geometry(self, body):
self.assertIsNotNone(body.get('geometry'))
geometry = body.get('geometry')
self.assertIsNotNone(geometry.get('version'))
self.assertIsNotNone(geometry.get('geom_detail'))
geom = geometry.get('geom_detail')
polygon = shape(json.loads(geom))
self.assertIsInstance(polygon, Polygon)
def _add_test_data(self):
self.map1 = TopoMap(editor='IGN', scale='25000', code='3431OT')
self.locale_en = DocumentLocale(lang='en', title='Lac d\'Annecy')
self.locale_fr = DocumentLocale(lang='fr', title='Lac d\'Annecy')
self.map1.locales.append(self.locale_en)
self.map1.locales.append(self.locale_fr)
self.map1.geometry = DocumentGeometry(
geom_detail='SRID=3857;POLYGON((611774 5706934,611774 5744215,'
'642834 5744215,642834 5706934,611774 5706934))')
self.session.add(self.map1)
self.session.flush()
user_id = self.global_userids['contributor']
DocumentRest.create_new_version(self.map1, user_id)
self.map2 = TopoMap(
editor='IGN', scale='25000', code='3432OT')
self.session.add(self.map2)
self.map3 = TopoMap(
editor='IGN', scale='25000', code='3433OT')
self.session.add(self.map3)
self.map4 = TopoMap(
editor='IGN', scale='25000', code='3434OT')
self.map4.locales.append(DocumentLocale(
lang='en', title='Lac d\'Annecy'))
self.map4.locales.append(DocumentLocale(
lang='fr', title='Lac d\'Annecy'))
self.session.add(self.map4)
self.session.flush()
self.waypoint1 = Waypoint(
waypoint_type='summit',
geometry=DocumentGeometry(
geom='SRID=3857;POINT(677461.381691516 5740879.44638645)')
)
self.waypoint2 = Waypoint(
waypoint_type='summit',
geometry=DocumentGeometry(
geom='SRID=3857;POINT(693666.031687976 5741108.7574713)')
)
route_geom = 'SRID=3857;LINESTRING(668518 5728802, 668528 5728812)'
self.route = Route(
activities=['skitouring'],
geometry=DocumentGeometry(geom_detail=route_geom))
self.session.add_all([self.waypoint1, self.waypoint2, self.route])
self.session.add(TopoMapAssociation(
document=self.waypoint2, topo_map=self.map1))
self.session.flush()<|fim▁end|> | |
<|file_name|>steamapi.js<|end_file_name|><|fim▁begin|>"use strict";
var request = require(__dirname + "/request");
var db = require(__dirname + "/db");
/**
* Steam utils
*/
var steamapi = {};
/**
* Request to our api
* @param {string} type
* @param {string[]} ids
* @param {function} callback
*/
steamapi.request = function (type, ids, callback) {
if (!ids.length) {
callback({});
return;
}
var res = {};
var missingIds = [];
for (var i = 0; i < ids.length; i++) {
var id = ids[i];
var steamData = steamapi.getDataForId(type, id);
if (steamData) {
res[id] = steamData;
} else {
missingIds.push(id);
}
}
if (missingIds.length) {
request.get("https://scripts.0x.at/steamapi/api.php?action=" + type + "&ids=" + missingIds.join(","), false, function (result) {
if (result !== null) {
var steamData = null;
var data = JSON.parse(result);
if (type == "bans") {
for (var i = 0; i < data.players.length; i++) {
steamData = data.players[i];
steamapi.saveDataForId(type, steamData.SteamId, steamData);
res[steamData.SteamId] = steamData;
}
}
if (type == "summaries") {
if(data.response){
for (var playerIndex in data.response.players) {
if (data.response.players.hasOwnProperty(playerIndex)) {
steamData = data.response.players[playerIndex];
steamapi.saveDataForId(type, steamData.steamid, steamData);
res[steamData.steamid] = steamData;
}
}
}
}
}
callback(res);
});
} else {
callback(res);
}
};
/**
* Get db data for steamid
* @param {string} type
* @param {string} id
* @returns {*}
*/
steamapi.getDataForId = function (type, id) {
var sdb = db.get("steamapi");
var playerData = sdb.get(id).value();
if (!playerData || !playerData[type]) return null;
if (playerData[type].timestamp < (new Date().getTime() / 1000 - 86400)) {
delete playerData[type];
}
return playerData[type] || null;
};
/**
* Save db data for steamid
* @param {string} type
* @param {string} id
* @param {object} data
* @returns {*}
*/
steamapi.saveDataForId = function (type, id, data) {
var sdb = db.get("steamapi");
var playerData = sdb.get(id).value();
if (!playerData) playerData = {};
data.timestamp = new Date().getTime() / 1000;
playerData[type] = data;
sdb.set(id, playerData).value();
};
/**
* Delete old entries
*/
steamapi.cleanup = function () {
try {
var data = db.get("steamapi").value();
var timeout = new Date() / 1000 - 86400;
for (var steamId in data) {
if (data.hasOwnProperty(steamId)) {
var entries = data[steamId];
for (var entryIndex in entries) {
if (entries.hasOwnProperty(entryIndex)) {
var entryRow = entries[entryIndex];
if (entryRow.timestamp < timeout) {
delete entries[entryIndex];
}
}
}
}
}
db.get("steamapi").setState(data);<|fim▁hole|> }
};
// each 30 minutes cleanup the steamapi db and remove old entries
setInterval(steamapi.cleanup, 30 * 60 * 1000);
steamapi.cleanup();
module.exports = steamapi;<|fim▁end|> | } catch (e) {
console.error(new Date(), "Steamapi cleanup failed", e, e.stack); |
<|file_name|>parse_options.cpp<|end_file_name|><|fim▁begin|>/**
\file parse_options.cpp
\author [email protected]
\copyright ABY - A Framework for Efficient Mixed-protocol Secure Two-party Computation
Copyright (C) 2015 Engineering Cryptographic Protocols Group, TU Darmstadt
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
\brief Parse Options Implementation
*/
#include "parse_options.h"
/**
* takes a string in the Format "c i i i ..."
* (1 char followed by potentially many integers) and returns a vector of all i
* @param str the string to tokenize
* @param tokens the result vector of wire id
*/
void tokenize_verilog(const std::string& str, std::vector<uint32_t>& tokens, const std::string& delimiters) {
tokens.clear();
// Skip delimiters at beginning. Skip first two characters (1 Char + 1 Space)
std::string::size_type lastPos = str.find_first_not_of(delimiters, 2);
// Find first "non-delimiter".
std::string::size_type pos = str.find_first_of(delimiters, lastPos);
while (std::string::npos != pos || std::string::npos != lastPos) {
// Found a token, add it to the vector.
tokens.push_back(atoi(str.substr(lastPos, pos - lastPos).c_str()));
// Skip delimiters. Note the "not_of"
lastPos = str.find_first_not_of(delimiters, pos);
// Find next "non-delimiter"
pos = str.find_first_of(delimiters, lastPos);
}
}
/**
* takes a string in the Format "i|i|i|..."
* (integers separated by '|') and returns a vector of all integers
* @param str the string to tokenize
* @param tokens the result vector of wire id
*/
void tokenize(const std::string& str, std::vector<uint32_t>& tokens, const std::string& delimiters) {
tokens.clear();
// Skip delimiters at beginning
std::string::size_type lastPos = str.find_first_not_of(delimiters, 0);
// Find first "non-delimiter".
std::string::size_type pos = str.find_first_of(delimiters, lastPos);
while (std::string::npos != pos || std::string::npos != lastPos) {
// Found a token, add it to the vector.
tokens.push_back(atoi(str.substr(lastPos, pos - lastPos).c_str()));
// Skip delimiters. Note the "not_of"
lastPos = str.find_first_not_of(delimiters, pos);
// Find next "non-delimiter"
pos = str.find_first_of(delimiters, lastPos);
}
}
int32_t parse_options(int32_t* argcp, char*** argvp, parsing_ctx* options, uint32_t nops) {
int result = 0;
bool skip;
uint32_t i;
if(*argcp < 2)
return 0;
while ((*argcp) > 1) {
if ((*argvp)[1][0] != '-' || (*argvp)[1][1] == '\0' || (*argvp)[1][2] != '\0')
return result;
for (i = 0, skip = false; i < nops && !skip; i++) {
if (((*argvp)[1][1]) == options[i].opt_name) {
switch (options[i].type) {
case T_NUM:
if (isdigit((*argvp)[2][0])) {
++*argvp;
--*argcp;
*((uint32_t*) options[i].val) = atoi((*argvp)[1]);
}
break;
case T_DOUBLE:
++*argvp;
--*argcp;
*((double*) options[i].val) = atof((*argvp)[1]);
break;
case T_STR:
++*argvp;
--*argcp;
*((std::string*) options[i].val) = (*argvp)[1];
break;
case T_FLAG:<|fim▁hole|> *((bool*) options[i].val) = true;
break;
}
++result;
++*argvp;
--*argcp;
options[i].set = true;
skip = true;
}
}
}
for (i = 0; i < nops; i++) {
if (options[i].required && !options[i].set)
return 0;
}
return 1;
}
void print_usage(std::string progname, parsing_ctx* options, uint32_t nops) {
uint32_t i;
std::cout << "Usage: " << progname << std::endl;
for (i = 0; i < nops; i++) {
std::cout << " -" << options[i].opt_name << " [" << options[i].help_str << (options[i].required ? ", required" : ", optional") << "]" << std::endl;
}
std::cout << std::endl << "Program exiting" << std::endl;
}<|fim▁end|> | |
<|file_name|>SimDigital.cpp<|end_file_name|><|fim▁begin|>/*************** <auto-copyright.pl BEGIN do not edit this line> **************
*
* VR Juggler is (C) Copyright 1998-2011 by Iowa State University
*
* Original Authors:
* Allen Bierbaum, Christopher Just,
* Patrick Hartling, Kevin Meinert,
* Carolina Cruz-Neira, Albert Baker
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*************** <auto-copyright.pl END do not edit this line> ***************/
#include <gadget/gadgetConfig.h>
#include <jccl/Config/ConfigElement.h>
#include <gadget/Devices/Sim/SimDigital.h>
namespace gadget
{
/** Default Constructor */
SimDigital::SimDigital()
{
vprDEBUG(vprDBG_ALL, vprDBG_VERB_LVL)<<"*** SimDigital::SimDigital()\n"<< vprDEBUG_FLUSH;
}
/** Destructor */
SimDigital::~SimDigital()
{
//vprDEBUG(vprDBG_ALL, vprDBG_VERB_LVL)<<"*** SimDigital::~SimDigital()\n"<< vprDEBUG_FLUSH;
}
std::string SimDigital::getElementType()
{
return "simulated_digital_device";
}
bool SimDigital::config(jccl::ConfigElementPtr element)
{
//vprDEBUG(vprDBG_ALL, vprDBG_VERB_LVL)<<"*** SimDigital::config()\n"<< vprDEBUG_FLUSH;
if (! (Input::config(element) && Digital::config(element) &&
SimInput::config(element)) )
{
return false;
}
std::vector<jccl::ConfigElementPtr> key_list;
int key_count = element->getNum("key_pair");
for ( int i = 0; i < key_count; ++i )
{
key_list.push_back(element->getProperty<jccl::ConfigElementPtr>("key_pair", i));
}
mSimKeys = readKeyList(key_list);
return true;
}
/**
* Updates the state of the digital data vector.
*
* @note Digital is on when key is held down.
* When key is release, digital goes to off state.
*/
void SimDigital::updateData()
{
//vprDEBUG(vprDBG_ALL, vprDBG_VERB_LVL)<<"*** SimDigital::updateData()\n"<< vprDEBUG_FLUSH;
std::vector<DigitalData> digital_data_sample(mSimKeys.size()); // The digital data that makes up the sample
// -- Update digital data --- //
for (unsigned int i = 0; i < mSimKeys.size(); ++i)
{
// Set the time for the digital data to the KeyboardMouse timestamp
digital_data_sample[i].setTime(mKeyboardMouse->getTimeStamp());
// ON if keys pressed, OFF otherwise.
digital_data_sample[i] = checkKeyPair(mSimKeys[i]) ? DigitalState::ON
: DigitalState::OFF;
}
// Add a sample
addDigitalSample(digital_data_sample);
swapDigitalBuffers();
}
<|fim▁hole|><|fim▁end|> | } // End of gadget namespace |
<|file_name|>test_commands.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2013 Association of Universities for Research in Astronomy
# (AURA)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of AURA and its representatives may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
from testtools import content
from pbr.tests import base
class TestCommands(base.BaseTestCase):
def test_custom_build_py_command(self):
"""Test custom build_py command.
Test that a custom subclass of the build_py command runs when listed in
the commands [global] option, rather than the normal build command.
"""
stdout, stderr, return_code = self.run_setup('build_py')
self.addDetail('stdout', content.text_content(stdout))
self.addDetail('stderr', content.text_content(stderr))
self.assertIn('Running custom build_py command.', stdout)
self.assertEqual(0, return_code)
def test_custom_deb_version_py_command(self):
"""Test custom deb_version command."""
stdout, stderr, return_code = self.run_setup('deb_version')
self.addDetail('stdout', content.text_content(stdout))
self.addDetail('stderr', content.text_content(stderr))
self.assertIn('Extracting deb version', stdout)
self.assertEqual(0, return_code)
def test_custom_rpm_version_py_command(self):
"""Test custom rpm_version command."""
stdout, stderr, return_code = self.run_setup('rpm_version')
self.addDetail('stdout', content.text_content(stdout))
self.addDetail('stderr', content.text_content(stderr))
self.assertIn('Extracting rpm version', stdout)
self.assertEqual(0, return_code)
def test_freeze_command(self):
"""Test that freeze output is sorted in a case-insensitive manner."""
stdout, stderr, return_code = self.run_pbr('freeze')
self.assertEqual(0, return_code)
pkgs = []
for l in stdout.split('\n'):
pkgs.append(l.split('==')[0].lower())
pkgs_sort = sorted(pkgs[:])
self.assertEqual(pkgs_sort, pkgs)<|fim▁end|> | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# |
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package core
import (
"crypto/sha256"
"errors"
"strings"
"time"
"github.com/OpenBazaar/wallet-interface"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/golang/protobuf/ptypes"
google_protobuf "github.com/golang/protobuf/ptypes/timestamp"
util "gx/ipfs/QmNiJuT8Ja3hMVpBHXv3Q6dwmperaQ6JjLtpMQgMCD7xvx/go-ipfs-util"
ma "gx/ipfs/QmWWQ2Txc2c6tqjsBpzg5Ar652cHPGNsQQp2SejkNmkUMb/go-multiaddr"
ps "gx/ipfs/QmXauCuJzmzapetmC6W4TuDJLL1yFFrVzSHoWv8YdbmnxH/go-libp2p-peerstore"
mh "gx/ipfs/QmZyZDi491cCNTLfAhwcaDii2Kg4pwKRkhqQzURGDvY6ua/go-multihash"
cid "gx/ipfs/QmcZfnkapfECQGcLZaf9B79NRg7cRa9EnZh4LSbkCzwNvY/go-cid"
"github.com/OpenBazaar/openbazaar-go/pb"
)
// EncodeCID - Hash with SHA-256 and encode as a multihash
func EncodeCID(b []byte) (*cid.Cid, error) {
multihash, err := EncodeMultihash(b)
if err != nil {
return nil, err
}
id := cid.NewCidV1(cid.Raw, *multihash)
return id, err<|fim▁hole|>
// EncodeMultihash - sha256 encode
func EncodeMultihash(b []byte) (*mh.Multihash, error) {
h := sha256.Sum256(b)
encoded, err := mh.Encode(h[:], mh.SHA2_256)
if err != nil {
return nil, err
}
multihash, err := mh.Cast(encoded)
if err != nil {
return nil, err
}
return &multihash, err
}
// ExtractIDFromPointer Certain pointers, such as moderators, contain a peerID. This function
// will extract the ID from the underlying PeerInfo object.
func ExtractIDFromPointer(pi ps.PeerInfo) (string, error) {
if len(pi.Addrs) == 0 {
return "", errors.New("PeerInfo object has no addresses")
}
addr := pi.Addrs[0]
if addr.Protocols()[0].Code != ma.P_IPFS {
return "", errors.New("IPFS protocol not found in address")
}
val, err := addr.ValueForProtocol(ma.P_IPFS)
if err != nil {
return "", err
}
return val, nil
}
// FormatRFC3339PB returns the given `google_protobuf.Timestamp` as a RFC3339
// formatted string
func FormatRFC3339PB(ts google_protobuf.Timestamp) string {
return util.FormatRFC3339(time.Unix(ts.Seconds, int64(ts.Nanos)).UTC())
}
// BuildTransactionRecords - Used by the GET order API to build transaction records suitable to be included in the order response
func (n *OpenBazaarNode) BuildTransactionRecords(contract *pb.RicardianContract, records []*wallet.TransactionRecord, state pb.OrderState) ([]*pb.TransactionRecord, *pb.TransactionRecord, error) {
paymentRecords := []*pb.TransactionRecord{}
payments := make(map[string]*pb.TransactionRecord)
// Consolidate any transactions with multiple outputs into a single record
for _, r := range records {
record, ok := payments[r.Txid]
if ok {
record.Value += r.Value
payments[r.Txid] = record
} else {
tx := new(pb.TransactionRecord)
tx.Txid = r.Txid
tx.Value = r.Value
ts, err := ptypes.TimestampProto(r.Timestamp)
if err != nil {
return paymentRecords, nil, err
}
tx.Timestamp = ts
ch, err := chainhash.NewHashFromStr(tx.Txid)
if err != nil {
return paymentRecords, nil, err
}
confirmations, height, err := n.Wallet.GetConfirmations(*ch)
if err != nil {
return paymentRecords, nil, err
}
tx.Height = height
tx.Confirmations = confirmations
payments[r.Txid] = tx
}
}
for _, rec := range payments {
paymentRecords = append(paymentRecords, rec)
}
var refundRecord *pb.TransactionRecord
if contract != nil && (state == pb.OrderState_REFUNDED || state == pb.OrderState_DECLINED || state == pb.OrderState_CANCELED) && contract.BuyerOrder != nil && contract.BuyerOrder.Payment != nil {
// For multisig we can use the outgoing from the payment address
if contract.BuyerOrder.Payment.Method == pb.Order_Payment_MODERATED || state == pb.OrderState_DECLINED || state == pb.OrderState_CANCELED {
for _, rec := range payments {
if rec.Value < 0 {
refundRecord = new(pb.TransactionRecord)
refundRecord.Txid = rec.Txid
refundRecord.Value = -rec.Value
refundRecord.Confirmations = rec.Confirmations
refundRecord.Height = rec.Height
refundRecord.Timestamp = rec.Timestamp
break
}
}
} else if contract.Refund != nil && contract.Refund.RefundTransaction != nil && contract.Refund.Timestamp != nil {
refundRecord = new(pb.TransactionRecord)
// Direct we need to use the transaction info in the contract's refund object
ch, err := chainhash.NewHashFromStr(contract.Refund.RefundTransaction.Txid)
if err != nil {
return paymentRecords, refundRecord, err
}
confirmations, height, err := n.Wallet.GetConfirmations(*ch)
if err != nil {
return paymentRecords, refundRecord, nil
}
refundRecord.Txid = contract.Refund.RefundTransaction.Txid
refundRecord.Value = int64(contract.Refund.RefundTransaction.Value)
refundRecord.Timestamp = contract.Refund.Timestamp
refundRecord.Confirmations = confirmations
refundRecord.Height = height
}
}
return paymentRecords, refundRecord, nil
}
// NormalizeCurrencyCode standardizes the format for the given currency code
func NormalizeCurrencyCode(currencyCode string) string {
return strings.ToUpper(currencyCode)
}<|fim▁end|> | } |
<|file_name|>cloud9.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
use rusoto_cloud9::{Cloud9, Cloud9Client, ListEnvironmentsRequest};
use rusoto_core::Region;
#[test]
fn should_list_environments() {
let client = Cloud9Client::new(Region::UsEast1);
let request = ListEnvironmentsRequest::default();
let result = client.list_environments(request).sync().unwrap();
println!("{:#?}", result);
}<|fim▁end|> | #![cfg(feature = "cloud9")]
extern crate rusoto_core;
extern crate rusoto_cloud9; |
<|file_name|>FileSerializer.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2012 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.<|fim▁hole|> *
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.processor.serializer;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItem;
import org.apache.log4j.Logger;
import org.dom4j.Document;
import org.orbeon.oxf.common.OXFException;
import org.orbeon.oxf.pipeline.api.PipelineContext;
import org.orbeon.oxf.pipeline.api.XMLReceiver;
import org.orbeon.oxf.processor.*;
import org.orbeon.oxf.processor.serializer.store.ResultStore;
import org.orbeon.oxf.processor.serializer.store.ResultStoreOutputStream;
import org.orbeon.oxf.util.LoggerFactory;
import org.orbeon.oxf.util.NetUtils;
import org.orbeon.oxf.xforms.processor.XFormsResourceServer;
import org.orbeon.oxf.xml.XMLUtils;
import org.orbeon.oxf.xml.XPathUtils;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
/**
* The File Serializer serializes text and binary documents to files on disk.
*/
public class FileSerializer extends ProcessorImpl {
private static Logger logger = LoggerFactory.createLogger(FileSerializer.class);
public static final String FILE_SERIALIZER_CONFIG_NAMESPACE_URI = "http://orbeon.org/oxf/xml/file-serializer-config";
public static final String DIRECTORY_PROPERTY = "directory";
// NOTE: Those are also in HttpSerializerBase
private static final boolean DEFAULT_FORCE_CONTENT_TYPE = false;
private static final boolean DEFAULT_IGNORE_DOCUMENT_CONTENT_TYPE = false;
private static final boolean DEFAULT_FORCE_ENCODING = false;
private static final boolean DEFAULT_IGNORE_DOCUMENT_ENCODING = false;
private static final boolean DEFAULT_APPEND = false;
private static final boolean DEFAULT_MAKE_DIRECTORIES = false;
static {
try {
// Create factory
DocumentBuilderFactory documentBuilderFactory = (DocumentBuilderFactory) Class.forName("orbeon.apache.xerces.jaxp.DocumentBuilderFactoryImpl").newInstance();
// Configure factory
documentBuilderFactory.setNamespaceAware(true);
}
catch (Exception e) {
throw new OXFException(e);
}
}
public FileSerializer() {
addInputInfo(new ProcessorInputOutputInfo(INPUT_CONFIG, FILE_SERIALIZER_CONFIG_NAMESPACE_URI));
addInputInfo(new ProcessorInputOutputInfo(INPUT_DATA));
// We don't declare the "data" output here, as this is an optional output.
// If we declare it, we'll the XPL engine won't be happy when don't connect anything to that output.
}
private static class Config {
private String directory;
private String file;
private String scope;
private boolean proxyResult;
private String url;
private boolean append;
private boolean makeDirectories;
private boolean cacheUseLocalCache;
private boolean forceContentType;
private String requestedContentType;
private boolean ignoreDocumentContentType;
private boolean forceEncoding;
private String requestedEncoding;
private boolean ignoreDocumentEncoding;
public Config(Document document) {
// Directory and file
directory = XPathUtils.selectStringValueNormalize(document, "/config/directory");
file = XPathUtils.selectStringValueNormalize(document, "/config/file");
// Scope
scope = XPathUtils.selectStringValueNormalize(document, "/config/scope");
// Proxy result
proxyResult = ProcessorUtils.selectBooleanValue(document, "/config/proxy-result", false);
// URL
url = XPathUtils.selectStringValueNormalize(document, "/config/url");
// Cache control
cacheUseLocalCache = ProcessorUtils.selectBooleanValue(document, "/config/cache-control/use-local-cache", CachedSerializer.DEFAULT_CACHE_USE_LOCAL_CACHE);
// Whether to append or not
append = ProcessorUtils.selectBooleanValue(document, "/config/append", DEFAULT_APPEND);
// Whether to append or not
makeDirectories = ProcessorUtils.selectBooleanValue(document, "/config/make-directories", DEFAULT_MAKE_DIRECTORIES);
// Content-type and Encoding
requestedContentType = XPathUtils.selectStringValueNormalize(document, "/config/content-type");
forceContentType = ProcessorUtils.selectBooleanValue(document, "/config/force-content-type", DEFAULT_FORCE_CONTENT_TYPE);
// TODO: We don't seem to be using the content type in the file serializer.
// Maybe this is something that was left over from the days when the file serializer was also serializing XML.
if (forceContentType)
throw new OXFException("The force-content-type element requires a content-type element.");
ignoreDocumentContentType = ProcessorUtils.selectBooleanValue(document, "/config/ignore-document-content-type", DEFAULT_IGNORE_DOCUMENT_CONTENT_TYPE);
requestedEncoding = XPathUtils.selectStringValueNormalize(document, "/config/encoding");
forceEncoding = ProcessorUtils.selectBooleanValue(document, "/config/force-encoding", DEFAULT_FORCE_ENCODING);
if (forceEncoding && (requestedEncoding == null || requestedEncoding.equals("")))
throw new OXFException("The force-encoding element requires an encoding element.");
ignoreDocumentEncoding = ProcessorUtils.selectBooleanValue(document, "/config/ignore-document-encoding", DEFAULT_IGNORE_DOCUMENT_ENCODING);
}
public String getDirectory() {
return directory;
}
public String getFile() {
return file;
}
public String getScope() {
return scope;
}
public boolean isProxyResult() {
return proxyResult;
}
public String getUrl() {
return url;
}
public boolean isAppend() {
return append;
}
public boolean isMakeDirectories() {
return makeDirectories;
}
public boolean isCacheUseLocalCache() {
return cacheUseLocalCache;
}
public boolean isForceContentType() {
return forceContentType;
}
public boolean isForceEncoding() {
return forceEncoding;
}
public boolean isIgnoreDocumentContentType() {
return ignoreDocumentContentType;
}
public boolean isIgnoreDocumentEncoding() {
return ignoreDocumentEncoding;
}
public String getRequestedContentType() {
return requestedContentType;
}
public String getRequestedEncoding() {
return requestedEncoding;
}
}
@Override
public void start(PipelineContext context) {
try {
// Read config
final Config config = readCacheInputAsObject(context, getInputByName(INPUT_CONFIG), new CacheableInputReader<Config>() {
public Config read(PipelineContext context, ProcessorInput input) {
return new Config(readInputAsDOM4J(context, input));
}
});
final ProcessorInput dataInput = getInputByName(INPUT_DATA);
// Get file object
final String directory = config.getDirectory() != null ? config.getDirectory() : getPropertySet().getString(DIRECTORY_PROPERTY);
final File file = NetUtils.getFile(directory, config.getFile(), config.getUrl(), getLocationData(), config.isMakeDirectories());
// NOTE: Caching here is broken, so we never cache. This is what we should do in case
// we want caching:
// o for a given file, store a hash of the content stored (or the input key?)
// o then when we check whether we need to modify the file, check against the key
// AND the validity
// Delete file if it exists, unless we append
if (!config.isAppend() && file.exists()) {
final boolean deleted = file.delete();
// We test on file.exists() here again so we don't complain that the file can't be deleted if it got
// deleted just between our last test and the delete operation.
if (!deleted && file.exists())
throw new OXFException("Can't delete file: " + file);
}
// Create file if needed
file.createNewFile();
FileOutputStream fileOutputStream = new FileOutputStream(file, config.isAppend());
writeToFile(context, config, dataInput, fileOutputStream);
} catch (Exception e) {
throw new OXFException(e);
}
}
private void writeToFile(PipelineContext context, final Config config, ProcessorInput dataInput, final OutputStream fileOutputStream) throws IOException {
try {
if (config.cacheUseLocalCache) {
// If caching of the data is enabled, use the caching API
// We return a ResultStore
final boolean[] read = new boolean[1];
ResultStore filter = (ResultStore) readCacheInputAsObject(context, dataInput, new CacheableInputReader() {
public Object read(PipelineContext context, ProcessorInput input) {
read[0] = true;
if (logger.isDebugEnabled())
logger.debug("Output not cached");
try {
ResultStoreOutputStream resultStoreOutputStream = new ResultStoreOutputStream(fileOutputStream);
readInputAsSAX(context, input, new BinaryTextXMLReceiver(null, resultStoreOutputStream, true,
config.forceContentType, config.requestedContentType, config.ignoreDocumentContentType,
config.forceEncoding, config.requestedEncoding, config.ignoreDocumentEncoding));
resultStoreOutputStream.close();
return resultStoreOutputStream;
} catch (IOException e) {
throw new OXFException(e);
}
}
});
// If the output was obtained from the cache, just write it
if (!read[0]) {
if (logger.isDebugEnabled())
logger.debug("Serializer output cached");
filter.replay(fileOutputStream);
}
} else {
// Caching is not enabled
readInputAsSAX(context, dataInput, new BinaryTextXMLReceiver(null, fileOutputStream, true,
config.forceContentType, config.requestedContentType, config.ignoreDocumentContentType,
config.forceEncoding, config.requestedEncoding, config.ignoreDocumentEncoding));
fileOutputStream.close();
}
} finally {
if (fileOutputStream != null)
fileOutputStream.close();
}
}
/**
* Case where a response must be generated.
*/
@Override
public ProcessorOutput createOutput(String name) {
final ProcessorOutput output = new ProcessorOutputImpl(FileSerializer.this, name) {
public void readImpl(PipelineContext pipelineContext, XMLReceiver xmlReceiver) {
OutputStream fileOutputStream = null;
try {
//Get the input and config
final Config config = getConfig(pipelineContext);
final ProcessorInput dataInput = getInputByName(INPUT_DATA);
// Determine scope
final int scope;
if ("request".equals(config.getScope())) {
scope = NetUtils.REQUEST_SCOPE;
} else if ("session".equals(config.getScope())) {
scope = NetUtils.SESSION_SCOPE;
} else if ("application".equals(config.getScope())) {
scope = NetUtils.APPLICATION_SCOPE;
} else {
throw new OXFException("Invalid context requested: " + config.getScope());
}
// We use the commons fileupload utilities to write to file
final FileItem fileItem = NetUtils.prepareFileItem(scope);
fileOutputStream = fileItem.getOutputStream();
writeToFile(pipelineContext, config, dataInput, fileOutputStream);
// Create file if it doesn't exist
final File storeLocation = ((DiskFileItem) fileItem).getStoreLocation();
storeLocation.createNewFile();
// Get the url of the file
final String resultURL;
{
final String localURL = ((DiskFileItem) fileItem).getStoreLocation().toURI().toString();
if ("session".equals(config.getScope()) && config.isProxyResult())
resultURL = XFormsResourceServer.jProxyURI(localURL, config.getRequestedContentType());
else
resultURL = localURL;
}
xmlReceiver.startDocument();
xmlReceiver.startElement("", "url", "url", XMLUtils.EMPTY_ATTRIBUTES);
xmlReceiver.characters(resultURL.toCharArray(), 0, resultURL.length());
xmlReceiver.endElement("", "url", "url");
xmlReceiver.endDocument();
}
catch (SAXException e) {
throw new OXFException(e);
}
catch (IOException e) {
throw new OXFException(e);
}
finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
}
catch (IOException e) {
throw new OXFException(e);
}
}
}
}
};
addOutput(name, output);
return output;
}
protected Config getConfig(PipelineContext pipelineContext) {
// Read config
return readCacheInputAsObject(pipelineContext, getInputByName(INPUT_CONFIG), new CacheableInputReader<Config>() {
public Config read(PipelineContext context, ProcessorInput input) {
return new Config(readInputAsDOM4J(context, input));
}
});
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 by Clearcode <http://clearcode.cc>
# and associates (see AUTHORS).
# This file is part of matchbox.
# matchbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# matchbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># along with matchbox. If not, see <http://www.gnu.org/licenses/>.
"""Main matchbox module."""
from matchbox.box import MatchBox
from matchbox.index import MatchIndex
__version__ = "1.1.1"
__all__ = ("MatchBox", "MatchIndex")<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License |
<|file_name|>SyspropsMapWrapper.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.Pair;
/**
* A wrapper that exposes a read-only {@link Map} access to the system
* properties. Any attempt to modify it will throw {@link UnsupportedOperationException}.
* The mapper uses the {@link #SYSPROPS_MAPPED_PREFIX} to filter and access'
* only these properties, ignoring all others
*
* @author <a href="mailto:[email protected]">Apache MINA SSHD Project</a>
*/
public final class SyspropsMapWrapper implements Map<String, Object> {
/**
* Prefix of properties used by the mapper to identify SSHD related settings
*/
public static final String SYSPROPS_MAPPED_PREFIX = "org.apache.sshd.config";
/**
* The one and only wrapper instance
*/
public static final SyspropsMapWrapper INSTANCE = new SyspropsMapWrapper();
/**
* A {@link PropertyResolver} with no parent that exposes the system properties
*/
public static final PropertyResolver SYSPROPS_RESOLVER = new PropertyResolver() {
@Override
public Map<String, Object> getProperties() {
return SyspropsMapWrapper.INSTANCE;
}
@Override
public PropertyResolver getParentPropertyResolver() {
return null;
}
@Override
public String toString() {
return "SYSPROPS";
}
};
private SyspropsMapWrapper() {
super();
}
@Override
public void clear() {
throw new UnsupportedOperationException("sysprops#clear() N/A");
}
@Override
public boolean containsKey(Object key) {
return get(key) != null;
}
@Override
public boolean containsValue(Object value) {
// not the most efficient implementation, but we do not expect it to be called much
Properties props = System.getProperties();
for (String key : props.stringPropertyNames()) {
if (!isMappedSyspropKey(key)) {
continue;
}
Object v = props.getProperty(key);
if (Objects.equals(v, value)) {
return true;
}
}
return false;
}
@Override
public Set<Entry<String, Object>> entrySet() {
Properties props = System.getProperties();
// return a copy in order to avoid concurrent modifications
Set<Entry<String, Object>> entries =
new TreeSet<Entry<String, Object>>(Pair.<String, Object>byKeyEntryComparator());
for (String key : props.stringPropertyNames()) {
if (!isMappedSyspropKey(key)) {
continue;
}
Object v = props.getProperty(key);
if (v != null) {
entries.add(new Pair<>(getUnmappedSyspropKey(key), v));
}
}
return entries;
}
@Override
public Object get(Object key) {
return (key instanceof String) ? System.getProperty(getMappedSyspropKey(key)) : null;
}
@Override
public boolean isEmpty() {
return GenericUtils.isEmpty(keySet());
}
@Override
public Set<String> keySet() {
Properties props = System.getProperties();
Set<String> keys = new TreeSet<>();
// filter out any non-SSHD properties
for (String key : props.stringPropertyNames()) {
if (isMappedSyspropKey(key)) {
keys.add(getUnmappedSyspropKey(key));
}
}
return keys;
}
@Override
public Object put(String key, Object value) {
throw new UnsupportedOperationException("sysprops#put(" + key + ")[" + value + "] N/A");
}
@Override
public void putAll(Map<? extends String, ? extends Object> m) {
throw new UnsupportedOperationException("sysprops#putAll(" + m + ") N/A");
}
@Override<|fim▁hole|>
@Override
public int size() {
return GenericUtils.size(keySet());
}
@Override
public Collection<Object> values() {
Properties props = System.getProperties();
// return a copy in order to avoid concurrent modifications
List<Object> values = new ArrayList<>(props.size());
for (String key : props.stringPropertyNames()) {
if (!isMappedSyspropKey(key)) {
continue;
}
Object v = props.getProperty(key);
if (v != null) {
values.add(v);
}
}
return values;
}
@Override
public String toString() {
return Objects.toString(System.getProperties(), null);
}
/**
* @param key Key to be tested
* @return {@code true} if key starts with {@link #SYSPROPS_MAPPED_PREFIX}
* and continues with a dot followed by some characters
*/
public static boolean isMappedSyspropKey(String key) {
return (GenericUtils.length(key) > (SYSPROPS_MAPPED_PREFIX.length() + 1))
&& key.startsWith(SYSPROPS_MAPPED_PREFIX)
&& (key.charAt(SYSPROPS_MAPPED_PREFIX.length()) == '.');
}
/**
* @param key Key to be transformed
* @return The "pure" key name if a mapped one, same as input otherwise
* @see #isMappedSyspropKey(String)
*/
public static String getUnmappedSyspropKey(Object key) {
String s = Objects.toString(key);
return isMappedSyspropKey(s) ? s.substring(SYSPROPS_MAPPED_PREFIX.length() + 1 /* skip dot */) : s;
}
/**
* @param key The original key
* @return A key prefixed by {@link #SYSPROPS_MAPPED_PREFIX}
* @see #isMappedSyspropKey(String)
*/
public static String getMappedSyspropKey(Object key) {
return SYSPROPS_MAPPED_PREFIX + "." + key;
}
}<|fim▁end|> | public Object remove(Object key) {
throw new UnsupportedOperationException("sysprops#remove(" + key + ") N/A");
} |
<|file_name|>test_gem.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"Fully test this module's functionality through the use of fixtures."
from megacosm.generators import Gem<|fim▁hole|>import fixtures
class TestGem(unittest.TestCase):
def setUp(self):
""" """
self.redis = fakeredis.FakeRedis()
fixtures.gem.import_fixtures(self)
def tearDown(self):
self.redis.flushall()
def test_random_gem(self):
""" """
gem = Gem(self.redis)
self.assertNotEquals('', gem.text)
self.assertNotEquals('', gem.count)
self.assertNotEquals('', gem.color)
self.assertNotEquals('', str(gem))
def test_static_gem(self):
""" """
gem = Gem(self.redis, {'text':'foo bar', 'count':3, 'color':'green'})
self.assertEqual('Foo bar', gem.text)
self.assertEqual(3, gem.count)
self.assertEqual('green', gem.color)<|fim▁end|> | import unittest2 as unittest
import fakeredis
from config import TestConfiguration |
<|file_name|>nfc.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { IconBaseProps } from 'react-icon-base';<|fim▁hole|><|fim▁end|> | export default class MdNfc extends React.Component<IconBaseProps, any> { } |
<|file_name|>hws_cascaded_configer.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
__author__ = 'q00222219@huawei'
import time
from heat.openstack.common import log as logging
import heat.engine.resources.cloudmanager.commonutils as commonutils
import heat.engine.resources.cloudmanager.constant as constant
import heat.engine.resources.cloudmanager.exception as exception
import pdb
LOG = logging.getLogger(__name__)
class CascadedConfiger(object):
def __init__(self, public_ip_api, api_ip, domain, user, password,
cascading_domain, cascading_api_ip, cascaded_domain,
cascaded_api_ip, cascaded_api_subnet_gateway):
self.public_ip_api = public_ip_api
self.api_ip = api_ip
self.domain = domain
self.user = user
self.password = password
self.cascading_domain = cascading_domain
self.cascading_api_ip = cascading_api_ip
self.cascaded_domain = cascaded_domain
self.cascaded_ip = cascaded_api_ip
self.gateway = cascaded_api_subnet_gateway
def do_config(self):
start_time = time.time()
#pdb.set_trace()
LOG.info("start config cascaded, cascaded: %s" % self.domain)
# wait cascaded tunnel can visit
commonutils.check_host_status(host=self.public_ip_api,
user=self.user,
password=self.password,
retry_time=500, interval=1)
# config cascaded host<|fim▁hole|> cost_time = time.time() - start_time
LOG.info("first config success, cascaded: %s, cost time: %d"
% (self.domain, cost_time))
# check config result
for i in range(3):
try:
# check 90s
commonutils.check_host_status(
host=self.public_ip_api,
user=constant.VcloudConstant.ROOT,
password=constant.VcloudConstant.ROOT_PWD,
retry_time=15,
interval=1)
LOG.info("cascaded api is ready..")
break
except exception.CheckHostStatusFailure:
if i == 2:
LOG.error("check cascaded api failed ...")
break
LOG.error("check cascaded api error, "
"retry config cascaded ...")
self._config_az_cascaded()
cost_time = time.time() - start_time
LOG.info("config cascaded success, cascaded: %s, cost_time: %d"
% (self.domain, cost_time))
def _config_az_cascaded(self):
LOG.info("start config cascaded host, host: %s" % self.api_ip)
# modify dns server address
address = "/%(cascading_domain)s/%(cascading_ip)s,/%(cascaded_domain)s/%(cascaded_ip)s" \
% {"cascading_domain": self.cascading_domain,
"cascading_ip": self.cascading_api_ip,
"cascaded_domain":self.cascaded_domain,
"cascaded_ip":self.cascaded_ip}
for i in range(30):
try:
commonutils.execute_cmd_without_stdout(
host=self.public_ip_api,
user=self.user,
password=self.password,
cmd='cd %(dir)s; source /root/adminrc; sh %(script)s replace %(address)s'
% {"dir": constant.PublicConstant.SCRIPTS_DIR,
"script": constant.PublicConstant.
MODIFY_DNS_SERVER_ADDRESS,
"address": address})
break
except exception.SSHCommandFailure as e:
LOG.error("modify cascaded dns address error, cascaded: "
"%s, error: %s"
% (self.domain, e.format_message()))
time.sleep(1)
LOG.info(
"config cascaded dns address success, cascaded: %s"
% self.public_ip_api)
return True<|fim▁end|> | self._config_az_cascaded()
|
<|file_name|>11.py<|end_file_name|><|fim▁begin|># In the 20x20 grid below, four numbers along a diagonal line have been marked in red.
# 08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
# 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
# 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
# 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
# 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
# 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
# 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
# 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
# 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
# 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
# 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
# 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
# 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
# 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
# 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
# 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
# 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
# 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
# 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
# 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
# The product of these numbers is 26 x 63 x 78 x 14 = 1788696.
# What is the greatest product of four adjacent numbers in the same direction (up, down,
# left, right, or diagonally) in the 20x20 grid?
text = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 \
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 \
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 \
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 \
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 \<|fim▁hole|>24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 \
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 \
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 \
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 \
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 \
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 \
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 \
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 \
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 \
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 \
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 \
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48'
nums = [int(x) for x in text.split(' ')]
ans = 0
size = 20
for i in range(0, size):
for j in range(3, size):
tmp = nums[j - 3 + i * size] * nums[j - 2 + i * size] \
* nums[j - 1 + i * size] * nums[j + i * size]
ans = max(ans, tmp)
tmp = nums[i + (j - 3) * size] * nums[i + (j - 2) * size] \
* nums[i + (j - 1) * size] * nums[i + j * size]
ans = max(ans, tmp)
for i in range(3, size):
for j in range(3, size):
tmp = nums[j - 3 + (i - 3) * size] * nums[j - 2 + (i - 2) * size] \
* nums[j - 1 + (i - 1) * size] * nums[j + i * size]
ans = max(ans, tmp)
tmp = nums[j + (i - 3) * size] * nums[j - 1 + (i - 2) * size] \
* nums[j - 2 + (i - 1) * size] * nums[j - 3 + i * size]
ans = max(ans, tmp)
print ans<|fim▁end|> | 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 \
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 \
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 \ |
<|file_name|>bootstrap.js<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
$(document).ready(function(){
var oldAction = $('#comment-form').attr("action");
hljs.initHighlightingOnLoad();
$('#coolness div').hover(function(){
$('#coolness .second').fadeOut(500);
}, function(){
$('#coolness .second').fadeIn(1000).stop(false, true);
});
$(".reply a").click(function() {
var add = this.className;
var action = oldAction + '/' + add;<|fim▁hole|>
console.log($('#comment-form').attr("action"));
});
});<|fim▁end|> | $("#comment-form").attr("action", action); |
<|file_name|>stock.py<|end_file_name|><|fim▁begin|>import numpy as np
class Stock:
"""
Class to represent the data and ratios of a stock.
"""
def __init__(self, eps, dps, roe=0):
'''
eps: earnings per share.
dps: dividends per share.
roe: fractional return on equity, default to 0.
'''
self.eps = np.array(eps).astype(float)
self.dps = np.array(dps).astype(float)
self.roe = np.array(roe).astype(float)
def retention_ratio(self):
'''
Calculates the retention ratio for a stock.
Returns fractional payout ratio numpy array.
'''
return self.eps / self.dps
def dividend_cover(self):
'''<|fim▁hole|>
def payout_ratio(self):
'''
Calculates the stock payout ratio based on:
Returns fractional payout ratio numpy array.
'''
return 1 / self.retention_ratio()
def growth_rate(self):
'''
Calculates the dividend growth rate:
(1 - payout ratio)^cost of equity
Returns the fractional expected growth rate numpy array.
'''
ones = np.ones_like(self.roe)
return (ones - self.payout_ratio()) * self.roe<|fim▁end|> | Returns the dividend cover for the stock (alias
for retention_ratio).
'''
return self.retention_ratio() |
<|file_name|>TabSet.js<|end_file_name|><|fim▁begin|>import React, { Fragment } from 'react'
import noop from '@utils/noop'
const DEFAULT_TAB = {
id: 'blank',
onClick: noop,
renderTab: () => <div />,
renderContents: () => <div />,
}
const TabSet = ({ activeTabId, buttons, onTabClicked = noop, showTabs = true, tabs = [] }) => {
if (tabs.length === 0) tabs.push(DEFAULT_TAB)
const activeId = activeTabId || tabs[0].id
const activeTab = tabs.find(tab => tab.id === activeId) || tabs[0]
const className = [
'tab-set',
!showTabs ? 'no-tabs' : '',
].filter(Boolean).join(' ')
return (
<div className={className}>
{showTabs &&
<Fragment>
<ul className={`tabs ${activeTabId}`}>
{tabs.map((tab) => {
// eslint-disable-next-line no-param-reassign
if (!tab.onClick) tab.onClick = () => onTabClicked(tab.id)
const liClassName = [<|fim▁hole|> tab.className,
tab.id,
activeTab.id === tab.id && 'is-active',
].filter(Boolean).join(' ')
return <li key={tab.id} className={liClassName} onClick={tab.onClick}>{tab.tab}</li>
})}
<li className="buttons">{buttons}</li>
</ul>
</Fragment>
}
<div className="tab-contents">
{activeTab.contents}
</div>
</div>
)
}
TabSet.displayName = 'TabSet'
export default TabSet<|fim▁end|> | 'tab', |
<|file_name|>Robot_sr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="sr" sourcelanguage="en">
<context>
<name>CmdRobotAddToolShape</name>
<message>
<location filename="../../CommandInsertRobot.cpp" line="215"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="216"/>
<source>Add tool</source>
<translation>Додаj алат</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="217"/>
<source>Add a tool shape to the robot</source>
<translation>Додај алат за облик роботу</translation>
</message>
</context>
<context>
<name>CmdRobotConstraintAxle</name>
<message>
<location filename="../../Command.cpp" line="158"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../Command.cpp" line="159"/>
<source>Place robot...</source>
<translation>Поcтави робота...</translation>
</message>
<message>
<location filename="../../Command.cpp" line="160"/>
<source>Place a robot (experimental!)</source>
<translation>Поcтави робота (екcпериментално)</translation>
</message>
</context>
<context>
<name>CmdRobotCreateTrajectory</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="62"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="63"/>
<source>Create trajectory</source>
<translation>Направи путању</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="64"/>
<source>Create a new empty trajectory </source>
<translation>Направи нову празну путању </translation>
</message>
</context>
<context>
<name>CmdRobotEdge2Trac</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="320"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="321"/>
<source>Edge to Trajectory...</source>
<translation>Руб у Путању...</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="322"/>
<source>Generate a Trajectory from a set of edges</source>
<translation>Створи путању од више ивица</translation>
</message>
</context>
<context>
<name>CmdRobotExportKukaCompact</name>
<message>
<location filename="../../CommandExport.cpp" line="50"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="51"/>
<source>Kuka compact subroutine...</source>
<translation>Kuka компактни подпрограм...</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="52"/>
<source>Export the trajectory as a compact KRL subroutine.</source>
<translation>Извези путању као компактни KRL подпрограм.</translation>
</message>
</context>
<context>
<name>CmdRobotExportKukaFull</name>
<message>
<location filename="../../CommandExport.cpp" line="112"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="113"/>
<source>Kuka full subroutine...</source>
<translation>Kuka цео подпрограм...</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="114"/>
<source>Export the trajectory as a full KRL subroutine.</source>
<translation>Извези путању као потпун KRL подпрограм.</translation><|fim▁hole|> </message>
</context>
<context>
<name>CmdRobotInsertKukaIR125</name>
<message>
<location filename="../../CommandInsertRobot.cpp" line="174"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="175"/>
<source>Kuka IR125</source>
<translation>Kuka IR125</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="176"/>
<source>Insert a Kuka IR125 into the document.</source>
<translation>Уметни Kuka IR125 у документ.</translation>
</message>
</context>
<context>
<name>CmdRobotInsertKukaIR16</name>
<message>
<location filename="../../CommandInsertRobot.cpp" line="93"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="94"/>
<source>Kuka IR16</source>
<translation>Kuka IR16</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="95"/>
<source>Insert a Kuka IR16 into the document.</source>
<translation>Уметни Kuka IR16 у документ.</translation>
</message>
</context>
<context>
<name>CmdRobotInsertKukaIR210</name>
<message>
<location filename="../../CommandInsertRobot.cpp" line="134"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="135"/>
<source>Kuka IR210</source>
<translation>Kuka IR210</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="136"/>
<source>Insert a Kuka IR210 into the document.</source>
<translation>Уметни Kuka IR210 у документ.</translation>
</message>
</context>
<context>
<name>CmdRobotInsertKukaIR500</name>
<message>
<location filename="../../CommandInsertRobot.cpp" line="51"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="52"/>
<source>Kuka IR500</source>
<translation>Kuka IR500</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="53"/>
<source>Insert a Kuka IR500 into the document.</source>
<translation>Уметни Kuka IR500 у документ.</translation>
</message>
</context>
<context>
<name>CmdRobotInsertWaypoint</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="95"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="96"/>
<source>Insert in trajectory</source>
<translation>Уметни путању</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="97"/>
<source>Insert robot Tool location into trajectory</source>
<translation>Уметни локацију Алата робота у путању</translation>
</message>
</context>
<context>
<name>CmdRobotInsertWaypointPreselect</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="152"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="153"/>
<source>Insert in trajectory</source>
<translation>Уметни путању</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="154"/>
<source>Insert preselection position into trajectory (W)</source>
<translation>Уметни претходно изабрану позицију у путању (W)</translation>
</message>
</context>
<context>
<name>CmdRobotRestoreHomePos</name>
<message>
<location filename="../../Command.cpp" line="104"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../Command.cpp" line="105"/>
<location filename="../../Command.cpp" line="106"/>
<source>Move to home</source>
<translation>Помери на почетну позицију</translation>
</message>
</context>
<context>
<name>CmdRobotSetDefaultOrientation</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="216"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="217"/>
<source>Set default orientation</source>
<translation>Подеcи подразумевану оријентацију</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="218"/>
<source>Set the default orientation for subsequent commands for waypoint creation</source>
<translation>Постави подразумевану оријентацију за пратеће команде стварања тачака путање</translation>
</message>
</context>
<context>
<name>CmdRobotSetDefaultValues</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="254"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="255"/>
<source>Set default values</source>
<translation>Постави подразумеване вредности</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="256"/>
<source>Set the default values for speed, acceleration and continuity for subsequent commands of waypoint creation</source>
<translation>Подеcи подразумеване вредноcти за брзину, убрзање и континуитет за пратеће команде стварања тачака путање</translation>
</message>
</context>
<context>
<name>CmdRobotSetHomePos</name>
<message>
<location filename="../../Command.cpp" line="55"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../Command.cpp" line="56"/>
<location filename="../../Command.cpp" line="57"/>
<source>Set the home position</source>
<translation>Постави почетну позицију</translation>
</message>
</context>
<context>
<name>CmdRobotSimulate</name>
<message>
<location filename="../../Command.cpp" line="199"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../Command.cpp" line="200"/>
<source>Simulate a trajectory</source>
<translation>Cимулирај путању</translation>
</message>
<message>
<location filename="../../Command.cpp" line="201"/>
<source>Run a simulation on a trajectory</source>
<translation>Покрени cимулацију на путањи</translation>
</message>
</context>
<context>
<name>CmdRobotTrajectoryCompound</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="431"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="432"/>
<source>Trajectory compound...</source>
<translation>Cпој путање...</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="433"/>
<source>Group and connect some trajectories to one</source>
<translation>Групиши и cпоји неке путање у једну</translation>
</message>
</context>
<context>
<name>CmdRobotTrajectoryDressUp</name>
<message>
<location filename="../../CommandTrajectory.cpp" line="384"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="385"/>
<source>Dress-up trajectory...</source>
<translation>Опреми путању...</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="386"/>
<source>Create a dress-up object which overrides some aspects of a trajectory</source>
<translation>Направи опремљен објекат који ће надглаcати неке аcпекте путање</translation>
</message>
</context>
<context>
<name>Gui::TaskView::TaskWatcherCommands</name>
<message>
<location filename="../../Workbench.cpp" line="53"/>
<source>Trajectory tools</source>
<translation>Алати путање</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="54"/>
<source>Robot tools</source>
<translation>Алати робота</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="55"/>
<source>Insert Robot</source>
<translation>Уметни Робота</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../../Command.cpp" line="75"/>
<location filename="../../Command.cpp" line="124"/>
<location filename="../../Command.cpp" line="224"/>
<location filename="../../Command.cpp" line="253"/>
<location filename="../../CommandExport.cpp" line="65"/>
<location filename="../../CommandExport.cpp" line="127"/>
<location filename="../../CommandInsertRobot.cpp" line="234"/>
<location filename="../../CommandTrajectory.cpp" line="111"/>
<location filename="../../CommandTrajectory.cpp" line="167"/>
<location filename="../../CommandTrajectory.cpp" line="184"/>
<location filename="../../CommandTrajectory.cpp" line="412"/>
<source>Wrong selection</source>
<translation>Погрешан избор</translation>
</message>
<message>
<location filename="../../Command.cpp" line="76"/>
<source>Select one Robot to set home position</source>
<translation>Изабери једног Робота да поcтавиш почетну позицију</translation>
</message>
<message>
<location filename="../../Command.cpp" line="125"/>
<source>Select one Robot</source>
<translation>Изабери једног Робота</translation>
</message>
<message>
<location filename="../../Command.cpp" line="225"/>
<location filename="../../Command.cpp" line="254"/>
<location filename="../../CommandExport.cpp" line="66"/>
<location filename="../../CommandExport.cpp" line="128"/>
<location filename="../../CommandTrajectory.cpp" line="112"/>
<source>Select one Robot and one Trajectory object.</source>
<translation>Одабери једног Робота и један објекат Путање.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="230"/>
<source>Trajectory not valid</source>
<translation>Путања је неважећа</translation>
</message>
<message>
<location filename="../../Command.cpp" line="231"/>
<source>You need at least two waypoints in a trajectory to simulate.</source>
<translation>Потребне cу бар две тачке путање за cимулацију.</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="88"/>
<location filename="../../CommandExport.cpp" line="150"/>
<source>KRL file</source>
<translation>KRL датотека</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="89"/>
<location filename="../../CommandExport.cpp" line="151"/>
<source>All Files</source>
<translation>Све датотеке</translation>
</message>
<message>
<location filename="../../CommandExport.cpp" line="90"/>
<location filename="../../CommandExport.cpp" line="152"/>
<source>Export program</source>
<translation>Извези програм</translation>
</message>
<message>
<location filename="../../CommandInsertRobot.cpp" line="235"/>
<source>Select one robot and one shape or VRML object.</source>
<translation>Изабери једног робота и један облик, или VRML објекат.</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="168"/>
<location filename="../../CommandTrajectory.cpp" line="185"/>
<source>Select one Trajectory object.</source>
<translation>Одабери један објекат Путање.</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="191"/>
<source>No preselection</source>
<translation>Нема предизбора</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="192"/>
<source>You have to hover above a geometry (Preselection) with the mouse to use this command. See documentation for details.</source>
<translation>Морате прелазити преко геометрије (предизбора) cа мишем,да би кориcтили ову команду.Погледајте документацију за детаље.</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="268"/>
<source>Set default speed</source>
<translation>Подеси подразумевану брзину</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="269"/>
<source>speed: (e.g. 1 m/s or 3 cm/s)</source>
<translation>брзина: (e.g. 1 m/s or 3 cm/s)</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="278"/>
<source>Set default continuity</source>
<translation>Подеси подразумевани континуитет</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="279"/>
<source>continuous ?</source>
<translation>непрекидно ?</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="285"/>
<source>Set default acceleration</source>
<translation>Подеcи подразумевано убрзање</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="286"/>
<source>acceleration: (e.g. 1 m/s^2 or 3 cm/s^2)</source>
<translation>убрзање: (e.g. 1 m/s^2 or 3 cm/s^2)</translation>
</message>
<message>
<location filename="../../CommandTrajectory.cpp" line="413"/>
<source>Select the Trajectory which you want to dress up.</source>
<translation>Одабери Путању коју желиш да опремиш.</translation>
</message>
<message>
<location filename="../../ViewProviderTrajectory.cpp" line="164"/>
<source>Modify</source>
<translation>Измени</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="81"/>
<source>No robot files installed</source>
<translation>Нема инсталираних робот датотека</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="82"/>
<source>Please visit %1 and copy the files to %2</source>
<translation>Молимо, посетите %1 и копирајте датотеке у %2</translation>
</message>
</context>
<context>
<name>RobotGui::DlgTrajectorySimulate</name>
<message>
<location filename="../../TrajectorySimulate.ui" line="20"/>
<source>Simulation</source>
<translation>Симулација</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="58"/>
<source>|<</source>
<translation>|<</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="89"/>
<source><</source>
<translation><</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="120"/>
<source>||</source>
<translation>||</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="151"/>
<source>|></source>
<translation>|></translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="182"/>
<source>></source>
<translation>></translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="213"/>
<source>>|</source>
<translation>>|</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="237"/>
<source>%</source>
<translation>%</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="272"/>
<source>Type</source>
<translation>Тип</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="277"/>
<source>Name</source>
<translation>Име</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="282"/>
<source>C</source>
<translation>C</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="287"/>
<source>V</source>
<translation>V</translation>
</message>
<message>
<location filename="../../TrajectorySimulate.ui" line="292"/>
<source>A</source>
<translation>A</translation>
</message>
</context>
<context>
<name>RobotGui::TaskEdge2TracParameter</name>
<message>
<location filename="../../TaskEdge2TracParameter.cpp" line="47"/>
<source>TaskEdge2TracParameter</source>
<translation type="unfinished">TaskEdge2TracParameter</translation>
</message>
</context>
<context>
<name>RobotGui::TaskRobot6Axis</name>
<message>
<location filename="../../TaskRobot6Axis.ui" line="14"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="22"/>
<source>A1</source>
<translation>A1</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="69"/>
<source>A2</source>
<translation>A2</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="116"/>
<source>A3</source>
<translation>A3</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="163"/>
<source>A4</source>
<translation>A4</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="210"/>
<source>A5</source>
<translation>A5</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="257"/>
<source>A6</source>
<translation>A6</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="313"/>
<source>TCP: (200.23,300.23,400.23,234,343,343)</source>
<translation>TCP: (200.23,300.23,400.23,234,343,343)</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="329"/>
<source>Tool: (0,0,400,0,0,0)</source>
<translation>Алатка: (0,0,400,0,0,0)</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.ui" line="345"/>
<source>...</source>
<translation>...</translation>
</message>
<message>
<location filename="../../TaskRobot6Axis.cpp" line="52"/>
<source>TaskRobot6Axis</source>
<translation type="unfinished">TaskRobot6Axis</translation>
</message>
</context>
<context>
<name>RobotGui::TaskRobotControl</name>
<message>
<location filename="../../TaskRobotControl.cpp" line="46"/>
<source>TaskRobotControl</source>
<translation type="unfinished">TaskRobotControl</translation>
</message>
</context>
<context>
<name>RobotGui::TaskRobotMessages</name>
<message>
<location filename="../../TaskRobotMessages.cpp" line="46"/>
<source>TaskRobotMessages</source>
<translation type="unfinished">TaskRobotMessages</translation>
</message>
</context>
<context>
<name>RobotGui::TaskTrajectory</name>
<message>
<location filename="../../TaskTrajectory.ui" line="20"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="58"/>
<source>|<</source>
<translation>|<</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="89"/>
<source><</source>
<translation><</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="120"/>
<source>||</source>
<translation>||</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="151"/>
<source>|></source>
<translation>|></translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="182"/>
<source>></source>
<translation>></translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="213"/>
<source>>|</source>
<translation>>|</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="237"/>
<source>%</source>
<translation>%</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="254"/>
<source>10 ms</source>
<translation>10 мс</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="259"/>
<source>50 ms</source>
<translation>50 мс</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="264"/>
<source>100 ms</source>
<translation>100 мс</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="269"/>
<source>500 ms</source>
<translation>500 мс</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="274"/>
<source>1 s</source>
<translation>1 с</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="291"/>
<source>Pos: (200.23, 300.23, 400.23, 234, 343 ,343)</source>
<translation>Поз: (200.23, 300.23, 400.23, 234, 343 ,343)</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="317"/>
<source>Type</source>
<translation>Тип</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="322"/>
<source>Name</source>
<translation>Име</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="327"/>
<source>C</source>
<translation>C</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="332"/>
<source>V</source>
<translation>V</translation>
</message>
<message>
<location filename="../../TaskTrajectory.ui" line="337"/>
<source>A</source>
<translation>A</translation>
</message>
<message>
<location filename="../../TaskTrajectory.cpp" line="44"/>
<source>Trajectory</source>
<translation type="unfinished">Trajectory</translation>
</message>
</context>
<context>
<name>RobotGui::TaskTrajectoryDressUpParameter</name>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.cpp" line="48"/>
<source>Dress Up Parameter</source>
<translation type="unfinished">Dress Up Parameter</translation>
</message>
</context>
<context>
<name>TaskEdge2TracParameter</name>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="14"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="29"/>
<source>Hide / Show</source>
<translation>Сакриј / Прикажи</translation>
</message>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="47"/>
<source>Edges: 0</source>
<translation>Ивице: 0</translation>
</message>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="61"/>
<source>Cluster: 0</source>
<translation>Јато: 0</translation>
</message>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="72"/>
<source>Sizing Value:</source>
<translation>Калибриcање Вредноcти:</translation>
</message>
<message>
<location filename="../../TaskEdge2TracParameter.ui" line="103"/>
<source>Use orientation of edge</source>
<translation>Користи оријентацију ивице</translation>
</message>
</context>
<context>
<name>TaskRobotControl</name>
<message>
<location filename="../../TaskRobotControl.ui" line="14"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="37"/>
<source>X+</source>
<translation>X+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="56"/>
<source>Y+</source>
<translation>Y+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="75"/>
<source>Z+</source>
<translation>Z+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="94"/>
<source>A+</source>
<translation>A+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="113"/>
<source>B+</source>
<translation>B+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="132"/>
<source>C+</source>
<translation>C+</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="151"/>
<source>X-</source>
<translation>X-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="170"/>
<source>Y-</source>
<translation>Y-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="189"/>
<source>Z-</source>
<translation>Z-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="208"/>
<source>A-</source>
<translation>A-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="227"/>
<source>B-</source>
<translation>B-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="246"/>
<source>C-</source>
<translation>C-</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="258"/>
<source>Tool 0</source>
<translation>Алатка 0</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="263"/>
<source>Tool</source>
<translation>Алатка</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="268"/>
<source>Base 0</source>
<translation>База 0</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="273"/>
<source>Base</source>
<translation>База</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="278"/>
<source>World</source>
<translation>Свет</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="287"/>
<source>50mm / 5°</source>
<translation>50мм / 5°</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="292"/>
<source>20mm / 2°</source>
<translation>20мм / 2°</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="297"/>
<source>10mm / 1°</source>
<translation>10мм / 1°</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="302"/>
<source>5mm / 0.5°</source>
<translation>5mm / 0.5°</translation>
</message>
<message>
<location filename="../../TaskRobotControl.ui" line="307"/>
<source>1mm / 0.1°</source>
<translation>1mm / 0.1°</translation>
</message>
</context>
<context>
<name>TaskRobotMessages</name>
<message>
<location filename="../../TaskRobotMessages.ui" line="14"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskRobotMessages.ui" line="20"/>
<source>clear</source>
<translation>очиcти</translation>
</message>
</context>
<context>
<name>TaskTrajectoryDressUpParameter</name>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="14"/>
<source>Form</source>
<translation>Образац</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="20"/>
<source>Speed & Acceleration:</source>
<translation>Брзина & Убрзање:</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="29"/>
<source>Speed:</source>
<translation>Брзина:</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="55"/>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="92"/>
<source>Use</source>
<translation>Користи</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="66"/>
<source>Accel:</source>
<translation>Убрз:</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="102"/>
<source>Don't change Cont</source>
<translation>Не мењај Наставак</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="107"/>
<source>Continues</source>
<translation>Непрекидно</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="112"/>
<source>Discontinues</source>
<translation>Испрекидано</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="127"/>
<source>Position and Orientation:</source>
<translation>Позиција и Оријентација:</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="136"/>
<source>(0,0,0),(0,0,0)</source>
<translation>(0,0,0),(0,0,0)</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="149"/>
<source>...</source>
<translation>...</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="159"/>
<source>Don't change Position & Orientation</source>
<translation>Не мењај Позицију & Оријентацију</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="164"/>
<source>Use Orientation</source>
<translation>Користи Оријентацију</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="169"/>
<source>Add Position</source>
<translation>Додај Позицију</translation>
</message>
<message>
<location filename="../../TaskTrajectoryDressUpParameter.ui" line="174"/>
<source>Add Orientation</source>
<translation>Додај Оријентацију</translation>
</message>
</context>
<context>
<name>Workbench</name>
<message>
<location filename="../../Workbench.cpp" line="49"/>
<source>Robot</source>
<translation>Робот</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="50"/>
<source>Insert Robots</source>
<translation>Уметни Робота</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="51"/>
<source>&Robot</source>
<translation>&Робот</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="52"/>
<source>Export trajectory</source>
<translation>Извези путању</translation>
</message>
</context>
</TS><|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.