prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>issue-3424.rs<|end_file_name|><|fim▁begin|>// xfail-fast
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// rustc --test ignores2.rs && ./ignores2
extern mod std;
use core::path::{Path};
type rsrc_loader = ~fn(path: &Path) -> result::Result<~str, ~str>;
#[test]
fn tester()
{
let loader: rsrc_loader = |_path| {result::Ok(~"more blah")};<|fim▁hole|> let path = path::from_str("blah");
assert!(loader(&path).is_ok());
}
pub fn main() {}<|fim▁end|>
| |
<|file_name|>ir.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::{Location, Named, WithLocation};
use graphql_syntax::{FloatValue, OperationKind};
use interner::StringKey;
use schema::{FieldID, Type, TypeReference};
use schema::{SDLSchema, Schema};
use std::fmt;
use std::sync::Arc;
// Definitions
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum ExecutableDefinition {
Operation(OperationDefinition),
Fragment(FragmentDefinition),
}
impl ExecutableDefinition {
pub fn has_directive(&self, directive_name: StringKey) -> bool {
match self {
ExecutableDefinition::Operation(node) => node
.directives
.iter()
.any(|d| d.name.item == directive_name),
ExecutableDefinition::Fragment(node) => node
.directives
.iter()
.any(|d| d.name.item == directive_name),
}
}
pub fn name_with_location(&self) -> WithLocation<StringKey> {
match self {
ExecutableDefinition::Operation(node) => node.name,
ExecutableDefinition::Fragment(node) => node.name,
}
}
}
/// A fully-typed mutation, query, or subscription definition
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct OperationDefinition {
pub kind: OperationKind,
pub name: WithLocation<StringKey>,
pub type_: Type,
pub variable_definitions: Vec<VariableDefinition>,
pub directives: Vec<Directive>,
pub selections: Vec<Selection>,
}
impl OperationDefinition {
pub fn is_query(&self) -> bool {
self.kind == OperationKind::Query
}
pub fn is_mutation(&self) -> bool {
self.kind == OperationKind::Mutation
}
pub fn is_subscription(&self) -> bool {
self.kind == OperationKind::Subscription
}
}
/// A fully-typed fragment definition
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct FragmentDefinition {
pub name: WithLocation<StringKey>,
pub variable_definitions: Vec<VariableDefinition>,
pub used_global_variables: Vec<VariableDefinition>,
pub type_condition: Type,
pub directives: Vec<Directive>,
pub selections: Vec<Selection>,
}
/// A variable definition of an operation or fragment
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct VariableDefinition {
pub name: WithLocation<StringKey>,
pub type_: TypeReference,
pub default_value: Option<ConstantValue>,
pub directives: Vec<Directive>,
}
impl VariableDefinition {
pub fn has_non_null_default_value(&self) -> bool {
match &self.default_value {
Some(value) => value.is_non_null(),
_ => false,
}
}
}
impl Named for VariableDefinition {
fn name(&self) -> StringKey {
self.name.item
}
}
// Selections
/// A selection within an operation or fragment
#[derive(Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
pub enum Selection {
FragmentSpread(Arc<FragmentSpread>),
InlineFragment(Arc<InlineFragment>),
LinkedField(Arc<LinkedField>),
ScalarField(Arc<ScalarField>),
Condition(Arc<Condition>),
}
impl Selection {
/// Get selection directives
/// This method will panic if called on the Selection::Condition
pub fn directives(&self) -> &[Directive] {
match self {
Selection::FragmentSpread(node) => &node.directives,
Selection::InlineFragment(node) => &node.directives,
Selection::ScalarField(node) => &node.directives,
Selection::LinkedField(node) => &node.directives,
Selection::Condition(_) => unreachable!("Unexpected `Condition` selection."),
}
}
/// Update Selection directives
/// This method will panic if called on the Selection::Condition
pub fn set_directives(&mut self, directives: Vec<Directive>) {
match self {
Selection::FragmentSpread(node) => {
Arc::make_mut(node).directives = directives;
}
Selection::InlineFragment(node) => {
Arc::make_mut(node).directives = directives;
}
Selection::ScalarField(node) => {
Arc::make_mut(node).directives = directives;
}
Selection::LinkedField(node) => {
Arc::make_mut(node).directives = directives;
}
Selection::Condition(_) => unreachable!("Unexpected `Condition` selection."),
};
}
}
impl fmt::Debug for Selection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Selection::FragmentSpread(node) => f.write_fmt(format_args!("{:#?}", node)),
Selection::InlineFragment(node) => f.write_fmt(format_args!("{:#?}", node)),
Selection::LinkedField(node) => f.write_fmt(format_args!("{:#?}", node)),
Selection::ScalarField(node) => f.write_fmt(format_args!("{:#?}", node)),
Selection::Condition(node) => f.write_fmt(format_args!("{:#?}", node)),
}
}
}
/// ... Name
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct FragmentSpread {
pub fragment: WithLocation<StringKey>,
pub arguments: Vec<Argument>,
pub directives: Vec<Directive>,
}
/// ... SelectionSet
/// ... on Type SelectionSet
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct InlineFragment {
pub type_condition: Option<Type>,
pub directives: Vec<Directive>,
pub selections: Vec<Selection>,
}
/// Name Arguments? SelectionSet
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct LinkedField {
pub alias: Option<WithLocation<StringKey>>,
pub definition: WithLocation<FieldID>,
pub arguments: Vec<Argument>,
pub directives: Vec<Directive>,
pub selections: Vec<Selection>,
}
impl LinkedField {
pub fn alias_or_name(&self, schema: &SDLSchema) -> StringKey {
if let Some(name) = self.alias {
name.item<|fim▁hole|> schema.field(self.definition.item).name
}
}
pub fn alias_or_name_location(&self) -> Location {
if let Some(name) = self.alias {
name.location
} else {
self.definition.location
}
}
}
/// Name Arguments?
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct ScalarField {
pub alias: Option<WithLocation<StringKey>>,
pub definition: WithLocation<FieldID>,
pub arguments: Vec<Argument>,
pub directives: Vec<Directive>,
}
impl ScalarField {
pub fn alias_or_name(&self, schema: &SDLSchema) -> StringKey {
if let Some(name) = self.alias {
name.item
} else {
schema.field(self.definition.item).name
}
}
pub fn alias_or_name_location(&self) -> Location {
if let Some(name) = self.alias {
name.location
} else {
self.definition.location
}
}
}
/// https://spec.graphql.org/June2018/#sec--skip
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Condition {
pub selections: Vec<Selection>,
pub value: ConditionValue,
pub passing_value: bool,
}
// Associated Types
/// @ Name Arguments?
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Directive {
pub name: WithLocation<StringKey>,
pub arguments: Vec<Argument>,
}
impl Named for Directive {
fn name(&self) -> StringKey {
self.name.item
}
}
/// Name : Value
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Argument {
pub name: WithLocation<StringKey>,
pub value: WithLocation<Value>,
}
impl Named for Argument {
fn name(&self) -> StringKey {
self.name.item
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Value {
Constant(ConstantValue),
Variable(Variable),
List(Vec<Value>),
Object(Vec<Argument>),
}
impl Value {
/// If the value is a constant string literal, return the value, otherwise None.
pub fn get_string_literal(&self) -> Option<StringKey> {
if let Value::Constant(ConstantValue::String(val)) = self {
Some(*val)
} else {
None
}
}
/// Return the constant string literal of this value.
/// Panics if the value is not a constant string literal.
pub fn expect_string_literal(&self) -> StringKey {
self.get_string_literal().unwrap_or_else(|| {
panic!("expected a string literal, got {:?}", self);
})
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Variable {
pub name: WithLocation<StringKey>,
pub type_: TypeReference,
}
/// Name : Value[Const]
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct ConstantArgument {
pub name: WithLocation<StringKey>,
pub value: WithLocation<ConstantValue>,
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum ConstantValue {
Int(i64),
Float(FloatValue),
String(StringKey),
Boolean(bool),
Null(),
Enum(StringKey),
List(Vec<ConstantValue>),
Object(Vec<ConstantArgument>),
}
impl ConstantValue {
pub fn is_null(&self) -> bool {
matches!(self, ConstantValue::Null())
}
pub fn is_non_null(&self) -> bool {
!self.is_null()
}
}
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum ConditionValue {
Constant(bool),
Variable(Variable),
}<|fim▁end|>
|
} else {
|
<|file_name|>stack.py<|end_file_name|><|fim▁begin|>from yowsup.stacks import YowStack
from .layer import EchoLayer
from yowsup.layers import YowLayerEvent
from yowsup.layers.auth import YowCryptLayer, YowAuthenticationProtocolLayer, AuthError
from yowsup.layers.coder import YowCoderLayer
from yowsup.layers.network import YowNetworkLayer
from yowsup.layers.protocol_messages import YowMessagesProtocolLayer
from yowsup.layers.protocol_media import YowMediaProtocolLayer
from yowsup.layers.stanzaregulator import YowStanzaRegulator
from yowsup.layers.protocol_receipts import YowReceiptProtocolLayer
from yowsup.layers.protocol_acks import YowAckProtocolLayer
from yowsup.layers.logger import YowLoggerLayer
from yowsup.layers.protocol_iq import YowIqProtocolLayer
from yowsup.layers.protocol_calls import YowCallsProtocolLayer
from yowsup.layers import YowParallelLayer
class YowsupEchoStack(object):<|fim▁hole|> if encryptionEnabled:
from yowsup.layers.axolotl import YowAxolotlLayer
layers = (
EchoLayer,
YowParallelLayer([YowAuthenticationProtocolLayer, YowMessagesProtocolLayer, YowReceiptProtocolLayer, YowAckProtocolLayer, YowMediaProtocolLayer, YowIqProtocolLayer, YowCallsProtocolLayer]),
YowAxolotlLayer,
YowLoggerLayer,
YowCoderLayer,
YowCryptLayer,
YowStanzaRegulator,
YowNetworkLayer
)
else:
layers = (
EchoLayer,
YowParallelLayer([YowAuthenticationProtocolLayer, YowMessagesProtocolLayer, YowReceiptProtocolLayer, YowAckProtocolLayer, YowMediaProtocolLayer, YowIqProtocolLayer, YowCallsProtocolLayer]),
YowLoggerLayer,
YowCoderLayer,
YowCryptLayer,
YowStanzaRegulator,
YowNetworkLayer
)
self.stack = YowStack(layers)
self.stack.setCredentials(credentials)
def start(self):
self.stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
self.stack.loop()
except AuthError as e:
print("Authentication Error: %s" % e.message)<|fim▁end|>
|
def __init__(self, credentials, encryptionEnabled = False):
|
<|file_name|>selectplace.py<|end_file_name|><|fim▁begin|>#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2003-2006 Donald N. Allingham
# Copyright (C) 2009-2010 Gary Burton
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
<|fim▁hole|>#
# internationalization
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from ..views.treemodels.placemodel import PlaceListModel
from .baseselector import BaseSelector
#-------------------------------------------------------------------------
#
# SelectPlace
#
#-------------------------------------------------------------------------
class SelectPlace(BaseSelector):
def _local_init(self):
"""
Perform local initialisation for this class
"""
self.width_key = 'interface.place-sel-width'
self.height_key = 'interface.place-sel-height'
def get_window_title(self):
return _("Select Place")
def get_model_class(self):
return PlaceListModel
def get_column_titles(self):
return [
(_('Title'), 350, BaseSelector.TEXT, 0),
(_('ID'), 75, BaseSelector.TEXT, 1),
(_('Street'), 75, BaseSelector.TEXT, 2),
(_('Locality'), 75, BaseSelector.TEXT, 3),
(_('City'), 75, BaseSelector.TEXT, 4),
(_('County'), 75, BaseSelector.TEXT, 5),
(_('State'), 75, BaseSelector.TEXT, 6),
(_('Country'), 75, BaseSelector.TEXT, 7),
(_('Parish'), 75, BaseSelector.TEXT, 9),
]
def get_from_handle_func(self):
return self.db.get_place_from_handle<|fim▁end|>
|
#-------------------------------------------------------------------------
|
<|file_name|>test_tokenize.py<|end_file_name|><|fim▁begin|>import unittest
from broca.tokenize import keyword, util, LemmaTokenizer
class KeywordTokenizeTests(unittest.TestCase):
def setUp(self):
self.docs = [
'This cat dog is running happy.',
'This cat dog runs sad.'
]
def test_overkill(self):
expected_t_docs = [
['cat dog', 'run', 'happy'],
['cat dog', 'run', 'sad']
]
t_docs = keyword.OverkillTokenizer(lemmatize=True,
min_count=1,
threshold=0.1).tokenize(self.docs)
self.assertEqual(t_docs, expected_t_docs)
def test_rake(self):
expected_t_docs = [
['cat dog', 'running happy'],
['cat dog runs sad']
]
t_docs = keyword.RAKETokenizer().tokenize(self.docs)
# Order not necessarily preserved
for i, output in enumerate(t_docs):
self.assertEqual(set(output), set(expected_t_docs[i]))
def test_apriori(self):
expected_t_docs = [
['cat dog'],
['cat dog']
]
t_docs = keyword.AprioriTokenizer().tokenize(self.docs)
self.assertEqual(t_docs, expected_t_docs)
def test_pos(self):
expected_t_docs = [
['cat dog'],
['cat dog']
]
t_docs = keyword.POSTokenizer().tokenize(self.docs)
self.assertEqual(t_docs, expected_t_docs)
def test_overkill_parallel(self):
expected_t_docs = [
['cat dog', 'run', 'happy'],
['cat dog', 'run', 'sad']
]
t_docs = keyword.OverkillTokenizer(lemmatize=True,
min_count=1,
threshold=0.1,
n_jobs=2).tokenize(self.docs)
self.assertEqual(t_docs, expected_t_docs)
def test_rake_parallel(self):
expected_t_docs = [
['cat dog', 'running happy'],
['cat dog runs sad']
]
t_docs = keyword.RAKETokenizer(n_jobs=-1).tokenize(self.docs)
# Order not necessarily preserved
for i, output in enumerate(t_docs):
self.assertEqual(set(output), set(expected_t_docs[i]))
class TokenizeTests(unittest.TestCase):
def setUp(self):
self.docs = [
'This cat dog is running happy.',
'This cat dog runs sad.'
]
def test_lemma(self):
expected_t_docs = [
['cat', 'dog', 'run', 'happy', '.'],
['cat', 'dog', 'run', 'sad', '.']
]
t_docs = LemmaTokenizer().tokenize(self.docs)
self.assertEqual(t_docs, expected_t_docs)
def test_prune(self):
t_docs = [<|fim▁hole|> expected_t_docs = [
['cat dog', 'happy', 'dog', 'dog'],
['cat dog', 'sad']
]
t_docs = util.prune(t_docs)
self.assertEqual(t_docs, expected_t_docs)<|fim▁end|>
|
['cat', 'cat dog', 'happy', 'dog', 'dog'],
['cat', 'cat dog', 'sad']
]
|
<|file_name|>04da9abf37e2_add_post_media.py<|end_file_name|><|fim▁begin|>"""add post media
Revision ID: 04da9abf37e2
Revises: 2e3a2882e5a4
Create Date: 2017-08-08 15:15:50.911420
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '04da9abf37e2'
down_revision = '2e3a2882e5a4'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('accounts', sa.Column('policy_keep_media', sa.Boolean(), server_default='FALSE', nullable=False))
op.add_column('posts', sa.Column('has_media', sa.Boolean(), server_default='FALSE', nullable=False))
# ### end Alembic commands ###<|fim▁hole|>def downgrade():
op.drop_column('posts', 'has_media')
op.drop_column('accounts', 'policy_keep_media')<|fim▁end|>
| |
<|file_name|>login.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { UserService, LoginCallback, LoggedInCallback } from '../services/user.service';
@Component({
selector: 'login',
styleUrls: ['./login.component.css'],
templateUrl: './login.component.html'
})
export class LoginComponent implements OnInit, LoginCallback, LoggedInCallback {
public user = {username: '', password: '', changePassword: ''};
public errorMessage: string;
public newPasswordRequired: boolean = false;
constructor(private router: Router, public userService: UserService) {
}
public ngOnInit() {
this.errorMessage = null;
this.userService.isAuthenticated(this);
}
public redirect(pagename: string) {
this.router.navigate(['/' + pagename]);
}<|fim▁hole|> public isLoggedIn(message: string, isLoggedIn: boolean) {
if (isLoggedIn) {
this.logout();
// console.log('user is already logged in. Off to the home page...');
// this.redirect('home');
}
console.log('user is not already logged in');
}
public loginCallback(message: string, result: any) {
console.log('loginCallback: message: ' + message + '. result: ' + result);
this.newPasswordRequired = false;
if (message != null) {
this.errorMessage = message;
console.log('result: ' + message);
} else if (result && result === 'New password required') {
this.errorMessage = 'AWS requires a new password';
console.log('AWS requires a new password');
this.newPasswordRequired = true;
} else {
console.log('result: ' + message);
this.redirect('home');
}
}
public login() {
if (this.user.username == null || this.user.password == null) {
this.errorMessage = 'All fields are required';
return;
}
this.errorMessage = null;
this.userService.login(this.user.username, this.user.password, this.user.changePassword, this);
}
public logout() {
this.userService.logout();
}
}<|fim▁end|>
| |
<|file_name|>selectors.ts<|end_file_name|><|fim▁begin|>import { formValueSelector, getFormValues } from 'redux-form';
import { createSelector } from 'reselect';
import { BookingProps } from '@waldur/booking/types';
import { getOfferingComponentsFilter } from '@waldur/marketplace/common/registry';
import { OfferingComponent } from '@waldur/marketplace/types';
import { RootState } from '@waldur/store/reducers';
import {
isOwnerOrStaff,
getCustomer,
getUser,
} from '@waldur/workspace/selectors';
import { FORM_ID, DRAFT } from './constants';
import { PlanFormData } from './types';
import { formatComponents } from './utils';
export const getOffering = (state: RootState) => state.marketplace.offering;
export const getStep = (state: RootState) => getOffering(state).step;
export const isLoading = (state: RootState) => getOffering(state).loading;
export const isLoaded = (state: RootState) => getOffering(state).loaded;
export const isErred = (state: RootState) => getOffering(state).erred;
export const getCategories = (state: RootState) =>
getOffering(state).categories;
export const getOfferingComponents = (state: RootState, type) =>
getOffering(state).plugins[type].components;
export const getOfferingLimits = (state: RootState, type) =>
getOffering(state).plugins[type].available_limits;
export const getForm = formValueSelector(FORM_ID);
export const getComponents = (
state: RootState,
type: string,
): OfferingComponent[] => {
const builtinComponents = getOfferingComponents(state, type);
const builtinTypes: string[] = builtinComponents.map((c) => c.type);
const formComponents: OfferingComponent[] = formatComponents(
getForm(state, 'components') || [],
);
let components = [
...builtinComponents,
...formComponents.filter((c) => !builtinTypes.includes(c.type)),
];
const offeringComponentsFilter = getOfferingComponentsFilter(type);
if (offeringComponentsFilter) {
const formData = getFormValues(FORM_ID)(state);
components = offeringComponentsFilter(formData, components);
}
return components;
};
export const getTypeLabel = (state: RootState): string => {
const option = getForm(state, 'type');
if (option) {
return option.label;
}
};
export const getType = (state: RootState): string => {
const option = getForm(state, 'type');
if (option) {
return option.value;
}
};
export const getCategory = (state: RootState) => getForm(state, 'category');
export const getAttributes = (state: RootState) => getForm(state, 'attributes');
export const getPlans = (state): PlanFormData[] => getForm(state, 'plans');
export const getPlanData = (state: RootState, planPath: string): PlanFormData =>
getForm(state, planPath);
export const getPlanPrice = (state: RootState, planPath: string) => {
const planData = getPlanData(state, planPath);
if (planData && planData.quotas && planData.prices) {
const type = getType(state);
const components = (type ? getComponents(state, type) : [])
.filter((component) => component.billing_type === 'fixed')
.map((component) => component.type);
const keys = Object.keys(planData.quotas).filter(
(key) => components.indexOf(key) !== -1,
);
return keys.reduce(
(total, item) =>
total + (planData.quotas[item] || 0) * (planData.prices[item] || 0),
0,
);
}
return 0;
};
export const isOfferingManagementDisabled = createSelector(
isOwnerOrStaff,
getOffering,
getCustomer,
getUser,
(ownerOrStaff, offeringState, customer, user) => {
if (!customer) {
return false;
}
if (!customer.is_service_provider) {
return true;
}
if (!ownerOrStaff) {
return true;<|fim▁hole|> offering &&
offering.state &&
offering.state !== DRAFT &&
!user.is_staff
) {
return true;
}
},
);
export const getSchedules = (state: RootState) =>
getForm(state, 'schedules') as BookingProps[];<|fim▁end|>
|
}
const offering = offeringState.offering;
if (
|
<|file_name|>query04.rs<|end_file_name|><|fim▁begin|>use timely::order::TotalOrder;
use timely::dataflow::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
use {Arrangements, Experiment, Collections};
// -- $ID$
// -- TPC-H/TPC-R Order Priority Checking Query (Q4)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// o_orderpriority,
// count(*) as order_count
// from
// orders
// where
// o_orderdate >= date ':1'
// and o_orderdate < date ':1' + interval '3' month
// and exists (
// select
// *
// from
// lineitem
// where
// l_orderkey = o_orderkey
// and l_commitdate < l_receiptdate
// )
// group by
// o_orderpriority
// order by
// o_orderpriority;
// :n -1
pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)
where G::Timestamp: Lattice+TotalOrder+Ord {
let lineitems =
collections
.lineitems()
.flat_map(|l| if l.commit_date < l.receipt_date { Some(l.order_key) } else { None })
.distinct_total();
collections
.orders()
.flat_map(|o|
if o.order_date >= ::types::create_date(1993, 7, 1) && o.order_date < ::types::create_date(1993, 10, 1) {
Some((o.order_key, o.order_priority))
}
else { None }
)
.semijoin(&lineitems)
.map(|(_k,v)| v)
.count_total()
// .inspect(|x| println!("{:?}", x))
.probe_with(probe);
}
pub fn query_arranged<G: Scope<Timestamp=usize>>(
scope: &mut G,
probe: &mut ProbeHandle<usize>,
experiment: &mut Experiment,
arrangements: &mut Arrangements,
)
where
G::Timestamp: Lattice+TotalOrder+Ord
{
let arrangements = arrangements.in_scope(scope, experiment);
experiment
.lineitem(scope)
.flat_map(|l| if l.commit_date < l.receipt_date { Some((l.order_key, ())) } else { None })
.distinct_total()
.join_core(&arrangements.order, |_k,&(),o| {
if o.order_date >= ::types::create_date(1993, 7, 1) && o.order_date < ::types::create_date(1993, 10, 1) {
Some(o.order_priority)
}
else {
None
}
})
.count_total()
.probe_with(probe);<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>gallery.routing.ts<|end_file_name|><|fim▁begin|>import {Routes} from '@angular/router';
import {GalleryCatsListComponent} from './list/galleryCatList.component';
import {SettingsResolver} from '../../core/SettingsResolver';<|fim▁hole|>import {GalleryCatFormComponent} from './form/galleryCatForm.component';
export const GalleryRoutes: Routes = [
{
path: '',
children: [
{
path: 'cats',
component: GalleryCatsListComponent
},
{
path: 'cats/:id/edit',
resolve: {settings: SettingsResolver},
component: GalleryCatFormComponent
},
{
path: 'cats/add',
resolve: {settings: SettingsResolver},
component: GalleryCatFormComponent
}
]
}
];<|fim▁end|>
| |
<|file_name|>synch.py<|end_file_name|><|fim▁begin|>"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
One repository to update them all
On mbed.org the mbed SDK is split up in multiple repositories, this script takes
care of updating them all.
"""
import sys
from copy import copy
from os import walk, remove, makedirs
from os.path import join, abspath, dirname, relpath, exists, isfile
from shutil import copyfile
from optparse import OptionParser
import re
import string
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from workspace_tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
from workspace_tools.paths import *
from workspace_tools.utils import run_cmd
MBED_URL = "mbed.org"
MBED_USER = "mbed_official"
changed = []
push_remote = True
quiet = False
commit_msg = ''
# Code that does have a mirror in the mbed SDK
# Tuple data: (repo_name, list_of_code_dirs, [team])
# team is optional - if not specified, the code is published under mbed_official
OFFICIAL_CODE = (
("mbed-dev" , MBED_BASE),
("mbed-rtos", RTOS),
("mbed-dsp" , DSP),
("mbed-rpc" , MBED_RPC),
("lwip" , LWIP_SOURCES+"/lwip"),
("lwip-sys", LWIP_SOURCES+"/lwip-sys"),
("Socket" , LWIP_SOURCES+"/Socket"),
("lwip-eth" , ETH_SOURCES+"/lwip-eth"),
("EthernetInterface", ETH_SOURCES+"/EthernetInterface"),
("USBDevice", USB),
("USBHost" , USB_HOST),
("CellularModem", CELLULAR_SOURCES),
("CellularUSBModem", CELLULAR_USB_SOURCES),
("UbloxUSBModem", UBLOX_SOURCES),
("UbloxModemHTTPClientTest", [TEST_DIR+"/net/cellular/http/common", TEST_DIR+"/net/cellular/http/ubloxusb"]),
("UbloxModemSMSTest", [TEST_DIR+"/net/cellular/sms/common", TEST_DIR+"/net/cellular/sms/ubloxusb"]),
("FATFileSystem", FAT_FS, "mbed-official"),
)
# Code that does have dependencies to libraries should point to
# the latest revision. By default, they point to a specific revision.
CODE_WITH_DEPENDENCIES = (
# Libraries
"EthernetInterface",
# RTOS Examples
"rtos_basic",
"rtos_isr",
"rtos_mail",
"rtos_mutex",
"rtos_queue",
"rtos_semaphore",
"rtos_signals",
"rtos_timer",
# Net Examples
"TCPEchoClient",
"TCPEchoServer",
"TCPSocket_HelloWorld",
"UDPSocket_HelloWorld",
"UDPEchoClient",
"UDPEchoServer",
"BroadcastReceive",
"BroadcastSend",
# mbed sources
"mbed-src-program",
)
# A list of regular expressions that will be checked against each directory
# name and skipped if they match.
IGNORE_DIRS = (
)
IGNORE_FILES = (
'COPYING',
'\.md',
"\.lib",
"\.bld"
)
def ignore_path(name, reg_exps):
for r in reg_exps:
if re.search(r, name):
return True
return False
class MbedRepository:
@staticmethod
def run_and_print(command, cwd):
stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
print(stdout)
def __init__(self, name, team = None):
self.name = name
self.path = join(MBED_ORG_PATH, name)
if team is None:
self.url = "http://" + MBED_URL + "/users/" + MBED_USER + "/code/%s/"
else:
self.url = "http://" + MBED_URL + "/teams/" + team + "/code/%s/"
if not exists(self.path):
# Checkout code
if not exists(MBED_ORG_PATH):
makedirs(MBED_ORG_PATH)
self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
else:
# Update
self.run_and_print(['hg', 'pull'], cwd=self.path)
self.run_and_print(['hg', 'update'], cwd=self.path)
def publish(self):<|fim▁hole|> stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path)
if stdout == '':
print "No changes"
return False
print stdout
if quiet:
commit = 'Y'
else:
commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
if commit == 'Y':
args = ['hg', 'commit', '-u', MBED_ORG_USER]
if commit_msg:
args = args + ['-m', commit_msg]
self.run_and_print(args, cwd=self.path)
if push_remote:
self.run_and_print(['hg', 'push'], cwd=self.path)
return True
# Check if a file is a text file or a binary file
# Taken from http://code.activestate.com/recipes/173220/
text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
_null_trans = string.maketrans("", "")
def is_text_file(filename):
block_size = 1024
def istext(s):
if "\0" in s:
return 0
if not s: # Empty files are considered text
return 1
# Get the non-text characters (maps a character to itself then
# use the 'remove' option to get rid of the text characters.)
t = s.translate(_null_trans, text_characters)
# If more than 30% non-text characters, then
# this is considered a binary file
if float(len(t))/len(s) > 0.30:
return 0
return 1
with open(filename) as f:
res = istext(f.read(block_size))
return res
# Return the line ending type for the given file ('cr' or 'crlf')
def get_line_endings(f):
examine_size = 1024
try:
tf = open(f, "rb")
lines, ncrlf = tf.readlines(examine_size), 0
tf.close()
for l in lines:
if l.endswith("\r\n"):
ncrlf = ncrlf + 1
return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
except:
return 'cr'
# Copy file to destination, but preserve destination line endings if possible
# This prevents very annoying issues with huge diffs that appear because of
# differences in line endings
def copy_with_line_endings(sdk_file, repo_file):
if not isfile(repo_file):
copyfile(sdk_file, repo_file)
return
is_text = is_text_file(repo_file)
if is_text:
sdk_le = get_line_endings(sdk_file)
repo_le = get_line_endings(repo_file)
if not is_text or sdk_le == repo_le:
copyfile(sdk_file, repo_file)
else:
print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
f = open(sdk_file, "rb")
data = f.read()
f.close()
f = open(repo_file, "wb")
data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
f.write(data)
f.close()
def visit_files(path, visit):
for root, dirs, files in walk(path):
# Ignore hidden directories
for d in copy(dirs):
full = join(root, d)
if d.startswith('.'):
dirs.remove(d)
if ignore_path(full, IGNORE_DIRS):
print "Skipping '%s'" % full
dirs.remove(d)
for file in files:
if ignore_path(file, IGNORE_FILES):
continue
visit(join(root, file))
def update_repo(repo_name, sdk_paths, team_name):
repo = MbedRepository(repo_name, team_name)
# copy files from mbed SDK to mbed_official repository
def visit_mbed_sdk(sdk_file):
repo_file = join(repo.path, relpath(sdk_file, sdk_path))
repo_dir = dirname(repo_file)
if not exists(repo_dir):
makedirs(repo_dir)
copy_with_line_endings(sdk_file, repo_file)
for sdk_path in sdk_paths:
visit_files(sdk_path, visit_mbed_sdk)
# remove repository files that do not exist in the mbed SDK
def visit_repo(repo_file):
for sdk_path in sdk_paths:
sdk_file = join(sdk_path, relpath(repo_file, repo.path))
if exists(sdk_file):
break
else:
remove(repo_file)
print "remove: %s" % repo_file
visit_files(repo.path, visit_repo)
if repo.publish():
changed.append(repo_name)
def update_code(repositories):
for r in repositories:
repo_name, sdk_dir = r[0], r[1]
team_name = r[2] if len(r) == 3 else None
print '\n=== Updating "%s" ===' % repo_name
sdk_dirs = [sdk_dir] if type(sdk_dir) != type([]) else sdk_dir
update_repo(repo_name, sdk_dirs, team_name)
def update_single_repo(repo):
repos = [r for r in OFFICIAL_CODE if r[0] == repo]
if not repos:
print "Repository '%s' not found" % repo
else:
update_code(repos)
def update_dependencies(repositories):
for repo_name in repositories:
print '\n=== Updating "%s" ===' % repo_name
repo = MbedRepository(repo_name)
# point to the latest libraries
def visit_repo(repo_file):
with open(repo_file, "r") as f:
url = f.read()
with open(repo_file, "w") as f:
f.write(url[:(url.rindex('/')+1)])
visit_files(repo.path, visit_repo, None, MBED_REPO_EXT)
if repo.publish():
changed.append(repo_name)
def update_mbed():
update_repo("mbed", [join(BUILD_DIR, "mbed")], None)
def do_sync(options):
global push_remote, quiet, commit_msg, changed
push_remote = not options.nopush
quiet = options.quiet
commit_msg = options.msg
chnaged = []
if options.code:
update_code(OFFICIAL_CODE)
if options.dependencies:
update_dependencies(CODE_WITH_DEPENDENCIES)
if options.mbed:
update_mbed()
if options.repo:
update_single_repo(options.repo)
if changed:
print "Repositories with changes:", changed
return changed
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-c", "--code",
action="store_true", default=False,
help="Update the mbed_official code")
parser.add_option("-d", "--dependencies",
action="store_true", default=False,
help="Update the mbed_official code dependencies")
parser.add_option("-m", "--mbed",
action="store_true", default=False,
help="Release a build of the mbed library")
parser.add_option("-n", "--nopush",
action="store_true", default=False,
help="Commit the changes locally only, don't push them")
parser.add_option("", "--commit_message",
action="store", type="string", default='', dest='msg',
help="Commit message to use for all the commits")
parser.add_option("-r", "--repository",
action="store", type="string", default='', dest='repo',
help="Synchronize only the given repository")
parser.add_option("-q", "--quiet",
action="store_true", default=False,
help="Don't ask for confirmation before commiting or pushing")
(options, args) = parser.parse_args()
do_sync(options)<|fim▁end|>
|
# The maintainer has to evaluate the changes first and explicitly accept them
self.run_and_print(['hg', 'addremove'], cwd=self.path)
|
<|file_name|>test_api_check.py<|end_file_name|><|fim▁begin|>#---------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#---------------------------------------------------------------------------------------------
import unittest
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
# pylint: disable=line-too-long
from azure.cli.command_modules.resource._validators import (validate_resource_type,
validate_parent,
_resolve_api_version as resolve_api_version)
class TestApiCheck(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
pass
def tearDown(self):
pass
def test_resolve_api_provider_backup(self):
""" Verifies provider is used as backup if api-version not specified. """
resource_type = validate_resource_type('Mock/test')
self.assertEqual(resolve_api_version(self._get_mock_client(), resource_type), "2016-01-01")
<|fim▁hole|> resource_type = validate_resource_type('Mock/bar')
parent = validate_parent('foo/testfoo123')
self.assertEqual(
resolve_api_version(self._get_mock_client(), resource_type, parent),
"1999-01-01"
)
def test_resolve_api_all_previews(self):
""" Verifies most recent preview version returned only if there are no non-preview versions. """
resource_type = validate_resource_type('Mock/preview')
self.assertEqual(
resolve_api_version(self._get_mock_client(), resource_type),
"2005-01-01-preview"
)
def _get_mock_client(self):
client = MagicMock()
provider = MagicMock()
provider.resource_types = [
self._get_mock_resource_type('skip', ['2000-01-01-preview', '2000-01-01']),
self._get_mock_resource_type('test', ['2016-01-01-preview', '2016-01-01']),
self._get_mock_resource_type('foo/bar', ['1999-01-01-preview', '1999-01-01']),
self._get_mock_resource_type('preview', ['2005-01-01-preview', '2004-01-01-preview'])
]
client.providers.get.return_value = provider
return client
def _get_mock_resource_type(self, name, api_versions): #pylint: disable=no-self-use
rt = MagicMock()
rt.resource_type = name
rt.api_versions = api_versions
return rt
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
def test_resolve_api_provider_with_parent_backup(self):
""" Verifies provider (with parent) is used as backup if api-version not specified. """
|
<|file_name|>old-suffixes-are-really-forbidden.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>
fn main() {
let a = 1_is; //~ ERROR invalid suffix
let b = 2_us; //~ ERROR invalid suffix
}<|fim▁end|>
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>layer_test.py<|end_file_name|><|fim▁begin|>import unittest
from .layer import *
class L(Layer):
L1 = LayerSpec("l1", "layer-1")
L2 = LayerSpec("l2", "layer-2", [L1])
class TestLayer(unittest.TestCase):
def testFromId(self):
self.assertEqual(L.FromId(L.L1.id_), L.L1)
self.assertEqual(L.FromId(L.L2.id_), L.L2)
def testFromName(self):
self.assertEqual(L.FromName(L.L1.name), L.L1)
self.assertEqual(L.FromName(L.L2.name), L.L2)
def testAllLayers(self):<|fim▁hole|> def testIsSublayer(self):
self.assertTrue(L.IsSublayer(L.L1, L.L2))
self.assertFalse(Layer.IsSublayer(L.L2, L.L1))
def testTopLayer(self):
self.assertEqual(L.TopLayer(), L.L2)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
self.assertEqual(set(L.AllLayers()), set((L.L2, L.L1)))
|
<|file_name|>070_review_request_removed.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> return Promise.resolve()
.then(() => sendGitHubRequest(requestReviewRequestRemoved, header))
.then(resp => resp.msg === 'ok' ? Promise.resolve() : Promise.reject(resp))
.catch(err => Promise.reject(err));
}<|fim▁end|>
|
import { requestReviewRequestRemoved, header } from '../../../webhooks/github/PullRequestEvent';
import { sendGitHubRequest } from '../../../utils/utils';
export default function() {
|
<|file_name|>utfgrid_encode.cpp<|end_file_name|><|fim▁begin|>#include "utfgrid_encode.h"
#include <unordered_map>
#include <glog/logging.h>
#include <jsoncpp/json/value.h>
#include <mapnik/unicode.hpp>
struct value_to_json_visitor {
Json::Value operator() (const mapnik::value_null& val) {return Json::Value();}
Json::Value operator() (const mapnik::value_bool& val) {return Json::Value(val);}
Json::Value operator() (const mapnik::value_integer& val) {return Json::Value(static_cast<uint>(val));}
Json::Value operator() (const mapnik::value_double& val) {return Json::Value(val);}
Json::Value operator() (const mapnik::value_unicode_string& val) {
std::string utf8_str;
mapnik::to_utf8(val, utf8_str);
return Json::Value(utf8_str);
}
};
std::string encode_utfgrid(const mapnik::grid_view& utfgrid, uint size) {
Json::Value root(Json::objectValue);
Json::Value& jgrid = root["grid"];
jgrid = Json::Value(Json::arrayValue);
using lookup_type = mapnik::grid::lookup_type;
using value_type = mapnik::grid::value_type;
using feature_type = mapnik::grid::feature_type;
using keys_type = std::unordered_map<lookup_type, value_type>;
std::vector<lookup_type> key_order;
keys_type keys;
const mapnik::grid::feature_key_type& feature_keys = utfgrid.get_feature_keys();
std::uint16_t codepoint = 32;
for (uint y = 0; y < utfgrid.height(); y += size) {
std::string line;
const value_type* row = utfgrid.get_row(y);
for (uint x = 0; x < utfgrid.width(); x += size) {
value_type feature_id = row[x];
auto feature_itr = feature_keys.find(feature_id);
lookup_type val;
if (feature_itr == feature_keys.end()) {
feature_id = mapnik::grid::base_mask;
} else {
val = feature_itr->second;
}
auto key_iter = keys.find(val);
if (key_iter == keys.end()) {
// Create a new entry for this key. Skip the codepoints that
// can't be encoded directly in JSON.
if (codepoint == 34) ++codepoint; // Skip "
else if (codepoint == 92) ++codepoint; // Skip backslash
if (feature_id == mapnik::grid::base_mask) {
keys[""] = codepoint;
key_order.push_back("");
} else {
keys[val] = codepoint;
key_order.push_back(val);
}
line.append(reinterpret_cast<char*>(&codepoint), sizeof(codepoint));
++codepoint;
} else {
line.append(reinterpret_cast<char*>(&key_iter->second), sizeof(key_iter->second));
}
}
jgrid.append(Json::Value(line));
}
Json::Value& jkeys = root["keys"];<|fim▁hole|> }
Json::Value& jdata = root["data"];
const feature_type& g_features = utfgrid.get_grid_features();
const std::set<std::string>& attributes = utfgrid.get_fields();
feature_type::const_iterator feat_end = g_features.end();
for (const std::string& key_item : key_order)
{
if (key_item.empty()) {
continue;
}
feature_type::const_iterator feat_itr = g_features.find(key_item);
if (feat_itr == feat_end) {
continue;
}
bool found = false;
Json::Value jfeature(Json::objectValue);
mapnik::feature_ptr feature = feat_itr->second;
for (const std::string& attr : attributes) {
value_to_json_visitor val_to_json;
if (attr == "__id__") {
jfeature[attr] = static_cast<uint>(feature->id());
} else if (feature->has_key(attr)) {
found = true;
jfeature[attr] = mapnik::util::apply_visitor(val_to_json, feature->get(attr));
}
}
if (found) {
jdata[feat_itr->first] = jfeature;
}
}
return root.toStyledString();
}<|fim▁end|>
|
jkeys = Json::Value(Json::arrayValue);
for (const auto& key_id : key_order) {
jkeys.append(key_id);
|
<|file_name|>interactions.go<|end_file_name|><|fim▁begin|>package customerinsights
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"net/http"
)
// InteractionsClient is the the Azure Customer Insights management API provides a RESTful set of web services that
// interact with Azure Customer Insights service to manage your resources. The API has entities that capture the
// relationship between an end user and the Azure Customer Insights service.
type InteractionsClient struct {
BaseClient
}
// NewInteractionsClient creates an instance of the InteractionsClient client.
func NewInteractionsClient(subscriptionID string) InteractionsClient {
return NewInteractionsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewInteractionsClientWithBaseURI creates an instance of the InteractionsClient client.
func NewInteractionsClientWithBaseURI(baseURI string, subscriptionID string) InteractionsClient {
return InteractionsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates an interaction or updates an existing interaction within a hub.
//
// resourceGroupName is the name of the resource group. hubName is the name of the hub. interactionName is the name of
// the interaction. parameters is parameters supplied to the CreateOrUpdate Interaction operation.
func (client InteractionsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, hubName string, interactionName string, parameters InteractionResourceFormat) (result InteractionsCreateOrUpdateFuture, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: interactionName,
Constraints: []validation.Constraint{{Target: "interactionName", Name: validation.MaxLength, Rule: 128, Chain: nil},
{Target: "interactionName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "interactionName", Name: validation.Pattern, Rule: `^[a-zA-Z][a-zA-Z0-9_]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewErrorWithValidationError(err, "customerinsights.InteractionsClient", "CreateOrUpdate")
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, hubName, interactionName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client InteractionsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, hubName string, interactionName string, parameters InteractionResourceFormat) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"interactionName": autorest.Encode("path", interactionName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-04-26"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsJSON(),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/interactions/{interactionName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client InteractionsClient) CreateOrUpdateSender(req *http.Request) (future InteractionsCreateOrUpdateFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted))
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client InteractionsClient) CreateOrUpdateResponder(resp *http.Response) (result InteractionResourceFormat, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Get gets information about the specified interaction.
//
// resourceGroupName is the name of the resource group. hubName is the name of the hub. interactionName is the name of<|fim▁hole|> err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client InteractionsClient) GetPreparer(ctx context.Context, resourceGroupName string, hubName string, interactionName string, localeCode string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"interactionName": autorest.Encode("path", interactionName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-04-26"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(localeCode) > 0 {
queryParameters["locale-code"] = autorest.Encode("query", localeCode)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/interactions/{interactionName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client InteractionsClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client InteractionsClient) GetResponder(resp *http.Response) (result InteractionResourceFormat, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListByHub gets all interactions in the hub.
//
// resourceGroupName is the name of the resource group. hubName is the name of the hub. localeCode is locale of
// interaction to retrieve, default is en-us.
func (client InteractionsClient) ListByHub(ctx context.Context, resourceGroupName string, hubName string, localeCode string) (result InteractionListResultPage, err error) {
result.fn = client.listByHubNextResults
req, err := client.ListByHubPreparer(ctx, resourceGroupName, hubName, localeCode)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "ListByHub", nil, "Failure preparing request")
return
}
resp, err := client.ListByHubSender(req)
if err != nil {
result.ilr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "ListByHub", resp, "Failure sending request")
return
}
result.ilr, err = client.ListByHubResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "ListByHub", resp, "Failure responding to request")
}
return
}
// ListByHubPreparer prepares the ListByHub request.
func (client InteractionsClient) ListByHubPreparer(ctx context.Context, resourceGroupName string, hubName string, localeCode string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-04-26"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(localeCode) > 0 {
queryParameters["locale-code"] = autorest.Encode("query", localeCode)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/interactions", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByHubSender sends the ListByHub request. The method will close the
// http.Response Body if it receives an error.
func (client InteractionsClient) ListByHubSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListByHubResponder handles the response to the ListByHub request. The method always
// closes the http.Response Body.
func (client InteractionsClient) ListByHubResponder(resp *http.Response) (result InteractionListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByHubNextResults retrieves the next set of results, if any.
func (client InteractionsClient) listByHubNextResults(lastResults InteractionListResult) (result InteractionListResult, err error) {
req, err := lastResults.interactionListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "listByHubNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByHubSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "listByHubNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByHubResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "listByHubNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByHubComplete enumerates all values, automatically crossing page boundaries as required.
func (client InteractionsClient) ListByHubComplete(ctx context.Context, resourceGroupName string, hubName string, localeCode string) (result InteractionListResultIterator, err error) {
result.page, err = client.ListByHub(ctx, resourceGroupName, hubName, localeCode)
return
}
// SuggestRelationshipLinks suggests relationships to create relationship links.
//
// resourceGroupName is the name of the resource group. hubName is the name of the hub. interactionName is the name of
// the interaction.
func (client InteractionsClient) SuggestRelationshipLinks(ctx context.Context, resourceGroupName string, hubName string, interactionName string) (result SuggestRelationshipLinksResponse, err error) {
req, err := client.SuggestRelationshipLinksPreparer(ctx, resourceGroupName, hubName, interactionName)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "SuggestRelationshipLinks", nil, "Failure preparing request")
return
}
resp, err := client.SuggestRelationshipLinksSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "SuggestRelationshipLinks", resp, "Failure sending request")
return
}
result, err = client.SuggestRelationshipLinksResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.InteractionsClient", "SuggestRelationshipLinks", resp, "Failure responding to request")
}
return
}
// SuggestRelationshipLinksPreparer prepares the SuggestRelationshipLinks request.
func (client InteractionsClient) SuggestRelationshipLinksPreparer(ctx context.Context, resourceGroupName string, hubName string, interactionName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"interactionName": autorest.Encode("path", interactionName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-04-26"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/interactions/{interactionName}/suggestRelationshipLinks", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// SuggestRelationshipLinksSender sends the SuggestRelationshipLinks request. The method will close the
// http.Response Body if it receives an error.
func (client InteractionsClient) SuggestRelationshipLinksSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// SuggestRelationshipLinksResponder handles the response to the SuggestRelationshipLinks request. The method always
// closes the http.Response Body.
func (client InteractionsClient) SuggestRelationshipLinksResponder(resp *http.Response) (result SuggestRelationshipLinksResponse, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|>
|
// the interaction. localeCode is locale of interaction to retrieve, default is en-us.
func (client InteractionsClient) Get(ctx context.Context, resourceGroupName string, hubName string, interactionName string, localeCode string) (result InteractionResourceFormat, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, hubName, interactionName, localeCode)
if err != nil {
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017-2021 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
#![cfg_attr(feature = "strict", deny(warnings))]
// Clippy lints we want to suppress due to style, or simply too noisy
// and not a priority right now.
#![allow(clippy::missing_safety_doc)]
#![allow(clippy::needless_return)]
#![allow(clippy::redundant_field_names)]<|fim▁hole|>#![allow(clippy::too_many_arguments)]
#![allow(clippy::assign_op_pattern)]
#![allow(clippy::manual_range_contains)]
#![allow(clippy::vec_init_then_push)]
#![allow(clippy::assertions_on_constants)]
#![allow(clippy::let_and_return)]
#![allow(clippy::needless_bool)]
#![allow(clippy::char_lit_as_u8)]
// To be fixed, but remove the noise for now.
#![allow(clippy::collapsible_if)]
#![allow(clippy::collapsible_else_if)]
#![allow(clippy::unnecessary_cast)]
#![allow(clippy::redundant_static_lifetimes)]
#![allow(clippy::bool_comparison)]
#![allow(clippy::for_loops_over_fallibles)]
#![allow(clippy::needless_lifetimes)]
#![allow(clippy::single_match)]
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::ptr_arg)]
#![allow(clippy::new_without_default)]
#![allow(clippy::needless_lifetimes)]
#![allow(clippy::match_ref_pats)]
#![allow(clippy::module_inception)]
#![allow(clippy::needless_range_loop)]
#![allow(clippy::enum_variant_names)]
#![allow(clippy::if_same_then_else)]
#![allow(clippy::match_like_matches_macro)]
#![allow(clippy::extra_unused_lifetimes)]
#![allow(clippy::mixed_case_hex_literals)]
#![allow(clippy::type_complexity)]
#![allow(clippy::nonminimal_bool)]
#![allow(clippy::never_loop)]
#![allow(clippy::large_enum_variant)]
#![allow(clippy::for_loops_over_fallibles)]
#![allow(clippy::explicit_counter_loop)]
#![allow(clippy::branches_sharing_code)]
#![allow(clippy::while_let_loop)]
#![allow(clippy::redundant_pattern_matching)]
#![allow(clippy::field_reassign_with_default)]
#[macro_use]
extern crate nom;
#[macro_use]
extern crate bitflags;
extern crate byteorder;
extern crate crc;
extern crate memchr;
#[macro_use]
extern crate num_derive;
extern crate widestring;
extern crate der_parser;
extern crate kerberos_parser;
extern crate tls_parser;
extern crate x509_parser;
#[macro_use]
extern crate suricata_derive;
#[macro_use]
pub mod log;
#[macro_use]
pub mod core;
#[macro_use]
pub mod common;
pub mod conf;
pub mod jsonbuilder;
#[macro_use]
pub mod applayer;
/// cbindgen:ignore
pub mod frames;
pub mod filecontainer;
pub mod filetracker;
pub mod kerberos;
#[cfg(feature = "lua")]
pub mod lua;
pub mod dns;
pub mod nfs;
pub mod ftp;
pub mod smb;
pub mod krb;
pub mod dcerpc;
pub mod modbus;
pub mod ike;
pub mod snmp;
pub mod ntp;
pub mod tftp;
pub mod dhcp;
pub mod sip;
pub mod rfb;
pub mod mqtt;
pub mod pgsql;
pub mod telnet;
pub mod applayertemplate;
pub mod rdp;
pub mod x509;
pub mod asn1;
pub mod mime;
pub mod ssh;
pub mod http2;
pub mod quic;
pub mod plugin;
pub mod util;
pub mod ffi;<|fim▁end|>
|
#![allow(clippy::len_zero)]
|
<|file_name|>biopet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Wrapper script for starting the Biopet JAR package
#
# This script is written for use with the Conda package manager and is copied
# from the peptide-shaker wrapper. Only the parameters are changed.
# (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py)
#
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'Biopet-0.9.0-be7838f2.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx2g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""<|fim▁hole|> prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
java = java_executable()
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()<|fim▁end|>
|
mem_opts = []
|
<|file_name|>extraction4.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import xml.etree.ElementTree as etree
import re
import datetime
#set the output file name for the 'good' data
#needs to be to a structured format - but dump to text for now
#clean_output = 'clean_out.csv'
clean_output = 'clean.txt'
#set the dirty output file where we'll dump the awkward lines
dirty_output = 'dirty_out.txt'
#open the clean output file
f2 = open(clean_output, 'w')
#open the clean output file
f3 = open(dirty_output, 'w')
#probably a better way of doing this - but set up a list of valide months to compare against (maybe move nearer to this code?)
month_list = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August','September', 'October', 'November', 'December']
ref = ("january", "february", "march", "april", "may", "june", "july", "august", "september", "october", "november", "december")
#initialise integer values for month and day
birth_day = 0
birth_month = 0
# First function: cleans out (invisible) ascii chars 132 and 160 from some lines which was causing problems
def remove_non_ascii_1(text):
return ''.join(i for i in text if ord(i)<128) #we need no characters below this normal pronting range
#Second Function - test_line() - split the line into words and return how many there are
# (just used to spot 2-word lines which indicate a day / month line)
def test_line (text):
words = text.split()
num_words = len(words)
#print "one " + str( words[0])
#print "two " + str( words[1])
return num_words
def muso_detail_split(text):
# initialise so we can use it as a flag if fails below
#worked = True
#split the line of text using commas as a delimiter
muso_bits = text.split(',')
try: #try to convert the contents of the last item - to an integer. If it is 1928 or 1957 for example, it should work
birth_year = int(muso_bits [-1])
#Grab everything before the first comma - that seems to be a constant for the name location
muso_name = muso_bits[0]
#split that name into first, middle and surname to be returned individually - using the space as a delimiter
# putting each into a list
muso_name_list = muso_name.split(" ")
muso_forname = muso_name_list[0] #choose the first item in the list - should be the forname
muso_surname = muso_name_list[-1] # choose the last item as the last name
#if there are more than 2 items in the list, assume that the second is a middle name
if len (muso_name_list) > 2:
muso_midname = muso_name_list[1]
else:
muso_midname = ""
#chuck away the first item as we dealt with that as the names at lines 12 - 20 above
#print muso_forname
#print muso_surname
muso_bits.remove(muso_bits[0])
#chuck away the last item - it was the year of birth (line 24)
muso_bits.remove(muso_bits[-1])
#we should be left with the instruments
instrm_list = list(muso_bits)
#that's it all sorted - keep these remaining items as a list of instruments / roles which we'll return as a list
############
# Needs to find and replace / in instrument list (e.g Campise entry)
muso_obj = [muso_forname, muso_midname, muso_surname, birth_year, instrm_list]
except ValueError:
# doesn't end with a single year we can't process it for now = write it out to the dirty file (and mark *** for future reference)
f3.write(str(birth_day) + " " + str(birth_month) +"\n")
f3.write(text + "*** " +"\n")
# return empty list
muso_obj = []
return muso_obj
def create_date(d,m,y):
date1 = datetime.date(y,m,d)
return date1
def date_comp(dc):
for month in ref:
if dc in month:
return ref.index(month) + 1
def find_death(line):
line = line.strip()
list1 = line.split(',')
try:
int_year = int(list1[1])
#print(int_year)
except:
pass
#print list[0]
list1[0] = list1[0].replace(".", " ")
#print list[0]
d_m = list1[0].split(" ")
d_m[0] = d_m[0].replace(".","").lower()
int_month = date_comp(d_m[0])
int_day = d_m[-1]
return str(int_year) + "-" + str(int_month) + "-" + str(int_day)
##################################
# main code starts here #
##################################
# grab the document as an xml object
tree = etree.parse('jazz_birthdays_manual_tidy.xml')
root = tree.getroot()
for child in root:
ignore_flag = False #used to identify elements with sub-elements <ulink> (ie Youtube links) as we don't want those
dod =""
for sub_child in child:
if sub_child is not None:
# if there is a sub-elemnent (which can only be ulink)
# set the flag to true and do nothing more with that line
ignore_flag = True
if not ignore_flag: #so no sub elements - so we need to to more checks
if child.text is not None: #not an empty <para/>
line_text = child.text.encode('utf-8') #encode the text
line_text = line_text.strip() # strip leading and trailing whitespace
line_text = remove_non_ascii_1(line_text) # call the function to clean out ascii chars 132 and 160 from some lines
nw = test_line (line_text)
if nw ==2:
#it can only be a date (as only they have two elements - day / month)
words = line_text.split()
tally = 0
if words[1] in month_list:
#take it that this is a date
# update the global vaiables with day and month * ordinal values*
# We can use these to build up a datetime object for each musician's birth
# (taking the year from the muso's line below
birth_day = int(words [0])
birth_month = month_list.index(words[1]) +1
else:
#take it that it is a musician line (as we've excluded the day / month lines )
find_substr = "(or"
if find_substr in line_text:
f3.write(str(birth_day) + " " + str(birth_month) +"\n")
f3.write(line_text +"\n")
else:
# we need to find death dates and split on those
# treating the second part as the death date
# and the first part as a general musician entry
death_text =""
deceased = re.search ("\(d\.(.*)\)", line_text)
# if "(d." found use that to split the string
if deceased:
split_dec = re.split ("\(d\.(.*)\)", line_text)
line_text = split_dec [0]
death_text = split_dec[1]
muso_parts = muso_detail_split (line_text)
# returned as muso_forname, muso_midname, muso_surname, birth_year, instrm_list
#print len (muso_parts)
if len (muso_parts) > 0:
#for part in muso_parts:
# print part
#print len(muso_parts)
#print muso_parts[3]
dob = create_date (birth_day, birth_month, muso_parts[3])
#dod = create_death_date (death_text)
if deceased:
print death_text
dod = find_death (death_text)<|fim▁hole|>
f2.write (muso_parts[2] + "\t" + muso_parts[0] +"\t" + muso_parts [1] +"\t" + str(dob) + "\t")
for inst in muso_parts [4]:
f2.write (inst + ", ")
#f deceased:
# f2.write ("Deceased \t")
if dod != "":
f2.write(dod)
f2.write("\n")
#f2.write("\n")
#print muso_parts
#for part in muso_parts:
# print part
#f3.write(line_text +"\n")
#print len(child)
# f.close()
f2.close()
f3.close()<|fim▁end|>
| |
<|file_name|>audio.rs<|end_file_name|><|fim▁begin|>//! Audio Functions
//!
//! # Example
//! ```no_run
//! use sdl2::audio::{AudioCallback, AudioSpecDesired};
//! use std::time::Duration;
//!
//! struct SquareWave {
//! phase_inc: f32,
//! phase: f32,
//! volume: f32
//! }
//!
//! impl AudioCallback for SquareWave {
//! type Channel = f32;
//!
//! fn callback(&mut self, out: &mut [f32]) {
//! // Generate a square wave
//! for x in out.iter_mut() {
//! *x = match self.phase {
//! 0.0...0.5 => self.volume,
//! _ => -self.volume
//! };
//! self.phase = (self.phase + self.phase_inc) % 1.0;
//! }
//! }
//! }
//!
//! let sdl_context = sdl2::init().unwrap();
//! let audio_subsystem = sdl_context.audio().unwrap();
//!
//! let desired_spec = AudioSpecDesired {
//! freq: Some(44100),
//! channels: Some(1), // mono
//! samples: None // default sample size
//! };
//!
//! let device = audio_subsystem.open_playback(None, &desired_spec, |spec| {
//! // initialize the audio callback
//! SquareWave {
//! phase_inc: 440.0 / spec.freq as f32,
//! phase: 0.0,
//! volume: 0.25
//! }
//! }).unwrap();
//!
//! // Start playback
//! device.resume();
//!
//! // Play for 2 seconds
//! std::thread::sleep(Duration::from_millis(2000));
//! ```
use std::ffi::{CStr, CString};
use num::FromPrimitive;
use libc::{c_int, c_void, uint8_t, c_char};
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::marker::PhantomData;
use std::mem;
use std::ptr;
use AudioSubsystem;
use get_error;
use rwops::RWops;
use sys::audio as ll;
impl AudioSubsystem {
/// Opens a new audio device given the desired parameters and callback.
#[inline]
pub fn open_playback<CB, F>(&self, device: Option<&str>, spec: &AudioSpecDesired, get_callback: F) -> Result<AudioDevice <CB>, String>
where CB: AudioCallback, F: FnOnce(AudioSpec) -> CB
{
AudioDevice::open_playback(self, device, spec, get_callback)
}
/// Opens a new audio device which uses queueing rather than older callback method.
#[inline]
pub fn open_queue<Channel>(&self, device: Option<&str>, spec: &AudioSpecDesired) -> Result<AudioQueue<Channel>, String> where Channel: AudioFormatNum
{
AudioQueue::open_queue(self, device, spec)
}
pub fn current_audio_driver(&self) -> &'static str {
unsafe {
let buf = ll::SDL_GetCurrentAudioDriver();
assert!(!buf.is_null());
CStr::from_ptr(buf as *const _).to_str().unwrap()
}
}
pub fn num_audio_playback_devices(&self) -> Option<u32> {
let result = unsafe { ll::SDL_GetNumAudioDevices(0) };
if result < 0 {
// SDL cannot retreive a list of audio devices. This is not necessarily an error (see the SDL2 docs).
None
} else {
Some(result as u32)
}
}
pub fn audio_playback_device_name(&self, index: u32) -> Result<String, String> {
unsafe {
let dev_name = ll::SDL_GetAudioDeviceName(index as c_int, 0);
if dev_name.is_null() {
Err(get_error())
} else {
let cstr = CStr::from_ptr(dev_name as *const _);
Ok(cstr.to_str().unwrap().to_owned())
}
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub enum AudioFormat {
/// Unsigned 8-bit samples
U8 = ll::AUDIO_U8 as isize,
/// Signed 8-bit samples
S8 = ll::AUDIO_S8 as isize,
/// Unsigned 16-bit samples, little-endian
U16LSB = ll::AUDIO_U16LSB as isize,
/// Unsigned 16-bit samples, big-endian
U16MSB = ll::AUDIO_U16MSB as isize,
/// Signed 16-bit samples, little-endian
S16LSB = ll::AUDIO_S16LSB as isize,
/// Signed 16-bit samples, big-endian
S16MSB = ll::AUDIO_S16MSB as isize,
/// Signed 32-bit samples, little-endian
S32LSB = ll::AUDIO_S32LSB as isize,
/// Signed 32-bit samples, big-endian
S32MSB = ll::AUDIO_S32MSB as isize,
/// 32-bit floating point samples, little-endian
F32LSB = ll::AUDIO_F32LSB as isize,
/// 32-bit floating point samples, big-endian
F32MSB = ll::AUDIO_F32MSB as isize
}
impl AudioFormat {
fn from_ll(raw: ll::SDL_AudioFormat) -> Option<AudioFormat> {
use self::AudioFormat::*;
match raw {
ll::AUDIO_U8 => Some(U8),
ll::AUDIO_S8 => Some(S8),
ll::AUDIO_U16LSB => Some(U16LSB),
ll::AUDIO_U16MSB => Some(U16MSB),
ll::AUDIO_S16LSB => Some(S16LSB),
ll::AUDIO_S16MSB => Some(S16MSB),
ll::AUDIO_S32LSB => Some(S32LSB),
ll::AUDIO_S32MSB => Some(S32MSB),
ll::AUDIO_F32LSB => Some(F32LSB),
ll::AUDIO_F32MSB => Some(F32MSB),
_ => None
}
}
fn to_ll(self) -> ll::SDL_AudioFormat {
self as ll::SDL_AudioFormat
}
}
#[cfg(target_endian = "little")]
impl AudioFormat {
/// Unsigned 16-bit samples, native endian
#[inline] pub fn u16_sys() -> AudioFormat { AudioFormat::U16LSB }
/// Signed 16-bit samples, native endian
#[inline] pub fn s16_sys() -> AudioFormat { AudioFormat::S16LSB }
/// Signed 32-bit samples, native endian
#[inline] pub fn s32_sys() -> AudioFormat { AudioFormat::S32LSB }
/// 32-bit floating point samples, native endian
#[inline] pub fn f32_sys() -> AudioFormat { AudioFormat::F32LSB }
}
#[cfg(target_endian = "big")]
impl AudioFormat {
/// Unsigned 16-bit samples, native endian
#[inline] pub fn u16_sys() -> AudioFormat { AudioFormat::U16MSB }
/// Signed 16-bit samples, native endian
#[inline] pub fn s16_sys() -> AudioFormat { AudioFormat::S16MSB }
/// Signed 32-bit samples, native endian
#[inline] pub fn s32_sys() -> AudioFormat { AudioFormat::S32MSB }
/// 32-bit floating point samples, native endian
#[inline] pub fn f32_sys() -> AudioFormat { AudioFormat::F32MSB }
}
#[repr(C)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum AudioStatus {
Stopped = ll::SDL_AUDIO_STOPPED as isize,
Playing = ll::SDL_AUDIO_PLAYING as isize,
Paused = ll::SDL_AUDIO_PAUSED as isize,
}
impl FromPrimitive for AudioStatus {
fn from_i64(n: i64) -> Option<AudioStatus> {
use self::AudioStatus::*;
Some( match n as ll::SDL_AudioStatus {
ll::SDL_AUDIO_STOPPED => Stopped,
ll::SDL_AUDIO_PLAYING => Playing,
ll::SDL_AUDIO_PAUSED => Paused,
_ => return None,
})
}<|fim▁hole|>
#[derive(Copy, Clone)]
pub struct DriverIterator {
length: i32,
index: i32
}
impl Iterator for DriverIterator {
type Item = &'static str;
#[inline]
fn next(&mut self) -> Option<&'static str> {
if self.index >= self.length {
None
} else {
unsafe {
let buf = ll::SDL_GetAudioDriver(self.index);
assert!(!buf.is_null());
self.index += 1;
Some(CStr::from_ptr(buf as *const _).to_str().unwrap())
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let l = self.length as usize;
(l, Some(l))
}
}
impl ExactSizeIterator for DriverIterator { }
/// Gets an iterator of all audio drivers compiled into the SDL2 library.
#[inline]
pub fn drivers() -> DriverIterator {
// This function is thread-safe and doesn't require the audio subsystem to be initialized.
// The list of drivers are read-only and statically compiled into SDL2, varying by platform.
// SDL_GetNumAudioDrivers can never return a negative value.
DriverIterator {
length: unsafe { ll::SDL_GetNumAudioDrivers() },
index: 0
}
}
pub struct AudioSpecWAV {
pub freq: i32,
pub format: AudioFormat,
pub channels: u8,
audio_buf: *mut u8,
audio_len: u32
}
impl AudioSpecWAV {
/// Loads a WAVE from the file path.
pub fn load_wav<P: AsRef<Path>>(path: P) -> Result<AudioSpecWAV, String> {
let mut file = try!(RWops::from_file(path, "rb"));
AudioSpecWAV::load_wav_rw(&mut file)
}
/// Loads a WAVE from the data source.
pub fn load_wav_rw(src: &mut RWops) -> Result<AudioSpecWAV, String> {
use std::mem::uninitialized;
use std::ptr::null_mut;
let mut desired = unsafe { uninitialized::<ll::SDL_AudioSpec>() };
let mut audio_buf: *mut u8 = null_mut();
let mut audio_len: u32 = 0;
unsafe {
let ret = ll::SDL_LoadWAV_RW(src.raw(), 0, &mut desired, &mut audio_buf, &mut audio_len);
if ret.is_null() {
Err(get_error())
} else {
Ok(AudioSpecWAV {
freq: desired.freq,
format: AudioFormat::from_ll(desired.format).unwrap(),
channels: desired.channels,
audio_buf: audio_buf,
audio_len: audio_len
})
}
}
}
pub fn buffer(&self) -> &[u8] {
use std::slice::from_raw_parts;
unsafe {
let ptr = self.audio_buf as *const u8;
let len = self.audio_len as usize;
from_raw_parts(ptr, len)
}
}
}
impl Drop for AudioSpecWAV {
fn drop(&mut self) {
unsafe { ll::SDL_FreeWAV(self.audio_buf); }
}
}
pub trait AudioCallback: Send
where Self::Channel: AudioFormatNum + 'static
{
type Channel;
fn callback(&mut self, &mut [Self::Channel]);
}
/// A phantom type for retreiving the SDL_AudioFormat of a given generic type.
/// All format types are returned as native-endian.
pub trait AudioFormatNum {
fn audio_format() -> AudioFormat;
fn zero() -> Self;
}
/// AUDIO_S8
impl AudioFormatNum for i8 {
fn audio_format() -> AudioFormat { AudioFormat::S8 }
fn zero() -> i8 { 0 }
}
/// AUDIO_U8
impl AudioFormatNum for u8 {
fn audio_format() -> AudioFormat { AudioFormat::U8 }
fn zero() -> u8 { 0 }
}
/// AUDIO_S16
impl AudioFormatNum for i16 {
fn audio_format() -> AudioFormat { AudioFormat::s16_sys() }
fn zero() -> i16 { 0 }
}
/// AUDIO_U16
impl AudioFormatNum for u16 {
fn audio_format() -> AudioFormat { AudioFormat::u16_sys() }
fn zero() -> u16 { 0 }
}
/// AUDIO_S32
impl AudioFormatNum for i32 {
fn audio_format() -> AudioFormat { AudioFormat::s32_sys() }
fn zero() -> i32 { 0 }
}
/// AUDIO_F32
impl AudioFormatNum for f32 {
fn audio_format() -> AudioFormat { AudioFormat::f32_sys() }
fn zero() -> f32 { 0.0 }
}
extern "C" fn audio_callback_marshall<CB: AudioCallback>
(userdata: *mut c_void, stream: *mut uint8_t, len: c_int) {
use std::slice::from_raw_parts_mut;
use std::mem::{size_of, transmute};
unsafe {
let mut cb_userdata: &mut CB = transmute(userdata);
let buf: &mut [CB::Channel] = from_raw_parts_mut(
stream as *mut CB::Channel,
len as usize / size_of::<CB::Channel>()
);
cb_userdata.callback(buf);
}
}
#[derive(Clone)]
pub struct AudioSpecDesired {
/// DSP frequency (samples per second). Set to None for the device's fallback frequency.
pub freq: Option<i32>,
/// Number of separate audio channels. Set to None for the device's fallback number of channels.
pub channels: Option<u8>,
/// Audio buffer size in samples (power of 2). Set to None for the device's fallback sample size.
pub samples: Option<u16>,
}
impl AudioSpecDesired {
fn convert_to_ll<CB: AudioCallback>(freq: Option<i32>, channels: Option<u8>, samples: Option<u16>, userdata: *mut CB) -> ll::SDL_AudioSpec {
use std::mem::transmute;
if let Some(freq) = freq { assert!(freq > 0); }
if let Some(channels) = channels { assert!(channels > 0); }
if let Some(samples) = samples { assert!(samples > 0); }
// A value of 0 means "fallback" or "default".
unsafe {
ll::SDL_AudioSpec {
freq: freq.unwrap_or(0),
format: <CB::Channel as AudioFormatNum>::audio_format().to_ll(),
channels: channels.unwrap_or(0),
silence: 0,
samples: samples.unwrap_or(0),
padding: 0,
size: 0,
callback: Some(audio_callback_marshall::<CB>
as extern "C" fn
(arg1: *mut c_void,
arg2: *mut uint8_t,
arg3: c_int)),
userdata: transmute(userdata)
}
}
}
fn convert_queue_to_ll<Channel: AudioFormatNum>(freq: Option<i32>, channels: Option<u8>, samples: Option<u16>) -> ll::SDL_AudioSpec {
if let Some(freq) = freq { assert!(freq > 0); }
if let Some(channels) = channels { assert!(channels > 0); }
if let Some(samples) = samples { assert!(samples > 0); }
// A value of 0 means "fallback" or "default".
ll::SDL_AudioSpec {
freq: freq.unwrap_or(0),
format: <Channel as AudioFormatNum>::audio_format().to_ll(),
channels: channels.unwrap_or(0),
silence: 0,
samples: samples.unwrap_or(0),
padding: 0,
size: 0,
callback: None,
userdata: 0 as *mut c_void
}
}
}
#[allow(missing_copy_implementations)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct AudioSpec {
pub freq: i32,
pub format: AudioFormat,
pub channels: u8,
pub silence: u8,
pub samples: u16,
pub size: u32
}
impl AudioSpec {
fn convert_from_ll(spec: ll::SDL_AudioSpec) -> AudioSpec {
AudioSpec {
freq: spec.freq,
format: AudioFormat::from_ll(spec.format).unwrap(),
channels: spec.channels,
silence: spec.silence,
samples: spec.samples,
size: spec.size
}
}
}
enum AudioDeviceID {
PlaybackDevice(ll::SDL_AudioDeviceID)
}
impl AudioDeviceID {
fn id(&self) -> ll::SDL_AudioDeviceID {
match self {
&AudioDeviceID::PlaybackDevice(id) => id
}
}
}
impl Drop for AudioDeviceID {
fn drop(&mut self) {
//! Shut down audio processing and close the audio device.
unsafe { ll::SDL_CloseAudioDevice(self.id()) }
}
}
/// Wraps SDL_AudioDeviceID and owns the callback data used by the audio device.
pub struct AudioQueue<Channel: AudioFormatNum> {
subsystem: AudioSubsystem,
device_id: AudioDeviceID,
phantom: PhantomData<Channel>
}
impl<Channel: AudioFormatNum> AudioQueue<Channel> {
/// Opens a new audio device given the desired parameters and callback.
pub fn open_queue(a: &AudioSubsystem, device: Option<&str>, spec: &AudioSpecDesired) -> Result<AudioQueue<Channel>, String> {
let desired = AudioSpecDesired::convert_queue_to_ll::<Channel>(spec.freq, spec.channels, spec.samples);
let mut obtained = unsafe { mem::uninitialized::<ll::SDL_AudioSpec>() };
unsafe {
let device = match device {
Some(device) => Some(CString::new(device).unwrap()),
None => None
};
let device_ptr = device.map_or(ptr::null(), |s| s.as_ptr());
let iscapture_flag = 0;
let device_id = ll::SDL_OpenAudioDevice(
device_ptr as *const c_char, iscapture_flag, &desired,
&mut obtained, 0
);
match device_id {
0 => {
Err(get_error())
},
id => {
let device_id = AudioDeviceID::PlaybackDevice(id);
Ok(AudioQueue {
subsystem: a.clone(),
device_id: device_id,
phantom: PhantomData::default()
})
}
}
}
}
#[inline]
pub fn subsystem(&self) -> &AudioSubsystem { &self.subsystem }
pub fn status(&self) -> AudioStatus {
unsafe {
let status = ll::SDL_GetAudioDeviceStatus(self.device_id.id());
FromPrimitive::from_i32(status as i32).unwrap()
}
}
/// Pauses playback of the audio device.
pub fn pause(&self) {
unsafe { ll::SDL_PauseAudioDevice(self.device_id.id(), 1) }
}
/// Starts playback of the audio device.
pub fn resume(&self) {
unsafe { ll::SDL_PauseAudioDevice(self.device_id.id(), 0) }
}
/// Adds data to the audio queue.
pub fn queue(&self, data: &[Channel]) -> bool {
let result = unsafe {ll::SDL_QueueAudio(self.device_id.id(), data.as_ptr() as *const c_void, (data.len() * mem::size_of::<Channel>()) as u32)};
result == 0
}
pub fn size(&self) -> u32 {
unsafe {ll::SDL_GetQueuedAudioSize(self.device_id.id())}
}
/// Clears all data from the current audio queue.
pub fn clear(&self) {
unsafe {ll::SDL_ClearQueuedAudio(self.device_id.id());}
}
}
/// Wraps SDL_AudioDeviceID and owns the callback data used by the audio device.
pub struct AudioDevice<CB: AudioCallback> {
subsystem: AudioSubsystem,
device_id: AudioDeviceID,
/// Store the callback to keep it alive for the entire duration of `AudioDevice`.
userdata: Box<CB>
}
impl<CB: AudioCallback> AudioDevice<CB> {
/// Opens a new audio device given the desired parameters and callback.
pub fn open_playback<F>(a: &AudioSubsystem, device: Option<&str>, spec: &AudioSpecDesired, get_callback: F) -> Result<AudioDevice <CB>, String>
where F: FnOnce(AudioSpec) -> CB
{
// SDL_OpenAudioDevice needs a userdata pointer, but we can't initialize the
// callback without the obtained AudioSpec.
// Create an uninitialized box that will be initialized after SDL_OpenAudioDevice.
let userdata: *mut CB = unsafe {
let b: Box<CB> = Box::new(mem::uninitialized());
mem::transmute(b)
};
let desired = AudioSpecDesired::convert_to_ll(spec.freq, spec.channels, spec.samples, userdata);
let mut obtained = unsafe { mem::uninitialized::<ll::SDL_AudioSpec>() };
unsafe {
let device = match device {
Some(device) => Some(CString::new(device).unwrap()),
None => None
};
let device_ptr = device.map_or(ptr::null(), |s| s.as_ptr());
let iscapture_flag = 0;
let device_id = ll::SDL_OpenAudioDevice(
device_ptr as *const c_char, iscapture_flag, &desired,
&mut obtained, 0
);
match device_id {
0 => {
Err(get_error())
},
id => {
let device_id = AudioDeviceID::PlaybackDevice(id);
let spec = AudioSpec::convert_from_ll(obtained);
let mut userdata: Box<CB> = mem::transmute(userdata);
let garbage = mem::replace(&mut userdata as &mut CB, get_callback(spec));
mem::forget(garbage);
Ok(AudioDevice {
subsystem: a.clone(),
device_id: device_id,
userdata: userdata
})
}
}
}
}
#[inline]
pub fn subsystem(&self) -> &AudioSubsystem { &self.subsystem }
pub fn status(&self) -> AudioStatus {
unsafe {
let status = ll::SDL_GetAudioDeviceStatus(self.device_id.id());
FromPrimitive::from_i32(status as i32).unwrap()
}
}
/// Pauses playback of the audio device.
pub fn pause(&self) {
unsafe { ll::SDL_PauseAudioDevice(self.device_id.id(), 1) }
}
/// Starts playback of the audio device.
pub fn resume(&self) {
unsafe { ll::SDL_PauseAudioDevice(self.device_id.id(), 0) }
}
/// Locks the audio device using `SDL_LockAudioDevice`.
///
/// When the returned lock guard is dropped, `SDL_UnlockAudioDevice` is
/// called.
/// Use this method to read and mutate callback data.
pub fn lock<'a>(&'a mut self) -> AudioDeviceLockGuard<'a, CB> {
unsafe { ll::SDL_LockAudioDevice(self.device_id.id()) };
AudioDeviceLockGuard {
device: self,
_nosend: PhantomData
}
}
/// Closes the audio device and saves the callback data from being dropped.
///
/// Note that simply dropping `AudioDevice` will close the audio device,
/// but the callback data will be dropped.
pub fn close_and_get_callback(self) -> CB {
drop(self.device_id);
*self.userdata
}
}
/// Similar to `std::sync::MutexGuard`, but for use with `AudioDevice::lock()`.
pub struct AudioDeviceLockGuard<'a, CB> where CB: AudioCallback, CB: 'a {
device: &'a mut AudioDevice<CB>,
_nosend: PhantomData<*mut ()>
}
impl<'a, CB: AudioCallback> Deref for AudioDeviceLockGuard<'a, CB> {
type Target = CB;
fn deref(&self) -> &CB { &self.device.userdata }
}
impl<'a, CB: AudioCallback> DerefMut for AudioDeviceLockGuard<'a, CB> {
fn deref_mut(&mut self) -> &mut CB { &mut self.device.userdata }
}
impl<'a, CB: AudioCallback> Drop for AudioDeviceLockGuard<'a, CB> {
fn drop(&mut self) {
unsafe { ll::SDL_UnlockAudioDevice(self.device.device_id.id()) }
}
}
#[derive(Copy, Clone)]
pub struct AudioCVT {
raw: ll::SDL_AudioCVT
}
impl AudioCVT {
pub fn new(src_format: AudioFormat, src_channels: u8, src_rate: i32,
dst_format: AudioFormat, dst_channels: u8, dst_rate: i32) -> Result<AudioCVT, String>
{
use std::mem;
unsafe {
let mut raw: ll::SDL_AudioCVT = mem::uninitialized();
let ret = ll::SDL_BuildAudioCVT(&mut raw,
src_format.to_ll(), src_channels, src_rate as c_int,
dst_format.to_ll(), dst_channels, dst_rate as c_int);
if ret == 1 || ret == 0 {
Ok(AudioCVT { raw: raw })
} else {
Err(get_error())
}
}
}
pub fn convert(&self, mut src: Vec<u8>) -> Vec<u8> {
//! Convert audio data to a desired audio format.
//!
//! The `src` vector is adjusted to the capacity necessary to perform
//! the conversion in place; then it is passed to the SDL library.
//!
//! Certain conversions may cause buffer overflows. See AngryLawyer/rust-sdl2 issue #270.
use num::traits as num;
unsafe {
if self.raw.needed != 0 {
let mut raw = self.raw;
// calculate the size of the dst buffer
raw.len = num::cast(src.len()).expect("Buffer length overflow");
let dst_size = self.capacity(src.len());
let needed = dst_size - src.len();
src.reserve_exact(needed);
// perform the conversion in place
raw.buf = src.as_mut_ptr();
let ret = ll::SDL_ConvertAudio(&mut raw);
// There's no reason for SDL_ConvertAudio to fail.
// The only time it can fail is if buf is NULL, which it never is.
if ret != 0 { panic!(get_error()) }
// return original buffer back to caller
debug_assert!(raw.len_cvt > 0);
debug_assert!(raw.len_cvt as usize <= src.capacity());
src.set_len(raw.len_cvt as usize);
src
} else {
// The buffer remains unmodified
src
}
}
}
/// Checks if any conversion is needed. i.e. if the buffer that goes
/// into `convert()` is unchanged from the result.
pub fn is_conversion_needed(&self) -> bool { self.raw.needed != 0 }
/// Gets the buffer capacity that can contain both the original and
/// converted data.
pub fn capacity(&self, src_len: usize) -> usize {
src_len.checked_mul(self.raw.len_mult as usize).expect("Integer overflow")
}
}
#[cfg(test)]
mod test {
use super::{AudioCVT, AudioFormat};
#[test]
fn test_audio_cvt() {
use std::iter::repeat;
// 0,1,2,3, ...
let buffer: Vec<u8> = (0..255).collect();
// 0,0,1,1,2,2,3,3, ...
let new_buffer_expected: Vec<u8> = (0..255).flat_map(|v| repeat(v).take(2)).collect();
let cvt = AudioCVT::new(AudioFormat::U8, 1, 44100, AudioFormat::U8, 2, 44100).unwrap();
assert!(cvt.is_conversion_needed());
assert_eq!(cvt.capacity(255), 255*2);
let new_buffer = cvt.convert(buffer);
assert_eq!(new_buffer.len(), new_buffer_expected.len());
assert_eq!(new_buffer, new_buffer_expected);
}
}<|fim▁end|>
|
fn from_u64(n: u64) -> Option<AudioStatus> { FromPrimitive::from_i64(n as i64) }
}
|
<|file_name|>LxControlTrackerTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2010-2021 Contributors to the openHAB project<|fim▁hole|> * information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.loxone.internal.controls;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.openhab.core.library.types.StringType;
/**
* Test class for (@link LxControlTracker}
*
* @author Pawel Pieczul - initial contribution
*
*/
public class LxControlTrackerTest extends LxControlTest {
@BeforeEach
public void setup() {
setupControl("132aa43b-01d4-56ea-ffff403fb0c34b9e", "0b734138-037d-034e-ffff403fb0c34b9e",
"0fe650c2-0004-d446-ffff504f9410790f", "Tracker Control");
}
@Test
public void testControlCreation() {
testControlCreation(LxControlTracker.class, 1, 0, 1, 1, 1);
}
@Test
public void testChannels() {
testChannel("String", null, null, null, null, null, true, null);
}
@Test
public void testLoxoneStateChanges() {
for (int i = 0; i < 20; i++) {
String s = new String();
for (int j = 0; j < i; j++) {
for (char c = 'a'; c <= 'a' + j; c++) {
s = s + c;
}
if (j != i - 1) {
s = s + '|';
}
}
changeLoxoneState("entries", s);
testChannelState(new StringType(s));
}
}
}<|fim▁end|>
|
*
* See the NOTICE file(s) distributed with this work for additional
|
<|file_name|>common_usage_steps_fd.hpp<|end_file_name|><|fim▁begin|>namespace boost { namespace logging {
/**
@page common_usage_steps_fd Common steps when using Formatters and destinations
\n<|fim▁hole|>
First, the examples: @ref scenarios_code_mom "example 1", @ref scenarios_code_noo "example 2"
- Step 1: (optional) Specify your @ref BOOST_LOG_FORMAT_MSG "format message class" and/or @ref BOOST_LOG_DESTINATION_MSG "destination message class". By default, it's <tt>std::(w)string</tt>.
You'll use this when you want a @ref optimize "optimize string class". Or, when @ref boost::logging::tag "using tags"
- Step 2: Specify your logging and filter classes
- Step 2A: Typedef your logger as <tt>typedef boost::logging::named_logger<>::type logger_type;</tt> and @ref typedefing_your_filter "typedef your filter class"
- Step 2B: Declare the @ref declare_define "filters and loggers" you'll use (in a header file)
- Step 2C: Define the @ref declare_define "filters and loggers" you'll use (in a source file). We need this separation
(into declaring and defining the logs/filters), in order to @ref macros_compile_time "make compilation times fast".
- Step 3: Define the @ref defining_logger_macros "macros through which you'll do logging"
- Step 4: Initialize the logger.
- Step 4A: Set the @ref boost::logging::writer::named_write "formatters and destinations", as strings.
- Step 4B: @ref boost::logging::logger_base::mark_as_initialized "Mark the logger as initialized"
\n
<b>The manual way</b>
First, the examples: @ref common_your_scenario_code "example 1", @ref common_your_mul_logger_one_filter "example 2"
- Step 1: (optional) Specify your @ref BOOST_LOG_FORMAT_MSG "format message class" and/or @ref BOOST_LOG_DESTINATION_MSG "destination message class". By default, it's <tt>std::(w)string</tt>.
You'll use this when you want a @ref optimize "optimize string class". Or, when @ref boost::logging::tag "using tags"
- Step 2: (optional) Specify your @ref boost::logging::manipulator "formatter & destination base classes"
- Step 3: Specify your logging and filter classes
- Step 3A: @ref typedefing_your_filter "Typedef your filter class(es)" and @ref typedefing_your_logger "Typedef your logger class(es)"
- Step 3B: Declare the @ref declare_define "filters and loggers" you'll use (in a header file)
- Step 3C: Define the @ref declare_define "filters and loggers" you'll use (in a source file). We need this separation
(into declaring and defining the logs/filters), in order to @ref macros_compile_time "make compilation times fast".
- Step 4: Define the @ref defining_logger_macros "macros through which you'll do logging"
- Step 5: Initialize the logger
- Step 5A: Add @ref boost::logging::manipulator "formatters and destinations". That is, how the message is to be formatted...
- Step 5B: @ref boost::logging::logger_base::mark_as_initialized "Mark the logger as initialized"
*/
}}<|fim▁end|>
|
<b>The easy way, use Named Formatters and Destinations</b>
You use a string to specify Formatters, and a string to specify Destinations. Thus, you use the @ref boost::logging::writer::named_write "writer::named_write".
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# GObject-Introspection documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 16 15:34:52 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GObject-Introspection'
copyright = u'2013, Dieter Verfaillie'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some<|fim▁hole|>
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'GObject-Introspectiondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'GObject-Introspection.tex', u'GObject-Introspection Documentation',
u'Dieter Verfaillie', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gobject-introspection', u'GObject-Introspection Documentation',
[u'Dieter Verfaillie'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'GObject-Introspection', u'GObject-Introspection Documentation',
u'Dieter Verfaillie', 'GObject-Introspection', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}<|fim▁end|>
|
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
|
<|file_name|>environment.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
// This file can be replaced during build by using the `fileReplacements` array.
// `ng build --prod` replaces `environment.ts` with `environment.prod.ts`.
// The list of file replacements can be found in `angular.json`.
import { version } from '../../../../package.json';
export const environment = {
production: false,
interactiveVisualizerUrl: `https://storage.googleapis.com/interactive_visualizer/${version}/index.html`,
models: [
{
displayName: 'Birds V1',
description: 'AIY natural world insects classification model',<|fim▁hole|> metadataUrl: 'https://storage.googleapis.com/tfhub-visualizers/google/aiy/vision/classifier/birds_V1/1/metadata.json',
},
{
displayName: 'Insects V1',
description: 'AIY natural world birds quantized classification model',
type: 'image classification',
metadataUrl: 'https://storage.googleapis.com/tfhub-visualizers/google/aiy/vision/classifier/insects_V1/1/metadata.json',
},
{
displayName: 'Mobile Object Localizer V1',
description: 'Mobile model to localize objects in an image',
type: 'object detection',
metadataUrl: 'https://storage.googleapis.com/tfhub-visualizers/google/object_detection/mobile_object_localizer_v1/1/metadata.json',
},
],
};
/*
* For easier debugging in development mode, you can import the following file
* to ignore zone related error stack frames such as `zone.run`, `zoneDelegate.invokeTask`.
*
* This import should be commented out in production mode because it will have a negative impact
* on performance if an error is thrown.
*/
// import 'zone.js/dist/zone-error'; // Included with Angular CLI.<|fim▁end|>
|
type: 'image classification',
|
<|file_name|>yahooWeatherForecast.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
def makeYqlQuery(req):
<|fim▁hole|> parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "') and u='c'"<|fim▁end|>
|
result = req.get("result")
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>module.exports = require('./lib/goldwasher-aws-lambda.js');<|fim▁end|>
| |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2021 Charles University, Faculty of Arts,
* Institute of the Czech National Corpus
* Copyright (c) 2021 Tomas Machalek <[email protected]>
* Copyright (c) 2021 Martin Zimandl <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* dated June, 1991.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import { Bound, IActionDispatcher } from 'kombo';
import * as Kontext from '../../../types/kontext';
import { PqueryResultModel, PqueryResultModelState, SortColumn } from '../../../models/pquery/result';
import { Actions } from '../../../models/pquery/actions';
import * as S from './style';
import { Color, id, List, pipe } from 'cnc-tskit';
import { init as initSaveViews } from './save';
import { PqueryResultsSaveModel } from '../../../models/pquery/save';
import { colorHeatmap } from '../../theme/default';
export interface PqueryFormViewsArgs {
dispatcher:IActionDispatcher;
he:Kontext.ComponentHelpers;
resultModel:PqueryResultModel;
saveModel:PqueryResultsSaveModel;
}
export function init({dispatcher, he, resultModel, saveModel}:PqueryFormViewsArgs):React.ComponentClass<{}> {
const layoutViews = he.getLayoutViews();
// ------------------------ <PageCounter /> --------------------------
const saveViews = initSaveViews(dispatcher, he, saveModel);
const PageCounter:React.FC<{
maxPage:number;
currPage:number;
currPageInput:Kontext.FormValue<string>;
}> = (props) => {
const setPage = (page:string|number) => () => {
dispatcher.dispatch(
Actions.SetPageInput,
{
value: typeof page === 'string' ? page : page + ''
}
);
};
return <S.PageCounter>
<a className={props.currPage === 1 ? "inactive" : null} onClick={props.currPage > 1 ? setPage(props.currPage-1) : null}>
<img src={he.createStaticUrl('img/prev-page.svg')} />
</a>
<span className="num-input">
<input type="text" className={props.currPageInput.isInvalid ? 'error' : null} value={props.currPageInput.value}
onChange={e => setPage(e.target.value)()} /> / {props.maxPage}
</span>
<a className={props.currPage === props.maxPage ? "inactive" : null} onClick={props.currPage < props.maxPage ? setPage(props.currPage+1) : null}>
<img src={he.createStaticUrl('img/next-page.svg')} />
</a>
</S.PageCounter>
};
// ------------------------ <ThSortable /> --------------------------
const ThSortable:React.FC<{
sortColumn:SortColumn;
actualSortColumn:SortColumn;
label:string;
}> = (props) => {
const isSortedByMe = () => {
if (!props.actualSortColumn) {
return false;
}
if (props.sortColumn.type === 'partial_freq' &&
props.actualSortColumn.type === 'partial_freq') {
return props.sortColumn.concId === props.actualSortColumn.concId;
}
return props.sortColumn.type === props.actualSortColumn.type;
}
const renderSortFlag = () => {
if (isSortedByMe()) {
return props.actualSortColumn.reverse ?
<img className="sort-flag" src={he.createStaticUrl('img/sort_desc.svg')} /> :
<img className="sort-flag" src={he.createStaticUrl('img/sort_asc.svg')} />;
} else {
return null;
}
};
const handleSortClick = () => {
dispatcher.dispatch<typeof Actions.SortLines>({
name: Actions.SortLines.name,
payload: {
...props.sortColumn,
reverse: !(isSortedByMe() && props.actualSortColumn.reverse)
}
});
};
const getTitle = () => {
if (isSortedByMe()) {
return he.translate('global__sorted_click_change');
}
return he.translate('global__click_to_sort');
};
return (
<th>
<a onClick={handleSortClick} title={getTitle()}>
{props.label}
{renderSortFlag()}
</a>
</th>
);
};
// ---------------- <PqueryResultSection /> ----------------------------
const PqueryResultSection:React.FC<PqueryResultModelState> = (props) => {
const _handleSaveFormClose = () => {
dispatcher.dispatch<typeof Actions.ResultCloseSaveForm>({
name: Actions.ResultCloseSaveForm.name
})
};
const mapColor = (idx:number) => colorHeatmap[~~Math.floor((idx) * (colorHeatmap.length - 1) / props.concIds.length)];
const _handleFilter = (value:string, concId:string) => (e) => {
dispatcher.dispatch<typeof Actions.ResultApplyQuickFilter>({
name: Actions.ResultApplyQuickFilter.name,
payload: {
value,
concId,
blankWindow: e.ctrlKey
}
});
};
const renderContent = () => {
if (props.numLines === 0) {
return <S.NoResultPar>{he.translate('pquery__no_result')}</S.NoResultPar>;
} else {
return (
<>
<section className="heading">
<div className="controls">
<p>{he.translate('pquery__avail_label')}: {props.numLines}</p>
<PageCounter
maxPage={Math.ceil(props.numLines/props.pageSize)}
currPage={props.page}
currPageInput={props.pageInput} />
</div>
<div className="loader">
{props.isBusy ? <layoutViews.AjaxLoaderImage /> : <div />}
</div>
</section>
<table className={`data${props.isBusy ? ' busy' : ''}`}>
<thead>
<tr>
<th colSpan={2} />
{List.map(
(concId, i) => (
<React.Fragment key={concId}>
<th className="conc-group" colSpan={2}>{`Conc ${i+1}`}</th>
</React.Fragment>
),
props.concIds
)}
<th />
</tr>
<tr>
<th />
<ThSortable sortColumn={{type: 'value', reverse: false}}
actualSortColumn={props.sortColumn} label="Value"/>
{List.map(
(concId, i) => (
<React.Fragment key={concId}>
<ThSortable sortColumn={{type: 'partial_freq', concId, reverse: false}}
actualSortColumn={props.sortColumn} label="Freq" />
<th>Filter</th>
</React.Fragment>
),
props.concIds
)}
<ThSortable sortColumn={{type: 'freq', reverse: false}} actualSortColumn={props.sortColumn}
label={'Freq \u2211'} />
</tr>
</thead>
<tbody>
{List.map(
([word, ...freqs], i) => (
<tr key={`${i}:${word}`}>
<td className="num">{(props.page-1)*props.pageSize+i+1}</td>
<td>{word}</td>
{List.map(
(f, i) => {
const idx = pipe(
freqs,
List.sortedBy(id),
List.findIndex(v => v === f)
);
const bgCol = mapColor(idx);
const textCol = pipe(
bgCol,
Color.importColor(1),
Color.textColorFromBg(),
Color.color2str()
);
const style = {
backgroundColor: bgCol,
color: textCol
};
return (
<React.Fragment key={props.concIds[i]}>
<td style={style} className="num">{f}</td>
<td><a onClick={_handleFilter(word, props.concIds[i])}>p</a></td>
</React.Fragment>
);
},
freqs
)}
<td className="num sum">{List.foldl((acc, curr) => acc + curr, 0, freqs)}</td>
</tr>
),
props.data
)}
</tbody>
</table>
{props.saveFormActive ?<|fim▁hole|> </>
);
}
};
return <S.PqueryResultSection>{renderContent()}</S.PqueryResultSection>;
};
return Bound(PqueryResultSection, resultModel);
}<|fim▁end|>
|
<saveViews.SavePqueryForm onClose={_handleSaveFormClose} /> :
null
}
|
<|file_name|>idl_diff.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import glob
import os
import subprocess
import sys
from idl_option import GetOption, Option, ParseOptions
from idl_outfile import IDLOutFile
#
# IDLDiff
#
# IDLDiff is a tool for comparing sets of IDL generated header files
# with the standard checked in headers. It does this by capturing the
# output of the standard diff tool, parsing it into separate changes, then
# ignoring changes that are know to be safe, such as adding or removing
# blank lines, etc...
#
Option('gen', 'IDL generated files', default='hdir')
Option('src', 'Original ".h" files', default='../c')
Option('halt', 'Stop if a difference is found')
Option('diff', 'Directory holding acceptable diffs', default='diff')
Option('ok', 'Write out the diff file.')
# Change
#
# A Change object contains the previous lines, new news and change type.
#
class Change(object):
def __init__(self, mode, was, now):
self.mode = mode
self.was = was
self.now = now
def Dump(self):
if not self.was:
print('Adding %s' % self.mode)
elif not self.now:
print('Missing %s' % self.mode)
else:
print('Modifying %s' % self.mode)
for line in self.was:
print('src: >>%s<<' % line)
for line in self.now:
print('gen: >>%s<<' % line)
print
#
# IsCopyright
#
# Return True if this change is only a one line change in the copyright notice
# such as non-matching years.
#
def IsCopyright(change):
if len(change.now) != 1 or len(change.was) != 1: return False
if 'Copyright (c)' not in change.now[0]: return False
if 'Copyright (c)' not in change.was[0]: return False
return True
#
# IsBlankComment
#
# Return True if this change only removes a blank line from a comment
#
def IsBlankComment(change):
if change.now: return False
if len(change.was) != 1: return False
if change.was[0].strip() != '*': return False
return True
#
# IsBlank
#
# Return True if this change only adds or removes blank lines
#
def IsBlank(change):
for line in change.now:
if line: return False
for line in change.was:
if line: return False
return True
#
# IsCppComment
#
# Return True if this change only going from C++ to C style
#
def IsToCppComment(change):
if not len(change.now) or len(change.now) != len(change.was):
return False
for index in range(len(change.now)):
was = change.was[index].strip()
if was[:2] != '//':
return False
was = was[2:].strip()
now = change.now[index].strip()
if now[:2] != '/*':
return False
now = now[2:-2].strip()
if now != was:
return False
return True
return True
def IsMergeComment(change):
if len(change.was) != 1: return False
if change.was[0].strip() != '*': return False
for line in change.now:
stripped = line.strip()
if stripped != '*' and stripped[:2] != '/*' and stripped[-2:] != '*/':
return False
return True
#
# IsSpacing
#
# Return True if this change is only different in the way 'words' are spaced
# such as in an enum:
# ENUM_XXX = 1,
# ENUM_XYY_Y = 2,
# vs
# ENUM_XXX = 1,
# ENUM_XYY_Y = 2,
#
def IsSpacing(change):
if len(change.now) != len(change.was): return False
for i in range(len(change.now)):
# Also ignore right side comments
line = change.was[i]
offs = line.find('//')
if offs == -1:
offs = line.find('/*')
if offs >-1:
line = line[:offs-1]
words1 = change.now[i].split()
words2 = line.split()
if words1 != words2: return False
return True
<|fim▁hole|>#
# Return True if change has extra includes
#
def IsInclude(change):
for line in change.was:
if line.strip().find('struct'): return False
for line in change.now:
if line and '#include' not in line: return False
return True
#
# IsCppComment
#
# Return True if the change is only missing C++ comments
#
def IsCppComment(change):
if len(change.now): return False
for line in change.was:
line = line.strip()
if line[:2] != '//': return False
return True
#
# ValidChange
#
# Return True if none of the changes does not patch an above "bogus" change.
#
def ValidChange(change):
if IsToCppComment(change): return False
if IsCopyright(change): return False
if IsBlankComment(change): return False
if IsMergeComment(change): return False
if IsBlank(change): return False
if IsSpacing(change): return False
if IsInclude(change): return False
if IsCppComment(change): return False
return True
#
# Swapped
#
# Check if the combination of last + next change signals they are both
# invalid such as swap of line around an invalid block.
#
def Swapped(last, next):
if not last.now and not next.was and len(last.was) == len(next.now):
cnt = len(last.was)
for i in range(cnt):
match = True
for j in range(cnt):
if last.was[j] != next.now[(i + j) % cnt]:
match = False
break;
if match: return True
if not last.was and not next.now and len(last.now) == len(next.was):
cnt = len(last.now)
for i in range(cnt):
match = True
for j in range(cnt):
if last.now[i] != next.was[(i + j) % cnt]:
match = False
break;
if match: return True
return False
def FilterLinesIn(output):
was = []
now = []
filter = []
for index in range(len(output)):
filter.append(False)
line = output[index]
if len(line) < 2: continue
if line[0] == '<':
if line[2:].strip() == '': continue
was.append((index, line[2:]))
elif line[0] == '>':
if line[2:].strip() == '': continue
now.append((index, line[2:]))
for windex, wline in was:
for nindex, nline in now:
if filter[nindex]: continue
if filter[windex]: continue
if wline == nline:
filter[nindex] = True
filter[windex] = True
if GetOption('verbose'):
print("Found %d, %d >>%s<<" % (windex + 1, nindex + 1, wline))
out = []
for index in range(len(output)):
if not filter[index]:
out.append(output[index])
return out
#
# GetChanges
#
# Parse the output into discrete change blocks.
#
def GetChanges(output):
# Split on lines, adding an END marker to simply add logic
lines = output.split('\n')
lines = FilterLinesIn(lines)
lines.append('END')
changes = []
was = []
now = []
mode = ''
last = None
for line in lines:
#print("LINE=%s" % line)
if not line: continue
elif line[0] == '<':
if line[2:].strip() == '': continue
# Ignore prototypes
if len(line) > 10:
words = line[2:].split()
if len(words) == 2 and words[1][-1] == ';':
if words[0] == 'struct' or words[0] == 'union':
continue
was.append(line[2:])
elif line[0] == '>':
if line[2:].strip() == '': continue
if line[2:10] == '#include': continue
now.append(line[2:])
elif line[0] == '-':
continue
else:
change = Change(line, was, now)
was = []
now = []
if ValidChange(change):
changes.append(change)
if line == 'END':
break
return FilterChanges(changes)
def FilterChanges(changes):
if len(changes) < 2: return changes
out = []
filter = [False for change in changes]
for cur in range(len(changes)):
for cmp in range(cur+1, len(changes)):
if filter[cmp]:
continue
if Swapped(changes[cur], changes[cmp]):
filter[cur] = True
filter[cmp] = True
for cur in range(len(changes)):
if filter[cur]: continue
out.append(changes[cur])
return out
def Main(args):
filenames = ParseOptions(args)
if not filenames:
gendir = os.path.join(GetOption('gen'), '*.h')
filenames = sorted(glob.glob(gendir))
srcdir = os.path.join(GetOption('src'), '*.h')
srcs = sorted(glob.glob(srcdir))
for name in srcs:
name = os.path.split(name)[1]
name = os.path.join(GetOption('gen'), name)
if name not in filenames:
print('Missing: %s' % name)
for filename in filenames:
gen = filename
filename = filename[len(GetOption('gen')) + 1:]
src = os.path.join(GetOption('src'), filename)
diff = os.path.join(GetOption('diff'), filename)
p = subprocess.Popen(['diff', src, gen], stdout=subprocess.PIPE)
output, errors = p.communicate()
try:
input = open(diff, 'rt').read()
except:
input = ''
if input != output:
changes = GetChanges(output)
else:
changes = []
if changes:
print("\n\nDelta between:\n src=%s\n gen=%s\n" % (src, gen))
for change in changes:
change.Dump()
print('Done with %s\n\n' % src)
if GetOption('ok'):
open(diff, 'wt').write(output)
if GetOption('halt'):
return 1
else:
print("\nSAME:\n src=%s\n gen=%s" % (src, gen))
if input:
print(' ** Matched expected diff. **')
print('\n')
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))<|fim▁end|>
|
#
# IsInclude
|
<|file_name|>fpeditor.py<|end_file_name|><|fim▁begin|># vim: set fileencoding=utf-8 :
# GNU Solfege - free ear training software
# Copyright (C) 2009, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import logging
import os
import StringIO
import subprocess
from gi.repository import Gtk
from solfege.esel import SearchView
if __name__ == '__main__':
from solfege import i18n
i18n.setup(".", "C")
import solfege.statistics
solfege.db = solfege.statistics.DB()
import solfege
from solfege import cfg
from solfege import filesystem
from solfege import gu
from solfege import frontpage as pd
from solfege import lessonfile
from solfege import osutils
class LessonFilePreviewWidget(Gtk.VBox):
def __init__(self, model):
Gtk.VBox.__init__(self)
self.m_model = model
self.set_size_request(200, 200)
l = Gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Title:</b>")
self.pack_start(l, False, False, 0)
self.g_title = Gtk.Label()
self.g_title.set_alignment(0.0, 0.5)
self.pack_start(self.g_title, False, False, 0)
l = Gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Module:</b>")
self.pack_start(l, False, False, 0)
self.g_module = Gtk.Label()
self.g_module.set_alignment(0.0, 0.5)
self.pack_start(self.g_module, False, False, 0)
l = Gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup("<b>Used in topcis:</b>")
self.pack_start(l, False, False, 0)
self.g_topic_box = Gtk.VBox()
self.pack_start(self.g_topic_box, False, False, 0)
self.show_all()
def update(self, dlg):
fn = dlg.get_preview_filename()
if fn:
fn = gu.decode_filename(fn)
for child in self.g_topic_box.get_children():
child.destroy()
fn = lessonfile.mk_uri(fn)
try:
self.set_sensitive(True)
self.g_title.set_text(lessonfile.infocache.get(fn, 'title'))
self.g_module.set_text(lessonfile.infocache.get(fn, 'module'))
self.g_ok_button.set_sensitive(True)
for x in self.m_model.iterate_topics_for_file(fn):
l = Gtk.Label(label=x)
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False, False, 0)
if not self.g_topic_box.get_children():
l = Gtk.Label(label=u"-")
l.set_alignment(0.0, 0.5)
self.g_topic_box.pack_start(l, False, False, 0)
except (lessonfile.InfoCache.FileNotFound,
lessonfile.InfoCache.FileNotLessonfile), e:
self.g_title.set_text(u'')
self.g_module.set_text(u'')
self.g_ok_button.set_sensitive(False)
self.set_sensitive(False)
self.show_all()
return True
class SelectLessonFileDialog(Gtk.FileChooserDialog):
def __init__(self, parent):
Gtk.FileChooserDialog.__init__(self, _("Select lesson file"),
parent=parent,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,))
self.set_select_multiple(True)
pv = LessonFilePreviewWidget(parent.m_model)
pv.g_ok_button = self.add_button("gtk-ok", Gtk.ResponseType.OK)
pv.g_ok_button.set_sensitive(False)
pv.show()
self.set_preview_widget(pv)
self.connect('selection-changed', pv.update)
class SelectLessonfileBySearchDialog(Gtk.Dialog):
def __init__(self):
Gtk.Dialog.__init__(self, buttons=(Gtk.STOCK_CLOSE, Gtk.ResponseType.ACCEPT))
view = SearchView(_('Search for exercises. Each exercise you click will be added to the section of the front page.'),
fields=['link-with-filename-tooltip', 'module'])
view.on_link_clicked = self.on_link_clicked
self.vbox.pack_start(view, True, True, 0)
self.show_all()
def on_link_clicked(self, widget, filename):
self.m_filename = filename
self.response(Gtk.ResponseType.OK)
def editor_of(obj):
"""
Return the toplevel page, the one that is a Editor object.
"""
p = obj
while not isinstance(p, Editor):
p = p.m_parent
return p
def parent_page(obj):
"""
Return the parent page of obj. Return None if this is the toplevel page.
"""
p = obj
while True:
try:
p = p.m_parent
except AttributeError:
return None
if isinstance(p, Page):
return p
if p is None:
return None
class Section(Gtk.VBox):
"""
A section consists of a heading and a list of links.
self.g_link_box is a vbox that contains the links.
"""
def __init__(self, model, parent):
Gtk.VBox.__init__(self)
self.m_model = model
self.m_parent = parent
assert isinstance(model, pd.LinkList)
hbox = Gtk.HBox()
hbox.set_spacing(6)
self.pack_start(hbox, False, False, 0)
# This is displayed and used when we edit the heading
self.g_heading_entry = Gtk.Entry()
self.g_heading_entry.set_no_show_all(True)
hbox.pack_start(self.g_heading_entry, True, True, 0)
self.g_heading = Gtk.Label()
self.g_heading.set_alignment(0.0, 0.5)
# FIXME escape m_name
self.g_heading.set_markup("<b>%s</b>" % model.m_name)
hbox.pack_start(self.g_heading, False, False, 0)
###
button_hbox = Gtk.HBox()
button_hbox.set_spacing(0)
hbox.pack_start(button_hbox, False, False, 0)
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_EDIT, Gtk.IconSize.MENU)
button = Gtk.Button()
button.add(im)
button.connect('clicked', self.on_edit_heading)
button_hbox.pack_start(button, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_ADD, Gtk.IconSize.MENU)
button = Gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_add)
button_hbox.pack_start(button, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_REMOVE, Gtk.IconSize.MENU)
button = Gtk.Button()
button.add(im)
button.connect('button-release-event', self.on_remove)
button_hbox.pack_start(button, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_CUT, Gtk.IconSize.MENU)
b = Gtk.Button()
b.add(im)
b.connect('clicked', self.on_cut)
button_hbox.pack_start(b, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_PASTE, Gtk.IconSize.MENU)
b = Gtk.Button()
b.add(im)
b.connect('clicked', self.on_paste, -1)
Editor.clipboard.register_paste_button(b, (pd.LinkList, pd.Page, unicode))
button_hbox.pack_start(b, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_GO_DOWN, Gtk.IconSize.MENU)
self.g_move_down_btn = Gtk.Button()
self.g_move_down_btn.add(im)
self.g_move_down_btn.connect('clicked',
self.m_parent.move_section_down, self)
button_hbox.pack_start(self.g_move_down_btn, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_GO_UP, Gtk.IconSize.MENU)
self.g_move_up_btn = Gtk.Button()
self.g_move_up_btn.add(im)
self.g_move_up_btn.connect('clicked',
self.m_parent.move_section_up, self)
button_hbox.pack_start(self.g_move_up_btn, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_GO_BACK, Gtk.IconSize.MENU)
self.g_move_left_btn = Gtk.Button()
self.g_move_left_btn.add(im)
self.g_move_left_btn.connect('clicked',
parent.m_parent.on_move_section_left, self)
button_hbox.pack_start(self.g_move_left_btn, False, False, 0)
###
im = Gtk.Image()
im.set_from_stock(Gtk.STOCK_GO_FORWARD, Gtk.IconSize.MENU)
self.g_move_right_btn = Gtk.Button()
self.g_move_right_btn.add(im)
self.g_move_right_btn.connect('clicked',
parent.m_parent.on_move_section_right, self)
button_hbox.pack_start(self.g_move_right_btn, False, False, 0)
###
self.g_link_box = Gtk.VBox()
self.pack_start(self.g_link_box, False, False, 0)
for link in self.m_model:
self.g_link_box.pack_start(self.create_linkrow(link), True, True, 0)
# The button to click to add a new link
hbox = Gtk.HBox()
self.pack_start(hbox, True, True, 0)
def on_edit_heading(self, btn):
self.g_heading_entry.set_text(self.m_model.m_name)
self.g_heading_entry.show()
self.g_heading.hide()
self.g_heading_entry.grab_focus()
def finish_edit(entry):
self.g_heading_entry.disconnect(sid)
self.g_heading_entry.disconnect(keyup_id)
self.g_heading_entry.disconnect(keydown_sid)
self.m_model.m_name = entry.get_text()
self.g_heading.set_markup(u"<b>%s</b>" % entry.get_text())
self.g_heading_entry.hide()
self.g_heading.show()
sid = self.g_heading_entry.connect('activate', finish_edit)
def keydown(entry, event):
if event.keyval == Gdk.KEY_Tab:
finish_edit(entry)
keydown_sid = self.g_heading_entry.connect('key-press-event', keydown)
def keyup(entry, event):
if event.keyval == Gdk.KEY_Escape:
self.g_heading_entry.disconnect(sid)
self.g_heading_entry.disconnect(keyup_id)
self.g_heading_entry.hide()
self.g_heading.show()
return True
keyup_id = self.g_heading_entry.connect('key-release-event', keyup)
def on_add(self, btn, event):
menu = Gtk.Menu()
item = Gtk.MenuItem(_("Add link to new page"))
item.connect('activate', self.on_add_link_to_new_page)
menu.append(item)
item = Gtk.MenuItem(_("Add link to exercise"))
item.connect('activate', self.on_add_link)
menu.append(item)
item = Gtk.MenuItem(_("Add link by searching for exercises"))
item.connect('activate', self.on_add_link_by_search)
menu.append(item)
menu.show_all()
menu.popup(None, None, None, None, event.button, event.time)
def on_remove(self, btn, event):
self.m_parent.remove_section(self)
def on_add_link_by_search(self, btn):
dlg = SelectLessonfileBySearchDialog()
while True:
ret = dlg.run()
if ret == Gtk.ResponseType.OK:
self._add_filenames([os.path.abspath(lessonfile.uri_expand(dlg.m_filename))])
else:
break
dlg.destroy()
def on_add_link(self, btn):
if editor_of(self).m_filename:
open_dir = os.path.split(editor_of(self).m_filename)[0]
else:
open_dir = filesystem.user_data()
dlg = SelectLessonFileDialog(editor_of(self))
dlg.set_current_folder(open_dir)
while 1:
ret = dlg.run()
if ret in (Gtk.ResponseType.REJECT, Gtk.ResponseType.DELETE_EVENT, Gtk.ResponseType.CANCEL):
break
else:
assert ret == Gtk.ResponseType.OK
self._add_filenames(dlg.get_filenames())
break
dlg.destroy()
def _add_filenames(self, filenames):
for filename in filenames:
fn = gu.decode_filename(filename)
assert os.path.isabs(fn)
# If the file name is a file in a subdirectory below
# lessonfile.exercises_dir in the current working directory,
# then the file is a standard lesson file, and it will be
# converted to a uri scheme with:
fn = lessonfile.mk_uri(fn)
# Small test to check that the file actually is a lesson file.
try:
lessonfile.infocache.get(fn, 'title')
except lessonfile.infocache.FileNotLessonfile:
continue
self.m_model.append(fn)
self.g_link_box.pack_start(self.create_linkrow(fn, True, True, 0), False)
def on_add_link_to_new_page(self, menuitem):
page = pd.Page(_("Untitled%s") % "", [pd.Column()])
self.m_model.append(page)
self.g_link_box.pack_start(self.create_linkrow(page, True, True, 0))
def create_linkrow(self, link_this):
hbox = Gtk.HBox()
def ff(btn, page):
if id(page) in editor_of(self).m_page_mapping:
editor_of(self).show_page_id(id(page))
else:
if not page[0]:
page[0].append(pd.LinkList(link_this.m_name))
p = Page(page, parent_page(self))
p.show()
editor_of(self).add_page(p)
if isinstance(link_this, pd.Page):
linkbutton = gu.ClickableLabel(link_this.m_name)
linkbutton.connect('clicked', ff, link_this)
else:
try:
linkbutton = gu.ClickableLabel(lessonfile.infocache.get(link_this, 'title'))
linkbutton.set_tooltip_text(link_this)
except lessonfile.InfoCache.FileNotFound:
linkbutton = gu.ClickableLabel(_(u"«%s» was not found") % link_this)
linkbutton.make_warning()
hbox.pack_start(linkbutton, True, True, 0)
linkbutton.connect('button-press-event', self.on_right_click_row, link_this)
hbox.show_all()
return hbox
def on_right_click_row(self, button, event, linked):
idx = self.m_model.index(linked)
if event.button == 3:
m = Gtk.Menu()
item = Gtk.ImageMenuItem(Gtk.STOCK_DELETE)
item.connect('activate', self.on_delete_link, linked)
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_CUT)
item.connect('activate', self.on_cut_link, idx)
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_PASTE)
item.set_sensitive(bool(Editor.clipboard))
item.connect('activate', self.on_paste, idx)
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_EDIT)
item.connect('activate', self.on_edit_linktext, linked)
item.set_sensitive(bool(not isinstance(linked, basestring)))
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_GO_UP)
item.connect('activate', self.on_move_link_up, idx)
item.set_sensitive(bool(idx > 0))
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_GO_DOWN)
item.connect('activate', self.on_move_link_down, idx)
item.set_sensitive(bool(idx < len(self.m_model) - 1))
###
m.append(item)
item = Gtk.ImageMenuItem(Gtk.STOCK_EDIT)
item.set_sensitive(isinstance(linked, unicode))
item.connect('activate', self.on_edit_file, idx)
###
m.append(item)
m.show_all()
m.popup(None, None, None, None, event.button, event.time)
return True
def on_delete_link(self, menuitem, linked):
idx = self.m_model.index(linked)
if id(linked) in editor_of(self).m_page_mapping:
editor_of(self).destroy_window(id(linked))
self.g_link_box.get_children()[idx].destroy()
del self.m_model[idx]
def on_edit_linktext(self, menuitem, linked):
idx = self.m_model.index(linked)
# row is the hbox containing the linkbutton
row = self.g_link_box.get_children()[idx]
linkbutton = row.get_children()[0]
entry = Gtk.Entry()
entry.set_text(linkbutton.get_label())
row.pack_start(entry, True, True, 0)
linkbutton.hide()
entry.show()
entry.grab_focus()
def finish_edit(entry):
linkbutton.set_label(entry.get_text().decode("utf-8"))
linkbutton.get_children()[0].set_alignment(0.0, 0.5)
linkbutton.show()
self.m_model[idx].m_name = entry.get_text().decode("utf-8")
entry.destroy()
sid = entry.connect('activate', finish_edit)
def keydown(entry, event):
if event.keyval == Gdk.KEY_Tab:
finish_edit(entry)
entry.connect('key-press-event', keydown)
def keyup(entry, event):
if event.keyval == Gdk.KEY_Escape:
linkbutton.show()
entry.disconnect(sid)
entry.destroy()
return True
entry.connect('key-release-event', keyup)
def on_edit_file(self, menuitem, linked):
try:
try:
subprocess.call((cfg.get_string("programs/text-editor"),
lessonfile.uri_expand(self.m_model[linked])))
except OSError, e:
raise osutils.BinaryForProgramException("Text editor", cfg.get_string("programs/text-editor"), e)
except osutils.BinaryForProgramException, e:
solfege.win.display_error_message2(e.msg1, e.msg2)
def on_cut(self, btn):
self.m_parent.cut_section(self)
def on_cut_link(self, menuitem, idx):
Editor.clipboard.append(self.m_model[idx])
del self.m_model[idx]
self.g_link_box.get_children()[idx].destroy()
def on_paste(self, btn, idx):
assert Editor.clipboard, "Paste buttons should be insensitive when the clipboard is empty."
pobj = Editor.clipboard.pop()
if isinstance(pobj, pd.LinkList):
mobj = pd.Page(pobj.m_name, [pd.Column(pobj)])
else:
mobj = pobj
if idx == -1:
self.m_model.append(mobj)
self.g_link_box.pack_start(self.create_linkrow(mobj, True, True, 0))
else:
self.m_model.insert(idx, mobj)
row = self.create_linkrow(mobj)
self.g_link_box.pack_start(row, True, True, 0)
self.g_link_box.reorder_child(row, idx)
def on_move_link_up(self, btn, idx):
"""
Move the link one row up.
"""
assert idx > 0
self.m_model[idx], self.m_model[idx - 1] = self.m_model[idx - 1], self.m_model[idx]
self.g_link_box.reorder_child(self.g_link_box.get_children()[idx], idx - 1)
def on_move_link_down(self, btn, idx=None):
"""
Move the link one row down.
"""
self.m_model[idx], self.m_model[idx + 1] = self.m_model[idx + 1], self.m_model[idx]
self.g_link_box.reorder_child(self.g_link_box.get_children()[idx], idx + 1)
class Column(Gtk.VBox):
def __init__(self, model, parent):
Gtk.VBox.__init__(self)
self.set_spacing(gu.hig.SPACE_MEDIUM)
self.m_model = model
self.m_parent = parent
assert isinstance(model, pd.Column)
self.g_section_box = Gtk.VBox()
self.g_section_box.set_spacing(gu.hig.SPACE_MEDIUM)
self.pack_start(self.g_section_box, False, False, 0)
for section in model:
assert isinstance(section, pd.LinkList)
gui_section = Section(section, self)
self.g_section_box.pack_start(gui_section, False, False, 0)
hbox = Gtk.HBox()
self.pack_start(hbox, False, False, 0)
b = Gtk.Button(_("Add section"))
hbox.pack_start(b, False, False, 0)
b.connect('clicked', self.on_add_section)
b = Gtk.Button(stock=Gtk.STOCK_PASTE)
b.connect('clicked', self.on_paste)
Editor.clipboard.register_paste_button(b, pd.LinkList)
hbox.pack_start(b, False, False, 0)
def __del__(self):
logging.debug("Column.__del__")
def cut_section(self, section):
idx = self.g_section_box.get_children().index(section)
Editor.clipboard.append(self.m_model[idx])
del self.m_model[idx]
self.g_section_box.get_children()[idx].destroy()
def remove_section(self, section):
idx = self.g_section_box.get_children().index(section)
del self.m_model[idx]
self.g_section_box.get_children()[idx].destroy()
def on_add_section(self, btn):
# We write "Untitled%s" % "" instead of just "Untitled" here
# since "Untitled%s" is already translated in many languages.
section = pd.LinkList(_("Untitled%s" % ""))
self.m_model.append(section)
gui_section = Section(section, self)
self.g_section_box.pack_start(gui_section, False, False, 0)
gui_section.show_all()
def move_section_down(self, widget, section):
idx = self.g_section_box.get_children().index(section)
if idx < len(self.g_section_box.get_children()) - 1:
self.g_section_box.reorder_child(section, idx + 1)
self.m_model[idx], self.m_model[idx + 1] \
= self.m_model[idx + 1], self.m_model[idx]
self.m_parent.update_buttons()
def move_section_up(self, widget, section):
idx = self.g_section_box.get_children().index(section)
if idx > 0:
self.g_section_box.reorder_child(section, idx - 1)
self.m_model[idx], self.m_model[idx - 1] \
= self.m_model[idx - 1], self.m_model[idx]
self.m_parent.update_buttons()
def on_paste(self, widget):
"""
Paste the clipboard as a new section to this column.
"""
assert Editor.clipboard, "Paste buttons should be insensitive when the clipboard is empty."
assert isinstance(Editor.clipboard[-1], pd.LinkList)
pobj = Editor.clipboard.pop()
self.m_model.append(pobj)
sect = Section(pobj, self)
sect.show_all()
self.g_section_box.pack_start(sect, False, False, 0)
class Page(Gtk.VBox):
def __init__(self, model, parent):
Gtk.VBox.__init__(self)
self.m_model = model
self.m_parent = parent
sc = Gtk.ScrolledWindow()
sc.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.pack_start(sc, True, True, 0)
self.g_column_box = Gtk.HBox()
self.g_column_box.set_spacing(gu.hig.SPACE_LARGE)
self.g_column_box.set_border_width(gu.hig.SPACE_SMALL)
# We pack column into this box
sc.add_with_viewport(self.g_column_box)
self.show_all()
if model:
self.update_from_model()
def __del__(self):
logging.debug("Page.__del__:", self.m_model.m_name)
def on_add_column(self, *btn):
column = pd.Column()
self.m_model.append(column)
gcol = Column(column, self)
gcol.show_all()
self.g_column_box.pack_start(gcol, True, True, 0)
def on_move_section_left(self, button, section):
column_idx = self.g_column_box.get_children().index(section.m_parent)
section_idx = section.m_parent.g_section_box.get_children().index(section)
if column_idx > 0:
to_column = self.g_column_box.get_children()[column_idx - 1]
section.reparent(to_column.g_section_box)
section.m_parent = to_column
to_column.g_section_box.set_child_packing(section, False, False, 0, Gtk.PACK_START)
self.m_model[column_idx - 1].append(self.m_model[column_idx][section_idx])
del self.m_model[column_idx][section_idx]
# Remove the right-most column if we moved the
# last section out of it.
if not self.g_column_box.get_children()[-1].g_section_box.get_children():
assert len(self.m_model[-1]) == 0
del self.m_model[-1]
self.g_column_box.get_children()[-1].destroy()
self.update_buttons()
def on_move_section_right(self, button, section):
# the column we move from
column_idx = self.g_column_box.get_children().index(section.m_parent)
section_idx = section.m_parent.g_section_box.get_children().index(section)
if column_idx == len(self.g_column_box.get_children()) - 1:
self.on_add_column()
to_column = self.g_column_box.get_children()[column_idx + 1]
section.reparent(to_column.g_section_box)
section.m_parent = to_column
to_column.g_section_box.set_child_packing(section, False, False, 0, Gtk.PACK_START)
to_section_idx = len(self.m_model[column_idx + 1])
self.m_model[column_idx + 1].append(self.m_model[column_idx][section_idx])
del self.m_model[column_idx][section_idx]
self.update_buttons()
def update_from_model(self):
for child in self.g_column_box.get_children():
child.destroy()
for column in self.m_model:
self.g_column_box.pack_start(Column(column, self), False, False, 0)
self.g_column_box.show_all()
self.update_buttons()
def update_buttons(self):
num_cols = len(self.g_column_box.get_children())
for col_idx, column in enumerate(self.g_column_box.get_children()):
num_sects = len(column.g_section_box.get_children())
for sect_idx, section in enumerate(column.g_section_box.get_children()):
section.g_move_up_btn.set_sensitive(sect_idx != 0)
section.g_move_down_btn.set_sensitive(sect_idx != num_sects -1)
section.g_move_left_btn.set_sensitive(col_idx != 0)
if [col for col in self.g_column_box.get_children() if not col.g_section_box.get_children()] and col_idx == num_cols - 1:
section.g_move_right_btn.set_sensitive(False)
else:
section.g_move_right_btn.set_sensitive(True)
class Clipboard(list):
def __init__(self, v=[]):
list.__init__(v)
self.m_paste_buttons = []
def pop(self, i=-1):
ret = list.pop(self, i)
self.update_buttons()
return ret
def append(self, obj):
list.append(self, obj)
self.update_buttons()
def register_paste_button(self, button, accepts_types):
button.set_sensitive(bool(self) and isinstance(self[-1], accepts_types))
self.m_paste_buttons.append((button, accepts_types))
def update_buttons(self):<|fim▁hole|> button.set_sensitive(bool(self) and isinstance(self[-1], types))
class Editor(Gtk.Window, gu.EditorDialogBase):
savedir = os.path.join(filesystem.user_data(), u'exercises', u'user')
# The clipboard will be shared between all Editor instances
clipboard = Clipboard()
def __init__(self, filename=None):
Gtk.Window.__init__(self)
logging.debug("fpeditor.Editor.__init__(%s)", filename)
gu.EditorDialogBase.__init__(self, filename)
self.set_default_size(800, 600)
self.g_main_box = Gtk.VBox()
self.add(self.g_main_box)
self.g_actiongroup.add_actions([
('GoBack', Gtk.STOCK_GO_BACK, None, None, None, self.go_back),
])
self.setup_toolbar()
self.g_title_hbox = Gtk.HBox()
self.g_title_hbox.set_spacing(gu.hig.SPACE_SMALL)
self.g_title_hbox.set_border_width(gu.hig.SPACE_SMALL)
label = Gtk.Label()
label.set_markup(u"<b>%s</b>" % _("Front page title:"))
self.g_title_hbox.pack_start(label, False, False, 0)
self.g_fptitle = Gtk.Entry()
self.g_title_hbox.pack_start(self.g_fptitle, True, True, 0)
self.g_main_box.pack_start(self.g_title_hbox, False, False, 0)
# This dict maps the windows created for all pages belonging to
# the file.
self.m_page_mapping = {}
self.m_model = None
if filename:
self.load_file(filename)
else:
self.m_model = pd.Page(_("Untitled%s") % self.m_instance_number,
pd.Column())
self.set_not_modified()
self.add_page(Page(self.m_model, self))
self.clipboard.update_buttons()
self.show_all()
self.add_to_instance_dict()
self.g_fptitle.set_text(self.m_model.m_name)
self.g_fptitle.connect('changed', self.on_frontpage_title_changed)
def __del__(self):
logging.debug("fpeditor.Editor.__del__, filename=%s", self.m_filename)
def add_page(self, page):
"""
Add and show the page.
"""
editor_of(self).m_page_mapping[id(page.m_model)] = page
self.g_main_box.pack_start(page, True, True, 0)
self.show_page(page)
def show_page_id(self, page_id):
self.show_page(self.m_page_mapping[page_id])
def show_page(self, page):
"""
Hide the currently visible page, and show PAGE instead.
"""
try:
self.g_visible_page.hide()
except AttributeError:
pass
self.g_visible_page = page
page.show()
if isinstance(page.m_parent, Page):
self.g_title_hbox.hide()
else:
self.g_title_hbox.show()
self.g_ui_manager.get_widget("/Toolbar/GoBack").set_sensitive(
not isinstance(self.g_visible_page.m_parent, Editor))
def go_back(self, *action):
self.show_page(self.g_visible_page.m_parent)
def on_frontpage_title_changed(self, widget):
self.m_model.m_name = widget.get_text()
def setup_toolbar(self):
self.g_ui_manager.insert_action_group(self.g_actiongroup, 0)
uixml = """
<ui>
<toolbar name='Toolbar'>
<toolitem action='GoBack'/>
<toolitem action='New'/>
<toolitem action='Open'/>
<toolitem action='Save'/>
<toolitem action='SaveAs'/>
<toolitem action='Close'/>
<toolitem action='Help'/>
</toolbar>
<accelerator action='Close'/>
<accelerator action='New'/>
<accelerator action='Open'/>
<accelerator action='Save'/>
</ui>
"""
self.g_ui_manager.add_ui_from_string(uixml)
toolbar = self.g_ui_manager.get_widget("/Toolbar")
self.g_main_box.pack_start(toolbar, False, False, 0)
self.g_main_box.reorder_child(toolbar, 0)
self.g_ui_manager.get_widget("/Toolbar").set_style(Gtk.ToolbarStyle.BOTH)
def destroy_window(self, window_id):
"""
Destroy the window with the id 'windowid' and all subwindows.
"""
def do_del(wid):
for key in self.m_page_mapping:
parent = parent_page(self.m_page_mapping[key])
if id(parent) == wid:
do_del(key)
editor_of(self).m_page_mapping[wid].destroy()
del editor_of(self).m_page_mapping[wid]
do_del(window_id)
@staticmethod
def edit_file(fn):
if fn in Editor.instance_dict:
Editor.instance_dict[fn].present()
else:
try:
win = Editor(fn)
win.show()
except IOError, e:
gu.dialog_ok(_("Loading file '%(filename)s' failed: %(msg)s") %
{'filename': fn, 'msg': str(e).decode('utf8', 'replace')})
def load_file(self, filename):
"""
Load a file into a empty, newly created Editor object.
"""
assert self.m_model == None
self.m_model = pd.load_tree(filename, C_locale=True)
self.m_filename = filename
#
if not os.path.isabs(filename):
if not os.access(filename, os.W_OK):
m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL, Gtk.MessageType.INFO,
Gtk.ButtonsType.CLOSE, _("The front page file is write protected in your install. This is normal. If you want to edit a front page file, you have to select one of the files stored in .solfege/exercises/*/ in your home directory."))
m.run()
m.destroy()
self.set_not_modified()
self.set_title(self.m_filename)
def set_not_modified(self):
"""
Store the current state of the data in self.m_orig_dump so that
is_modified() will return False until we make new changes.
"""
io = StringIO.StringIO()
self.m_model.dump(io)
self.m_orig_dump = io.getvalue()
def is_modified(self):
"""
Return True if the data has changed since the last call to
set_not_modified()
"""
io = StringIO.StringIO()
self.m_model.dump(io)
s = io.getvalue()
return s != self.m_orig_dump
@property
def m_changed(self):
return self.is_modified()
def save(self, w=None):
assert self.m_filename
save_location = os.path.split(self.m_filename)[0] + os.sep
fh = pd.FileHeader(1, self.m_model)
fh.save_file(self.m_filename)
self.set_not_modified()
# We do test for solfege.win since it is not available during testing
if hasattr(solfege, 'win'):
solfege.win.load_frontpage()
def on_show_help(self, *w):
return
def get_save_as_dialog(self):
dialog = gu.EditorDialogBase.get_save_as_dialog(self)
ev2 = Gtk.EventBox()
ev2.set_name("DIALOGWARNING2")
ev = Gtk.EventBox()
ev.set_border_width(gu.hig.SPACE_SMALL)
ev2.add(ev)
ev.set_name("DIALOGWARNING")
label = Gtk.Label()
label.set_padding(gu.hig.SPACE_MEDIUM, gu.hig.SPACE_MEDIUM)
ev.add(label)
label.set_markup(_("<b>IMPORTANT:</b> Your front page file <b>must</b> be saved in a subdirectory below the directory named exercises. See the user manual for details."))
dialog.set_extra_widget(ev2)
ev2.show_all()
return dialog
if __name__ == '__main__':
Gtk.link_button_set_uri_hook(lambda a, b: None)
e = Editor()
e.load_file("learningtrees/learningtree.txt")
Gtk.main()<|fim▁end|>
|
for button, types in self.m_paste_buttons:
|
<|file_name|>QualysAPI.py<|end_file_name|><|fim▁begin|>import requests
import xml.etree.ElementTree as ET
from time import sleep
class QualysAPI:
"""Class to simplify the making and handling of API calls to the Qualys platform
Class Members
=============
server : String : The FQDN of the API server (with https:// prefix)
user : String : The username of an API user in the subscription
password : String : The password of the API user
proxy : String : The FQDN of the proxy server to be used for connections (with https:// prefix)
debug : Boolean : If True, will output debug information to the console during member function execution
enableProxy : Boolean : If True will force connections via the proxy defined in the 'proxy' class member
callCount : Integer : The number of API calls made during the life of the API object
Class Methods
=============
__init__(svr, usr, passwd, proxy, enableProxy, debug)
Called when an object of type QualysAPI is created
svr : String : The FQDN of the API server (with https:// prefix).
Default value = ""
usr : String : The username of an API user in the subscription.
Default value = ""
passwd : String : The password of the API user.
Default value = ""
proxy : String : The FQDN of the proxy server to be used for connections (with https:// prefix)
Default value = ""
enableProxy : Boolean : If True, will force connections made via the proxy defined in the 'proxy' class
member
Default value = False
debug : Boolean : If True, will output debug information to the console during member function
execution
Default value = False
makeCall(url, payload, headers, retryCount)
Make a Qualys API call and return the response in XML format as an ElementTree.Element object
url : String : The full URL of the API request, including any URL encoded parameters
NO DEFAULT VALUE, REQUIRED PARAMETER
payload : String : The payload (body) of the API request
Default value = ""
headers : Dict : HTTP Request headers to be sent in the API call
Default value = None
retryCount : Integer : The number of times this call has been attempted. Used in rate and concurrency
limit handling, not intended for use by users
Default value = 0
Example :
api = QualysAPI(svr='https://qualysapi.qualys.com',
usr='username',
passwd='password',
proxy='https://proxy.internal',
enableProxy = True,
debug=False)
fullurl = '%s/full/path/to/api/call' % api.url
api.makeCall(url=fullURL, payload='', headers={'X-Requested-With': 'python3'})
"""
server: str
user: str
password: str
proxy: str
debug: bool
enableProxy: bool
callCount: int
headers = {}
sess: requests.Session
def __init__(self, svr="", usr="", passwd="", proxy="", enableProxy=False, debug=False):
# Set all member variables from the values passed in when object is created
self.server = svr
self.user = usr
self.password = passwd
self.proxy = proxy
self.enableProxy = enableProxy
self.debug = debug
self.callCount = 0
# Create a session object with the requests library
self.sess = requests.session()
# Set the authentication credentials for the session to be the (username, password) tuple
self.sess.auth = (self.user, self.password)
# Add a default X-Requested-With header (most API calls require it, it doesn't hurt to have it in all calls)
self.headers = {'X-Requested-With': 'python3/requests'}
def makeCall(self, url, payload="", headers=None, retryCount=0):
# Get the headers from our own session object
rheaders = self.sess.headers
# If there are headers (meaning the __init__ method has been called and the api object was correctly created)
if headers is not None:
# copy each of the headers passed in via the 'headers' variable to the session headers so they are included
# in the request
for h in headers.keys():
rheaders[h] = headers[h]
# Create a Request object using the requests library
r = requests.Request('POST', url, data=payload, headers=rheaders)
# Prepare the request for sending
prepped_req = self.sess.prepare_request(r)
# If the proxy is enabled, send via the proxy
if self.enableProxy:
resp = self.sess.send(prepped_req, proxies={'https': self.proxy})
# Otherwise send direct
else:
resp = self.sess.send(prepped_req)
if self.debug:
print("QualysAPI.makeCall: Headers...")
print("%s" % str(resp.headers))
# Handle concurrency limit failures
if 'X-Concurrency-Limit-Limit' in resp.headers.keys() and 'X-Concurrency-Limit-Running' in resp.headers.keys():
climit = int(resp.headers['X-Concurrency-Limit-Limit'])
crun = int(resp.headers['X-Concurrency-Limit-Running'])
# If crun > climit then we have hit the concurrency limit. We then wait for a number of seconds depending
# on how many retry attempts there have been
if crun > climit:
print("QualysAPI.makeCall: Concurrency limit hit. %s/%s running calls" % (crun,climit))
retryCount = retryCount + 1
if retryCount > 15:
print("QualysAPI.makeCall: Retry count > 15, waiting 60 seconds")
waittime = 60
elif retryCount > 5:
print("QualysAPI.makeCall: Retry count > 5, waiting 30 seconds")<|fim▁hole|> waittime = 15
# Sleep here
sleep(waittime)
print("QualysAPI.makeCall: Retrying (retryCount = %s)" % str(retryCount))
# Make a self-referential call to this same class method, passing in the retry count so the next
# iteration knows how many attempts have been made so far
resp = self.makeCall(url=url, payload=payload,headers=headers, retryCount=retryCount)
# Handle rate limit failures
if 'X-RateLimit-ToWait-Sec' in resp.headers.keys():
if resp.headers['X-RateLimit-ToWait-Sec'] > 0:
# If this response header has a value > 0, we know we have to wait some time so first we increment
# the retryCount
retryCount = retryCount + 1
# Get the number of seconds to wait from the response header. Add to this a number of seconds depending
# on how many times we have already tried this call
waittime = int(resp.headers['X-RateLimit-ToWait-Sec'])
print("QualysAPI.makeCall: Rate limit reached, suggested wait time: %s seconds" % waittime)
if retryCount > 15:
print("QualysAPI.makeCall: Retry Count > 15, adding 60 seconds to wait time")
waittime = waittime + 60
elif retryCount > 5:
print("QualysAPI.makeCall: Retry Count > 5, adding 30 seconds to wait time")
waittime = waittime + 30
# Sleep here
sleep(waittime)
print("QualysAPI.makeCall: Retrying (retryCount = %s)" % str(retryCount))
# Make a self-referential call to this same class method, passing in the retry count so the next
# iteration knows how many attempts have been made so far
resp = self.makeCall(url=url, payload=payload, headers=headers, retryCount=retryCount)
# Increment the API call count (failed calls are not included in the count)
self.callCount = self.callCount + 1
# Return the response as an ElementTree XML object
return ET.fromstring(resp.text)<|fim▁end|>
|
waittime = 30
else:
print("QualysAPI.makeCall: Waiting 15 seconds")
|
<|file_name|>db.py<|end_file_name|><|fim▁begin|>__author__ = 'fatihka'
from sqlalchemy import Column, Integer, String, Unicode, Float, Boolean, create_engine, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# db_name = 'ww.db'
db_name = ':memory:'
tanimlar = {'company': 'Fatih Ka.', 'optional': 'NO'}
periodss = list()
len_periods = 0
Hesaplar = None
session = None
Base = declarative_base()
__all__ = ['Hesaplar', 'Lead', 'session', 'tanimlar']
class Lead(Base):
__tablename__ = 'ana_hesaplar'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=True)<|fim▁hole|>
def make_hesaplar():
class Hesaplar(Base):
__table__ = Table('hesaplar', Base.metadata,
Column('id', Integer, primary_key=True),
Column('number', String, nullable=True),
Column('ana_hesap', String, nullable=True),
Column('name', Unicode, nullable=True),
Column('lead_code', String, default='Unmapped', nullable=True),
Column('len', Integer, nullable=True),
Column('bd', Boolean, nullable=True, default=False),
Column('optional', Unicode, nullable=True),
*[Column('%s' % i, Float, nullable=True, default=0) for i in periodss]
)
return Hesaplar
def create_db():
global session
engine = create_engine("sqlite:///%s" % db_name, echo=False) # engine = create_engine("sqlite://", echo=False)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
return session<|fim▁end|>
|
lead_code = Column(String, nullable=True)
account = Column(String, nullable=True)
account_name = Column(String, nullable=True)
|
<|file_name|>SBTCVM-asm2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import VMSYSTEM.libSBTCVM as libSBTCVM
import VMSYSTEM.libbaltcalc as libbaltcalc
import sys
import os
assmoverrun=19683
instcnt=0
txtblk=0
VMSYSROMS=os.path.join("VMSYSTEM", "ROMS")
critcomperr=0
compvers="v2.2.0"
outfile="assmout.trom"
#define IOmaps
IOmapread={"random": "--0------"}
IOmapwrite={}
#populate IOmaps with memory pointers
scratchmap={}
scratchstart="---------"
shortsccnt=1
scratchstop="---++++++"
IOgen=scratchstart
while IOgen!=scratchstop:
#scratchmap[("mem" + str(shortsccnt))] = IOgen
IOmapread[("mem" + str(shortsccnt))] = IOgen
IOmapwrite[("mem" + str(shortsccnt))] = IOgen
IOgen=libSBTCVM.trunkto6(libbaltcalc.btadd(IOgen, "+"))
shortsccnt += 1
#scratchmap[("mem" + str(shortsccnt))] = scratchstop
IOmapread[("mem" + str(shortsccnt))] = scratchstop
IOmapwrite[("mem" + str(shortsccnt))] = scratchstop
def getlinetern(line):
line=(line-9841)
tline=libSBTCVM.trunkto6(libbaltcalc.DECTOBT(line))
return tline
tracecomp=0
#used to write to the compiler log if the compiler is in tracelog mode
def complog(textis):
if tracecomp==1:
compilerlog.write(textis)
#class used by the goto refrence system
class gotoref:
def __init__(self, line, gtname):
self.line=line
self.tline=getlinetern(line)
self.gtname=gtname
#begin by reading command line arguments
try:
cmd=sys.argv[1]
except:
cmd=None
if "GLOBASMFLG" in globals():
cmd=GLOBASMFLG
if cmd=="-h" or cmd=="--help" or cmd=="help":
print '''This is SBTCVM-asm2.py, SBTCVM Mark 2's assembler.
commands:
SBTCVM-asm2.py -h (--help) (help): this text
SBTCVM-asm2.py -v (--version)
SBTCVM-asm2.py -a (--about): about SBTCVM-asm2.py
SBTCVM-asm2.py -c (--compile) [sourcefile]: build a tasm source into a trom
SBTCVM-asm2.py -t (--tracecompile) [sourcefile]: same as -c but logs the compiling process in detail in the CAP directory.
SBTCVM-asm2.py [sourcefile]: build a tasm source into a trom
'''
elif cmd=="-v" or cmd=="--version":
print ("SBTCVM Assember" + compvers)
elif cmd=="-a" or cmd=="--about":
print '''SBTCVM Assembler 2
''' + compvers + '''
(c)2016-2017 Thomas Leathers and Contributors
SBTCVM Assembler 2 is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SBTCVM Assembler 2 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SBTCVM Assembler 2. If not, see <http://www.gnu.org/licenses/>
'''
elif cmd==None:
print "tip: use SBTCVM-asm2.py -h for help."
elif cmd=="-c" or cmd=="--compile" or cmd[0]!="-" or cmd=="-t" or cmd=="--tracecompile":
print("SBTCVM-asm " + compvers + " starting")
if "GLOBASMFLG" in globals():
arg=GLOBASMFLG
else:
if cmd[0]!="-":
arg=sys.argv[1]
else:
arg=sys.argv[2]
print arg
lowarg=arg.lower()
argisfile=0
argistasm=0
for extq in ["", ".tasm", ".TASM"]:
qarg=(arg + extq)
qlowarg=(lowarg + extq.lower())
print "searching for: \"" + qarg + "\"..."
argisfile
if os.path.isfile(qarg):
argisfile=1
print "found: " + qarg
elif os.path.isfile(os.path.join("VMSYSTEM", qarg)):
qarg=os.path.join("VMSYSTEM", qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join(VMSYSROMS, qarg)):
qarg=os.path.join(VMSYSROMS, qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join("VMUSER", qarg)):
qarg=os.path.join("VMUSER", qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join("ROMS", qarg)):
qarg=os.path.join("ROMS", qarg)
print "found: " + qarg
argisfile=1
if argisfile==1:
if qlowarg.endswith(".tasm") and os.path.isfile(qarg):
print "tasm source found."
arg=qarg
argistasm=1
break
else:
print "Not valid."
argisfile=0
if argisfile==0 or argistasm==0:
#print "ERROR: file not found, or is not a tasm file STOP"
sys.exit("ERROR: SBTCVM assembler was unable to load the specified filename. STOP")
#generate a name for logs in case its needed
#logsub=arg.replace("/", "-")
#logsub=logsub.replace("~", "")
#logsub=logsub.split(".")
logsub=libSBTCVM.namecrunch(arg, "-tasm-comp.log")
#detect if command line options specify tracelog compile mode:
if cmd=="-t" or cmd=="--tracecompile":
tracecomp=1
compilerlog=open(os.path.join('CAP', logsub), "w")
else:
tracecomp=0
#arg=arg.replace("./", "")
#print arg
complog("starting up compiler...\n")
complog("TASM VERSION: SBTCVM-asm " + compvers + "\n")
complog("source: " + arg + "\n")
complog("---------\n\n")
#open 2 instances of source. one per pass.
sourcefile=open(arg, 'r')
sourcefileB=open(arg, 'r')
#open(arg, 'r') as sourcefile
gotoreflist=list()
print "preforming prescan & prep pass"
complog("preforming prescan & prep pass\n")
srcline=0
for linen in sourcefile:
srcline += 1
lined=linen
linen=linen.replace("\n", "")
linen=linen.replace(" ", "")
linenraw=linen
linen=(linen.split("#"))[0]
linelist=linen.split("|")
if (len(linelist))==2:
instword=(linelist[0])
instdat=(linelist[1])
else:
instword=(linelist[0])
instdat="000000000"
if instword=="textstop":
txtblk=0
complog("TEXTBLOCK END\n")
gtflag=1
if txtblk==1:
for f in lined:
instcnt += 1
elif instword=="textstart":
txtblk=1
complog("TEXTBLOCK START\n")
#raw class
elif instword=="romread1":
instcnt += 1
elif instword=="romread2":
instcnt += 1
elif instword=="IOread1":
instcnt += 1
elif instword=="IOread2":
instcnt += 1
elif instword=="IOwrite1":
instcnt += 1
elif instword=="IOwrite2":
instcnt += 1
elif instword=="regswap":
instcnt += 1
elif instword=="copy1to2":
instcnt += 1
elif instword=="copy2to1":
instcnt += 1
elif instword=="invert1":
instcnt += 1
elif instword=="invert2":
instcnt += 1
elif instword=="add":
instcnt += 1
elif instword=="subtract":
instcnt += 1
elif instword=="multiply":
instcnt += 1
elif instword=="divide":
instcnt += 1
elif instword=="setreg1":
instcnt += 1
elif instword=="setreg2":
instcnt += 1
elif instword=="setinst":
instcnt += 1
elif instword=="setdata":
instcnt += 1
#----jump in used opcodes----
#color drawing
elif instword=="continue":
instcnt += 1
elif instword=="colorpixel":
instcnt += 1
elif instword=="setcolorreg":
instcnt += 1
elif instword=="colorfill":
instcnt += 1
elif instword=="setcolorvect":
instcnt += 1
elif instword=="colorline":
instcnt += 1
elif instword=="colorrect":
instcnt += 1
#mono drawing
elif instword=="monopixel":
instcnt += 1
elif instword=="monofill":
instcnt += 1
elif instword=="setmonovect":
instcnt += 1
elif instword=="monoline":
instcnt += 1
elif instword=="monorect":
instcnt += 1
#----opcode --00-+ unused----
elif instword=="stop":
instcnt += 1
elif instword=="null":
instcnt += 1
elif instword=="gotodata":
instcnt += 1
elif instword=="gotoreg1":
instcnt += 1
elif instword=="gotodataif":
instcnt += 1
elif instword=="wait":
instcnt += 1
elif instword=="YNgoto":
instcnt += 1
elif instword=="userwait":
instcnt += 1
elif instword=="TTYclear":
instcnt += 1
#----gap in used opcodes----
elif instword=="gotoA":
instcnt += 1
autostpflg=1
elif instword=="gotoAif":
instcnt += 1
elif instword=="gotoB":
instcnt += 1
autostpflg=1
elif instword=="gotoBif":
instcnt += 1
elif instword=="gotoC":
instcnt += 1
elif instword=="gotoCif":
instcnt += 1
elif instword=="gotoD":
instcnt += 1
elif instword=="gotoDif":
instcnt += 1
elif instword=="gotoE":
instcnt += 1
elif instword=="gotoEif":
instcnt += 1
elif instword=="gotoF":
instcnt += 1
elif instword=="gotoFif":
instcnt += 1
#----gap in used opcodes----
elif instword=="dumpreg1":
instcnt += 1
elif instword=="dumpreg2":
instcnt += 1
elif instword=="TTYwrite":
instcnt += 1
elif instword=="buzzer":
instcnt += 1
elif instword=="setregset":
instcnt += 1
elif instword=="regset":
instcnt += 1
elif instword=="setkeyint":
instcnt += 1
elif instword=="keyint":
instcnt += 1
elif instword=="offsetlen":
instcnt += 1
elif instword=="clearkeyint":
instcnt += 1
elif instword=="gotoifgreater":
instcnt += 1
elif instword=="TTYbg":
instcnt += 2
elif instword=="TTYlinedraw":
instcnt += 2
elif instword=="TTYmode":
instcnt += 2
elif instword=="threadref":
instcnt += 1
elif instword=="threadstart":
instcnt += 1
elif instword=="threadstop":
instcnt += 1
elif instword=="threadkill":
instcnt += 1
else:
gtflag=0
if gtflag==1 and (txtblk==0 or linenraw=="textstart"):
complog("pass 1: srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " inst:" + instword + " instdat:" + instdat + "\n")
elif gtflag==1 and txtblk==1:
complog("TEXTBLOCK: pass 1 : srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " textline: \"" + linenraw + "\"\n")
if (len(linelist))==3 and gtflag==1 and txtblk==0 and instword[0]!="#":
if instword=="textstart":
instcnt += 1
gtox=gotoref((instcnt - 1), linelist[2])
gotoreflist.extend([gtox])
print ("found gotoref: \"" + linelist[2] + "\", at instruction:\"" + str((instcnt - 1)) + "\", Source line:\"" + str(srcline) + "\"")
complog("found gotoref: \"" + linelist[2] + "\", at instruction:\"" + str((instcnt - 1)) + "\", Source line:\"" + str(srcline) + "\"\n")
if instword=="textstart":
instcnt -= 1
#print gotoreflist
instcnt=0
firstloop=1
srcline=0
for linen in sourcefileB:
srcline += 1
if firstloop==1:
print "preforming compileloop startup..."
complog("\n\npreforming compileloop startup...\n")
assmflename=arg
complog("source file: \"" + assmflename + "\"\n")
assmnamelst=assmflename.rsplit('.', 1)
outfile=(assmnamelst[0] + (".trom"))
complog("output file: \"" + outfile + "\"\n")
outn = open(outfile, 'w')
firstloop=0
print "done. begin compile."
complog("done. begin compile.\n")
lined=linen
linen=linen.replace("\n", "")
linen=linen.replace(" ", "")
linenraw=linen
linen=(linen.split("#"))[0]
linelist=linen.split("|")
autostpflg=0
gtflag=1
if (len(linelist))==2 or (len(linelist))==3:
instword=(linelist[0])
instdat=(linelist[1])
else:
instword=(linelist[0])
instdat="000000000"
if instdat=="":
instdat="000000000"
print "NOTICE: data portion at source line:\"" + str(srcline) + "\" blank, defaulting to ground..."
complog("NOTICE: data portion at source line:\"" + str(srcline) + "\" blank, defaulting to ground...\n")
#if len(instdat)==6 and instdat[0]!=">" and instdat[0]!=":":
# print "Mark 1.x legacy NOTICE: instruction \"" + instword + "\" at \"" + str(srcline) + "\" did not have 9 trits data. it has been padded far from radix. please pad any legacy instructions manually."
# complog("Mark 1.x legacy NOTICE: instruction \"" + instword + "\" at \"" + str(srcline) + "\" did not have 9 trits data. it has been padded far from radix. please pad any legacy instructions manually.\n")<|fim▁hole|> if txtblk==1:
for f in lined:
texchout=libSBTCVM.charlook(f)
texchout=("000" + texchout)
outn.write("--+++0" + (texchout) + "\n")
instcnt += 1
elif instword=="textstart":
txtblk=1
complog("TEXTBLOCK START\n")
#raw class
elif instword=="romread1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("------" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("------" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="romread2":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-----0" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("-----0" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="IOread1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-----+" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapread[instgpe[1]]
outn.write("-----+" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#outn.write("-----+" + instdat + "\n")
#instcnt += 1
elif instword=="IOread2":
#outn.write("----0-" + instdat + "\n")
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----0-" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapread[instgpe[1]]
outn.write("----0-" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#instcnt += 1
elif instword=="IOwrite1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----00" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapwrite[instgpe[1]]
outn.write("----00" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#instcnt += 1
elif instword=="IOwrite2":
#outn.write("----0+" + instdat + "\n")
#instcnt += 1
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----0+" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapwrite[instgpe[1]]
outn.write("----0+" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
elif instword=="regswap":
outn.write("----+-" + instdat + "\n")
instcnt += 1
elif instword=="copy1to2":
outn.write("----+0" + instdat + "\n")
instcnt += 1
elif instword=="copy2to1":
outn.write("----++" + instdat + "\n")
instcnt += 1
elif instword=="invert1":
outn.write("---0--" + instdat + "\n")
instcnt += 1
elif instword=="invert2":
outn.write("---0-0" + instdat + "\n")
instcnt += 1
elif instword=="add":
outn.write("---0-+" + instdat + "\n")
instcnt += 1
elif instword=="subtract":
outn.write("---00-" + instdat + "\n")
instcnt += 1
elif instword=="multiply":
outn.write("---000" + instdat + "\n")
instcnt += 1
elif instword=="divide":
outn.write("---00+" + instdat + "\n")
instcnt += 1
elif instword=="setreg1":
outn.write("---0+-" + instdat + "\n")
instcnt += 1
elif instword=="setreg2":
outn.write("---0+0" + instdat + "\n")
instcnt += 1
elif instword=="setinst":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("---0++" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("---0++" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="setdata":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("---+--" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("---+--" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
#----jump in used opcodes----
elif instword=="continue":
outn.write("---+++" + instdat + "\n")
instcnt += 1
#color drawing
elif instword=="colorpixel":
outn.write("--0---" + instdat + "\n")
instcnt += 1
elif instword=="setcolorreg":
instclst=instdat.split(',')
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("--0--0" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("--0--0" + instdat + "\n")
instcnt += 1
elif instword=="colorfill":
instclst=instdat.split(',')
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("--0--+" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("--0--+" + instdat + "\n")
instcnt += 1
elif instword=="setcolorvect":
outn.write("--0-0-" + instdat + "\n")
instcnt += 1
elif instword=="colorline":
outn.write("--0-00" + instdat + "\n")
instcnt += 1
elif instword=="colorrect":
outn.write("--0-0+" + instdat + "\n")
instcnt += 1
#mono drawing
elif instword=="monopixel":
outn.write("--0-+-" + instdat + "\n")
instcnt += 1
elif instword=="monofill":
outn.write("--0-+0" + instdat + "\n")
instcnt += 1
elif instword=="setmonovect":
outn.write("--0-++" + instdat + "\n")
instcnt += 1
elif instword=="monoline":
outn.write("--00--" + instdat + "\n")
instcnt += 1
elif instword=="monorect":
outn.write("--00-0" + instdat + "\n")
instcnt += 1
#----opcode --00-+ unused----
elif instword=="stop":
outn.write("--000-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="null":
outn.write("000000" + instdat + "\n")
instcnt += 1
elif instword=="gotodata":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--000+" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--000+" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="gotoreg1":
outn.write("--00+-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotodataif":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--00+0" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--00+0" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="gotoifgreater":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--0+0-" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--0+0-" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
#instcnt += 1
elif instword=="wait":
outn.write("--00++" + instdat + "\n")
instcnt += 1
elif instword=="YNgoto":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("--0+--" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--0+--" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="userwait":
outn.write("--0+-0" + instdat + "\n")
instcnt += 1
elif instword=="TTYclear":
outn.write("--0+-+" + instdat + "\n")
instcnt += 1
#----gap in used opcodes----
elif instword=="gotoA":
outn.write("--+---" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoAif":
outn.write("--+--0" + instdat + "\n")
instcnt += 1
elif instword=="gotoB":
outn.write("--+--+" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoBif":
outn.write("--+-0-" + instdat + "\n")
instcnt += 1
elif instword=="gotoC":
outn.write("--+-00" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoCif":
outn.write("--+-0+" + instdat + "\n")
instcnt += 1
elif instword=="gotoD":
outn.write("--+-+-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoDif":
outn.write("--+-+0" + instdat + "\n")
instcnt += 1
elif instword=="gotoE":
outn.write("--+-++" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoEif":
outn.write("--+0--" + instdat + "\n")
instcnt += 1
elif instword=="gotoF":
outn.write("--+0-0" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoFif":
outn.write("--+0-+" + instdat + "\n")
instcnt += 1
#----gap in used opcodes----
elif instword=="dumpreg1":
outn.write("--++0+" + instdat + "\n")
instcnt += 1
elif instword=="dumpreg2":
outn.write("--+++-" + instdat + "\n")
instcnt += 1
elif instword=="TTYwrite":
#outn.write("--+++0" + instdat + "\n")
#instcnt += 1
instgpe=instdat.split(":")
if (len(instgpe))==1:
outn.write("--+++0" + instdat + "\n")
instcnt += 1
else:
if instgpe[1]=="enter":
ksc=" "
elif instgpe[1]=="space":
ksc="\n"
else:
ksc=(instgpe[1])[0]
outn.write("--+++0" + "000" + (libSBTCVM.charlook(ksc)) + "\n")
instcnt += 1
elif instword=="buzzer":
outn.write("--++++" + instdat + "\n")
instcnt += 1
elif instword=="setregset":
outn.write("-0-000" + instdat + "\n")
instcnt += 1
elif instword=="regset":
outn.write("-0-00+" + instdat + "\n")
instcnt += 1
elif instword=="setkeyint":
instgpe=instdat.split(":")
if (len(instgpe))==1:
outn.write("-0-+++" + instdat + "\n")
instcnt += 1
else:
if instgpe[1]=="space":
ksc=" "
elif instgpe[1]=="enter":
ksc="\n"
else:
ksc=(instgpe[1])[0]
outn.write("-0-+++" + "00000" + (libSBTCVM.texttoscan[ksc]) + "\n")
instcnt += 1
elif instword=="keyint":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-00---" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("-00---" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="clearkeyint":
outn.write("-00--0" + instdat + "\n")
instcnt += 1
elif instword=="offsetlen":
instclst=instdat.split(",")
if len(instclst)==3:
tritgnd=instclst[0]
tritoffset=int(instclst[1])
tritlen=int(instclst[2])
if tritgnd=="on":
tritgndpar="+"
else:
tritgndpar="0"
if tritoffset==0:
tritoffsetpar="--"
elif tritoffset==1:
tritoffsetpar="-0"
elif tritoffset==2:
tritoffsetpar="-+"
elif tritoffset==3:
tritoffsetpar="0-"
elif tritoffset==4:
tritoffsetpar="00"
elif tritoffset==5:
tritoffsetpar="0+"
elif tritoffset==6:
tritoffsetpar="+-"
elif tritoffset==7:
tritoffsetpar="+0"
elif tritoffset==8:
tritoffsetpar="++"
else:
tritoffsetpar="--"
if tritlen==1:
tritlenpar="--"
elif tritlen==2:
tritlenpar="-0"
elif tritlen==3:
tritlenpar="-+"
elif tritlen==4:
tritlenpar="0-"
elif tritlen==5:
tritlenpar="00"
elif tritlen==6:
tritlenpar="0+"
elif tritlen==7:
tritlenpar="+-"
elif tritlen==8:
tritlenpar="+0"
elif tritlen==9:
tritlenpar="++"
else:
tritlenpar="++"
outn.write("-0-++0" + "0000" + tritgndpar + tritoffsetpar + tritlenpar + "\n")
else:
outn.write("-0-++0" + instdat + "\n")
instcnt += 1
#special regset shortcut commands
elif instword=="TTYbg":
instclst=instdat.split(",")
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("-0-000" + "---------" + "\n")
outn.write("-0-00+" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("-0-000" + "---------" + "\n")
outn.write("-0-00+" + instdat + "\n")
instcnt += 2
elif instword=="TTYlinedraw":
if instdat=="on":
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
elif instdat=="off":
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
else:
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
instcnt += 2
elif instword=="TTYmode":
if instdat=="27":
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
elif instdat=="54":
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
else:
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
instcnt += 2
elif instword=="threadref":
instcnt += 1
if len(instdat)==2:
outn.write("--+00-" + "0000000" + instdat + "\n")
else:
outn.write("--+00-" + instdat + "\n")
elif instword=="threadstart":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("--+000" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--+000" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="threadstop":
instcnt += 1
outn.write("--+00+" + instdat + "\n")
elif instword=="threadkill":
instcnt += 1
outn.write("--+0+-" + instdat + "\n")
else:
gtflag=0
if gtflag==1 and (txtblk==0 or linenraw=="textstart"):
complog("pass 2: srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " inst:" + instword + " instdat:" + instdat + "\n")
elif gtflag==1 and txtblk==1:
complog("TEXTBLOCK: pass 2 : srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " textline: \"" + linenraw + "\"\n")
if instcnt>assmoverrun:
#print("ERROR!: assembler has exceded rom size limit of 19683!")
complog("ERROR!: assembler has exceded rom size limit of 19683! \n")
sys.exit("ERROR!: assembler has exceded rom size limit of 19683!")
if txtblk==1:
print "WARNING: unclosed Text block!"
complog("WARNING: unclosed Text block!\n")
if instcnt==0:
#print "ERROR: No instructions found. nothing to compile."
complog("ERROR: No instructions found. nothing to compile. /n")
sys.exit("ERROR: No instructions found. nothing to compile.")
if autostpflg==0 and instcnt<19683:
print "NOTICE: no explicit goto or stop instruction at end of program. SBTCVM-asm will add a stop automatically."
complog("NOTICE: no explicit goto or stop instruction at end of program. SBTCVM-asm will add a stop automatically.\n")
outn.write("--000-" + "000000000" + "\n")
instcnt += 1
instpad=instcnt
while instpad!=19683 and instcnt<19684:
outn.write("000000" + "000000000" + "\n")
instpad += 1
outn.close()
instextra=(instpad - instcnt)
print ("SBTCVM Mk 2 assembly file \"" + assmflename + "\" has been compiled into: \"" + outfile + "\"")
complog("SBTCVM Mk 2 assembly file \"" + assmflename + "\" has been compiled into: \"" + outfile + "\"\n")
if tracecomp==1:
print "tracelog enabled. log file: \"" + (os.path.join('CAP', logsub)) + "\""
print ("total instructions: " + str(instcnt))
complog("total instructions: " + str(instcnt) + "\n")
print ("extra space: " + str(instextra))
complog ("extra space: " + str(instextra) + "\n")
else:
print "tip: use SBTCVM-asm2.py -h for help."<|fim▁end|>
|
# instdat=("000" + instdat)
if instword=="textstop":
txtblk=0
complog("TEXTBLOCK END\n")
|
<|file_name|>gtk_wrapper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 24 16:25:41 2016
@author: pavel
"""
from gi.repository import Gtk
import parameter_types as ptypes
from logger import Logger
logger = Logger.get_logger()
#
import gobject
gobject.threads_init()
#decorator is used to update gtk objects from another thread
def idle_add_decorator(func):
def callback(*args):
gobject.idle_add(func, *args)
return callback
class GTK_Wrapper(object):
def get_gui_object(self):
raise NotImplementedError()
@staticmethod
def get_wrapper(obj):
wrapper = TYPE_MATCHES.get(type(obj), GTK_ReadOnlyWrapper)
return wrapper(obj)
class GTK_ReadOnlyWrapper(GTK_Wrapper):
def __init__(self, obj):
self.label = Gtk.Label()
self.label.set_text(repr(obj))
def get_gui_object(self):
return self.label
class GTK_ParamWrapper(GTK_Wrapper):
def __init__(self, parameter):
self.parameter = parameter
self.container = Gtk.Box(spacing=2)
self.container.set_homogeneous(False)
self._set_name(parameter.get_name())<|fim▁hole|>
self.param_gui_obj = None
self._set_gui_obj()
self._append_gui_obj()
def get_gui_object(self):
return self.container
def _set_name(self, name):
name_label = Gtk.Label()
name_label.set_text(name)
self.container.pack_start(name_label, True, True, 0)
def _append_gui_obj(self):
if self.param_gui_obj is not None:
self.container.pack_start(self.param_gui_obj, True, True, 0)
self.param_gui_obj.set_hexpand(True)
#override this with obj and add call
# also set on update callback
def _set_gui_obj(self):
self.param_gui_obj = None
# override this with method
def _on_update(self, widget, parameter_obj):
logger.to_log(widget, parameter_obj)
class GTK_ParamCheckBtn(GTK_ParamWrapper):
def _set_gui_obj(self):
self.param_gui_obj = Gtk.CheckButton()
self.param_gui_obj.set_active(self.parameter.get_val())
self.param_gui_obj.connect("toggled", self._on_update, self.parameter)
def _on_update(self, widget, param):
new_val = widget.get_active()
param.set_val(new_val)
class GTK_ParamTextBox(GTK_ParamWrapper):
def _set_gui_obj(self):
self.param_gui_obj = Gtk.Entry()
self.param_gui_obj.set_text(str(self.parameter.get_val()))
self.param_gui_obj.connect("changed", self._on_update, self.parameter)
def _on_update(self, widget, param):
new_val = widget.get_text()
if not param.set_val(new_val):
#if impossible to set new value restore previous one
widget.set_text(str(param.get_val()))
logger.to_log(new_val, widget)
class GTK_ParamList(GTK_ParamWrapper):
def _set_gui_obj(self):
#value to select by default
active_val = self.parameter.get_val()
active_ind = 0
counter = 0
store = Gtk.ListStore(str)
for val in self.parameter.allowed_vals():
store.append([str(val)])
if val == active_val:
active_ind = counter
counter += 1
self.param_gui_obj = Gtk.ComboBox.new_with_model_and_entry(store)
self.param_gui_obj.set_entry_text_column(0)
self.param_gui_obj.set_active(active_ind)
self.param_gui_obj.connect("changed", self._on_update, self.parameter)
def _on_update(self, combobox, param):
model = combobox.get_model()
active = combobox.get_active()
if active is not None and active >= 0:
new_val = model[active][0]
param.set_val(new_val)
logger.to_log(new_val, combobox)
class GTK_ParamSlider(GTK_ParamWrapper):
from math import log10
NUM_STEPS = 100
ORIENTATION = Gtk.Orientation.HORIZONTAL
def _set_gui_obj(self):
#(initial value, min value, max value,
# step increment - press cursor keys to see!,
# page increment - click around the handle to see!,
init_val = self.parameter.get_val()
min_val, max_val = self.parameter.get_range()
step = float(max_val - min_val) / GTK_ParamSlider.NUM_STEPS
adj = Gtk.Adjustment(init_val, min_val, max_val, step, step, 0)
self.param_gui_obj = Gtk.Scale(orientation=GTK_ParamSlider.ORIENTATION,
adjustment = adj)
self.param_gui_obj.connect("value-changed", self._on_update, self.parameter)
self.param_gui_obj.set_digits(self._num_digits(step))
def _on_update(self, widget, param):
new_val = widget.get_value()
param.set_val(new_val)
logger.to_log(new_val, widget)
#print dir(self)
#print dir(super(GTK_ParamSlider, self))
#print dir(param)
#new_val = self.adj.get_value()
#print new_val
def _num_digits(self, step):
#return the number of decimal places to display based on step
remainder = abs(step - round(step))
remainder_log = - GTK_ParamSlider.log10(remainder)
return max(1, int(remainder_log))
TYPE_MATCHES = {ptypes.Parameter:GTK_ParamTextBox,
ptypes.BoolParameter:GTK_ParamCheckBtn,
ptypes.ListParameter:GTK_ParamList,
ptypes.RangeParameter:GTK_ParamSlider,
ptypes.RangeParameterFit:GTK_ParamSlider}<|fim▁end|>
| |
<|file_name|>test_blas.rs<|end_file_name|><|fim▁begin|>/// OpenBLAS testing
#[macro_use]
extern crate rustsci;
use rustsci::array;
use rustsci::matrix;
use rustsci::openblas;
////////////////////////
// BLAS Level 1 Tests //
////////////////////////
#[test]
fn test_openblas_ddot()
{
let a1 = arr![1f64, 2f64, 3f64];
let a2 = arr![4f64, 5f64, 6f64];
let blas_result = openblas::openblas_ddot(&a1, &a2);
let gen_result = a1 * a2; // Generic dot product
assert_eq!(gen_result, blas_result);
}
#[test]
fn test_openblas_sdot()
{
let a1 = arr![1f32, 2f32, 3f32];
let a2 = arr![4f32, 5f32, 6f32];
let blas_result = openblas::openblas_sdot(&a1, &a2);
let gen_result = a1 * a2; // Generic dot product
assert_eq!(gen_result, blas_result);
}
#[test]
fn test_openblas_sasum()
{
let a1 = arr![1f32, 2f32, 3f32];
let elem_sum = 1f32 + 2f32 + 3f32;
let blas_result = openblas::openblas_sasum(&a1);
assert_eq!(elem_sum, blas_result);
}
#[test]
fn test_openblas_dasum()
{
let a1 = arr![1f64, 2f64, 3f64];
let elem_sum = 1f64 + 2f64 + 3f64;
let blas_result = openblas::openblas_dasum(&a1);
assert_eq!(elem_sum, blas_result);
}
#[test]
fn test_openblas_daxpy()
{
let a1 = arr![1f64, 2f64, 3f64];
let mut a2 = arr![0f64, 0f64, 0f64];
let alpha = 5f64;
openblas::openblas_daxpy(&a1, alpha, &mut a2);
let result_arr = arr![5f64, 10f64, 15f64];
assert_eq!(result_arr, a2);
}
#[test]
fn test_openblas_saxpy()
{
let a1 = arr![1f32, 2f32, 3f32];
let mut a2 = arr![1f32, 2f32, 3f32];
let alpha = 5f32;
openblas::openblas_saxpy(&a1, alpha, &mut a2);
let result_arr = arr![6f32, 12f32, 18f32];
assert_eq!(result_arr, a2);
}
#[test]
fn test_openblas_snrm2()
{
let arr = arr![8f32, 4f32, 1f32, 0f32];
let norm : f32 = openblas::openblas_snrm2(&arr);
assert_eq!(norm, 9f32);
}
#[test]
fn test_openblas_dnrm2()
{
let arr = arr![8f64, 4f64, 1f64, 0f64];
let norm : f64 = openblas::openblas_dnrm2(&arr);
assert_eq!(norm, 9f64);
}
////////////////////////
// BLAS Level 2 Tests //
////////////////////////
#[test]
fn test_filled_new()
{
let m1 = matrix::Matrix::<f32>::new_filled(0f32, 3, 3);
let m2 = mat![[0f32, 0f32, 0f32],
[0f32, 0f32, 0f32],
[0f32, 0f32, 0f32]];
assert_eq!(m1, m2);
}
#[test]
fn test_openblas_sgemv()
{
let mat_a = mat![[25f32, 15f32, -5f32],
[15f32, 18f32, 0f32],
[-5f32, 0f32, 11f32]];
let arr_x = arr![8f32, 4f32, 1f32];
let arr_y = array::Array::<f32>::new_filled(0f32, 3, array::Order::Row);
let alpha = 1f32;
let beta = 1f32;
let ymat = openblas::openblas_sgemv(&mat_a, &arr_x, &arr_y, alpha, beta);
let resultm = arr![255f32, 192f32, -29f32];
assert_eq!(ymat, resultm);
}
////////////////////////
// BLAS Level 3 Tests //
////////////////////////
#[test]
fn test_openblas_sgemm()
{
let mat_a = mat![[25f32, 15f32, -5f32],
[15f32, 18f32, 0f32],
[-5f32, 0f32, 11f32]];
let mat_b = mat![[7f32, 15f32, -12f32],
[1f32, -14f32, 1f32],
[-5f32, 1f32, -11f32]];
let mat_c = mat![[1f32, 0f32, 0f32],
[0f32, 1f32, 0f32],
[0f32, 0f32, 1f32]];
let alpha = 1f32;
let beta = 1f32;
let c_mat = openblas::openblas_sgemm(&mat_a, &mat_b, &mat_c, alpha, beta);
let result_mat = mat![[216f32, 160f32, -230f32],<|fim▁hole|>
assert_eq!(c_mat, result_mat);
}<|fim▁end|>
|
[123f32, -26f32, -162f32],
[-90f32, -64f32, -60f32]];
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {grpc, Code, Metadata} from "grpc-web-client";
import {DummyService} from "../_proto/dummy/dummy_service_pb_service";
import {CreateGameRequest, CreateGameResponse} from "../_proto/dummy/dummy_service_pb";
<|fim▁hole|>
function createGame() {
const createGameRequest = new CreateGameRequest();
grpc.unary(DummyService.CreateGame, {
request: createGameRequest,
host: host,
onEnd: r => {
const {status, message} = r;
var wtf = (m: CreateGameResponse) => { return m.getId(); }
if (status === Code.OK && message) {
var response = message as CreateGameResponse;
console.log("getBook.onEnd.message", message.toObject());
wtf(response);
}
}
});
}
createGame();<|fim▁end|>
|
declare const USE_TLS: boolean;
const host = USE_TLS ? "https://web.ae.28k.ch:9091" : "http://web.ae.28k.ch:9090";
|
<|file_name|>provisioner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2017 Intel Corp.
#
"""
Interface for all resource control plugins.
"""
from abc import ABCMeta, abstractmethod
from ..plugin import DeclareFramework
@DeclareFramework('provisioner')
class Provisioner(object, metaclass=ABCMeta):
PROVISIONER_KEY = "provisioner"
PROVISIONER_IMAGE_KEY = "image"
PROVISIONER_BOOTSTRAP_KEY = "provisioner_bootstrap"
PROVISIONER_FILE_KEY = "provisioner_files"
PROVISIONER_KARGS_KEY = "provisioner_kernel_args"
PROVISIONER_UNSET_KEY = "UNDEF"
@abstractmethod
def add(self, device):
"""
Attempts to add a device to the provisioner. Does nothing if the device is already added.
:param device:
:return: Updated device with the new fields applied
"""
pass
@abstractmethod
def delete(self, device):
"""
Attempts to remove a device from the provisioner. Does nothing if the device isn't already there.
:param device:
:return: Updated device with the correct fields removed
"""
pass
@abstractmethod
def set_ip_address(self, device, ip_address, interface="eth0"):
"""
Mutate the device to include this ip_address.
Save it to the DataStore
And set it in the provisioner
:param device:
:param ip_address:
:param interface:
:return: Updated device with the new fields applied
"""
pass
@abstractmethod
def set_hardware_address(self, device, hardware_address, interface="eth0"):
"""
Same as Provisioner.set_ip_address
:param device:
:param hardware_address:
:param interface:
:return: Updated device with the new fields applied
"""
pass
@abstractmethod
def set_image(self, device, image):
"""
Set an image (already known by the provisioner) to a given device.
:param device:
:param image:
:param kernel:
:param network_interface:
:return: Updated device with the new fields applied
:raise: ProvisionException, the image specified is not known to the provisioner
"""
pass
@abstractmethod
def set_bootstrap(self, device, bootstrap):
"""
:param device:
:param bootstrap:
:return: Updated device with the new fields applied
:raise: ProvisionException, the bootstrap specified is not known to the provisioner
"""
pass
@abstractmethod
def set_files(self, device, files):
"""
:param device:
:param files:
:return: Updated device with the new fields applied
:raise: ProvisionException, the file(s) specified is not known to the provisioner
"""
pass
@abstractmethod
def set_kernel_args(self, device, args):
"""
:param device:
:param args:
:return: Updated device with the new fields applied
"""
pass
@abstractmethod<|fim▁hole|> def list(self):
"""
List all devices that the provisioner knows about.
does this come the DataStore or Warewulf?
:return: return the list of device names
"""
pass
@abstractmethod
def list_images(self):
"""
List all the images this provisioner knows about.
:return: list of known images (names only)
"""
pass
class ProvisionerException(Exception):
"""
A staple Exception thrown by the Provisioner
"""
def __init__(self, msg, command_output=None):
super(ProvisionerException, self).__init__()
self.msg = msg
if command_output is not None:
self.cmd_stdout = command_output.stdout
self.cmd_stderr = command_output.stderr
self.cmd_return_code = command_output.return_code
def __str__(self):
return repr(self.msg)<|fim▁end|>
| |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""
Switches on Zigbee Home Automation networks.
For more details on this platform, please refer to the documentation
at https://home-assistant.io/components/switch.zha/
"""
import logging<|fim▁hole|>from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core.const import (
DATA_ZHA, DATA_ZHA_DISPATCHERS, ZHA_DISCOVERY_NEW, ON_OFF_CHANNEL,
SIGNAL_ATTR_UPDATED
)
from .entity import ZhaEntity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['zha']
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Old way of setting up Zigbee Home Automation switches."""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation switch from config entry."""
async def async_discover(discovery_info):
await _async_setup_entities(hass, config_entry, async_add_entities,
[discovery_info])
unsub = async_dispatcher_connect(
hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
switches = hass.data.get(DATA_ZHA, {}).get(DOMAIN)
if switches is not None:
await _async_setup_entities(hass, config_entry, async_add_entities,
switches.values())
del hass.data[DATA_ZHA][DOMAIN]
async def _async_setup_entities(hass, config_entry, async_add_entities,
discovery_infos):
"""Set up the ZHA switches."""
entities = []
for discovery_info in discovery_infos:
entities.append(Switch(**discovery_info))
async_add_entities(entities, update_before_add=True)
class Switch(ZhaEntity, SwitchDevice):
"""ZHA switch."""
_domain = DOMAIN
def __init__(self, **kwargs):
"""Initialize the ZHA switch."""
super().__init__(**kwargs)
self._on_off_channel = self.cluster_channels.get(ON_OFF_CHANNEL)
@property
def is_on(self) -> bool:
"""Return if the switch is on based on the statemachine."""
if self._state is None:
return False
return self._state
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
success = await self._on_off_channel.on()
if not success:
return
self._state = True
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
success = await self._on_off_channel.off()
if not success:
return
self._state = False
self.async_schedule_update_ha_state()
def async_set_state(self, state):
"""Handle state update from channel."""
self._state = bool(state)
self.async_schedule_update_ha_state()
@property
def device_state_attributes(self):
"""Return state attributes."""
return self.state_attributes
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
await self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = last_state.state == STATE_ON
async def async_update(self):
"""Attempt to retrieve on off state from the switch."""
await super().async_update()
if self._on_off_channel:
self._state = await self._on_off_channel.get_attribute_value(
'on_off')<|fim▁end|>
|
from homeassistant.components.switch import DOMAIN, SwitchDevice
from homeassistant.const import STATE_ON
from homeassistant.core import callback
|
<|file_name|>missing.py<|end_file_name|><|fim▁begin|>class MethodMissing(object):
def __getattr__(self, name):
try:
return self.__getattribute__(name)
except AttributeError:
def method(*args, **kw):
return self.method_missing(name, *args, **kw)
return method
def method_missing(self, name, *args, **kw):<|fim▁hole|>
class ValMissing(object):
def __getattr__(self, name):
try:
return self.__getattribute__(name)
except AttributeError:
return self.val_missing(name)
def val_missing(self, name):
raise AttributeError("%r object has no attribute %r" %
(self.__class__, name))<|fim▁end|>
|
raise AttributeError("%r object has no attribute %r" %
(self.__class__, name))
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from . import advancetools<|fim▁hole|>from flask import render_template,redirect,Response,url_for
@advancetools.route('/')
def index():
return redirect(url_for('auto_decode_base'))
@advancetools.route('/auto_decode_base')
def auto_decode_base():
return render_template('advancetool/auto_decode_base.html')<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>mod system;
mod math;
mod renderer;
mod canvas;
use renderer::context::*;
use renderer::mesh;
use renderer::shader::{Program, Shader, ShaderType};
use renderer::texture;
use math::mat4::*;
use math::transform;
use math::vec3::*;
use canvas::Canvas;
extern crate rand;
extern crate time;
extern crate gl;
extern crate specs;
use specs::Join;
use gl::types::*;
use std::mem;
use std::ptr;
use std::ffi::CString;
use rand::Rng;
// Vertex data
static VERTEX_DATA: [GLfloat; 9] = [
200.0, 100.5, 0.11,
100.5, 200.5, 0.11,
300.5, 200.5, 0.11
];
static VERTEX_TEX_DATA: [GLfloat; 6] = [
0.5, 0.0,
0.0, 1.0,
1.0, 1.0
];
static INDEX_DATA: [u32; 3] = [
0, 1, 2
];
#[derive(Clone, Debug)]
struct CompPos(f32,f32);
impl specs::Component for CompPos{
type Storage = specs::VecStorage<CompPos>;
}
#[derive(Clone, Debug)]
struct CompVel(f32,f32);
impl specs::Component for CompVel{
type Storage = specs::VecStorage<CompVel>;
}
struct CompMesh(mesh::Mesh);
impl specs::Component for CompMesh{
type Storage = specs::VecStorage<CompMesh>;
}
fn main() {
let mut VERTEX_COL_DATA: [GLfloat; 12] = [
1.0, 1.0, 1.0, 1.0,
1.0, 0.0, 1.0, 1.0,
0.0, 1.0, 1.0, 1.0
];
let mut ctx = Context::new("data/config.json");
let t = texture::Texture::from_image("data/rust.png");
let mut program = Program::new();<|fim▁hole|> let vs = Shader::new(ShaderType::VERTEX,"data/shaders/test.vs".to_string());
let fs = Shader::new(ShaderType::FRAGMENT,"data/shaders/test.frag".to_string());
program.attach(&vs);
program.attach(&fs);
program.link();
program.register_uniform("ProjMatrix");
program.register_uniform("ModelMatrix");
program.register_uniform("diffuseTexture");
let mut m0 = mesh::Mesh::new(&VERTEX_DATA, &INDEX_DATA, Some(&VERTEX_TEX_DATA), Some(&VERTEX_COL_DATA));
program.set_uniform_matrix4fv("ProjMatrix", &ctx.proj_matrix_2d);
program.set_uniform_matrix4fv("ModelMatrix", &Mat4::identity());
program.set_uniform_1i("diffuseTexture", 0);
let mut canvas_program = Program::new();
let canvas_vs = Shader::new(ShaderType::VERTEX, "data/shaders/canvas.vs".to_string());
let canvas_fs = Shader::new(ShaderType::FRAGMENT, "data/shaders/canvas.frag".to_string());
canvas_program.attach(&canvas_vs);
canvas_program.attach(&canvas_fs);
canvas_program.link();
canvas_program.register_uniform("ProjMatrix");
canvas_program.register_uniform("ModelMatrix");
canvas_program.register_uniform("backColor");
canvas_program.register_uniform("diffuseTexture");
canvas_program.register_uniform("canvasPosition");
canvas_program.register_uniform("canvasSize");
canvas_program.set_uniform_matrix4fv("ProjMatrix", &ctx.proj_matrix_2d);
canvas_program.set_uniform_1i("diffuseTexture", 0);
let mut canvas1 = Canvas::new((400, 200), (200, 100), &canvas_program);
let mut rng = rand::thread_rng();
let mut start_time = time::now();
let mut accum = 0.0;
while ctx.is_running() {
ctx.start_frame();
let frame_time = time::now();
let elapsed_duration = frame_time - start_time;
start_time = frame_time;
let mut elapsed = elapsed_duration.num_seconds() as f64;
elapsed += (elapsed_duration.num_milliseconds() as f64) / 1_000.0;
accum += elapsed;
if accum >= 1.0 {
accum -= 1.0;
// try modifying tri color
for i in 0..12 {
if i % 4 != 0 {
VERTEX_COL_DATA[i] = rng.gen::<f32>();
}
}
m0.update_buffer(mesh::MeshAttrib::Color, &VERTEX_COL_DATA);
}
//now we update the systems
program.bind();
t.bind();
m0.render();
canvas_program.bind();
canvas1.update(elapsed);
canvas1.render();
ctx.end_frame();
}
}<|fim▁end|>
| |
<|file_name|>try-macro.rs<|end_file_name|><|fim▁begin|>// Test that `try!` macros are rewritten.
// run-rustfix
// compile-pass<|fim▁hole|>#![allow(unused_variables)]
#![allow(dead_code)]
fn foo() -> Result<usize, ()> {
let x: Result<usize, ()> = Ok(22);
try!(x);
Ok(44)
}
fn main() { }<|fim▁end|>
|
#![warn(rust_2018_compatibility)]
|
<|file_name|>echem_paperplots.py<|end_file_name|><|fim▁begin|>import time, copy
import os, os.path
import sys
import numpy
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from scipy import optimize
from echem_plate_ui import *
from echem_plate_math import *
import pickle
p1='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1066.dat'
p2='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1662.dat'
pill='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9FeCoNiTi_500C_CAill_plate1_dlist_1164.dat'
os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots')
vshift=-.24
imult=1.e6<|fim▁hole|>
f=open(p1, mode='r')
d1=pickle.load(f)
f.close()
f=open(p2, mode='r')
d2=pickle.load(f)
f.close()
f=open(pill, mode='r')
dill=pickle.load(f)
f.close()
segd1up, segd1dn=d1['segprops_dlist']
i1up=d1['I(A)'][segd1up['inds']][4:]
lin1up=i1up-d1['I(A)_LinSub'][segd1up['inds']][4:]
v1up=d1['Ewe(V)'][segd1up['inds']][4:]+vshift
i1dn=d1['I(A)'][segd1dn['inds']]
v1dn=d1['Ewe(V)'][segd1dn['inds']]+vshift
i1up*=imult
i1dn*=imult
lin1up*=imult
segd2up, segd2dn=d2['segprops_dlist']
i2up=d2['I(A)'][segd2up['inds']][4:]
lin2up=i2up-d2['I(A)_LinSub'][segd2up['inds']][4:]
v2up=d2['Ewe(V)'][segd2up['inds']][4:]+vshift
i2dn=d2['I(A)'][segd2dn['inds']]
v2dn=d2['Ewe(V)'][segd2dn['inds']]+vshift
i2up*=imult
i2dn*=imult
lin2up*=imult
ica=dill['I(A)_SG'][cai0:cai1]*imult
icadiff=dill['Idiff_time'][cai0:cai1]*imult
tca=dill['t(s)'][cai0:cai1]
tca_cycs=dill['till_cycs']
cycinds=numpy.where((tca_cycs>=tca.min())&(tca_cycs<=tca.max()))[0]
tca_cycs=tca_cycs[cycinds]
iphoto_cycs=dill['Photocurrent_cycs(A)'][cycinds]*imult
pylab.rc('font', family='serif', serif='Times New Roman', size=11)
fig=pylab.figure(figsize=(3.5, 4.5))
#ax1=pylab.subplot(211)
#ax2=pylab.subplot(212)
ax1=fig.add_axes((.2, .6, .74, .35))
ax2=fig.add_axes((.2, .11, .6, .35))
ax3=ax2.twinx()
ax1.plot(v1up, i1up, 'g-', linewidth=1.)
ax1.plot(v1up, lin1up, 'g:', linewidth=1.)
ax1.plot(v1dn, i1dn, 'g--', linewidth=1.)
ax1.plot(v2up, i2up, 'b-', linewidth=1.)
ax1.plot(v2up, lin2up, 'b:', linewidth=1.)
ax1.plot(v2dn, i2dn, 'b--', linewidth=1.)
ax1.set_xlim((-.1, .62))
ax1.set_ylim((-40, 130))
ax1.set_xlabel('Potential (V vs H$_2$O/O$_2$)', fontsize=12)
ax1.set_ylabel('Current ($\mu$A)', fontsize=12)
ax2.plot(tca, ica, 'k-')
ax2.plot(tca, icadiff, 'b--', linewidth=2)
ax2.set_xlim((0, 6.5))
ax2.set_ylim((0, 0.4))
ax3.plot(tca_cycs, iphoto_cycs, 'ro-')
ax3.set_ylim((0, 0.1))
ax2.set_xlabel('Elapsed time (s)', fontsize=12)
ax2.set_ylabel('Current ($\mu$A)', fontsize=12)
ax3.set_ylabel('Photocurrent ($\mu$A)', fontsize=12)
pylab.show()
print ''.join(['%s%.3f' %tup for tup in zip(dill['elements'], dill['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d1['elements'], d1['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d2['elements'], d2['compositions'])])<|fim▁end|>
|
cai0, cai1=(0, 6500)
|
<|file_name|>convertito_2012_test.py<|end_file_name|><|fim▁begin|># The Hazard Library
# Copyright (C) 2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openquake.hazardlib.gsim.convertito_2012 import (<|fim▁hole|>from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
class ConvertitoEtAl2012TestCase(BaseGSIMTestCase):
GSIM_CLASS = ConvertitoEtAl2012Geysers
def test_mean(self):
self.check('CONV2012/CONV_2012_MEAN.csv',
max_discrep_percentage=0.1)
def test_std_total(self):
self.check('CONV2012/CONV_2012_STDDEV.csv',
max_discrep_percentage=0.1)<|fim▁end|>
|
ConvertitoEtAl2012Geysers
)
|
<|file_name|>CombDiacritMarks.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:7191d4054034f882d0333e067f484abf20e9d176d5c0297c8824f5e0cafc8e12<|fim▁hole|><|fim▁end|>
|
size 2389
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
var msb = require('msb');
var app = exports;
app.config = require('./lib/config');
app.start = function(cb) {
if (app.config.channelMonitorEnabled) msb.channelMonitorAgent.start();
var RouterWrapper = require('./lib/routerWrapper').RouterWrapper;
app.router = new RouterWrapper();
app.router.load(app.config.routes);
app.createServer()
.listen(app.config.port)
.once('listening', function() {
app.config.port = this.address().port;
if (cb) { cb(); }
console.log('http2bus listening on ' + app.config.port);
});
};
app.createServer = function() {
var http = require('http');
var finalhandler = require('finalhandler');
return http.createServer(function(req, res) {
app.router.middleware(req, res, finalhandler(req, res));
});<|fim▁hole|>};
app.routesAgent = require('./lib/routesProvider/agent');<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
<|fim▁hole|> name = "nip",
version = "0.1a1",
py_modules = [
"nip",
],
scripts = [
"bin/nip",
],
author = "Brian Rosner",
author_email = "[email protected]",
description = "nip is environment isolation and installation for Node.js",
long_description = open("README.rst").read(),
license = "MIT",
classifiers = [
"Development Status :: 2 - Pre-Alpha",
],
)<|fim▁end|>
|
setup(
|
<|file_name|>global_settings.py<|end_file_name|><|fim▁begin|>"""
Global settings file.
Everything in here is imported *before* everything in settings.py.
This means that this file is used for default, fixed and global varibles, and
then settings.py is used to overwrite anything here as well as adding settings
particular to the install.
Note that there are no tuples here, as they are immutable. Please use lists, so
that in settings.py we can do list.append()
"""
import os
from os.path import exists, join
# This shouldn't be needed, however in some cases the buildout version of
# django (in bin/django) may not make the paths correctly
import sys
sys.path.append('web')
# Django settings for scraperwiki project.
DEBUG = True
TIME_ZONE = 'Europe/London'
LANGUAGE_CODE = 'en_GB'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
HOME_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # the parent directory of SCRAPERWIKI_DIR
SCRAPERWIKI_DIR = HOME_DIR + '/web/'
MEDIA_DIR = SCRAPERWIKI_DIR + 'media'
MEDIA_URL = 'http://media.scraperwiki.com/'
MEDIA_ADMIN_DIR = SCRAPERWIKI_DIR + '/media-admin'
LOGIN_URL = '/login/'
HOME_DIR = ""
# MySQL default overdue scraper query
OVERDUE_SQL = "(DATE_ADD(last_run, INTERVAL run_interval SECOND) < NOW() or last_run is null)"
OVERDUE_SQL_PARAMS = []
# URL that handles the media served from MEDIA_ROOT. Make sure to use a trailing slash.
URL_ROOT = ""
MEDIA_ROOT = URL_ROOT + 'media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a trailing slash.
ADMIN_MEDIA_PREFIX = URL_ROOT + '/media-admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'x*#sb54li2y_+b-ibgyl!lnd^*#=bzv7bj_ypr2jvon9mwii@z'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
MIDDLEWARE_CLASSES = [
'middleware.exception_logging.ExceptionLoggingMiddleware',
'middleware.improved_gzip.ImprovedGZipMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django_notify.middleware.NotificationsMiddleware',
'pagination.middleware.PaginationMiddleware',
'middleware.csrfcookie.CsrfAlwaysSetCookieMiddleware',
'api.middleware.CORSMiddleware'
]
AUTHENTICATION_BACKENDS = [
'frontend.email_auth.EmailOrUsernameModelBackend',
'django.contrib.auth.backends.ModelBackend'
]
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = [
join(SCRAPERWIKI_DIR, 'templates'),
]
TEMPLATE_CONTEXT_PROCESSORS = [
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'django_notify.context_processors.notifications',
'frontend.context_processors.site',
'frontend.context_processors.template_settings',
'frontend.context_processors.vault_info',
# 'frontend.context_processors.site_messages', # disabled as not used since design revamp April 2011
]
SCRAPERWIKI_APPS = [
# the following are scraperwiki apps
'frontend',
'codewiki',
'api',
'cropper',
'kpi',
'documentation',
#'devserver',
]
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.comments',
'django.contrib.markup',
'registration',
'south',
'profiles',
'django.contrib.humanize',
'django.contrib.messages',
'django_notify',
'tagging',
'contact_form',
'captcha',
<|fim▁hole|>
TEST_RUNNER = 'scraperwiki_tests.run_tests'
ACCOUNT_ACTIVATION_DAYS = 3650 # If you haven't activated in 10 years then tough luck!
# tell Django that the frontent user_profile model is to be attached to the
# user model in the admin side.
AUTH_PROFILE_MODULE = 'frontend.UserProfile'
INTERNAL_IPS = ['127.0.0.1',]
NOTIFICATIONS_STORAGE = 'session.SessionStorage'
REGISTRATION_BACKEND = "frontend.backends.UserWithNameBackend"
#tagging
FORCE_LOWERCASE_TAGS = True
# define default directories needed for paths to run scrapers
SCRAPER_LIBS_DIR = join(HOME_DIR, "scraperlibs")
#send broken link emails
SEND_BROKEN_LINK_EMAILS = DEBUG == False
#pagingation
SCRAPERS_PER_PAGE = 50
#API
MAX_API_ITEMS = 500
DEFAULT_API_ITEMS = 100
# Make "view on site" work for user models
# https://docs.djangoproject.com/en/dev/ref/settings/?#absolute-url-overrides
ABSOLUTE_URL_OVERRIDES = {
'auth.user': lambda o: o.get_profile().get_absolute_url()
}
# Required for the template_settings context processor. Each varible listed
# here will be made availible in all templates that are passed the
# RequestContext. Be careful of listing database and other private settings
# here
TEMPLATE_SETTINGS = [
'API_URL',
'ORBITED_URL',
'MAX_DATA_POINTS',
'MAX_MAP_POINTS',
'REVISION',
'VIEW_URL',
'CODEMIRROR_URL'
]
try:
REVISION = open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'revision.txt')).read()[:-1]
except:
REVISION = ""
MAX_DATA_POINTS = 500
BLOG_FEED = 'http://blog.scraperwiki.com/feed/atom'
DATA_TABLE_ROWS = 10
RSS_ITEMS = 50
VIEW_SCREENSHOT_SIZES = {'small': (110, 73), 'medium': (220, 145), 'large': (800, 600)}
SCRAPER_SCREENSHOT_SIZES = {'small': (110, 73), 'medium': (220, 145) }
CODEMIRROR_VERSION = "0.94"
CODEMIRROR_URL = "CodeMirror-%s/" % CODEMIRROR_VERSION
APPROXLENOUTPUTLIMIT = 3000
CONFIGFILE = "/var/www/scraperwiki/uml/uml.cfg"
HTTPPROXYURL = "http://localhost:9005"
DISPATCHERURL = "http://localhost:9000"
PAGINATION_DEFAULT_PAGINATION=20
# tell south to do migrations when doing tests
SOUTH_TESTS_MIGRATE = True
# To be overridden in actual settings files
SESSION_COOKIE_SECURE = False
# Enable logging of errors to text file, taken from:
# http://stackoverflow.com/questions/238081/how-do-you-log-server-errors-on-django-sites
import logging
from middleware import exception_logging
logging.custom_handlers = exception_logging
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'simple': {
'format' : '%(asctime)s %(name)s %(filename)s:%(lineno)s %(levelname)s: %(message)s'
}
},
'handlers': {
# Include the default Django email handler for errors
# This is what you'd get without configuring logging at all.
'mail_admins': {
'class': 'django.utils.log.AdminEmailHandler',
'level': 'ERROR',
# But the emails are plain text by default - HTML is nicer
'include_html': True,
},
# Log to a text file that can be rotated by logrotate
'logfile': {
'class': 'logging.custom_handlers.WorldWriteRotatingFileHandler',
'filename': '/var/log/scraperwiki/django-www.log',
'mode': 'a',
'maxBytes': 100000,
'backupCount': 5,
'formatter': 'simple'
},
},
'loggers': {
# Again, default Django configuration to email unhandled exceptions
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
# Might as well log any errors anywhere else in Django
# (so use empty string for name here to catch anything)
'': {
'handlers': ['logfile'],
'level': DEBUG and 'DEBUG' or 'ERROR',
'propagate': False,
},
# Your own app - this assumes all your logger names start with "myapp."
#'myapp': {
# 'handlers': ['logfile'],
# 'level': 'WARNING', # Or maybe INFO or DEBUG
# 'propagate': False
#},
},
}
# Javascript templating
INSTALLED_APPS += ['icanhaz']
ICANHAZ_DIRS = [SCRAPERWIKI_DIR + 'templates/codewiki/js/']<|fim▁end|>
|
'pagination',
'compressor',
] + SCRAPERWIKI_APPS
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># (c) Copyright 2017-2019 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import abort
from flask import Blueprint
from flask import jsonify
from flask import request
from keystoneauth1 import exceptions as exc
from keystoneauth1 import session as ks_session
from keystoneclient.auth.identity import v3
from keystoneclient.v3 import client as ks_client
import logging
import os
from oslo_config import cfg
import pbr.version
import pwd
import threading
import time
from .util import ping
from . import config
from . import policy
bp = Blueprint('admin', __name__)
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
USER_AGENT = 'Installer UI'
@bp.route("/api/v2/version")
def version():
"""Returns the version of the service
.. :quickref: Admin; Returns the version of the service
**Example valid response**:
<|fim▁hole|>
HTTP/1.1 200 OK
0.0.1.dev16
"""
version_info = pbr.version.VersionInfo('ardana-service')
return version_info.version_string_with_vcs()
@bp.route("/api/v2/heartbeat")
def heartbeat():
"""Returns the epoch time
Simple API to verify that the service is up and responding. Returns
the number of seconds since 1970-01-01 00:00:00 GMT.
.. :quickref: Admin; Returns the epoch time
**Example valid response**:
.. sourcecode:: http
HTTP/1.1 200 OK
1502745650
"""
return jsonify(int(time.time()))
@bp.route("/api/v2/user")
@policy.enforce('lifecycle:get_user')
def user():
"""Returns the username the service is running under
.. :quickref: Admin; Returns the username the service is running under
**Example valid response**:
.. sourcecode:: http
HTTP/1.1 200 OK
{"username": "myusername"}
"""
user_dict = {'username': pwd.getpwuid(os.getuid()).pw_name}
return jsonify(user_dict)
def update_trigger_file():
trigger_file = os.path.join(CONF.paths.log_dir, 'trigger.txt')
with open(trigger_file, 'w') as f:
f.write("Triggered restart at %s\n" % time.asctime())
@bp.route("/api/v2/restart", methods=['POST'])
@policy.enforce('lifecycle:restart')
def restart():
"""Requests the service to restart after a specified delay, in seconds
.. :quickref: Admin; Requests a service restart after a delay
**Example Request**:
.. sourcecode:: http
POST /api/v2/user HTTP/1.1
Content-Type: application/json
{
"delay": 60
}
"""
info = request.get_json() or {}
delay_secs = int(info.get('delay', 0))
t = threading.Timer(delay_secs, update_trigger_file)
t.start()
return jsonify('Success')
@bp.route("/api/v2/login", methods=['POST'])
def login():
"""Authenticates with keystone and returns a token
.. :quickref: Admin; Authenticates with keystone
**Example Request**:
.. sourcecode:: http
POST /api/v2/login HTTP/1.1
Content-Type: application/json
{
"username": "admin",
"password": "secret"
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"token": "gAAAAABbEaruZDQGIH5KmKWHlDZIw7CLq",
"expires": "2018-06-01T21:22:06+00:00"
}
:status 200: successful authentication
:status 401: invalid credentials
:status 403: authentication not permitted, or user not authorized for any
projects
"""
if not config.requires_auth():
abort(403,
"authentication not permitted since service is in insecure mode")
info = request.get_json() or {}
username = info.get('username')
password = info.get('password')
user_domain_name = info.get('user_domain_name', 'Default')
token = _authenticate(CONF.keystone_authtoken.auth_url,
username,
password,
user_domain_name)
return jsonify(token)
def _authenticate(auth_url, username=None, password=None,
user_domain_name='Default'):
"""Authenticate with keystone
Creates an unscoped token using the given credentials (which validates
them), and then uses that token to get a project-scoped token.
"""
unscoped_auth = v3.Password(auth_url,
username=username,
password=password,
user_domain_name=user_domain_name,
unscoped=True)
session = ks_session.Session(user_agent=USER_AGENT,
verify=not CONF.keystone_authtoken.insecure)
try:
# Trigger keystone to verify the credentials
unscoped_auth_ref = unscoped_auth.get_access(session)
except exc.connection.ConnectFailure as e:
abort(503, str(e))
except exc.http.HttpError as e:
abort(e.http_status, e.message)
except exc.ClientException as e:
abort(401, str(e))
except Exception as e:
LOG.exception(e)
abort(500, "Unable to authenticate")
client = ks_client.Client(session=session,
auth=unscoped_auth,
user_agent=USER_AGENT)
auth_url = unscoped_auth.auth_url
projects = client.projects.list(user=unscoped_auth_ref.user_id)
# Filter out disabled projects
projects = [project for project in projects if project.enabled]
# Prioritize the admin project by putting it at the beginning of the list
for pos, project in enumerate(projects):
if project.name == 'admin':
projects.pop(pos)
projects.insert(0, project)
break
# Return the first project token that we have the admin role on, otherwise
# return the first project token we have any role on.
fallback_auth_ref = None
for project in projects:
auth = v3.Token(auth_url=auth_url,
token=unscoped_auth_ref.auth_token,
project_id=project.id,
reauthenticate=False)
try:
auth_ref = auth.get_access(session)
if 'admin' in auth_ref.role_names:
return {'token': auth_ref.auth_token,
'expires': auth_ref.expires.isoformat()}
elif not fallback_auth_ref:
fallback_auth_ref = auth_ref
except Exception as e:
pass
if fallback_auth_ref:
return {'token': fallback_auth_ref.auth_token,
'expires': fallback_auth_ref.expires.isoformat()}
# TODO(gary): Consider as a secondary fallback to return a domain-scoped
# token
abort(403, "Not authorized for any project")
@bp.route("/api/v2/is_secured")
def get_secured():
"""Returns whether authentication is required
Returns a json object indicating whether the service is configured to
enforce authentication
.. :quickref: Model; Returns whether authentication is required
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"isSecured": false
}
:status 200: success
"""
return jsonify({'isSecured': config.requires_auth()})
@bp.route("/api/v2/connection_test", methods=['POST'])
def connection_test():
body = request.get_json() or {}
host = body['host']
try:
ping(host, 22)
return jsonify('Success')
except Exception as e:
return jsonify(error=str(e)), 404<|fim▁end|>
|
.. sourcecode:: http
|
<|file_name|>storm_cluster_test.py<|end_file_name|><|fim▁begin|>import os
import pytest
from pyleus.cli.storm_cluster import _get_storm_cmd_env<|fim▁hole|>from pyleus.cli.storm_cluster import STORM_JAR_JVM_OPTS
from pyleus.cli.storm_cluster import StormCluster
from pyleus.cli.storm_cluster import TOPOLOGY_BUILDER_CLASS
from pyleus.testing import mock
class TestGetStormCmdEnd(object):
@pytest.fixture(autouse=True)
def mock_os_environ(self, monkeypatch):
monkeypatch.setattr(os, 'environ', {})
def test_jvm_opts_unset(self):
assert _get_storm_cmd_env(None) is None
def test_jvm_opts_set(self):
jvm_opts = "-Dfoo=bar"
env = _get_storm_cmd_env(jvm_opts)
assert env[STORM_JAR_JVM_OPTS] == jvm_opts
class TestStormCluster(object):
@pytest.fixture
def cluster(self):
return StormCluster(
mock.sentinel.storm_cmd_path,
mock.sentinel.nimbus_host,
mock.sentinel.nimbus_port,
mock.sentinel.verbose,
mock.sentinel.jvm_opts,
)
def test__build_storm_cmd_no_port(self, cluster):
cluster.nimbus_host = "test-host"
cluster.nimbus_port = None
storm_cmd = cluster._build_storm_cmd(["a", "cmd"])
assert storm_cmd == [mock.sentinel.storm_cmd_path, "a", "cmd", "-c",
"nimbus.host=test-host"]
def test__build_storm_cmd_with_port(self, cluster):
cluster.nimbus_host = "test-host"
cluster.nimbus_port = 4321
storm_cmd = cluster._build_storm_cmd(["another", "cmd"])
assert storm_cmd == [mock.sentinel.storm_cmd_path, "another", "cmd", "-c",
"nimbus.host=test-host", "-c",
"nimbus.thrift.port=4321"]
def test_submit(self, cluster):
with mock.patch.object(cluster, '_exec_storm_cmd', autospec=True) as mock_exec:
cluster.submit(mock.sentinel.jar_path)
mock_exec.assert_called_once_with(["jar", mock.sentinel.jar_path, TOPOLOGY_BUILDER_CLASS])<|fim▁end|>
| |
<|file_name|>bug2175.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012-2014 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
<|fim▁hole|>const {
assertSame
} = Assert;
// 22.2.1.2 %TypedArray%: Different [[Prototype]] for copied ArrayBuffer depending on element types
// https://bugs.ecmascript.org/show_bug.cgi?id=2175
class MyArrayBuffer extends ArrayBuffer {}
let source = new Int8Array(new MyArrayBuffer(10));
let copySameType = new Int8Array(source);
assertSame(copySameType.buffer.constructor, MyArrayBuffer);
assertSame(Object.getPrototypeOf(copySameType.buffer), MyArrayBuffer.prototype);
let copyDifferentType = new Uint8Array(source);
assertSame(copyDifferentType.buffer.constructor, MyArrayBuffer);
assertSame(Object.getPrototypeOf(copyDifferentType.buffer), MyArrayBuffer.prototype);<|fim▁end|>
| |
<|file_name|>SchemaUpdater.java<|end_file_name|><|fim▁begin|>package com.bagri.server.hazelcast.task.schema;
import static com.bagri.core.Constants.pn_schema_password;
import static com.bagri.server.hazelcast.serialize.TaskSerializationFactory.cli_UpdateSchemaTask;
import static com.bagri.support.security.Encryptor.encrypt;
import java.io.IOException;
import java.util.Properties;
import java.util.Map.Entry;
import com.bagri.core.system.Schema;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
public class SchemaUpdater extends SchemaProcessor implements IdentifiedDataSerializable {
private boolean override;
private Properties properties;
public SchemaUpdater() {
//
}
public SchemaUpdater(int version, String admin, boolean override, Properties properties) {
super(version, admin);
this.override = override;
this.properties = properties;
}
@Override
public Object process(Entry<String, Schema> entry) {
logger.debug("process.enter; entry: {}", entry);
if (entry.getValue() != null) {
Schema schema = entry.getValue();
if (schema.getVersion() == getVersion()) {
//if (schema.isActive()) {
// if (denitSchemaInCluster(schema) > 0) {
// don't go further
<|fim▁hole|> // return null;
// }
//}
if (override) {
String pwd = properties.getProperty(pn_schema_password);
if (pwd != null) {
properties.setProperty(pn_schema_password, encrypt(pwd));
}
schema.setProperties(properties);
} else {
for (String name: properties.stringPropertyNames()) {
String value = properties.getProperty(name);
if (pn_schema_password.equals(name)) {
value = encrypt(value);
}
schema.setProperty(name, value);
}
}
//if (schema.isActive()) {
// if (initSchemaInCluster(schema) == 0) {
// schema.setActive(false);
// }
//}
schema.updateVersion(getAdmin());
entry.setValue(schema);
auditEntity(AuditType.update, schema);
return schema;
}
}
return null;
}
@Override
public int getId() {
return cli_UpdateSchemaTask;
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
override = in.readBoolean();
properties = in.readObject();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeBoolean(override);
out.writeObject(properties);
}
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.filters
~~~~~~~~~~~~~~~~
Module containing filter lookup functions and default
filters.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
get_choice_opt, ClassNotFound, OptionError
from pygments.plugin import find_plugin_filters
def find_filter_class(filtername):
"""
Lookup a filter by name. Return None if not found.
"""
if filtername in FILTERS:
return FILTERS[filtername]
for name, cls in find_plugin_filters():
if name == filtername:
return cls
return None
def get_filter_by_name(filtername, **options):
"""
Return an instantiated filter. Options are passed to the filter
initializer if wanted. Raise a ClassNotFound if not found.
"""
cls = find_filter_class(filtername)
if cls:
return cls(**options)
else:
raise ClassNotFound('filter %r not found' % filtername)
def get_all_filters():
"""
Return a generator of all filter names.
"""
for name in FILTERS:
yield name
for name, _ in find_plugin_filters():
yield name
def _replace_special(ttype, value, regex, specialttype,
replacefunc=lambda x: x):
last = 0
for match in regex.finditer(value):
start, end = match.start(), match.end()
if start != last:
yield ttype, value[last:start]
yield specialttype, replacefunc(value[start:end])
last = end
if last != len(value):
yield ttype, value[last:]
class CodeTagFilter(Filter):
"""
Highlight special code tags in comments and docstrings.
Options accepted:
`codetags` : list of strings
A list of strings that are flagged as code tags. The default is to
highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags',
['XXX', 'TODO', 'BUG', 'NOTE'])
self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
re.escape(tag) for tag in tags if tag
]))
def filter(self, lexer, stream):
regex = self.tag_re
for ttype, value in stream:
if ttype in String.Doc or \
ttype in Comment and \
ttype not in Comment.Preproc:
for sttype, svalue in _replace_special(ttype, value, regex,
Comment.Special):<|fim▁hole|> yield sttype, svalue
else:
yield ttype, value
class KeywordCaseFilter(Filter):
"""
Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
This can be useful e.g. if you highlight Pascal code and want to adapt the
code to your styleguide.
Options accepted:
`case` : string
The casing to convert keywords to. Must be one of ``'lower'``,
``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(unicode, case)
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype in Keyword:
yield ttype, self.convert(value)
else:
yield ttype, value
class NameHighlightFilter(Filter):
"""
Highlight a normal Name token with a different token type.
Example::
filter = NameHighlightFilter(
names=['foo', 'bar', 'baz'],
tokentype=Name.Function,
)
This would highlight the names "foo", "bar" and "baz"
as functions. `Name.Function` is the default token type.
Options accepted:
`names` : list of strings
A list of names that should be given the different token type.
There is no default.
`tokentype` : TokenType or string
A token type or a string containing a token type name that is
used for highlighting the strings in `names`. The default is
`Name.Function`.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.names = set(get_list_opt(options, 'names', []))
tokentype = options.get('tokentype')
if tokentype:
self.tokentype = string_to_tokentype(tokentype)
else:
self.tokentype = Name.Function
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Name and value in self.names:
yield self.tokentype, value
else:
yield ttype, value
class ErrorToken(Exception):
pass
class RaiseOnErrorTokenFilter(Filter):
"""
Raise an exception when the lexer generates an error token.
Options accepted:
`excclass` : Exception class
The exception class to raise.
The default is `pygments.filters.ErrorToken`.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.exception = options.get('excclass', ErrorToken)
try:
# issubclass() will raise TypeError if first argument is not a class
if not issubclass(self.exception, Exception):
raise TypeError
except TypeError:
raise OptionError('excclass option is not an exception class')
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Error:
raise self.exception(value)
yield ttype, value
class VisibleWhitespaceFilter(Filter):
"""
Convert tabs, newlines and/or spaces to visible characters.
Options accepted:
`spaces` : string or bool
If this is a one-character string, spaces will be replaces by this string.
If it is another true value, spaces will be replaced by ``·`` (unicode
MIDDLE DOT). If it is a false value, spaces will not be replaced. The
default is ``False``.
`tabs` : string or bool
The same as for `spaces`, but the default replacement character is ``»``
(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
is ``False``. Note: this will not work if the `tabsize` option for the
lexer is nonzero, as tabs will already have been expanded then.
`tabsize` : int
If tabs are to be replaced by this filter (see the `tabs` option), this
is the total number of characters that a tab should be expanded to.
The default is ``8``.
`newlines` : string or bool
The same as for `spaces`, but the default replacement character is ``¶``
(unicode PILCROW SIGN). The default value is ``False``.
`wstokentype` : bool
If true, give whitespace the special `Whitespace` token type. This allows
styling the visible whitespace differently (e.g. greyed out), but it can
disrupt background colors. The default is ``True``.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items():
opt = options.get(name, False)
if isinstance(opt, basestring) and len(opt) == 1:
setattr(self, name, opt)
else:
setattr(self, name, (opt and default or ''))
tabsize = get_int_opt(options, 'tabsize', 8)
if self.tabs:
self.tabs += ' '*(tabsize-1)
if self.newlines:
self.newlines += '\n'
self.wstt = get_bool_opt(options, 'wstokentype', True)
def filter(self, lexer, stream):
if self.wstt:
spaces = self.spaces or ' '
tabs = self.tabs or '\t'
newlines = self.newlines or '\n'
regex = re.compile(r'\s')
def replacefunc(wschar):
if wschar == ' ':
return spaces
elif wschar == '\t':
return tabs
elif wschar == '\n':
return newlines
return wschar
for ttype, value in stream:
for sttype, svalue in _replace_special(ttype, value, regex,
Whitespace, replacefunc):
yield sttype, svalue
else:
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
# simpler processing
for ttype, value in stream:
if spaces:
value = value.replace(' ', spaces)
if tabs:
value = value.replace('\t', tabs)
if newlines:
value = value.replace('\n', newlines)
yield ttype, value
class GobbleFilter(Filter):
"""
Gobbles source code lines (eats initial characters).
This filter drops the first ``n`` characters off every line of code. This
may be useful when the source code fed to the lexer is indented by a fixed
amount of space that isn't desired in the output.
Options accepted:
`n` : int
The number of characters to gobble.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.n = get_int_opt(options, 'n', 0)
def gobble(self, value, left):
if left < len(value):
return value[left:], 0
else:
return '', left - len(value)
def filter(self, lexer, stream):
n = self.n
left = n # How many characters left to gobble.
for ttype, value in stream:
# Remove ``left`` tokens from first line, ``n`` from all others.
parts = value.split('\n')
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
value = '\n'.join(parts)
if value != '':
yield ttype, value
class TokenMergeFilter(Filter):
"""
Merges consecutive tokens with the same token type in the output stream of a
lexer.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
def filter(self, lexer, stream):
current_type = None
current_value = None
for ttype, value in stream:
if ttype is current_type:
current_value += value
else:
if current_type is not None:
yield current_type, current_value
current_type = ttype
current_value = value
if current_type is not None:
yield current_type, current_value
FILTERS = {
'codetagify': CodeTagFilter,
'keywordcase': KeywordCaseFilter,
'highlight': NameHighlightFilter,
'raiseonerror': RaiseOnErrorTokenFilter,
'whitespace': VisibleWhitespaceFilter,
'gobble': GobbleFilter,
'tokenmerge': TokenMergeFilter,
}<|fim▁end|>
| |
<|file_name|>tail_boom_flex.py<|end_file_name|><|fim▁begin|>" tail boom flexibility "
from numpy import pi
from gpkit import Model, parse_variables, SignomialsEnabled
class TailBoomFlexibility(Model):
""" Tail Boom Flexibility Model
Variables
---------
Fne [-] tail boom flexibility factor
deda [-] wing downwash derivative
SMcorr 0.55 [-] corrected static margin<|fim▁hole|> -------------
Fne F_{\mathrm{NE}}
deda d\\epsilon/d\\alpha
SMcorr SM_{\\mathrm{corr}}
"""
@parse_variables(__doc__, globals())
def setup(self, htail, hbending, wing):
mh = htail.mh
mw = wing.mw
Vh = htail.Vh
th = hbending.th
CLhmin = htail.CLhmin
CLwmax = wing.planform.CLmax
Sw = wing.planform.S
bw = wing.planform.b
lh = htail.lh
CM = wing.planform.CM
constraints = [
Fne >= 1 + mh*th,
sph1*(mw*Fne/mh/Vh) + deda <= 1,
sph2 <= Vh*CLhmin/CLwmax,
# (sph1 + sph2).mono_lower_bound({"sph1": .48, "sph2": .52}) >= (
# SMcorr + wing["C_M"]/wing["C_{L_{max}}"]),
deda >= mw*Sw/bw/4/pi/lh]
with SignomialsEnabled():
constraints.extend([sph1 + sph2 >= SMcorr + CM/CLwmax])
return constraints<|fim▁end|>
|
sph1 [-] flexibility helper variable 1
sph2 [-] flexibility helper variable 2
LaTex Strings
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate num;
use num::{BigInt, Signed};
// Dumbest iterative approach
fn big_pow(base: &BigInt, exp: BigInt) -> BigInt {
if exp.is_negative() {
panic!("Negative exponent won't compute!")
}
let mut tmp = base.clone();
for _ in num::range(BigInt::from(1), exp) {
tmp *= base;
}
tmp
}
// 5^4^3^2
fn main() {
// Exponent is small enough to not use BigInt
let exp = BigInt::from(num::pow(4, num::pow(3, 2)));
let result = big_pow(&BigInt::from(5), exp).to_string();
let num_length = result.len();
println!("{}", result);
println!("Number has {} digits.", num_length);
assert!(result.starts_with("62060698786608744707"));
assert!(result.ends_with("92256259918212890625"));
}
#[cfg(test)]
mod tests {
use super::big_pow;
use num::BigInt;
#[test]
#[should_panic]
fn negative_exp_test() {
let num = BigInt::from(100);
let exp = BigInt::from(-100);
big_pow(&num, exp);
}
#[test]
fn big_powas() {<|fim▁hole|> assert_eq!(
big_pow(&BigInt::from(100), BigInt::from(100)).to_string(),
"10000000000000000000000000000000000000000000000000000000000000\
000000000000000000000000000000000000000000000000000000000000000\
000000000000000000000000000000000000000000000000000000000000000\
0000000000000"
);
assert_eq!(
big_pow(&BigInt::from(2), BigInt::from(89)).to_string(),
"618970019642690137449562112"
);
assert_eq!(
big_pow(&BigInt::from(2), BigInt::from(107)).to_string(),
"162259276829213363391578010288128"
);
assert_eq!(
big_pow(&BigInt::from(2), BigInt::from(127)).to_string(),
"170141183460469231731687303715884105728"
);
assert_eq!(
big_pow(&BigInt::from(2), BigInt::from(521)).to_string(),
"6864797660130609714981900799081393217269435300143305409394\
46345918554318339765605212255964066145455497729631139148085\
8037121987999716643812574028291115057152"
);
}
}<|fim▁end|>
| |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>import mock
from django_images.models import Thumbnail
from taggit.models import Tag
from tastypie.exceptions import Unauthorized
from tastypie.test import ResourceTestCase
from .helpers import ImageFactory, PinFactory, UserFactory<|fim▁hole|>
__all__ = ['ImageResourceTest', 'PinResourceTest']
def filter_generator_for(size):
def wrapped_func(obj):
return Thumbnail.objects.get_or_create_at_size(obj.pk, size)
return wrapped_func
def mock_requests_get(url):
response = mock.Mock(content=open('logo.png', 'rb').read())
return response
class ImageResourceTest(ResourceTestCase):
def test_post_create_unsupported(self):
"""Make sure that new images can't be created using API"""
response = self.api_client.post('/api/v1/image/', format='json', data={})
self.assertHttpUnauthorized(response)
def test_list_detail(self):
image = ImageFactory()
thumbnail = filter_generator_for('thumbnail')(image)
standard = filter_generator_for('standard')(image)
square = filter_generator_for('square')(image)
response = self.api_client.get('/api/v1/image/', format='json')
self.assertDictEqual(self.deserialize(response)['objects'][0], {
u'image': unicode(image.image.url),
u'height': image.height,
u'width': image.width,
u'standard': {
u'image': unicode(standard.image.url),
u'width': standard.width,
u'height': standard.height,
},
u'thumbnail': {
u'image': unicode(thumbnail.image.url),
u'width': thumbnail.width,
u'height': thumbnail.height,
},
u'square': {
u'image': unicode(square.image.url),
u'width': square.width,
u'height': square.height,
},
})
class PinResourceTest(ResourceTestCase):
def setUp(self):
super(PinResourceTest, self).setUp()
self.user = UserFactory(password='password')
self.api_client.client.login(username=self.user.username, password='password')
@mock.patch('requests.get', mock_requests_get)
def test_post_create_url(self):
url = 'http://testserver/mocked/logo.png'
post_data = {
'submitter': '/api/v1/user/{}/'.format(self.user.pk),
'url': url,
'description': 'That\'s an Apple!'
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertHttpCreated(response)
self.assertEqual(Pin.objects.count(), 1)
self.assertEqual(Image.objects.count(), 1)
# submitter is optional, current user will be used by default
post_data = {
'url': url,
'description': 'That\'s an Apple!',
'origin': None
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertHttpCreated(response)
@mock.patch('requests.get', mock_requests_get)
def test_post_create_url_with_empty_tags(self):
url = 'http://testserver/mocked/logo.png'
post_data = {
'submitter': '/api/v1/user/{}/'.format(self.user.pk),
'url': url,
'description': 'That\'s an Apple!',
'tags': []
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertHttpCreated(response)
self.assertEqual(Pin.objects.count(), 1)
self.assertEqual(Image.objects.count(), 1)
pin = Pin.objects.get(url=url)
self.assertEqual(pin.tags.count(), 0)
@mock.patch('requests.get', mock_requests_get)
def test_post_create_url_unauthorized(self):
url = 'http://testserver/mocked/logo.png'
post_data = {
'submitter': '/api/v1/user/2/',
'url': url,
'description': 'That\'s an Apple!',
'tags': []
}
with self.assertRaises(Unauthorized):
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertEqual(Pin.objects.count(), 0)
self.assertEqual(Image.objects.count(), 0)
@mock.patch('requests.get', mock_requests_get)
def test_post_create_url_with_empty_origin(self):
url = 'http://testserver/mocked/logo.png'
post_data = {
'submitter': '/api/v1/user/{}/'.format(self.user.pk),
'url': url,
'description': 'That\'s an Apple!',
'origin': None
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertHttpCreated(response)
self.assertEqual(Pin.objects.count(), 1)
self.assertEqual(Image.objects.count(), 1)
self.assertEqual(Pin.objects.get(url=url).origin, None)
@mock.patch('requests.get', mock_requests_get)
def test_post_create_url_with_origin(self):
origin = 'http://testserver/mocked/'
url = origin + 'logo.png'
post_data = {
'submitter': '/api/v1/user/{}/'.format(self.user.pk),
'url': url,
'description': 'That\'s an Apple!',
'origin': origin
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertHttpCreated(response)
self.assertEqual(Pin.objects.count(), 1)
self.assertEqual(Image.objects.count(), 1)
self.assertEqual(Pin.objects.get(url=url).origin, origin)
def test_post_create_obj(self):
image = ImageFactory()
post_data = {
'submitter': '/api/v1/user/{}/'.format(self.user.pk),
'image': '/api/v1/image/{}/'.format(image.pk),
'description': 'That\'s something else (probably a CC logo)!',
'tags': ['random', 'tags'],
}
response = self.api_client.post('/api/v1/pin/', data=post_data)
self.assertEqual(
self.deserialize(response)['description'],
'That\'s something else (probably a CC logo)!'
)
self.assertHttpCreated(response)
# A number of Image objects should stay the same as we are using an existing image
self.assertEqual(Image.objects.count(), 1)
self.assertEqual(Pin.objects.count(), 1)
self.assertEquals(Tag.objects.count(), 2)
def test_put_detail_unauthenticated(self):
self.api_client.client.logout()
uri = '/api/v1/pin/{}/'.format(PinFactory().pk)
response = self.api_client.put(uri, format='json', data={})
self.assertHttpUnauthorized(response)
def test_put_detail_unauthorized(self):
uri = '/api/v1/pin/{}/'.format(PinFactory(submitter=self.user).pk)
user = UserFactory(password='password')
self.api_client.client.login(username=user.username, password='password')
response = self.api_client.put(uri, format='json', data={})
self.assertHttpUnauthorized(response)
def test_put_detail(self):
pin = PinFactory(submitter=self.user)
uri = '/api/v1/pin/{}/'.format(pin.pk)
new = {'description': 'Updated description'}
response = self.api_client.put(uri, format='json', data=new)
self.assertHttpAccepted(response)
self.assertEqual(Pin.objects.count(), 1)
self.assertEqual(Pin.objects.get(pk=pin.pk).description, new['description'])
def test_delete_detail_unauthenticated(self):
uri = '/api/v1/pin/{}/'.format(PinFactory(submitter=self.user).pk)
self.api_client.client.logout()
self.assertHttpUnauthorized(self.api_client.delete(uri))
def test_delete_detail_unauthorized(self):
uri = '/api/v1/pin/{}/'.format(PinFactory(submitter=self.user).pk)
User.objects.create_user('test', '[email protected]', 'test')
self.api_client.client.login(username='test', password='test')
self.assertHttpUnauthorized(self.api_client.delete(uri))
def test_delete_detail(self):
uri = '/api/v1/pin/{}/'.format(PinFactory(submitter=self.user).pk)
self.assertHttpAccepted(self.api_client.delete(uri))
self.assertEqual(Pin.objects.count(), 0)
def test_get_list_json_ordered(self):
_, pin = PinFactory(), PinFactory()
response = self.api_client.get('/api/v1/pin/', format='json', data={'order_by': '-id'})
self.assertValidJSONResponse(response)
self.assertEqual(self.deserialize(response)['objects'][0]['id'], pin.id)
def test_get_list_json_filtered_by_tags(self):
pin = PinFactory()
response = self.api_client.get('/api/v1/pin/', format='json', data={'tag': pin.tags.all()[0]})
self.assertValidJSONResponse(response)
self.assertEqual(self.deserialize(response)['objects'][0]['id'], pin.pk)
def test_get_list_json_filtered_by_submitter(self):
pin = PinFactory(submitter=self.user)
response = self.api_client.get('/api/v1/pin/', format='json', data={'submitter__username': self.user.username})
self.assertValidJSONResponse(response)
self.assertEqual(self.deserialize(response)['objects'][0]['id'], pin.pk)
def test_get_list_json(self):
image = ImageFactory()
pin = PinFactory(**{
'submitter': self.user,
'image': image,
'url': 'http://testserver/mocked/logo.png',
'description': u'Mocked Description',
'origin': None
})
standard = filter_generator_for('standard')(image)
thumbnail = filter_generator_for('thumbnail')(image)
square = filter_generator_for('square')(image)
response = self.api_client.get('/api/v1/pin/', format='json')
self.assertValidJSONResponse(response)
self.assertDictEqual(self.deserialize(response)['objects'][0], {
u'id': pin.id,
u'submitter': {
u'username': unicode(self.user.username),
u'gravatar': unicode(self.user.gravatar)
},
u'image': {
u'image': unicode(image.image.url),
u'width': image.width,
u'height': image.height,
u'standard': {
u'image': unicode(standard.image.url),
u'width': standard.width,
u'height': standard.height,
},
u'thumbnail': {
u'image': unicode(thumbnail.image.url),
u'width': thumbnail.width,
u'height': thumbnail.height,
},
u'square': {
u'image': unicode(square.image.url),
u'width': square.width,
u'height': square.height,
},
},
u'url': pin.url,
u'origin': pin.origin,
u'description': pin.description,
u'tags': [tag.name for tag in pin.tags.all()]
})<|fim▁end|>
|
from ..models import Pin, Image
from ...users.models import User
|
<|file_name|>CtSaving_ZBuffer.cpp<|end_file_name|><|fim▁begin|>//###########################################################################
// This file is part of LImA, a Library for Image Acquisition
//
// Copyright (C) : 2009-2017
// European Synchrotron Radiation Facility
// CS40220 38043 Grenoble Cedex 9
// FRANCE
//
// Contact: [email protected]
//
// This is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.
//
// This software is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>
#include <string.h> // For memcpy
#include <stdlib.h> // For posix_memalign
#include <malloc.h> // For _aligned_malloc
using namespace lima;
void ZBuffer::_alloc(int buffer_size)
{
DEB_MEMBER_FUNCT();
DEB_PARAM() << DEB_VAR1(buffer_size);
if (buffer_size == 0)
THROW_CTL_ERROR(InvalidValue) << "Invalid NULL buffer_size";
used_size = 0;
#ifdef __unix
if(posix_memalign(&buffer,4*1024,buffer_size))
#else
buffer = _aligned_malloc(buffer_size,4*1024);
if(!buffer)
#endif
THROW_CTL_ERROR(Error) << "Can't allocate buffer";
alloc_size = buffer_size;
}
void ZBuffer::_deep_copy(const ZBuffer& o)
{
DEB_MEMBER_FUNCT();
DEB_PARAM() << DEB_VAR2(o.alloc_size, o.used_size);
if (o.used_size > o.alloc_size)
THROW_CTL_ERROR(Error) << "Invalid " << DEB_VAR2(o.used_size, o.alloc_size);
_alloc(o.alloc_size);
memcpy(buffer, o.buffer, used_size);
used_size = o.used_size;
}
void ZBuffer::_free()
{
DEB_MEMBER_FUNCT();
DEB_PARAM() << DEB_VAR2(alloc_size, used_size);
#ifdef __unix
free(buffer);
#else
_aligned_free(buffer);
#endif
_setInvalid();
}<|fim▁end|>
|
//###########################################################################
#include "lima/CtSaving_ZBuffer.h"
|
<|file_name|>ArrangeSwapCommand.js<|end_file_name|><|fim▁begin|>ArrangeSwapCommand = new Class({
Implements: ICommand,
beginDepths: [],
initialize: function(){},
// Move the target to the new depth.
execute: function() {
this.canvas.swapChildren(this.beginDepths[0], this.beginDepths[1]);
},
// Place the target object back in its original depth.
revert: function(target) {
this.canvas.swapChildren(this.beginDepths[1], this.beginDepths[0]);
},
setBeginDepths: function(a) {
this.beginDepths = a;
},<|fim▁hole|>
toString: function() {
return '{name: ArrangeSwap, ptA: ' + this.beginDepths[0].toString() +
', ptB: ' + this.beginDepths[1].toString() + '}';
}
});<|fim▁end|>
| |
<|file_name|>Route.tsx<|end_file_name|><|fim▁begin|>/**
* Taken from @taion as per example of how they actually use found-relay and
* have default setup for each route.
*/
import { RouteSpinner } from "Artsy/Relay/renderWithLoadProgress"
import { HttpError } from "found"
import BaseRoute from "found/Route"
import React from "react"
type FetchIndicator = "spinner" | "overlay"
interface CreateRenderProps {
fetchIndicator?: FetchIndicator
render?: (props) => React.ReactNode
}
interface RenderArgProps {
Component: React.ComponentType
props?: object
error?: Error
}
<|fim▁hole|>}: CreateRenderProps) {
return (renderArgs: RenderArgProps) => {
const { Component, props, error } = renderArgs
if (error) {
if (error instanceof HttpError) {
throw error
}
console.error(
"[Artsy/Router/Route] Non HttpError rendering route:",
error
)
return null
}
if (render) {
return render(renderArgs)
}
if (Component === undefined) {
return undefined
}
// This should only ever show when doing client-side routing.
if (!props) {
if (fetchIndicator === "spinner") {
return <RouteSpinner />
// TODO: At some point we might want to make this a little fancier. If
// undefined is returned here, then we defer to `RenderStatus` component.
} else if (fetchIndicator === "overlay") {
/*
In attempting to avoid the use of <StaticContainer> in RenderStatus.tsx,
which freezes the component tree with `shouldComponentUpdate = false`,
we stored the previously-rendered component and props in a variable and
instead of returning undefined here, we returned <PrevComponent {...prevProps} />.
However, when the component is rendered by react, it errors out because
the data in prevProps has seemingly been garbage collected.
Relay has the ability to `retain` data in the store. We should investigate,
which would give us greater control over our component tree when top-level
route transitions occur.
See: https://graphql.slack.com/archives/C0BEXJLKG/p1561741782163900
export const setLocal = (query: GraphQLTaggedNode, localData: object) => {
const request = getRequest(query);
const operation = createOperationDescriptor(request, {});
env.commitPayload(operation, localData);
env.retain(operation.root); // <== here @en_js magic :wink:
};
*/
/**
* Its an odd requirement, but the way in which one triggers RenderStatus
* component updates is to return undefined.
*/
return undefined
// If for some reason something else is passed, fall back to the spinner
} else {
return <RouteSpinner />
}
}
return <Component {...props} />
}
}
export class Route extends BaseRoute {
constructor(props) {
if (!(props.query || props.getQuery)) {
super(props)
return
}
super({
...props,
render: createRender(props),
})
}
}<|fim▁end|>
|
function createRender({
fetchIndicator = "overlay",
render,
|
<|file_name|>google.py<|end_file_name|><|fim▁begin|># coding: utf-8
# pylint: disable=missing-docstring, invalid-name
from __future__ import absolute_import
from google.appengine.api import users
import flask
import auth
import model
import util
from main import app
<|fim▁hole|> google_url = users.create_login_url(flask.url_for('google_authorized'))
return flask.redirect(google_url)
@app.route('/_s/callback/google/authorized/')
def google_authorized():
google_user = users.get_current_user()
if google_user is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(flask.url_for('index'))
user_db = retrieve_user_from_google(google_user)
return auth.signin_via_social(user_db)
def retrieve_user_from_google(google_user):
auth_id = 'federated_%s' % google_user.user_id()
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
if not user_db.admin and users.is_current_user_admin():
user_db.admin = True
user_db.put()
return user_db
return auth.create_or_get_user_db(
auth_id=auth_id,
name=util.create_name_from_email(google_user.email()),
username=google_user.email(),
email=google_user.email(),
verified=True,
admin=users.is_current_user_admin(),
)<|fim▁end|>
|
@app.route('/signin/google/')
def signin_google():
auth.save_request_params()
|
<|file_name|>index.spec.ts<|end_file_name|><|fim▁begin|>import { Subject } from "rxjs";
import { take, toArray } from "rxjs/operators";
import { v4 as uuid } from "uuid";
import {
createMainChannelFromSockets,
createSocket,
getUsername,
JupyterConnectionInfo,
verifiedConnect,
ZMQType
} from "../src";
import { EventEmitter } from "events";
import { Socket as _Socket } from "jmp";
import * as zmq from "zeromq";
type Socket = typeof _Socket &
zmq.Socket &
EventEmitter & { unmonitor: Function };
import { JupyterMessage, MessageType } from "@nteract/messaging";
import { Message } from "../__mocks__/jmp";
interface Sockets {
[id: string]: Socket;
}
// Mock a jmp socket
class HokeySocket extends _Socket {
send = jest.fn();
constructor() {
super("hokey");
this.send = jest.fn();
}
}
describe("createSocket", () => {
test("creates a JMP socket on the channel with identity", async done => {
const config = {
signature_scheme: "hmac-sha256",
key: "5ca1ab1e-c0da-aced-cafe-c0ffeefacade",
ip: "127.0.0.1",
transport: "tcp",
iopub_port: 9009
} as JupyterConnectionInfo;
const identity = uuid();
const socket = await createSocket("iopub", identity, config);
expect(socket).not.toBeNull();
expect(socket.identity).toBe(identity);
expect(socket.type).toBe(ZMQType.frontend.iopub);
socket.close();
done();
});
});
describe("verifiedConnect", () => {
test("verifiedConnect monitors the socket", async done => {
const emitter = new EventEmitter();
const socket = {
type: "hokey",
monitor: jest.fn(),
unmonitor: jest.fn(),
close: jest.fn(),
on: jest.fn(emitter.on.bind(emitter)),
emit: jest.fn(emitter.emit.bind(emitter)),
connect: jest.fn(() => {})
};
const p = verifiedConnect(
(socket as unknown) as _Socket,
"tcp://127.0.0.1:8945"
);
expect(socket.monitor).toHaveBeenCalledTimes(1);
expect(socket.connect).toHaveBeenCalledTimes(1);
expect(socket.connect).toHaveBeenCalledWith("tcp://127.0.0.1:8945");
expect(socket.unmonitor).toHaveBeenCalledTimes(0);
// Test that we unmonitor after connected
socket.emit("connect");
await p;
expect(socket.unmonitor).toHaveBeenCalledTimes(1);
done();
});
test("verifiedConnect monitors the socket properly even on fast connect", async done => {
const emitter = new EventEmitter();
const socket = {
type: "hokey",
monitor: jest.fn(),
unmonitor: jest.fn(),
close: jest.fn(),
on: jest.fn(emitter.on.bind(emitter)),
emit: jest.fn(emitter.emit.bind(emitter)),
connect: jest.fn(() => {
emitter.emit("connect");
})
};
verifiedConnect((socket as unknown) as _Socket, "tcp://127.0.0.1:8945");
expect(socket.monitor).toHaveBeenCalledTimes(1);
expect(socket.connect).toHaveBeenCalledTimes(1);
expect(socket.unmonitor).toHaveBeenCalledTimes(1);
expect(socket.connect).toHaveBeenCalledWith("tcp://127.0.0.1:8945");
done();
});
});
describe("getUsername", () => {
test("relies on environment variables for username with a specific ordering", () => {
expect(getUsername()).toEqual("username");
process.env.USERNAME = "TEST1";
expect(getUsername()).toEqual("TEST1");
process.env.LNAME = "TEST2";
expect(getUsername()).toEqual("TEST2");
process.env.USER = "TEST3";
expect(getUsername()).toEqual("TEST3");
process.env.LOGNAME = "TEST4";
expect(getUsername()).toEqual("TEST4");
});
test(`when no environment variables are set, use literally 'username', which
comes from the classic jupyter notebook`, () => {
expect(getUsername()).toEqual("username");
});
beforeEach(() => {
delete process.env.LOGNAME;
delete process.env.USER;
delete process.env.LNAME;
delete process.env.USERNAME;
});
afterEach(() => {
delete process.env.LOGNAME;
delete process.env.USER;
delete process.env.LNAME;
delete process.env.USERNAME;
});
});
describe("createMainChannelFromSockets", () => {
test("basic creation", () => {
const sockets = {
hokey: {} as any
};
// TODO: This shouldn't work silently if the socket doesn't actually behave
// like an actual socket
// NOTE: RxJS doesn't error with the fromEvent until there is at least one
// subscriber, which also tells me we might have the wrong behavior
// here as it should go ahead and subscribe unconditionally...
const channels = createMainChannelFromSockets(sockets);
expect(channels).toBeInstanceOf(Subject);
});
test("simple one channel message passing from 'socket' to channels", () => {
const hokeySocket = new HokeySocket();
const sockets = {
shell: hokeySocket
};
const channels = createMainChannelFromSockets(sockets);
expect(channels).toBeInstanceOf(Subject);
const messages = [{ a: 1 }, { a: 2 }, { b: 3 }];
const p = channels.pipe(take(messages.length), toArray()).toPromise();
for (const message of messages) {
hokeySocket.emit("message", message);
}
return p.then((modifiedMessages: any) => {
expect(modifiedMessages).toEqual(
messages.map(msg => ({ ...msg, channel: "shell" }))
);
});
});
test("handles multiple socket routing underneath", () => {
const shellSocket = new HokeySocket();
const iopubSocket = new HokeySocket();
const sockets = {
shell: shellSocket,
iopub: iopubSocket
};
const channels = createMainChannelFromSockets(sockets);
const p = channels.pipe(take(2), toArray()).toPromise();
shellSocket.emit("message", { yolo: false });
iopubSocket.emit("message", { yolo: true });
return p.then((modifiedMessages: any) => {
expect(modifiedMessages).toEqual([
{ channel: "shell", yolo: false },
{ channel: "iopub", yolo: true }
]);
});
});
test("propagates header information through", async done => {
const shellSocket = new HokeySocket();
const iopubSocket = new HokeySocket();
const sockets = {
shell: shellSocket,
iopub: iopubSocket
};
const channels = createMainChannelFromSockets(sockets, {
session: "spinning",
username: "dj"
});
const responses = channels.pipe(take(2), toArray()).toPromise();
channels.next({ channel: "shell" } as JupyterMessage<any>);
expect(shellSocket.send).toHaveBeenCalledWith(
new Message({
buffers: new Uint8Array(),
content: {},
header: {
session: "spinning",
username: "dj"
},
idents: [],
metadata: {},
parent_header: {}
})
);
channels.next({
channel: "shell",
content: {
applesauce: "mcgee"
},
header: {
version: "3",<|fim▁hole|> // NOTE: we'll be checking that we use the set username for the
// channels, no overrides
username: "kitty"
}
} as JupyterMessage);
expect(shellSocket.send).toHaveBeenLastCalledWith(
new Message({
buffers: new Uint8Array(),
content: {
applesauce: "mcgee"
},
header: {
msg_type: "random",
session: "spinning",
username: "dj",
msg_id: "XYZ",
date: expect.any(String),
version: "3"
},
idents: [],
metadata: {},
parent_header: {}
})
);
shellSocket.emit("message", { yolo: false });
iopubSocket.emit("message", { yolo: true });
const modifiedMessages = await responses;
expect(modifiedMessages).toEqual([
{ channel: "shell", yolo: false },
{ channel: "iopub", yolo: true }
]);
done();
});
});<|fim▁end|>
|
msg_type: "random" as MessageType,
date: new Date().toISOString(),
msg_id: "XYZ",
|
<|file_name|>pdu.py<|end_file_name|><|fim▁begin|>"""DICOM Upper Layer Protocol Data Units (PDUs).
There are seven different PDUs:
- A_ASSOCIATE_RQ
- A_ASSOCIATE_AC
- A_ASSOCIATE_RJ
- P_DATA_TF
- A_RELEASE_RQ
- A_RELEASE_RP
- A_ABORT_RQ
::
from_primitive encode
+----------------+ ------> +------------+ -----> +-------------+
| DUL Primitive | | PDU | | Peer AE |
+----------------+ <------ +------------+ <----- +-------------+
to_primitive decode
"""
import codecs
import logging
from struct import Struct
from pynetdicom.pdu_items import (
ApplicationContextItem,
PresentationContextItemRQ,
PresentationContextItemAC,
UserInformationItem,
PresentationDataValueItem,
PDU_ITEM_TYPES
)
from pynetdicom.utils import validate_ae_title
LOGGER = logging.getLogger('pynetdicom.pdu')
# Predefine some structs to make decoding and encoding faster
UCHAR = Struct('B')
UINT2 = Struct('>H')
UINT4 = Struct('>I')
UNPACK_UCHAR = UCHAR.unpack
UNPACK_UINT2 = UINT2.unpack
UNPACK_UINT4 = UINT4.unpack
PACK_UCHAR = UCHAR.pack
PACK_UINT2 = UINT2.pack
PACK_UINT4 = UINT4.pack
class PDU(object):
"""Base class for PDUs.
Protocol Data Units (PDUs) are the message formats exchanged between peer
entities within a layer. A PDU consists of protocol control information
and user data. PDUs are constructed by mandatory fixed fields followed by
optional variable fields that contain one or more items and/or sub-items.
References
----------
DICOM Standard, Part 8, :dcm:`Section 9.3 <part08/sect_9.3.html>`
"""
def decode(self, bytestream):
"""Decode `bytestream` and use the result to set the field values of
the PDU.
Parameters
----------
bytestream : bytes
The PDU data to be decoded.
"""
for (offset, length), attr_name, func, args in self._decoders:
# Allow us to use None as a `length`
if length:
sl = slice(offset, offset + length)
else:
sl = slice(offset, None)
setattr(
self, attr_name, func(bytestream[sl], *args)
)
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders."""
raise NotImplementedError
def encode(self):
"""Return the encoded PDU as :class:`bytes`.
Returns
-------
bytes
The encoded PDU.
"""
bytestream = bytes()
for attr_name, func, args in self._encoders:
# If attr_name is None then the field is usually reserved
if attr_name:
bytestream += func(getattr(self, attr_name), *args)
else:
bytestream += func(*args)
return bytestream
@property
def _encoders(self):
"""Return an iterable of tuples that contain field encoders."""
raise NotImplementedError
def __eq__(self, other):
"""Return ``True`` if `self` equals `other`."""
if other is self:
return True
# pylint: disable=protected-access
if isinstance(other, self.__class__):
self_dict = {
enc[0] : getattr(self, enc[0])
for enc in self._encoders if enc[0]
}
other_dict = {
enc[0] : getattr(other, enc[0])
for enc in other._encoders if enc[0]
}
return self_dict == other_dict
return NotImplemented
@staticmethod
def _generate_items(bytestream):
"""Yield PDU item data from `bytestream`.
Parameters
----------
bytestream : bytes
The encoded PDU variable item data.
Yields
------
int, bytes
The variable item's 'Item Type' parameter as int, and the item's
entire encoded data as bytes.
Notes
-----
Can be used with the following PDU items/sub-items:
- Application Context Item
- Presentation Context Item (RQ/AC)
- Abstract Syntax Sub-item
- Transfer Syntax Sub-item
- User Information Item
- Implementation Class UID Sub-item (RQ/AC)
- Implementation Version Name Sub-item (RQ/AC)
- Asynchronous Operations Window Sub-item (RQ/AC)
- SCP/SCU Role Selection Sub-item (RQ/AC)
- SOP Class Extended Negotiation Sub-item (RQ/AC)
- SOP Class Common Extended Negotiation Sub-item (RQ/AC)
- User Identity Sub-item (RQ/AC)
**Encoding**
When encoded, PDU item and sub-item data for the above has the
following structure, taken from various tables in (offsets shown
with Python indexing). Items are always encoded using Big Endian.
+--------+-------------+-------------+
| Offset | Length | Description |
+========+=============+=============+
| 0 | 1 | Item type |
+--------+-------------+-------------+
| 1 | 1 | Reserved |
+--------+-------------+-------------+
| 2 | 2 | Item length |
+--------+-------------+-------------+
| 4 | Item length | Item data |
+--------+-------------+-------------+
References
----------
* DICOM Standard, Part 8, :dcm:`Section 9.3 <part08/sect_9.3.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
offset = 0
while bytestream[offset:offset + 1]:
item_type = UNPACK_UCHAR(bytestream[offset:offset + 1])[0]
item_length = UNPACK_UINT2(bytestream[offset + 2:offset + 4])[0]
item_data = bytestream[offset:offset + 4 + item_length]
assert len(item_data) == 4 + item_length
yield item_type, item_data
# Move `offset` to the start of the next item
offset += 4 + item_length
def __len__(self):
"""Return the total length of the encoded PDU as :class:`int`."""
return 6 + self.pdu_length
def __ne__(self, other):
"""Return ``True`` if `self` does not equal `other`."""
return not self == other
@property
def pdu_length(self):
"""Return the *PDU Length* field value as :class:`int`."""
raise NotImplementedError
@property
def pdu_type(self):
"""Return the *PDU Type* field value as :class:`int`."""
return PDU_TYPES[self.__class__]
@staticmethod
def _wrap_bytes(bytestream):
"""Return `bytestream` without changing it."""
return bytestream
@staticmethod
def _wrap_encode_items(items):
"""Return `items` encoded as bytes.
Parameters
----------
items : list of PDU items
The items to encode.
Returns
-------
bytes
The encoded items.
"""
bytestream = bytes()
for item in items:
bytestream += item.encode()
return bytestream
@staticmethod
def _wrap_encode_uid(uid):
"""Return `uid` as bytes encoded using ASCII.
Each component of Application Context, Abstract Syntax and Transfer
Syntax UIDs should be encoded as a ISO 646:1990-Basic G0 Set Numeric
String (characters 0-9), with each component separated by '.' (0x2e)
.
'ascii' is chosen because this is the codec Python uses for ISO 646
[3]_.
Parameters
----------
uid : pydicom.uid.UID
The UID to encode using ASCII.
Returns
-------
bytes
The encoded `uid`.
References
----------
* DICOM Standard, Part 8, :dcm:`Annex F <part08/chapter_F.html>`
* `Python 3 codecs module
<https://docs.python.org/2/library/codecs.html#standard-encodings>`_
"""
return codecs.encode(uid, 'ascii')
def _wrap_generate_items(self, bytestream):
"""Return a list of encoded PDU items generated from `bytestream`."""
item_list = []
for item_type, item_bytes in self._generate_items(bytestream):
item = PDU_ITEM_TYPES[item_type]()
item.decode(item_bytes)
item_list.append(item)
return item_list
@staticmethod
def _wrap_pack(value, packer):
"""Return `value` encoded as bytes using `packer`.
Parameters
----------
value
The value to encode.
packer : callable
A callable function to use to pack the data as bytes. The
`packer` should return the packed bytes. Example:
struct.Struct('>I').pack
Returns
-------
bytes
"""
return packer(value)
@staticmethod
def _wrap_unpack(bytestream, unpacker):
"""Return the first value when `unpacker` is run on `bytestream`.
Parameters
----------
bytestream : bytes
The encoded data to unpack.
unpacker : callable
A callable function to use to unpack the data in `bytestream`. The
`unpacker` should return a tuple containing unpacked values.
Example: struct.Struct('>I').unpack.
"""
return unpacker(bytestream)[0]
class A_ASSOCIATE_RQ(PDU):
"""An A-ASSOCIATE-RQ PDU.
An A-ASSOCIATE-RQ PDU is sent by an association requestor to initiate
association negotiation with an acceptor.
Attributes
----------
application_context_name : pydicom.uid.UID or None
The Application Context Item's *Application Context Name* field value
(if available).
called_ae_title : bytes
The *Called AE Title* field value, which is the destination DICOM
application name as a fixed length 16-byte value (padded with trailing
spaces ``0x20``). Leading and trailing spaces are non-significant and a
value of 16 spaces is not allowed.
calling_ae_title : bytes
The *Calling AE Title* field value, which is the destination DICOM
application name as a fixed length 16-byte value (padded with trailing
spaces ``0x20``). Leading and trailing spaces are non-significant and a
value of 16 spaces is not allowed.
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x01``).
presentation_context : list of pdu_items.PresentationContextItemRQ
The *Presentation Context Item(s)*.
protocol_version : int
The *Protocol Version* field value (``0x01``).
user_information : pdu_items.UserInformationItem
The *User Information Item* (if available).
variable_items : list
A list containing the A-ASSOCIATE-RQ's *Variable Items*. Contains
one Application Context item, one or more Presentation Context items
and one User Information item. The order of the items is not
guaranteed.
Notes
-----
An A-ASSOCIATE-RQ PDU requires the following parameters:
* PDU type (1, fixed value, ``0x01``)
* PDU length (1)
* Protocol version (1, default value, ``0x01``)
* Called AE title (1)
* Calling AE title (1)
* Variable items (1)
* Application Context Item (1)
* Item type (1, fixed value, ``0x10``)
* Item length (1)
* Application Context Name (1, fixed in an application)
* Presentation Context Item(s) (1 or more)
* Item type (1, fixed value, ``0x21``)
* Item length (1)
* Context ID (1)
* Abstract/Transfer Syntax Sub-items (1)
* Abstract Syntax Sub-item (1)
* Item type (1, fixed, ``0x30``)
* Item length (1)
* Abstract syntax name (1)
* Transfer Syntax Sub-items (1 or more)
* Item type (1, fixed, ``0x40``)
* Item length (1)
* Transfer syntax name(s) (1 or more)
* User Information Item (1)
* Item type (1, fixed, ``0x50``)
* Item length (1)
* User data Sub-items (2 or more)
* Maximum Length Received Sub-item (1)
* Implementation Class UID Sub-item (1)
* Optional User Data Sub-items (0 or more)
**Encoding**
When encoded, an A-ASSOCIATE-RQ PDU has the following structure, taken
from `Table 9-11<part08/sect_9.3.2.html>` (offsets shown with Python
indexing). PDUs are always encoded using Big Endian.
+--------+-------------+------------------+
| Offset | Length | Description |
+========+=============+==================+
| 0 | 1 | PDU type |
+--------+-------------+------------------+
| 1 | 1 | Reserved |
+--------+-------------+------------------+
| 2 | 4 | PDU length |
+--------+-------------+------------------+
| 6 | 2 | Protocol version |
+--------+-------------+------------------+
| 8 | 2 | Reserved |
+--------+-------------+------------------+
| 10 | 16 | Called AE title |
+--------+-------------+------------------+
| 26 | 16 | Calling AE title |
+--------+-------------+------------------+
| 42 | 32 | Reserved |
+--------+-------------+------------------+
| 74 | Variable | Variable items |
+--------+-------------+------------------+
References
----------
* DICOM Standard, Part 8, Sections :dcm:`9.3.2<part08/sect_9.3.2.html>`
and :dcm:`9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-ASSOCIATE-RQ PDU."""
# We allow the user to modify the protocol version if so desired
self.protocol_version = 0x01
# Set some default values
self.called_ae_title = "Default"
self.calling_ae_title = "Default"
# `variable_items` is a list containing the following:
# 1 ApplicationContextItem
# 1 or more PresentationContextItemRQ
# 1 UserInformationItem
# The order of the items in the list may not be as given above
self.variable_items = []
def from_primitive(self, primitive):
"""Setup the current PDU using an A-ASSOCIATE (request) primitive.
Parameters
----------
primitive : pdu_primitives.A_ASSOCIATE
The primitive to use to set the current PDU field values.
"""
self.calling_ae_title = primitive.calling_ae_title
self.called_ae_title = primitive.called_ae_title
# Add Application Context
application_context = ApplicationContextItem()
application_context.application_context_name = (
primitive.application_context_name
)
self.variable_items.append(application_context)
# Add Presentation Context(s)
for contexts in primitive.presentation_context_definition_list:
presentation_context = PresentationContextItemRQ()
presentation_context.from_primitive(contexts)
self.variable_items.append(presentation_context)
# Add User Information
user_information = UserInformationItem()
user_information.from_primitive(primitive.user_information)
self.variable_items.append(user_information)
def to_primitive(self):
"""Return an A-ASSOCIATE (request) primitive from the current PDU.
Returns
-------
pdu_primitives.A_ASSOCIATE
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_ASSOCIATE
primitive = A_ASSOCIATE()
primitive.calling_ae_title = self.calling_ae_title
primitive.called_ae_title = self.called_ae_title
primitive.application_context_name = self.application_context_name
for item in self.variable_items:
# Add presentation contexts
if isinstance(item, PresentationContextItemRQ):
primitive.presentation_context_definition_list.append(
item.to_primitive())
# Add user information
elif isinstance(item, UserInformationItem):
primitive.user_information = item.to_primitive()
return primitive
@property
def application_context_name(self):
"""Return the *Application Context Name*, if available.
Returns
-------
pydicom.uid.UID or None
The requestor's *Application Context Name* or None if not
available.
"""
for item in self.variable_items:
if isinstance(item, ApplicationContextItem):
return item.application_context_name
return None
@property
def called_ae_title(self):
"""Return the *Called AE Title* field value as :class:`bytes`."""
return self._called_aet
@called_ae_title.setter
def called_ae_title(self, ae_title):
"""Set the *Called AE Title* field value.
Will be converted to a fixed length 16-byte value (padded with trailing
spaces ``0x20``). Leading and trailing spaces are non-significant and a
value of 16 spaces is not allowed.
Parameters
----------
ae_title : str or bytes
The value you wish to set. A value consisting of spaces is not
allowed and values longer than 16 characters will be truncated.
"""
# pylint: disable=attribute-defined-outside-init
if isinstance(ae_title, str):
ae_title = codecs.encode(ae_title, 'ascii')
self._called_aet = validate_ae_title(ae_title)
@property
def calling_ae_title(self):
"""Return the *Calling AE Title* field value as :class:`bytes`."""
return self._calling_aet
@calling_ae_title.setter
def calling_ae_title(self, ae_title):
"""Set the *Calling AE Title* field value.
Will be converted to a fixed length 16-byte value (padded with trailing
spaces ``0x20``). Leading and trailing spaces are non-significant and a
value of 16 spaces is not allowed.
Parameters
----------
ae_title : str or bytes
The value you wish to set. A value consisting of spaces is not
allowed and values longer than 16 characters will be truncated.
"""
# pylint: disable=attribute-defined-outside-init
if isinstance(ae_title, str):
ae_title = codecs.encode(ae_title, 'ascii')
self._calling_aet = validate_ae_title(ae_title)
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ``((offset, length), attr_name, callable, [args])``,
where:
- ``offset`` is the byte offset to start at
- ``length`` is how many bytes to slice (if None then will slice
to the end of the data),
- ``attr_name`` is the name of the attribute corresponding to the
field
- ``callable`` is a decoding function that returns the decoded
value
- ``args`` is a list of arguments to pass ``callable``
"""
return [
((6, 2), 'protocol_version', self._wrap_unpack, [UNPACK_UINT2]),
((10, 16), 'called_ae_title', self._wrap_bytes, []),
((26, 16), 'calling_ae_title', self._wrap_bytes, []),
((74, None), 'variable_items', self._wrap_generate_items, [])
]
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ``(attr_name, callable, [args])``, where:
- ``attr_name`` is the name of the attribute corresponding to the
field
- ``callable`` is an encoding function that returns :class:`bytes`
- ``args`` is a :class:`list` of arguments to pass ``callable``.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
('protocol_version', PACK_UINT2, []),
(None, self._wrap_pack, [0x0000, PACK_UINT2]),
('called_ae_title', self._wrap_bytes, []),
('calling_ae_title', self._wrap_bytes, []),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
('variable_items', self._wrap_encode_items, [])
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as :class:`int`."""
length = 68
for item in self.variable_items:
length += len(item)
return length
@property
def presentation_context(self):
"""Return a list of the Presentation Context items.
Returns
-------
list of pdu_items.PresentationContextItemRQ
The Presentation Context items.
"""
return [item for item in self.variable_items if
isinstance(item, PresentationContextItemRQ)]
def __str__(self):
"""Return a string representation of the PDU."""
s = 'A-ASSOCIATE-RQ PDU\n'
s += '==================\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
s += f' Protocol version: {self.protocol_version:d}\n'
s += f' Called AET: {self.called_ae_title}\n'
s += f' Calling AET: {self.calling_ae_title}\n'
s += '\n'
s += ' Variable Items:\n'
s += ' ---------------\n'
s += ' * Application Context Item\n'
s += f' - Context name: ={self.application_context_name}\n'
s += ' * Presentation Context Item(s):\n'
for ii in self.presentation_context:
item_str = f'{ii}'
item_str_list = item_str.split('\n')
s += f' - {item_str_list[0]}\n'
for jj in item_str_list[1:-1]:
s += f' {jj}\n'
s += ' * User Information Item(s):\n'
for ii in self.user_information.user_data:
item_str = f'{ii}'
item_str_list = item_str.split('\n')
s += f' - {item_str_list[0]}\n'
for jj in item_str_list[1:-1]:
s += f' {jj}\n'
return s
@property
def user_information(self):
"""Return the User Information Item, if available.
Returns
-------
pdu_items.UserInformationItem or None
The requestor's User Information object or ``None``, if not
available.
"""
for item in self.variable_items:
if isinstance(item, UserInformationItem):
return item
return None
class A_ASSOCIATE_AC(PDU):
"""An A-ASSOCIATE-AC PDU.
An A-ASSOCIATE-AC PDU is sent by an association acceptor to indicate that
association negotiation has been successful.
Attributes
----------
application_context_name : pydicom.uid.UID<|fim▁hole|> The requestor's *Called AE Title* field value, which is the destination
DICOM application name as a 16-byte value. The value is not
guaranteed to be the actual title and shall not be tested.
calling_ae_title : bytes
The requestor's *Calling AE Title* field value, which is the source
DICOM application name as a 16-byte value. The value is not
guaranteed to be the actual title and shall not be tested.
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x02``).
presentation_context : list of pdu_items.PresentationContextItemAC
The *Presentation Context Item(s)*.
protocol_version : int
The *Protocol Version* field value (default ``0x01``).
user_information : pdu_items.UserInformationItem
The *User Information Item* (if available).
variable_items : list
A list containing the A-ASSOCIATE-AC's 'Variable Items'. Contains
one Application Context item, one or more Presentation Context items
and one User Information item. The order of the items is not
guaranteed.
Notes
-----
An A-ASSOCIATE-AC PDU requires the following parameters:
* PDU type (1, fixed value, ``0x02``)
* PDU length (1)
* Protocol version (1, default value, ``0x01``)
* Variable items (1)
* Application Context Item (1)
* Item type (1, fixed value, ``0x10``)
* Item length (1)
* Application Context Name (1, fixed in an application)
* Presentation Context Item(s) (1 or more)
* Item type (1, fixed value, ``0x21``)
* Item length (1)
* Context ID (1)
* Result/reason (1)
* Transfer Syntax Sub-items (1)
* Item type (1, fixed, ``0x40``)
* Item length (1)
* Transfer syntax name(s) (1)
* User Information Item (1)
* Item type (1, fixed, ``0x50``)
* Item length (1)
* User data Sub-items (2 or more)
* Maximum Length Received Sub-item (1)
* Implementation Class UID Sub-item (1)
* Optional User Data Sub-items (0 or more)
**Encoding**
When encoded, an A-ASSOCIATE-AC PDU has the following structure, taken
from Table 9-17 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+------------------+
| Offset | Length | Description |
+========+=============+==================+
| 0 | 1 | PDU type |
+--------+-------------+------------------+
| 1 | 1 | Reserved |
+--------+-------------+------------------+
| 2 | 4 | PDU length |
+--------+-------------+------------------+
| 6 | 2 | Protocol version |
+--------+-------------+------------------+
| 8 | 2 | Reserved |
+--------+-------------+------------------+
| 10 | 16 | Reserved^ |
+--------+-------------+------------------+
| 26 | 16 | Reserved^ |
+--------+-------------+------------------+
| 42 | 32 | Reserved |
+--------+-------------+------------------+
| 74 | Variable | Variable items |
+--------+-------------+------------------+
^ The reserved fields shall be sent with a value identical to the value
received in the A-ASSOCIATE-RQ but their values shall not be tested.
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.3 <part08/sect_9.3.3.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-ASSOCIATE-AC PDU."""
# We allow the user to modify the protocol version if so desired
self.protocol_version = 0x01
# Called AE Title, should be present, but no guarantees
self._reserved_aet = None
# Calling AE Title, should be present, but no guarantees
self._reserved_aec = None
# `variable_items` is a list containing the following:
# 1 ApplicationContextItem
# 1 or more PresentationContextItemAC
# 1 UserInformationItem
# The order of the items in the list may not be as given above
self.variable_items = []
def from_primitive(self, primitive):
"""Setup the current PDU using an A-ASSOCIATE (accept) primitive.
Parameters
----------
primitive : pdu_primitives.A_ASSOCIATE
The primitive to use to set the current PDU field values.
"""
self._reserved_aet = primitive.called_ae_title
self._reserved_aec = primitive.calling_ae_title
# Make application context
application_context = ApplicationContextItem()
application_context.application_context_name = (
primitive.application_context_name
)
self.variable_items.append(application_context)
# Make presentation contexts
for ii in primitive.presentation_context_definition_results_list:
presentation_context = PresentationContextItemAC()
presentation_context.from_primitive(ii)
self.variable_items.append(presentation_context)
# Make user information
user_information = UserInformationItem()
user_information.from_primitive(primitive.user_information)
self.variable_items.append(user_information)
def to_primitive(self):
"""Return an A-ASSOCIATE (accept) primitive from the current PDU.
Returns
-------
pdu_primitives.A_ASSOCIATE
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_ASSOCIATE
primitive = A_ASSOCIATE()
# The two reserved parameters at byte offsets 11 and 27 shall be set
# to called and calling AET byte the value shall not be
# tested when received (PS3.8 Table 9-17)
primitive.called_ae_title = self._reserved_aet
primitive.calling_ae_title = self._reserved_aec
for item in self.variable_items:
# Add application context
if isinstance(item, ApplicationContextItem):
primitive.application_context_name = (
item.application_context_name
)
# Add presentation contexts
elif isinstance(item, PresentationContextItemAC):
primitive.presentation_context_definition_results_list.append(
item.to_primitive()
)
# Add user information
elif isinstance(item, UserInformationItem):
primitive.user_information = item.to_primitive()
# 0x00 = Accepted
primitive.result = 0x00
return primitive
@property
def application_context_name(self):
"""Return the *Application Context Name*, if available.
Returns
-------
pydicom.uid.UID or None
The acceptor's *Application Context Name* or None if not available.
"""
for item in self.variable_items:
if isinstance(item, ApplicationContextItem):
return item.application_context_name
return None
@property
def called_ae_title(self):
"""Return the value sent in the *Called AE Title* reserved space.
While the standard says this value should match the A-ASSOCIATE-RQ
value there is no guarantee and this should not be used as a check
value.
Returns
-------
bytes
The value the A-ASSOCIATE-AC sent in the *Called AE Title* reserved
space.
"""
return self._reserved_aet
@property
def calling_ae_title(self):
"""Return the value sent in the *Calling AE Title* reserved space.
While the standard says this value should match the A-ASSOCIATE-RQ
value there is no guarantee and this should not be used as a check
value.
Returns
-------
bytes
The value the A-ASSOCIATE-AC sent in the *Calling AE Title*
reserved space.
"""
return self._reserved_aec
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return [
((6, 2), 'protocol_version', self._wrap_unpack, [UNPACK_UINT2]),
((10, 16), '_reserved_aet', self._wrap_bytes, []),
((26, 16), '_reserved_aec', self._wrap_bytes, []),
((74, None), 'variable_items', self._wrap_generate_items, [])
]
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
('protocol_version', PACK_UINT2, []),
(None, self._wrap_pack, [0x0000, PACK_UINT2]),
('_reserved_aet', self._wrap_bytes, []),
('_reserved_aec', self._wrap_bytes, []),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
(None, self._wrap_pack, [0x00, PACK_UINT4]),
('variable_items', self._wrap_encode_items, [])
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
length = 68
for item in self.variable_items:
length += len(item)
return length
@property
def presentation_context(self):
"""Return a list of the Presentation Context Items.
Returns
-------
list of pdu_items.PresentationContextItemAC
The Presentation Context Items.
"""
return [item for item in self.variable_items if
isinstance(item, PresentationContextItemAC)]
def __str__(self):
"""Return a string representation of the PDU."""
s = 'A-ASSOCIATE-AC PDU\n'
s += '==================\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
s += f' Protocol version: {self.protocol_version:d}\n'
s += f' Reserved (Called AET): {self._reserved_aet}\n'
s += f' Reserved (Calling AET): {self._reserved_aec}\n'
s += '\n'
s += ' Variable Items:\n'
s += ' ---------------\n'
s += ' * Application Context Item\n'
s += f' - Context name: ={self.application_context_name}\n'
s += ' * Presentation Context Item(s):\n'
for ii in self.presentation_context:
item_str = f'{ii}'
item_str_list = item_str.split('\n')
s += f' - {item_str_list[0]}\n'
for jj in item_str_list[1:-1]:
s += f' {jj}\n'
s += ' * User Information Item(s):\n'
for item in self.user_information.user_data:
item_str = f'{item}'
item_str_list = item_str.split('\n')
s += f' - {item_str_list[0]}\n'
for jj in item_str_list[1:-1]:
s += f' {jj}\n'
return s
@property
def user_information(self):
"""Return the User Information Item, if available.
Returns
-------
pdu_items.UserInformationItem or None
The acceptor's User Information object or None, if not available.
"""
for item in self.variable_items:
if isinstance(item, UserInformationItem):
return item
return None
class A_ASSOCIATE_RJ(PDU):
"""An A-ASSOCIATE-RJ PDU.
An A-ASSOCIATE-RJ PDU is sent by an association acceptor to indicate that
association negotiation has been unsuccessful.
Attributes
----------
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x03``).
reason_diagnostic : int
The *Reason/Diagnostic* field value.
result : int
The *Result* field value.
source : int
The *Source* field value.
Notes
-----
An A-ASSOCIATE-RJ PDU requires the following parameters:
* PDU type (1, fixed value, ``0x03``)
* PDU length (1)
* Result (1)
* Source (1)
* Reason/Diagnostic (1)
**Encoding**
When encoded, an A-ASSOCIATE-RJ PDU has the following structure, taken
from Table 9-21 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+-------------------+
| Offset | Length | Description |
+========+=============+===================+
| 0 | 1 | PDU type |
+--------+-------------+-------------------+
| 1 | 1 | Reserved |
+--------+-------------+-------------------+
| 2 | 4 | PDU length |
+--------+-------------+-------------------+
| 6 | 1 | Reserved |
+--------+-------------+-------------------+
| 7 | 1 | Result |
+--------+-------------+-------------------+
| 8 | 1 | Source |
+--------+-------------+-------------------+
| 9 | 1 | Reason/diagnostic |
+--------+-------------+-------------------+
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.4 <part08/sect_9.3.4.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-ASSOCIATE-RJ PDU."""
self.result = None
self.source = None
self.reason_diagnostic = None
def from_primitive(self, primitive):
"""Setup the current PDU using an A-ASSOCIATE (reject) primitive.
Parameters
----------
primitive : pdu_primitives.A_ASSOCIATE
The primitive to use to set the current PDU field values.
"""
self.result = primitive.result
self.source = primitive.result_source
self.reason_diagnostic = primitive.diagnostic
def to_primitive(self):
"""Return an A-ASSOCIATE (reject) primitive from the current PDU.
Returns
-------
pdu_primitives.A_ASSOCIATE
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_ASSOCIATE
primitive = A_ASSOCIATE()
primitive.result = self.result
primitive.result_source = self.source
primitive.diagnostic = self.reason_diagnostic
return primitive
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return [
((7, 1), 'result', self._wrap_unpack, [UNPACK_UCHAR]),
((8, 1), 'source', self._wrap_unpack, [UNPACK_UCHAR]),
((9, 1), 'reason_diagnostic', self._wrap_unpack, [UNPACK_UCHAR])
]
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('result', PACK_UCHAR, []),
('source', PACK_UCHAR, []),
('reason_diagnostic', PACK_UCHAR, []),
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
return 4
@property
def reason_str(self):
"""Return a str describing the *Reason/Diagnostic* field value."""
_reasons = {
1 : {
1 : "No reason given",
2 : "Application context name not supported",
3 : "Calling AE title not recognised",
4 : "Reserved",
5 : "Reserved",
6 : "Reserved",
7 : "Called AE title not recognised",
8 : "Reserved",
9 : "Reserved",
10 : "Reserved"
},
2 : {
1 : "No reason given",
2 : "Protocol version not supported"
},
3 : {
0 : "Reserved",
1 : "Temporary congestion",
2 : "Local limit exceeded",
3 : "Reserved",
4 : "Reserved",
5: "Reserved",
6 : "Reserved",
7 : "Reserved"
}
}
if self.source not in _reasons:
LOGGER.error('Invalid value in Source field in '
'A-ASSOCIATE-RJ PDU')
raise ValueError('Invalid value in Source field in '
'A-ASSOCIATE-RJ PDU')
if self.reason_diagnostic not in _reasons[self.source]:
LOGGER.error('Invalid value in Reason field in '
'A-ASSOCIATE-RJ PDU')
raise ValueError('Invalid value in Reason field in '
'A-ASSOCIATE-RJ PDU')
return _reasons[self.source][self.reason_diagnostic]
@property
def result_str(self):
"""Return a str describing the *Result* field value."""
_results = {
1 : 'Rejected (Permanent)',
2 : 'Rejected (Transient)'
}
if self.result not in _results:
LOGGER.error('Invalid value in Result field in '
'A-ASSOCIATE-RJ PDU')
raise ValueError('Invalid value in Result field in '
'A-ASSOCIATE-RJ PDU')
return _results[self.result]
@property
def source_str(self):
"""Return a str describing the *Source* field value."""
_sources = {
1 : 'DUL service-user',
2 : 'DUL service-provider (ACSE related)',
3 : 'DUL service-provider (presentation related)'
}
if self.source not in _sources:
LOGGER.error('Invalid value in Source field in '
'A-ASSOCIATE-RJ PDU')
raise ValueError('Invalid value in Source field in '
'A-ASSOCIATE-RJ PDU')
return _sources[self.source]
def __str__(self):
"""Return a string representation of the PDU."""
s = 'A-ASSOCIATE-RJ PDU\n'
s += '==================\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
s += f' Result: {self.result_str}\n'
s += f' Source: {self.source_str}\n'
s += f' Reason/Diagnostic: {self.reason_str}\n'
return s
# Overridden _generate_items and _wrap_generate_items
class P_DATA_TF(PDU):
"""A P-DATA-TF PDU.
A P-DATA-TF PDU is used once an association has been established to send
DIMSE message data.
Attributes
----------
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x04``).
presentation_data_value_items : list of pdu.PresentationDataValueItem
The *Presentation Data Value Item(s)* field value.
Notes
-----
A P-DATA-TF PDU requires the following parameters:
* PDU type (1, fixed value, ``0x04``)
* PDU length (1)
* Presentation data value Item(s) (1 or more)
**Encoding**
When encoded, a P-DATA-TF PDU has the following structure, taken
from Table 9-22 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+-------------------------------+
| Offset | Length | Description |
+========+=============+===============================+
| 0 | 1 | PDU type |
+--------+-------------+-------------------------------+
| 1 | 1 | Reserved |
+--------+-------------+-------------------------------+
| 2 | 4 | PDU length |
+--------+-------------+-------------------------------+
| 6 | Variable | Presentation data value items |
+--------+-------------+-------------------------------+
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.5 <part08/sect_9.3.5.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new P-DATA-TF PDU."""
self.presentation_data_value_items = []
def from_primitive(self, primitive):
"""Setup the current PDU using a P-DATA primitive.
Parameters
----------
primitive : pdu_primitives.P_DATA
The primitive to use to set the current PDU field values.
"""
for item in primitive.presentation_data_value_list:
presentation_data_value = PresentationDataValueItem()
presentation_data_value.presentation_context_id = item[0]
presentation_data_value.presentation_data_value = item[1]
self.presentation_data_value_items.append(presentation_data_value)
def to_primitive(self):
"""Return a P-DATA primitive from the current PDU.
Returns
-------
pdu_primitives.P_DATA
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import P_DATA
primitive = P_DATA()
primitive.presentation_data_value_list = []
for item in self.presentation_data_value_items:
primitive.presentation_data_value_list.append(
[item.presentation_context_id, item.presentation_data_value]
)
return primitive
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return [
((6, None),
'presentation_data_value_items',
self._wrap_generate_items,
[])
]
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
('presentation_data_value_items', self._wrap_encode_items, [])
]
@staticmethod
def _generate_items(bytestream):
"""Yield the variable PDV item data from `bytestream`.
Parameters
----------
bytestream : bytes
The encoded PDU variable item data.
Yields
------
int, bytes
The PDV's Presentation Context ID as int, and the PDV item's
encoded data as bytes.
Notes
-----
**Encoding**
When encoded, a Presentation Data Value Item has the following
structure, taken from Table 9-23 (offset shown with Python
indexing). The item is encoded using Big Endian, but the encoding of
of the presentation data message fragments is dependent on the
negotiated transfer syntax.
+--------+-------------+-------------------------+
| Offset | Length | Description |
+========+=============+=========================+
| 0 | 4 | Item length |
+--------+-------------+-------------------------+
| 4 | 1 | Context ID |
+--------+-------------+-------------------------+
| 5 | NN | Presentation data value |
+--------+-------------+-------------------------+
References
----------
* DICOM Standard, Part 8, :dcm:`Section
9.3.5.1 <part08/sect_9.3.5.html#sect_9.3.5.1>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
offset = 0
while bytestream[offset:offset + 1]:
item_length = UNPACK_UINT4(bytestream[offset:offset + 4])[0]
context_id = UNPACK_UCHAR(bytestream[offset + 4:offset + 5])[0]
data = bytestream[offset + 5:offset + 4 + item_length]
assert len(data) == item_length - 1
yield context_id, data
# Change `offset` to the start of the next PDV item
offset += 4 + item_length
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
length = 0
for item in self.presentation_data_value_items:
length += len(item)
return length
def __str__(self):
"""Return a string representation of the PDU."""
s = 'P-DATA-TF PDU\n'
s += '=============\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
s += '\n'
s += ' Presentation Data Value Item(s):\n'
s += ' --------------------------------\n'
for ii in self.presentation_data_value_items:
item_str = f'{ii}'
item_str_list = item_str.split('\n')
s += f' * {item_str_list[0]}\n'
for jj in item_str_list[1:-1]:
s += f' {jj}\n'
return s
def _wrap_generate_items(self, bytestream):
"""Return a list of PDV Items generated from `bytestream`.
Parameters
----------
bytestream : bytes
The encoded presentation data value items.
Returns
-------
list of pdu_items.PresentationDataValueItem
The presentation data value items contained in `bytestream`.
"""
item_list = []
for context_id, data in self._generate_items(bytestream):
pdv_item = PresentationDataValueItem()
pdv_item.presentation_context_id = context_id
pdv_item.presentation_data_value = data
item_list.append(pdv_item)
return item_list
class A_RELEASE_RQ(PDU):
"""An A-RELEASE-RQ PDU.
An A-RELEASE-RQ PDU is used once an association has been established to
initiate the release of the association.
Attributes
----------
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x05``).
Notes
-----
An A-RELEASE-RQ PDU requires the following parameters:
* PDU type (1, fixed value, ``0x05``)
* PDU length (1, fixed value, 4)
**Encoding**
When encoded, an A-RELEASE-RQ PDU has the following structure, taken
from Table 9-24 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+---------------+
| Offset | Length | Description |
+========+=============+===============+
| 0 | 1 | PDU type |
+--------+-------------+---------------+
| 1 | 1 | Reserved |
+--------+-------------+---------------+
| 2 | 4 | PDU length |
+--------+-------------+---------------+
| 6 | 4 | Reserved |
+--------+-------------+---------------+
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.6 <part08/sect_9.3.6.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-RELEASE-RQ PDU."""
pass
@staticmethod
def from_primitive(primitive):
"""Setup the current PDU using an A-RELEASE (request) primitive.
Parameters
----------
primitive : pdu_primitives.A_RELEASE
The primitive to use to set the current PDU field values.
"""
pass
@staticmethod
def to_primitive():
"""Return an A-RELEASE (request) primitive from the current PDU.
Returns
-------
pdu_primitives.A_RELEASE
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_RELEASE
return A_RELEASE()
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return []
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
(None, self._wrap_pack, [0x00, PACK_UINT4])
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
return 4
def __str__(self):
"""Return a string representation of the PDU."""
s = 'A-RELEASE-RQ PDU\n'
s += '================\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
return s
class A_RELEASE_RP(PDU):
"""An A-RELEASE-RP PDU.
An A-RELEASE-RP PDU is used once an association has been established to
confirm the release of the association.
Attributes
----------
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x06``).
Notes
-----
An A-RELEASE-RP PDU requires the following parameters:
* PDU type (1, fixed value, ``0x06``)
* PDU length (1, fixed value, ``0x00000004``)
**Encoding**
When encoded, an A-RELEASE-RP PDU has the following structure, taken
from Table 9-25 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+---------------+
| Offset | Length | Description |
+========+=============+===============+
| 0 | 1 | PDU type |
+--------+-------------+---------------+
| 1 | 1 | Reserved |
+--------+-------------+---------------+
| 2 | 4 | PDU length |
+--------+-------------+---------------+
| 6 | 4 | Reserved |
+--------+-------------+---------------+
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.7 <part08/sect_9.3.7.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-RELEASE-RP PDU."""
pass
@staticmethod
def from_primitive(primitive):
"""Setup the current PDU using an A-release (response) primitive.
Parameters
----------
primitive : pdu_primitives.A_RELEASE
The primitive to use to set the current PDU field values.
"""
pass
@staticmethod
def to_primitive():
"""Return an A-RELEASE (response) primitive from the current PDU.
Returns
-------
pdu_primitives.A_RELEASE
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_RELEASE
primitive = A_RELEASE()
primitive.result = 'affirmative'
return primitive
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return []
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
(None, self._wrap_pack, [0x00, PACK_UINT4])
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
return 4
def __str__(self):
"""Return a string representation of the PDU."""
s = 'A-RELEASE-RP PDU\n'
s += '================\n'
s += f' PDU type: 0x{self.pdu_type:02x}\n'
s += f' PDU length: {self.pdu_length:d} bytes\n'
return s
class A_ABORT_RQ(PDU):
"""An A-ABORT-RQ PDU.
An A-ABORT-RQ PDU is used to abort the association.
Attributes
----------
pdu_length : int
The number of bytes from the first byte following the *PDU Length*
field to the last byte of the PDU.
pdu_type : int
The *PDU Type* field value (``0x07``).
reason_diagnostic : int
The *Reason/Diagnostic* field value.
source : int
The *Source* field value.
Notes
-----
An A-ABORT-RQ PDU requires the following parameters:
* PDU type (1, fixed value, ``0x06``)
* PDU length (1, fixed value, 4)
* Source (1)
* Reason/Diagnostic (1)
**Encoding**
When encoded, an A-ABORT-RQ PDU has the following structure, taken
from Table 9-26 (offsets shown with Python indexing). PDUs are always
encoded using Big Endian.
+--------+-------------+-------------------+
| Offset | Length | Description |
+========+=============+===================+
| 0 | 1 | PDU type |
+--------+-------------+-------------------+
| 1 | 1 | Reserved |
+--------+-------------+-------------------+
| 2 | 4 | PDU length |
+--------+-------------+-------------------+
| 6 | 1 | Reserved |
+--------+-------------+-------------------+
| 7 | 1 | Reserved |
+--------+-------------+-------------------+
| 8 | 1 | Source |
+--------+-------------+-------------------+
| 9 | 1 | Reason/Diagnostic |
+--------+-------------+-------------------+
References
----------
* DICOM Standard, Part 8,
:dcm:`Section 9.3.8 <part08/sect_9.3.8.html>`
* DICOM Standard, Part 8,
:dcm:`Section 9.3.1<part08/sect_9.3.html#sect_9.3.1>`
"""
def __init__(self):
"""Initialise a new A-ABORT-RQ PDU."""
self.source = None
self.reason_diagnostic = None
def from_primitive(self, primitive):
"""Setup the current PDU using an A-ABORT or A-P-ABORT primitive.
Parameters
----------
primitive : pdu_primitives.A_ABORT or pdu_primitives.A_P_ABORT
The primitive to use to set the current PDU field values.
"""
from pynetdicom.pdu_primitives import A_ABORT, A_P_ABORT
# User initiated abort
if primitive.__class__ == A_ABORT:
# The reason field shall be 0x00 when the source is DUL
# service-user
self.reason_diagnostic = 0
self.source = primitive.abort_source
# User provider primitive abort
elif primitive.__class__ == A_P_ABORT:
self.reason_diagnostic = primitive.provider_reason
self.source = 2
def to_primitive(self):
"""Return an A-ABORT or A-P-ABORT primitive from the current PDU.
Returns
-------
pdu_primitives.A_ABORT or pdu_primitives.A_P_ABORT
The primitive representation of the current PDU.
"""
from pynetdicom.pdu_primitives import A_ABORT, A_P_ABORT
# User initiated abort
if self.source == 0x00:
primitive = A_ABORT()
primitive.abort_source = self.source
# User provider primitive abort
elif self.source == 0x02:
primitive = A_P_ABORT()
primitive.provider_reason = self.reason_diagnostic
return primitive
@property
def _decoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of ((offset, length), attr_name, callable, [args]), where
- offset is the byte offset to start at
- length is how many bytes to slice (if None then will slice to the
end of the data),
- attr_name is the name of the attribute corresponding to the field
- callable is a decoding function that returns the decoded value,
- args is a list of arguments to pass callable.
"""
return [
((8, 1), 'source', self._wrap_unpack, [UNPACK_UCHAR]),
((9, 1), 'reason_diagnostic', self._wrap_unpack, [UNPACK_UCHAR])
]
@property
def _encoders(self):
"""Return an iterable of tuples that contain field decoders.
Returns
-------
list of tuple
A list of (attr_name, callable, [args]), where
- attr_name is the name of the attribute corresponding to the field
- callable is an encoding function that returns bytes
- args is a list of arguments to pass callable.
"""
return [
('pdu_type', PACK_UCHAR, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('pdu_length', PACK_UINT4, []),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
(None, self._wrap_pack, [0x00, PACK_UCHAR]),
('source', PACK_UCHAR, []),
('reason_diagnostic', PACK_UCHAR, []),
]
@property
def pdu_length(self):
"""Return the *PDU Length* field value as an int."""
return 4
def __str__(self):
"""Return a string representation of the PDU."""
s = "A-ABORT PDU\n"
s += "===========\n"
s += f" PDU type: 0x{self.pdu_type:02x}\n"
s += f" PDU length: {self.pdu_length:d} bytes\n"
s += f" Abort Source: {self.source_str}\n"
s += f" Reason/Diagnostic: {self.reason_str}\n"
return s
@property
def source_str(self):
"""Return a str description of the *Source* field value."""
_sources = {
0 : 'DUL service-user',
1 : 'Reserved',
2 : 'DUL service-provider'
}
return _sources[self.source]
@property
def reason_str(self):
"""Return a str description of the *Reason/Diagnostic* field value."""
if self.source == 2:
_reason_str = {
0 : "No reason given",
1 : "Unrecognised PDU",
2 : "Unexpected PDU",
3 : "Reserved",
4 : "Unrecognised PDU parameter",
5 : "Unexpected PDU parameter",
6 : "Invalid PDU parameter value"
}
return _reason_str[self.reason_diagnostic]
return 'No reason given'
# PDUs indexed by their class
PDU_TYPES = {
A_ASSOCIATE_RQ : 0x01,
A_ASSOCIATE_AC : 0x02,
A_ASSOCIATE_RJ : 0x03,
P_DATA_TF : 0x04,
A_RELEASE_RQ : 0x05,
A_RELEASE_RP : 0x06,
A_ABORT_RQ : 0x07,
}<|fim▁end|>
|
The Application Context Item's *Application Context Name* field value
(if available).
called_ae_title : bytes
|
<|file_name|>securitycontextconstraints.go<|end_file_name|><|fim▁begin|>// Code generated by informer-gen. DO NOT EDIT.
package v1
import (
"context"
time "time"
securityv1 "github.com/openshift/api/security/v1"
versioned "github.com/openshift/client-go/security/clientset/versioned"
internalinterfaces "github.com/openshift/client-go/security/informers/externalversions/internalinterfaces"
v1 "github.com/openshift/client-go/security/listers/security/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
watch "k8s.io/apimachinery/pkg/watch"
cache "k8s.io/client-go/tools/cache"
)
// SecurityContextConstraintsInformer provides access to a shared informer and lister for
// SecurityContextConstraints.
type SecurityContextConstraintsInformer interface {
Informer() cache.SharedIndexInformer
Lister() v1.SecurityContextConstraintsLister
}
type securityContextConstraintsInformer struct {
factory internalinterfaces.SharedInformerFactory
tweakListOptions internalinterfaces.TweakListOptionsFunc
}
// NewSecurityContextConstraintsInformer constructs a new informer for SecurityContextConstraints type.
// Always prefer using an informer factory to get a shared informer instead of getting an independent
// one. This reduces memory footprint and number of connections to the server.
func NewSecurityContextConstraintsInformer(client versioned.Interface, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer {
return NewFilteredSecurityContextConstraintsInformer(client, resyncPeriod, indexers, nil)
}
// NewFilteredSecurityContextConstraintsInformer constructs a new informer for SecurityContextConstraints type.
// Always prefer using an informer factory to get a shared informer instead of getting an independent
// one. This reduces memory footprint and number of connections to the server.
func NewFilteredSecurityContextConstraintsInformer(client versioned.Interface, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer {<|fim▁hole|> tweakListOptions(&options)
}
return client.SecurityV1().SecurityContextConstraints().List(context.TODO(), options)
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
if tweakListOptions != nil {
tweakListOptions(&options)
}
return client.SecurityV1().SecurityContextConstraints().Watch(context.TODO(), options)
},
},
&securityv1.SecurityContextConstraints{},
resyncPeriod,
indexers,
)
}
func (f *securityContextConstraintsInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer {
return NewFilteredSecurityContextConstraintsInformer(client, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions)
}
func (f *securityContextConstraintsInformer) Informer() cache.SharedIndexInformer {
return f.factory.InformerFor(&securityv1.SecurityContextConstraints{}, f.defaultInformer)
}
func (f *securityContextConstraintsInformer) Lister() v1.SecurityContextConstraintsLister {
return v1.NewSecurityContextConstraintsLister(f.Informer().GetIndexer())
}<|fim▁end|>
|
return cache.NewSharedIndexInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
if tweakListOptions != nil {
|
<|file_name|>ampi_base.cpp<|end_file_name|><|fim▁begin|>#include "ampi_base.h"
#include <sstream>
#include <ostream>
#include "ampi.h"
#include "mpi.h"
/**
* @brief AMPI_base::AMPI_base Initilize the class
*/
AMPI_base::AMPI_base(){
rParam=false;
valid=true;
}
/**
* @brief AMPI_base::AMPI_typeName This function is to be used by AMPI for
* transmitting, receiving, and handling type names. It serves as a unique
* identify for the class, and is required to be rewriten for inherited classes
* @return The name of the class
*/
char* AMPI_base::AMPI_typeName(){
return "AMPI_base";
}
/**
* @brief AMPI_base::AMPI_locked this optional function is called when the class
* has been transmitted and it will be modified by another machine.
*/
void AMPI_base::AMPI_locked() {
return;
}
/**
* @brief AMPI_base::AMPI_unlocked this optional function is called when the
* class has been returned from a remote function call.
*/
void AMPI_base::AMPI_unlocked() {
return;
}
/**
* @brief AMPI_base::AMPI_send This is called by AMPI to send data from
* the class to new copy on another machine. This can be left alone if using
* AMPI_base::AMPI_input, however it is left virtual so advaced users may use
* this function and MPI_Send to send the data more efficietly
* @param dest The destination to be sent to
* @param tag The MPI tag to send with
* @param comm the MPI communicant
* @return
*/
int AMPI_base::AMPI_send(int dest, int tag, MPI_Comm comm){
char *buf;
int size;
buf = AMPI_output(&size);
MPI_Send(&size,1,MPI_INT,dest,AMPI::AMPI_TAG_SIZE, comm);
MPI_Send(buf,size,MPI_CHAR,dest,tag,comm);
return 0;
}
/**
* @brief AMPI_base::AMPI_recv This is called by AMPI to receive new data from
* another class on another machine. this can be left alone if using
* AMPI_base::AMPI_input, however it is left virtual so advanced users my use<|fim▁hole|> * @param tag The MPI tag that to receive from
* @param comm The MPI communicant
* @param status Pointer to an external status variable
* @return
*/
int AMPI_base::AMPI_recv(int source, int tag, MPI_Comm comm,
MPI_Status *status){
int size;
MPI_Recv(&size,1,MPI_INT,source,AMPI::AMPI_TAG_SIZE,comm,status);
char *buf = new char[size];
MPI_Recv(buf,size,MPI_CHAR,source,tag,comm,status);
AMPI_input(buf,size);
return 0;
}
/**
* @brief AMPI_base::AMPI_input This function is called by the default
* AMPI_base::AMPI_recv function to convert the character array received into
* the inherited class's data format. Rewriting this function in the inherited
* class is required unless using AMPI_base::AMPI_recv, however it should be
* nearly identical to a function to read the class froma file.
* @param buf The character array to read from
* @param size the size of the array
*/
void AMPI_base::AMPI_input(char *buf, int size){
return;
}
/**
* @brief AMPI_base::AMPI_output This function is called by the default
* AMPI_base::AMPI_send function to convert the inherted class's data format to
* a character array. Rewirting this function in the inherited class is
* required unless using AMPI_base::AMPI_recv, however it should be nearly
* identicle to a function to write the class to a file.
* @param size pointer to and integer to store the size of the character array
* @return the character array
*/
char* AMPI_base::AMPI_output(int *size){
return "NULL";
}
/**
* @brief AMPI_base::AMPI_returnParameter setting rP to true indecates to AMPI
* that the class will be modified during a remote call and need to be sent back
* @param rP
* @return
*/
bool AMPI_base::AMPI_returnParameter(bool rP){
rParam=rP;
return AMPI_returnParameter();
}
/**
* @brief AMPI_base::AMPI_returnParameter this indecates weather or not the
* class will be returned after a remote call
* @return true if the class will be returned
*/
bool AMPI_base::AMPI_returnParameter(){
return rParam;
}
void AMPI_base::Validate(){
valid=false;
AMPI_locked();
}
void AMPI_base::deValidate(){
valid=true;
AMPI_unlocked();
}
/**
* @brief AMPI_base::AMPI_debug this function is used for debuging AMPI
* it is not need for applications.
*/
void AMPI_base::AMPI_debug(){
std::cerr << AMPI_typeName()
<< ": "
<< rParam << valid << "\n";
}<|fim▁end|>
|
* this function and MPI_Recv to recive the data more efficently
* @param source The source of the data
|
<|file_name|>ImmutabilityTest.java<|end_file_name|><|fim▁begin|>/*
* ome.server.itests.ImmutabilityTest
*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.server.itests;
// Java imports
// Third-party libraries
import org.testng.annotations.Test;
// Application-internal dependencies
import ome.model.core.Image;
import ome.model.meta.Event;
import ome.parameters.Filter;
import ome.parameters.Parameters;
/**
*
* @author Josh Moore <a
* href="mailto:[email protected]">[email protected]</a>
* @version 1.0 <small> (<b>Internal version:</b> $Rev$ $Date$) </small>
* @since 1.0
*/
public class ImmutabilityTest extends AbstractManagedContextTest {
@Test
public void testCreationEventWillBeSilentlyUnchanged() throws Exception {
loginRoot();
Image i = new_Image("immutable creation");
i = iUpdate.saveAndReturnObject(i);
Event oldEvent = i.getDetails().getCreationEvent();
Event newEvent = iQuery.findByQuery(
"select e from Event e where id != :id", new Parameters(<|fim▁hole|>
i.getDetails().setCreationEvent(newEvent);
// This fails because it gets silently copied to our new instance. See:
// http://trac.openmicroscopy.org.uk/ome/ticket/346
// i = iUpdate.saveAndReturnObject(i);
// assertEquals( i.getDetails().getCreationEvent().getId(),
// oldEvent.getId());
// Saving and reacquiring to be sure.
iUpdate.saveObject(i);
// unfortunately still not working properly i = iQuery.refresh(i);
i = iQuery.get(i.getClass(), i.getId());
assertEquals(i.getDetails().getCreationEvent().getId(), oldEvent
.getId());
}
}<|fim▁end|>
|
new Filter().page(0, 1)).addId(oldEvent.getId()));
|
<|file_name|>collection.js<|end_file_name|><|fim▁begin|>'use strict';
const Assert = require('chai').assert;
const request = require('../support/request');
const { Before, Given, Then, When, After } = require('cucumber');
// Timeout of 15 seconds
const TIMEOUT = 15 * 1000;
/**
* Helper function to create a collection
* @method createCollection
* @param {Object} body The body of the request
* @returns {Promise}
*/
function createCollection(body) {
return this.getJwt(this.apiToken)
.then((response) => {
this.jwt = response.body.token;
return request({
uri: `${this.instance}/${this.namespace}/collections`,
method: 'POST',
auth: {
bearer: this.jwt
},
body,
json: true
});
});
}
/**
* Helper function to delete a collection
* @method deleteCollection
* @param {Number} [id] Id of the collection to delete
* @returns {Promise}
*/
function deleteCollection(id) {
if (!id) {
return Promise.resolve();
}
return request({
uri: `${this.instance}/${this.namespace}/collections/${id}`,
method: 'DELETE',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 204);
});
}
Before('@collections', function hook() {
this.repoOrg = this.testOrg;
this.repoName = 'functional-collections';
this.pipelineId = null;
this.firstCollectionId = null;
this.secondCollectionId = null;
});
When(/^they check the default collection$/, { timeout: TIMEOUT }, function step() {
return this.ensurePipelineExists({ repoName: this.repoName })
.then(() => request({
uri: `${this.instance}/${this.namespace}/collections`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
})
.then((response) => {
Assert.strictEqual(response.statusCode, 200);
this.defaultCollectionId = response.body.find(collection =>
collection.type === 'default'
).id;
Assert.notEqual(this.defaultCollectionId, undefined);
})
);
});
Then(/^they can see the default collection contains that pipeline$/, { timeout: TIMEOUT },
function step() {
const pipelineId = parseInt(this.pipelineId, 10);
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.defaultCollectionId}`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
// TODO: May need to change back
// Assert.deepEqual(response.body.pipelineIds, [pipelineId]);
Assert.include(response.body.pipelineIds, pipelineId);
});
});
When(/^they create a new collection "myCollection" with that pipeline$/,
{ timeout: TIMEOUT }, function step() {
return this.ensurePipelineExists({ repoName: this.repoName })
.then(() => {
const requestBody = {
name: 'myCollection',
pipelineIds: [this.pipelineId]
};
return createCollection.call(this, requestBody);
})
.then((response) => {
Assert.strictEqual(response.statusCode, 201);
this.firstCollectionId = response.body.id;
});
});
Then(/^they can see that collection$/, { timeout: TIMEOUT }, function step() {
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.firstCollectionId}`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
Assert.strictEqual(response.body.name, 'myCollection');
});
});
Then(/^the collection contains that pipeline$/, { timeout: TIMEOUT }, function step() {
const pipelineId = parseInt(this.pipelineId, 10);
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.firstCollectionId}`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
Assert.deepEqual(response.body.pipelineIds, [pipelineId]);
});
});
When(/^they create a new collection "myCollection"$/, { timeout: TIMEOUT }, function step() {
return createCollection.call(this, { name: 'myCollection' })
.then((response) => {
Assert.strictEqual(response.statusCode, 201);
this.firstCollectionId = response.body.id;
});
});
Then(/^the collection is empty$/, { timeout: TIMEOUT }, function step() {
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.firstCollectionId}`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
Assert.deepEqual(response.body.pipelineIds, []);
});
});
When(/^they update the collection "myCollection" with that pipeline$/,
{ timeout: TIMEOUT }, function step() {
return this.ensurePipelineExists({ repoName: this.repoName })
.then(() => {
const pipelineId = parseInt(this.pipelineId, 10);
return request({
uri: `${this.instance}/${this.namespace}/collections/` +
`${this.firstCollectionId}`,
method: 'PUT',
auth: {
bearer: this.jwt
},
body: {
pipelineIds: [pipelineId]
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
});
});
});
Given(/^they have a collection "myCollection"$/, { timeout: TIMEOUT }, function step() {
return createCollection.call(this, { name: 'myCollection' })
.then((response) => {
Assert.oneOf(response.statusCode, [409, 201]);
if (response.statusCode === 201) {
this.firstCollectionId = response.body.id;
} else {
const str = response.body.message;
[, this.firstCollectionId] = str.split(': ');
}
});
});
Given(/^they have a collection "anotherCollection"$/, { timeout: TIMEOUT }, function step() {
return createCollection.call(this, { name: 'anotherCollection' })
.then((response) => {
Assert.oneOf(response.statusCode, [409, 201]);
if (response.statusCode === 201) {
this.secondCollectionId = response.body.id;
} else {
const str = response.body.message;
[, this.secondCollectionId] = str.split(': ');
}
});
});
When(/^they fetch all their collections$/, { timeout: TIMEOUT }, function step() {
return request({
uri: `${this.instance}/${this.namespace}/collections`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 200);
this.collections = response.body;
});
});
Then(/^they can see those collections and the default collection$/, function step() {
const normalCollectionNames = this.collections.filter(c => c.type === 'normal')
.map(c => c.name);
const defaultCollection = this.collections.filter(c => c.type === 'default');
Assert.strictEqual(normalCollectionNames.length, 2);
Assert.strictEqual(defaultCollection.length, 1);
Assert.ok(normalCollectionNames.includes('myCollection'));
Assert.ok(normalCollectionNames.includes('anotherCollection'));
});
When(/^they delete that collection$/, { timeout: TIMEOUT }, function step() {
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.firstCollectionId}`,
method: 'DELETE',
auth: {
bearer: this.jwt
},
json: true
})
.then((response) => {
Assert.strictEqual(response.statusCode, 204);
});
});
Then(/^that collection no longer exists$/, { timeout: TIMEOUT }, function step() {
return request({
uri: `${this.instance}/${this.namespace}/collections/${this.firstCollectionId}`,
method: 'GET',
auth: {
bearer: this.jwt
},
json: true
}).then((response) => {
Assert.strictEqual(response.statusCode, 404);
this.firstCollectionId = null;
});
});
When(/^they create another collection with the same name "myCollection"$/,
{ timeout: TIMEOUT }, function step() {<|fim▁hole|> return createCollection.call(this, { name: 'myCollection' })
.then((response) => {
Assert.ok(response);
this.lastResponse = response;
});
});
Then(/^they receive an error regarding unique collections$/, function step() {
Assert.strictEqual(this.lastResponse.statusCode, 409);
Assert.strictEqual(this.lastResponse.body.message,
`Collection already exists with the ID: ${this.firstCollectionId}`);
});
After('@collections', function hook() {
// Delete the collections created in the functional tests if they exist
return Promise.all([
deleteCollection.call(this, this.firstCollectionId),
deleteCollection.call(this, this.secondCollectionId)
]);
});<|fim▁end|>
| |
<|file_name|>ng_switch.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { TemplateRef, ViewContainerRef } from '@angular/core';
export declare class SwitchView {
private _viewContainerRef;
private _templateRef;
constructor(_viewContainerRef: ViewContainerRef, _templateRef: TemplateRef<Object>);
create(): void;
destroy(): void;
}
/**
* Adds or removes DOM sub-trees when their match expressions match the switch expression.
*
* Elements within `NgSwitch` but without `NgSwitchCase` or `NgSwitchDefault` directives will be
* preserved at the location as specified in the template.
*
* `NgSwitch` simply inserts nested elements based on which match expression matches the value
* obtained from the evaluated switch expression. In other words, you define a container element
* (where you place the directive with a switch expression on the
* `[ngSwitch]="..."` attribute), define any inner elements inside of the directive and<|fim▁hole|> *
* The `ngSwitchCase` property is used to inform `NgSwitch` which element to display when the
* expression is evaluated. If a matching expression is not found via a `ngSwitchCase` property
* then an element with the `ngSwitchDefault` attribute is displayed.
*
* ### Example ([live demo](http://plnkr.co/edit/DQMTII95CbuqWrl3lYAs?p=preview))
*
* ```typescript
* @Component({
* selector: 'app',
* template: `
* <p>Value = {{value}}</p>
* <button (click)="inc()">Increment</button>
*
* <div [ngSwitch]="value">
* <p *ngSwitchCase="'init'">increment to start</p>
* <p *ngSwitchCase="0">0, increment again</p>
* <p *ngSwitchCase="1">1, increment again</p>
* <p *ngSwitchCase="2">2, stop incrementing</p>
* <p *ngSwitchDefault>> 2, STOP!</p>
* </div>
*
* <!-- alternate syntax -->
*
* <p [ngSwitch]="value">
* <template ngSwitchCase="init">increment to start</template>
* <template [ngSwitchCase]="0">0, increment again</template>
* <template [ngSwitchCase]="1">1, increment again</template>
* <template [ngSwitchCase]="2">2, stop incrementing</template>
* <template ngSwitchDefault>> 2, STOP!</template>
* </p>
* `,
* directives: [NgSwitch, NgSwitchCase, NgSwitchDefault]
* })
* export class App {
* value = 'init';
*
* inc() {
* this.value = this.value === 'init' ? 0 : this.value + 1;
* }
* }
* ```
*
* @stable
*/
export declare class NgSwitch {
private _switchValue;
private _useDefault;
private _valueViews;
private _activeViews;
ngSwitch: any;
}
/**
* Insert the sub-tree when the `ngSwitchCase` expression evaluates to the same value as the
* enclosing switch expression.
*
* If multiple match expression match the switch expression value, all of them are displayed.
*
* See {@link NgSwitch} for more details and example.
*
* @stable
*/
export declare class NgSwitchCase {
private _switch;
constructor(viewContainer: ViewContainerRef, templateRef: TemplateRef<Object>, ngSwitch: NgSwitch);
ngSwitchCase: any;
}
/**
* Default case statements are displayed when no match expression matches the switch expression
* value.
*
* See {@link NgSwitch} for more details and example.
*
* @stable
*/
export declare class NgSwitchDefault {
constructor(viewContainer: ViewContainerRef, templateRef: TemplateRef<Object>, sswitch: NgSwitch);
}<|fim▁end|>
|
* place a `[ngSwitchCase]` attribute per element.
|
<|file_name|>startup.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
"""
product initialization stuff
"""
import os
import featuremonkey
from .composer import get_composer
from django_productline import compare_version
_product_selected = False
def select_product():
"""
binds the frozen context the selected features
should be called only once - calls after the first call have
no effect
"""
global _product_selected
if _product_selected:
# tss already bound ... ignore
return
_product_selected = True
from django_productline import context, template<|fim▁hole|> os.environ['DJANGO_SETTINGS_MODULE'] = 'django_productline.settings'
contextfile = os.environ['PRODUCT_CONTEXT_FILENAME']
equationfile = os.environ['PRODUCT_EQUATION_FILENAME']
#bind context and compose features
context.bind_context(contextfile)
get_composer().select_equation(equationfile)
# after composition we are now able to bind composed template settings
template.bind_settings()
featuremonkey.remove_import_guard('django.conf')
featuremonkey.remove_import_guard('django.db')
import django
if compare_version(django.get_version(), '1.7') >= 0:
django.setup()
# force import of settings and urls
# better fail during initialization than on the first request
from django.conf import settings
from django.core.urlresolvers import get_resolver
# eager creation of URLResolver
get_resolver(None)
# make sure overextends tag is registered
from django.template.loader import get_template
from overextends import models
def get_wsgi_application():
"""
returns the wsgi application for the selected product
this function is called by featuredjango.wsgi to get the wsgi
application object
if you need to refine the wsgi application object e.g. to add
wsgi middleware please refine django.core.wsgi.get_wsgi_application directly.
"""
# make sure the product is selected before importing and constructing wsgi app
select_product()
# return (possibly refined) wsgi application
from django.core.wsgi import get_wsgi_application
return get_wsgi_application()<|fim▁end|>
|
featuremonkey.add_import_guard('django.conf')
featuremonkey.add_import_guard('django.db')
|
<|file_name|>notification-center.ts<|end_file_name|><|fim▁begin|>import {CORE_DIRECTIVES} from 'angular2/common';
import {Component} from 'angular2/core';
import {OnInit} from 'angular2/core';
import {NotificationService} from '../../service/notification-service';
import {Alert} from 'ng2-bootstrap/ng2-bootstrap';
@Component({
selector: 'notification-center',
template: require('./notification-center.html'),
styles: [require('./notification-center.css')],
directives: [Alert, CORE_DIRECTIVES]
})
export class NotificationCenter implements OnInit {
constructor(private _notificationService: NotificationService) {
}
notifications: Array<Object> = [];
closeNotification(i: number) {
this.notifications.splice(i, 1);
}
ngOnInit() {
this._notificationService.eventBus.subscribe(notification =>
this.onNotificationReceived(notification));
}
<|fim▁hole|> private onNotificationReceived(notification) {
this.notifications.push(notification);
}
}<|fim▁end|>
| |
<|file_name|>issue526.rs<|end_file_name|><|fim▁begin|>fn main() {<|fim▁hole|> foo::<i32> ();
foo.foo::<i32> ();
}<|fim▁end|>
| |
<|file_name|>GroupQueryTreeRequest.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class GroupQueryTreeRequest(Request):
r'''A /g_queryTree request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.GroupQueryTreeRequest(
... node_id=0,
... include_controls=True,
... )
>>> request
GroupQueryTreeRequest(
include_controls=True,
node_id=0
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(57, 0, 1)
::
>>> message.address == requesttools.RequestId.GROUP_QUERY_TREE
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_include_controls',
'_node_id',
)
### INITIALIZER ###
def __init__(
self,
include_controls=False,
node_id=None,
):
Request.__init__(self)
self._node_id = node_id
self._include_controls = bool(include_controls)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
node_id = int(self.node_id)
include_controls = int(self.include_controls)
message = osctools.OscMessage(
request_id,
node_id,
include_controls,
)
return message
### PUBLIC PROPERTIES ###
@property
def include_controls(self):
return self._include_controls
<|fim▁hole|>
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.QueryTreeResponse: None,
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.GROUP_QUERY_TREE<|fim▁end|>
|
@property
def node_id(self):
return self._node_id
|
<|file_name|>debugger.py<|end_file_name|><|fim▁begin|>"""Module to debug python programs"""
import sys
import traceback
def getAllStacks():
code = []
for threadId, stack in sys._current_frames().iteritems():
code.append("\n# ThreadID: %s" % threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename,
lineno, name))<|fim▁hole|>def strStacks():
out = "\n*** STACKTRACE - START ***\n"
out += "\n".join(getAllStacks())
out += "\n*** STACKTRACE - END ***\n"
return out<|fim▁end|>
|
if line:
code.append(" %s" % (line.strip()))
return code
|
<|file_name|>MeshAlgoTest.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
from IECore import *
class MeshAlgoTangentTest( unittest.TestCase ) :
def makeSingleTriangleMesh( self ):
verticesPerFace = IntVectorData( [ 3 ] )
vertexIds = IntVectorData( [ 0, 1, 2 ] )
p = V3fVectorData( [ V3f( 0, 0, 0 ), V3f( 1, 0, 0 ), V3f( 0, 1, 0 ) ] )
s = FloatVectorData( [ 0, 1, 0 ] )
t = FloatVectorData( [ 0, 0, 1 ] )
mesh = MeshPrimitive( verticesPerFace, vertexIds, "linear", p )
mesh["s"] = PrimitiveVariable( PrimitiveVariable.Interpolation.FaceVarying, s )
mesh["t"] = PrimitiveVariable( PrimitiveVariable.Interpolation.FaceVarying, t )
mesh["foo_s"] = PrimitiveVariable( PrimitiveVariable.Interpolation.FaceVarying, FloatVectorData( [0, 0, 1] ) )
mesh["foo_t"] = PrimitiveVariable( PrimitiveVariable.Interpolation.FaceVarying, FloatVectorData( [0, 1, 0] ) )
prefData = V3fVectorData( [V3f( 0, 0, 0 ), V3f( 0, -1, 0 ), V3f( 1, 0, 0 )] )
mesh["Pref"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, prefData )
return mesh
def makeSingleBadUVTriangleMesh( self ) :
verticesPerFace = IntVectorData( [3] )
vertexIds = IntVectorData( [0, 1, 2] )
p = V3fVectorData( [V3f( 0, 0, 0 ), V3f( 1, 0, 0 ), V3f( 0, 1, 0 )] )
s = FloatVectorData( [0] )
t = FloatVectorData( [0] )
mesh = MeshPrimitive( verticesPerFace, vertexIds, "linear", p )
mesh["s"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Uniform, s )
mesh["t"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Uniform, t )
return mesh
def testSingleTriangleGeneratesCorrectTangents( self ) :
triangle = self.makeSingleTriangleMesh()
tangentPrimVar, bitangentPrimVar = MeshAlgo.calculateTangents( triangle )
self.assertEqual(tangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying)
self.assertEqual(bitangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying)
tangents = V3fVectorData( tangentPrimVar.data )
bitangent = V3fVectorData( bitangentPrimVar.data )
self.assertEqual( len( tangents ), 3 )
self.assertEqual( len( bitangent ), 3 )
for t in tangents :
self.assertAlmostEqual( t[0], 1.0 )
self.assertAlmostEqual( t[1], 0.0 )
self.assertAlmostEqual( t[2], 0.0 )
for b in bitangent :
self.assertAlmostEqual( b[0], 0.0 )
self.assertAlmostEqual( b[1], 1.0 )
self.assertAlmostEqual( b[2], 0.0 )
def testJoinedUVEdges( self ) :
mesh = ObjectReader( "test/IECore/data/cobFiles/twoTrianglesWithSharedUVs.cob" ).read()
self.assert_( mesh.arePrimitiveVariablesValid() )
tangentPrimVar, bitangentPrimVar = MeshAlgo.calculateTangents( mesh )
self.assertEqual( tangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( bitangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
for v in tangentPrimVar.data :
self.failUnless( v.equalWithAbsError( V3f( 1, 0, 0 ), 0.000001 ) )
for v in bitangentPrimVar.data :
self.failUnless( v.equalWithAbsError( V3f( 0, 0, -1 ), 0.000001 ) )
def testSplitAndOpposedUVEdges( self ) :
mesh = ObjectReader( "test/IECore/data/cobFiles/twoTrianglesWithSplitAndOpposedUVs.cob" ).read()
tangentPrimVar, bitangentPrimVar = MeshAlgo.calculateTangents( mesh )
self.assertEqual( tangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( bitangentPrimVar.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
for v in tangentPrimVar.data[:3] :
self.failUnless( v.equalWithAbsError( V3f( -1, 0, 0 ), 0.000001 ) )
for v in tangentPrimVar.data[3:] :
self.failUnless( v.equalWithAbsError( V3f( 1, 0, 0 ), 0.000001 ) )
for v in bitangentPrimVar.data[:3] :
self.failUnless( v.equalWithAbsError( V3f( 0, 0, 1 ), 0.000001 ) )
for v in bitangentPrimVar.data[3:] :
self.failUnless( v.equalWithAbsError( V3f( 0, 0, -1 ), 0.000001 ) )
def testNonTriangulatedMeshRaisesException( self ):
plane = MeshPrimitive.createPlane( Box2f( V2f( -0.1 ), V2f( 0.1 ) ) )
self.assertRaises( RuntimeError, lambda : MeshAlgo.calculateTangents( plane ) )
def testInvalidPositionPrimVarRaisesException( self ) :
triangle = self.makeSingleTriangleMesh()
self.assertRaises( RuntimeError, lambda : MeshAlgo.calculateTangents( triangle, position = "foo" ) )
def testMissingUVsetPrimVarsRaisesException ( self ):
triangle = self.makeSingleTriangleMesh()
self.assertRaises( RuntimeError, lambda : MeshAlgo.calculateTangents( triangle, uvSet = "bar") )
def testIncorrectUVPrimVarInterpolationRaisesException ( self ):
triangle = self.makeSingleBadUVTriangleMesh()
self.assertRaises( RuntimeError, lambda : MeshAlgo.calculateTangents( triangle ) )
def testCanUseSecondUVSet( self ) :
triangle = self.makeSingleTriangleMesh()
uTangent, vTangent = MeshAlgo.calculateTangents( triangle , uvSet = "foo" )
self.assertEqual( len( uTangent.data ), 3 )
self.assertEqual( len( vTangent.data ), 3 )
for v in uTangent.data :
self.failUnless( v.equalWithAbsError( V3f( 0, 1, 0 ), 0.000001 ) )
# really I'd expect the naive answer to the vTangent to be V3f( 1, 0, 0 )
# but the code forces the triple of n, uT, vT to flip the direction of vT if we don't have a correctly handed set of basis vectors
for v in vTangent.data :
self.failUnless( v.equalWithAbsError( V3f( -1, 0, 0 ), 0.000001 ) )
def testCanUsePref( self ) :
triangle = self.makeSingleTriangleMesh()
uTangent, vTangent = MeshAlgo.calculateTangents( triangle , position = "Pref")
self.assertEqual( len( uTangent.data ), 3 )
self.assertEqual( len( vTangent.data ), 3 )
for v in uTangent.data :
self.failUnless( v.equalWithAbsError( V3f( 0, -1, 0 ), 0.000001 ) )
for v in vTangent.data :
self.failUnless( v.equalWithAbsError( V3f( 1, 0, 0 ), 0.000001 ) )
class MeshAlgoPrimitiveVariableTest( unittest.TestCase ) :
@classmethod
def makeMesh( cls ) :
testObject = MeshPrimitive.createPlane( Box2f( V2f( 0 ), V2f( 10 ) ), V2i( 2 ) )
testObject["a"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Constant, FloatData( 0.5 ) )
testObject["b"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, FloatVectorData( range( 0, 9 ) ) )
testObject["c"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Uniform, FloatVectorData( range( 0, 4 ) ) )
testObject["d"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Varying, FloatVectorData( range( 0, 9 ) ) )
testObject["e"] = PrimitiveVariable( PrimitiveVariable.Interpolation.FaceVarying, FloatVectorData( range( 0, 16 ) ) )
return testObject
@classmethod
def setUpClass(cls):
cls.mesh = cls.makeMesh()
def testMeshConstantToVertex( self ) :
p = self.mesh["a"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Vertex );
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.data, FloatVectorData( [ 0.5 ] * 9 ) )
def testMeshConstantToUniform( self ) :
p = self.mesh["a"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.data, FloatVectorData( [ 0.5 ] * 4 ) )
def testMeshConstantToVarying( self ) :
p = self.mesh["a"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.data, FloatVectorData( [ 0.5 ] * 9 ) )
def testMeshConstantToFaceVarying( self ) :
p = self.mesh["a"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( p.data, FloatVectorData( [ 0.5 ] * 16 ) )
def testMeshVertexToConstant( self ) :
p = self.mesh["b"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.data, FloatData( sum(range(0,9))/9. ) )
def testMeshVertexToUniform( self ) :
p = self.mesh["b"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.data, FloatVectorData( [ 2, 3, 5, 6 ] ) )
def testMeshVertexToVarying( self ) :
p = self.mesh["b"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.data, FloatVectorData( range( 0, 9 ) ) )
def testMeshVertexToFaceVarying( self ) :
p = self.mesh["b"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
orig = range( 0, 9 )
self.assertEqual( p.data, FloatVectorData( [ orig[x] for x in self.mesh.vertexIds ] ) )
def testMeshUniformToConstant( self ) :
p = self.mesh["c"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.data, FloatData( sum(range(0,4))/4. ) )
def testMeshUniformToVertex( self ) :
p = self.mesh["c"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.data, FloatVectorData( [ 0, 0.5, 1, 1, 1.5, 2, 2, 2.5, 3 ] ) )
def testMeshUniformToVarying( self ) :
p = self.mesh["c"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.data, FloatVectorData( [ 0, 0.5, 1, 1, 1.5, 2, 2, 2.5, 3 ] ) )
<|fim▁hole|> self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( p.data, FloatVectorData( ( [ 0 ] * 4 ) + ( [ 1 ] * 4 ) + ( [ 2 ] * 4 ) + ( [ 3 ] * 4 ) ) )
def testMeshVaryingToConstant( self ) :
p = self.mesh["d"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.data, FloatData( sum(range(0,9))/9. ) )
def testMeshVaryingToVertex( self ) :
p = self.mesh["d"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.data, FloatVectorData( range( 0, 9 ) ) )
def testMeshVaryingToUniform( self ) :
p = self.mesh["d"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.data, FloatVectorData( [ 2, 3, 5, 6 ] ) )
def testMeshVaryingToFaceVarying( self ) :
p = self.mesh["d"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.FaceVarying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.FaceVarying )
orig = range( 0, 9 )
self.assertEqual( p.data, FloatVectorData( [ orig[x] for x in self.mesh.vertexIds ] ) )
def testMeshFaceVaryingToConstant( self ) :
p = self.mesh["e"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Constant )
self.assertEqual( p.data, FloatData( sum(range(0,16))/16. ) )
def testMeshFaceVaryingToVertex( self ) :
p = self.mesh["e"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Vertex )
self.assertEqual( p.data, FloatVectorData( [ 0, 2.5, 5, 5.5, 7.5, 9.5, 11, 12.5, 14 ] ) )
def testMeshFaceVaryingToUniform( self ) :
p = self.mesh["e"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Uniform )
self.assertEqual( p.data, FloatVectorData( [ 1.5, 5.5, 9.5, 13.5 ] ) )
def testMeshFaceVaryingToVarying( self ) :
p = self.mesh["e"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.interpolation, PrimitiveVariable.Interpolation.Varying )
self.assertEqual( p.data, FloatVectorData( [ 0, 2.5, 5, 5.5, 7.5, 9.5, 11, 12.5, 14 ] ) )
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
def testMeshUniformToFaceVarying( self ) :
p = self.mesh["c"]
MeshAlgo.resamplePrimitiveVariable( self.mesh, p, PrimitiveVariable.Interpolation.FaceVarying )
|
<|file_name|>ns3server.py<|end_file_name|><|fim▁begin|>#
# NEPI, a framework to manage network experiments
# Copyright (C) 2014 INRIA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Alina Quereilhac <[email protected]>
import base64<|fim▁hole|>import socket
import sys
from optparse import OptionParser, SUPPRESS_HELP
from ns3wrapper import NS3Wrapper
class NS3WrapperMessage:
CREATE = "CREATE"
FACTORY = "FACTORY"
INVOKE = "INVOKE"
SET = "SET"
GET = "GET"
FLUSH = "FLUSH"
START = "START"
STOP = "STOP"
SHUTDOWN = "SHUTDOWN"
def handle_message(ns3_wrapper, msg_type, args, kwargs):
if msg_type == NS3WrapperMessage.SHUTDOWN:
ns3_wrapper.shutdown()
return "BYEBYE"
if msg_type == NS3WrapperMessage.STOP:
time = kwargs.get("time")
ns3_wrapper.stop(time=time)
return "STOPPED"
if msg_type == NS3WrapperMessage.START:
ns3_wrapper.start()
return "STARTED"
if msg_type == NS3WrapperMessage.CREATE:
clazzname = args.pop(0)
return ns3_wrapper.create(clazzname, *args)
if msg_type == NS3WrapperMessage.FACTORY:
type_name = args.pop(0)
return ns3_wrapper.factory(type_name, **kwargs)
if msg_type == NS3WrapperMessage.INVOKE:
uuid = args.pop(0)
operation = args.pop(0)
return ns3_wrapper.invoke(uuid, operation, *args, **kwargs)
if msg_type == NS3WrapperMessage.GET:
uuid = args.pop(0)
name = args.pop(0)
return ns3_wrapper.get(uuid, name)
if msg_type == NS3WrapperMessage.SET:
uuid = args.pop(0)
name = args.pop(0)
value = args.pop(0)
return ns3_wrapper.set(uuid, name, value)
if msg_type == NS3WrapperMessage.FLUSH:
# Forces flushing output and error streams.
# NS-3 output will stay unflushed until the program exits or
# explicit invocation flush is done
sys.stdout.flush()
sys.stderr.flush()
ns3_wrapper.logger.debug("FLUSHED")
return "FLUSHED"
def open_socket(socket_name):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_name)
return sock
def close_socket(sock):
try:
sock.close()
except:
pass
def recv_msg(conn):
msg = []
chunk = ''
while '\n' not in chunk:
try:
chunk = conn.recv(1024)
except (OSError, socket.error), e:
if e[0] != errno.EINTR:
raise
# Ignore eintr errors
continue
if chunk:
msg.append(chunk)
else:
# empty chunk = EOF
break
msg = ''.join(msg).strip()
# The message is formatted as follows:
# MESSAGE_TYPE|args|kwargs
#
# where MESSAGE_TYPE, args and kwargs are pickld and enoded in base64
def decode(item):
item = base64.b64decode(item).rstrip()
return cPickle.loads(item)
decoded = map(decode, msg.split("|"))
# decoded message
dmsg_type = decoded.pop(0)
dargs = list(decoded.pop(0)) # transforming touple into list
dkwargs = decoded.pop(0)
return (dmsg_type, dargs, dkwargs)
def send_reply(conn, reply):
encoded = base64.b64encode(cPickle.dumps(reply))
conn.send("%s\n" % encoded)
def get_options():
usage = ("usage: %prog -S <socket-name> -L <ns-log> -D <enable-dump> -v ")
parser = OptionParser(usage = usage)
parser.add_option("-S", "--socket-name", dest="socket_name",
help = "Name for the unix socket used to interact with this process",
default = "tap.sock", type="str")
parser.add_option("-L", "--ns-log", dest="ns_log",
help = "NS_LOG environmental variable to be set",
default = "", type="str")
parser.add_option("-D", "--enable-dump", dest="enable_dump",
help = "Enable dumping the remote executed ns-3 commands to a script "
"in order to later reproduce and debug the experiment",
action = "store_true",
default = False)
parser.add_option("-v", "--verbose",
help="Print debug output",
action="store_true",
dest="verbose", default=False)
(options, args) = parser.parse_args()
return (options.socket_name, options.verbose, options.ns_log,
options.enable_dump)
def run_server(socket_name, level = logging.INFO, ns_log = None,
enable_dump = False):
# Sets NS_LOG environmental variable for NS debugging
if ns_log:
os.environ["NS_LOG"] = ns_log
###### ns-3 wrapper instantiation
ns3_wrapper = NS3Wrapper(loglevel=level, enable_dump = enable_dump)
ns3_wrapper.logger.info("STARTING...")
# create unix socket to receive instructions
sock = open_socket(socket_name)
sock.listen(0)
# wait for messages to arrive and process them
stop = False
while not stop:
conn, addr = sock.accept()
conn.settimeout(5)
try:
(msg_type, args, kwargs) = recv_msg(conn)
except socket.timeout, e:
# Ingore time-out
close_socket(conn)
continue
if not msg_type:
# Ignore - connection lost
close_socket(conn)
continue
if msg_type == NS3WrapperMessage.SHUTDOWN:
stop = True
try:
reply = handle_message(ns3_wrapper, msg_type, args, kwargs)
except:
import traceback
err = traceback.format_exc()
ns3_wrapper.logger.error(err)
close_socket(conn)
raise
try:
send_reply(conn, reply)
except socket.error:
import traceback
err = traceback.format_exc()
ns3_wrapper.logger.error(err)
close_socket(conn)
raise
close_socket(conn)
close_socket(sock)
ns3_wrapper.logger.info("EXITING...")
if __name__ == '__main__':
(socket_name, verbose, ns_log, enable_dump) = get_options()
## configure logging
FORMAT = "%(asctime)s %(name)s %(levelname)-4s %(message)s"
level = logging.DEBUG if verbose else logging.INFO
logging.basicConfig(format = FORMAT, level = level)
## Run the server
run_server(socket_name, level, ns_log, enable_dump)<|fim▁end|>
|
import cPickle
import errno
import logging
import os
|
<|file_name|>Category.js<|end_file_name|><|fim▁begin|>import * as API from '../utils/Api'
export const SET_CATEGORIES = "SET_CATEGORIES"
export function setCategories(categories) {
return {
type: SET_CATEGORIES,
categories
}<|fim▁hole|> dispatch(setCategories(data.categories))
})
);<|fim▁end|>
|
}
export const fetchGetCategories = () => dispatch => (
API.getCategories().then(data => {
|
<|file_name|>qgshttptransaction.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgshttptransaction.cpp - Tracks a HTTP request with its response,
with particular attention to tracking
HTTP redirect responses
-------------------
begin : 17 Mar, 2005
copyright : (C) 2005 by Brendan Morley
email : morb at ozemail dot com dot au
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include <fstream>
#include "qgshttptransaction.h"
#include "qgslogger.h"
#include "qgsconfig.h"
#include <QApplication>
#include <QUrl>
#include <QSettings>
#include <QTimer>
static int HTTP_PORT_DEFAULT = 80;
//XXX Set the connection name when creating the provider instance
//XXX in qgswmsprovider. When creating a QgsHttpTransaction, pass
//XXX the user/pass combination to the constructor. Then set the
//XXX username and password using QHttp::setUser.
QgsHttpTransaction::QgsHttpTransaction( const QString& uri,
const QString& proxyHost,
int proxyPort,
const QString& proxyUser,
const QString& proxyPass,
QNetworkProxy::ProxyType proxyType,
const QString& userName,
const QString& password )
: http( NULL )
, httpid( 0 )
, httpactive( false )
, httpurl( uri )
, httphost( proxyHost )
, httpredirections( 0 )
, mWatchdogTimer( NULL )
{
Q_UNUSED( proxyPort );
Q_UNUSED( proxyUser );
Q_UNUSED( proxyPass );
Q_UNUSED( proxyType );
Q_UNUSED( userName );
Q_UNUSED( password );
QSettings s;
mNetworkTimeoutMsec = s.value( "/qgis/networkAndProxy/networkTimeout", "20000" ).toInt();
}
QgsHttpTransaction::QgsHttpTransaction()
: http( NULL )
, httpid( 0 )
, httpactive( false )
, httpredirections( 0 )
, mWatchdogTimer( NULL )
{
QSettings s;
mNetworkTimeoutMsec = s.value( "/qgis/networkAndProxy/networkTimeout", "20000" ).toInt();
}
QgsHttpTransaction::~QgsHttpTransaction()
{
QgsDebugMsg( "deconstructing." );
}
void QgsHttpTransaction::setCredentials( const QString& username, const QString& password )
{
mUserName = username;
mPassword = password;
}
void QgsHttpTransaction::getAsynchronously()
{
//TODO
}
bool QgsHttpTransaction::getSynchronously( QByteArray &respondedContent, int redirections, const QByteArray* postData )
{
httpredirections = redirections;
QgsDebugMsg( "Entered." );
QgsDebugMsg( "Using '" + httpurl + "'." );
QgsDebugMsg( "Creds: " + mUserName + "/" + mPassword );
int httpport;
QUrl qurl( httpurl );
http = new QHttp();
// Create a header so we can set the user agent (Per WMS RFC).
QHttpRequestHeader header( "GET", qurl.host() );
// Set host in the header
if ( qurl.port( HTTP_PORT_DEFAULT ) == HTTP_PORT_DEFAULT )
{
header.setValue( "Host", qurl.host() );
}
else
{
header.setValue( "Host", QString( "%1:%2" ).arg( qurl.host() ).arg( qurl.port() ) );
}
// Set the user agent to QGIS plus the version name
header.setValue( "User-agent", QString( "QGIS - " ) + VERSION );
// Set the host in the QHttp object
http->setHost( qurl.host(), qurl.port( HTTP_PORT_DEFAULT ) );
// Set the username and password if supplied for this connection
// If we have username and password set in header
if ( !mUserName.isEmpty() && !mPassword.isEmpty() )
{
http->setUser( mUserName, mPassword );
}
if ( !QgsHttpTransaction::applyProxySettings( *http, httpurl ) )
{
httphost = qurl.host();
httpport = qurl.port( HTTP_PORT_DEFAULT );
}
else
{
//proxy enabled, read httphost and httpport from settings
QSettings settings;
httphost = settings.value( "proxy/proxyHost", "" ).toString();
httpport = settings.value( "proxy/proxyPort", "" ).toString().toInt();
}
// int httpid1 = http->setHost( qurl.host(), qurl.port() );
mWatchdogTimer = new QTimer( this );
QgsDebugMsg( "qurl.host() is '" + qurl.host() + "'." );
httpresponse.truncate( 0 );
// Some WMS servers don't like receiving a http request that
// includes the scheme, host and port (the
// http://www.address.bit:80), so remove that from the url before
// executing an http GET.
//Path could be just '/' so we remove the 'http://' first
QString pathAndQuery = httpurl.remove( 0, httpurl.indexOf( qurl.host() ) );
pathAndQuery = httpurl.remove( 0, pathAndQuery.indexOf( qurl.path() ) );
if ( !postData ) //do request with HTTP GET
{
header.setRequest( "GET", pathAndQuery );
// do GET using header containing user-agent
httpid = http->request( header );
}
else //do request with HTTP POST
{
header.setRequest( "POST", pathAndQuery );
// do POST using header containing user-agent
httpid = http->request( header, *postData );
}
connect( http, SIGNAL( requestStarted( int ) ),
this, SLOT( dataStarted( int ) ) );
connect( http, SIGNAL( responseHeaderReceived( const QHttpResponseHeader& ) ),
this, SLOT( dataHeaderReceived( const QHttpResponseHeader& ) ) );
connect( http, SIGNAL( readyRead( const QHttpResponseHeader& ) ),
this, SLOT( dataReceived( const QHttpResponseHeader& ) ) );
connect( http, SIGNAL( dataReadProgress( int, int ) ),
this, SLOT( dataProgress( int, int ) ) );
connect( http, SIGNAL( requestFinished( int, bool ) ),
this, SLOT( dataFinished( int, bool ) ) );
connect( http, SIGNAL( done( bool ) ),
this, SLOT( transactionFinished( bool ) ) );
connect( http, SIGNAL( stateChanged( int ) ),
this, SLOT( dataStateChanged( int ) ) );
// Set up the watchdog timer
connect( mWatchdogTimer, SIGNAL( timeout() ),
this, SLOT( networkTimedOut() ) );
mWatchdogTimer->setSingleShot( true );
mWatchdogTimer->start( mNetworkTimeoutMsec );
QgsDebugMsg( "Starting get with id " + QString::number( httpid ) + "." );
QgsDebugMsg( "Setting httpactive = true" );
httpactive = true;
// A little trick to make this function blocking
while ( httpactive )
{
// Do something else, maybe even network processing events
qApp->processEvents();
}
QgsDebugMsg( "Response received." );
#ifdef QGISDEBUG
// QString httpresponsestring(httpresponse);
// QgsDebugMsg("Response received; being '" + httpresponsestring + "'.");
#endif
delete http;
http = 0;
// Did we get an error? If so, bail early
if ( !mError.isEmpty() )
{
QgsDebugMsg( "Processing an error '" + mError + "'." );
return false;
}
// Do one level of redirection
// TODO make this recursable<|fim▁hole|> QgsDebugMsg( "Starting get of '" + httpredirecturl + "'." );
QgsHttpTransaction httprecurse( httpredirecturl, httphost, httpport );
httprecurse.setCredentials( mUserName, mPassword );
// Do a passthrough for the status bar text
connect(
&httprecurse, SIGNAL( statusChanged( QString ) ),
this, SIGNAL( statusChanged( QString ) )
);
httprecurse.getSynchronously( respondedContent, ( redirections + 1 ) );
return true;
}
respondedContent = httpresponse;
return true;
}
QString QgsHttpTransaction::responseContentType()
{
return httpresponsecontenttype;
}
void QgsHttpTransaction::dataStarted( int id )
{
Q_UNUSED( id );
QgsDebugMsg( "ID=" + QString::number( id ) + "." );
}
void QgsHttpTransaction::dataHeaderReceived( const QHttpResponseHeader& resp )
{
QgsDebugMsg( "statuscode " +
QString::number( resp.statusCode() ) + ", reason '" + resp.reasonPhrase() + "', content type: '" +
resp.value( "Content-Type" ) + "'." );
// We saw something come back, therefore restart the watchdog timer
mWatchdogTimer->start( mNetworkTimeoutMsec );
if ( resp.statusCode() == 302 ) // Redirect
{
// Grab the alternative URL
// (ref: "http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html")
httpredirecturl = resp.value( "Location" );
}
else if ( resp.statusCode() == 200 ) // OK
{
// NOOP
}
else
{
mError = tr( "WMS Server responded unexpectedly with HTTP Status Code %1 (%2)" )
.arg( resp.statusCode() )
.arg( resp.reasonPhrase() );
}
httpresponsecontenttype = resp.value( "Content-Type" );
}
void QgsHttpTransaction::dataReceived( const QHttpResponseHeader& resp )
{
Q_UNUSED( resp );
// TODO: Match 'resp' with 'http' if we move to multiple http connections
#if 0
// Comment this out for now - leave the coding of progressive rendering to another day.
char* temp;
if ( 0 < http->readBlock( temp, http->bytesAvailable() ) )
{
httpresponse.append( temp );
}
#endif
// QgsDebugMsg("received '" + data + "'.");
}
void QgsHttpTransaction::dataProgress( int done, int total )
{
// QgsDebugMsg("got " + QString::number(done) + " of " + QString::number(total));
// We saw something come back, therefore restart the watchdog timer
mWatchdogTimer->start( mNetworkTimeoutMsec );
emit dataReadProgress( done );
emit totalSteps( total );
QString status;
if ( total )
{
status = tr( "Received %1 of %2 bytes" ).arg( done ).arg( total );
}
else
{
status = tr( "Received %1 bytes (total unknown)" ).arg( done );
}
emit statusChanged( status );
}
void QgsHttpTransaction::dataFinished( int id, bool error )
{
#ifdef QGISDEBUG
QgsDebugMsg( "ID=" + QString::number( id ) + "." );
// The signal that this slot is connected to, QHttp::requestFinished,
// appears to get called at the destruction of the QHttp if it is
// still working at the time of the destruction.
//
// This situation may occur when we've detected a timeout and
// we already set httpactive = false.
//
// We have to detect this special case so that the last known error string is
// not overwritten (it should rightfully refer to the timeout event).
if ( !httpactive )
{
QgsDebugMsg( "http activity loop already false." );
return;
}
if ( error )
{
QgsDebugMsg( "however there was an error." );
QgsDebugMsg( "error: " + http->errorString() );
mError = tr( "HTTP response completed, however there was an error: %1" ).arg( http->errorString() );
}
else
{
QgsDebugMsg( "no error." );
}
#else
Q_UNUSED( id );
Q_UNUSED( error );
#endif
// Don't do this here as the request could have simply been
// to set the hostname - see transactionFinished() instead
#if 0
// TODO
httpresponse = http->readAll();
// QgsDebugMsg("Setting httpactive = false");
httpactive = false;
#endif
}
void QgsHttpTransaction::transactionFinished( bool error )
{
#ifdef QGISDEBUG
QgsDebugMsg( "entered." );
#if 0
// The signal that this slot is connected to, QHttp::requestFinished,
// appears to get called at the destruction of the QHttp if it is
// still working at the time of the destruction.
//
// This situation may occur when we've detected a timeout and
// we already set httpactive = false.
//
// We have to detect this special case so that the last known error string is
// not overwritten (it should rightfully refer to the timeout event).
if ( !httpactive )
{
// QgsDebugMsg("http activity loop already false.");
return;
}
#endif
if ( error )
{
QgsDebugMsg( "however there was an error." );
QgsDebugMsg( "error: " + http->errorString() );
mError = tr( "HTTP transaction completed, however there was an error: %1" ).arg( http->errorString() );
}
else
{
QgsDebugMsg( "no error." );
}
#else
Q_UNUSED( error );
#endif
// TODO
httpresponse = http->readAll();
QgsDebugMsg( "Setting httpactive = false" );
httpactive = false;
}
void QgsHttpTransaction::dataStateChanged( int state )
{
QgsDebugMsg( "state " + QString::number( state ) + "." );
// We saw something come back, therefore restart the watchdog timer
mWatchdogTimer->start( mNetworkTimeoutMsec );
switch ( state )
{
case QHttp::Unconnected:
QgsDebugMsg( "There is no connection to the host." );
emit statusChanged( tr( "Not connected" ) );
break;
case QHttp::HostLookup:
QgsDebugMsg( "A host name lookup is in progress." );
emit statusChanged( tr( "Looking up '%1'" ).arg( httphost ) );
break;
case QHttp::Connecting:
QgsDebugMsg( "An attempt to connect to the host is in progress." );
emit statusChanged( tr( "Connecting to '%1'" ).arg( httphost ) );
break;
case QHttp::Sending:
QgsDebugMsg( "The client is sending its request to the server." );
emit statusChanged( tr( "Sending request '%1'" ).arg( httpurl ) );
break;
case QHttp::Reading:
QgsDebugMsg( "The client's request has been sent and the client is reading the server's response." );
emit statusChanged( tr( "Receiving reply" ) );
break;
case QHttp::Connected:
QgsDebugMsg( "The connection to the host is open, but the client is neither sending a request, nor waiting for a response." );
emit statusChanged( tr( "Response is complete" ) );
break;
case QHttp::Closing:
QgsDebugMsg( "The connection is closing down, but is not yet closed. (The state will be Unconnected when the connection is closed.)" );
emit statusChanged( tr( "Closing down connection" ) );
break;
}
}
void QgsHttpTransaction::networkTimedOut()
{
QgsDebugMsg( "entering." );
mError = tr( "Network timed out after %n second(s) of inactivity.\n"
"This may be a problem in your network connection or at the WMS server.", "inactivity timeout", mNetworkTimeoutMsec / 1000 );
QgsDebugMsg( "Setting httpactive = false" );
httpactive = false;
QgsDebugMsg( "exiting." );
}
QString QgsHttpTransaction::errorString()
{
return mError;
}
bool QgsHttpTransaction::applyProxySettings( QHttp& http, const QString& url )
{
QSettings settings;
//check if proxy is enabled
bool proxyEnabled = settings.value( "proxy/proxyEnabled", false ).toBool();
if ( !proxyEnabled )
{
return false;
}
//check if the url should go through proxy
QString proxyExcludedURLs = settings.value( "proxy/proxyExcludedUrls", "" ).toString();
if ( !proxyExcludedURLs.isEmpty() )
{
QStringList excludedURLs = proxyExcludedURLs.split( "|" );
QStringList::const_iterator exclIt = excludedURLs.constBegin();
for ( ; exclIt != excludedURLs.constEnd(); ++exclIt )
{
if ( url.startsWith( *exclIt ) )
{
return false; //url does not go through proxy
}
}
}
//read type, host, port, user, passw from settings
QString proxyHost = settings.value( "proxy/proxyHost", "" ).toString();
int proxyPort = settings.value( "proxy/proxyPort", "" ).toString().toInt();
QString proxyUser = settings.value( "proxy/proxyUser", "" ).toString();
QString proxyPassword = settings.value( "proxy/proxyPassword", "" ).toString();
QString proxyTypeString = settings.value( "proxy/proxyType", "" ).toString();
QNetworkProxy::ProxyType proxyType = QNetworkProxy::NoProxy;
if ( proxyTypeString == "DefaultProxy" )
{
proxyType = QNetworkProxy::DefaultProxy;
}
else if ( proxyTypeString == "Socks5Proxy" )
{
proxyType = QNetworkProxy::Socks5Proxy;
}
else if ( proxyTypeString == "HttpProxy" )
{
proxyType = QNetworkProxy::HttpProxy;
}
else if ( proxyTypeString == "HttpCachingProxy" )
{
proxyType = QNetworkProxy::HttpCachingProxy;
}
else if ( proxyTypeString == "FtpCachingProxy" )
{
proxyType = QNetworkProxy::FtpCachingProxy;
}
http.setProxy( QNetworkProxy( proxyType, proxyHost, proxyPort, proxyUser, proxyPassword ) );
return true;
}
void QgsHttpTransaction::abort()
{
if ( http )
{
http->abort();
}
}
// ENDS<|fim▁end|>
|
// TODO detect any redirection loops
if ( !httpredirecturl.isEmpty() )
{
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
Package memory is a storage manager that just keeps the games and storage in
memory, which means that when the program exits the storage evaporates.
Useful in cases where you don't want a persistent store (e.g. testing or
fast iteration). Implements both boardgame.StorageManager and
boardgame/server.StorageManager.
*/
package memory
import (
"errors"
"sync"
"github.com/jkomoros/boardgame"
"github.com/jkomoros/boardgame/server/api/extendedgame"
"github.com/jkomoros/boardgame/server/api/listing"
"github.com/jkomoros/boardgame/storage/internal/helpers"
)
//StorageManager is the primary type of this package. Get a new one with
//NewStorageManager.
type StorageManager struct {
states map[string]map[int]boardgame.StateStorageRecord
moves map[string]map[int]*boardgame.MoveStorageRecord
games map[string]*boardgame.GameStorageRecord
statesLock sync.RWMutex
movesLock sync.RWMutex
gamesLock sync.RWMutex
*helpers.ExtendedMemoryStorageManager
}
//NewStorageManager is the way to get a new StorageManager.
func NewStorageManager() *StorageManager {
//InMemoryStorageManager is an extremely simple StorageManager that just keeps
//track of the objects in memory.
result := &StorageManager{
states: make(map[string]map[int]boardgame.StateStorageRecord),
moves: make(map[string]map[int]*boardgame.MoveStorageRecord),
games: make(map[string]*boardgame.GameStorageRecord),
}
result.ExtendedMemoryStorageManager = helpers.NewExtendedMemoryStorageManager(result)
return result
}
//Name returns 'memory'
func (s *StorageManager) Name() string {
return "memory"
}
//State implements that part of the core storage interface
func (s *StorageManager) State(gameID string, version int) (boardgame.StateStorageRecord, error) {
if gameID == "" {
return nil, errors.New("No game provided")
}
if version < 0 {
return nil, errors.New("Invalid version")
}
s.statesLock.RLock()
versionMap, ok := s.states[gameID]
s.statesLock.RUnlock()
if !ok {
return nil, errors.New("No such game")
}
s.statesLock.RLock()
record, ok := versionMap[version]
s.statesLock.RUnlock()
if !ok {
return nil, errors.New("No such version for that game")<|fim▁hole|>
}
//Moves implements that part of the core storage interface
func (s *StorageManager) Moves(gameID string, fromVersion, toVersion int) ([]*boardgame.MoveStorageRecord, error) {
return helpers.MovesHelper(s, gameID, fromVersion, toVersion)
}
//Move implements that part of the core storage interface
func (s *StorageManager) Move(gameID string, version int) (*boardgame.MoveStorageRecord, error) {
if gameID == "" {
return nil, errors.New("No game provided")
}
if version < 0 {
return nil, errors.New("Invalid version")
}
s.movesLock.RLock()
versionMap, ok := s.moves[gameID]
s.movesLock.RUnlock()
if !ok {
return nil, errors.New("No such game")
}
s.movesLock.RLock()
record, ok := versionMap[version]
s.movesLock.RUnlock()
if !ok {
return nil, errors.New("No such version for that game")
}
return record, nil
}
//Game implements that part of the core storage interface
func (s *StorageManager) Game(id string) (*boardgame.GameStorageRecord, error) {
s.gamesLock.RLock()
record := s.games[id]
s.gamesLock.RUnlock()
if record == nil {
return nil, errors.New("No such game")
}
return record, nil
}
//SaveGameAndCurrentState implements that part of the core storage interface
func (s *StorageManager) SaveGameAndCurrentState(game *boardgame.GameStorageRecord, state boardgame.StateStorageRecord, move *boardgame.MoveStorageRecord) error {
if game == nil {
return errors.New("No game provided")
}
s.statesLock.RLock()
_, ok := s.states[game.ID]
s.statesLock.RUnlock()
if !ok {
s.statesLock.Lock()
s.states[game.ID] = make(map[int]boardgame.StateStorageRecord)
s.statesLock.Unlock()
}
s.movesLock.RLock()
_, ok = s.moves[game.ID]
s.movesLock.RUnlock()
if !ok {
s.movesLock.Lock()
s.moves[game.ID] = make(map[int]*boardgame.MoveStorageRecord)
s.movesLock.Unlock()
}
version := game.Version
s.statesLock.RLock()
versionMap := s.states[game.ID]
_, ok = versionMap[version]
s.statesLock.RUnlock()
if ok {
//Wait, there was already a version stored there?
return errors.New("There was already a version for that game stored")
}
s.movesLock.RLock()
moveMap := s.moves[game.ID]
_, ok = moveMap[version]
s.movesLock.RUnlock()
if ok {
//Wait, there was already a version stored there?
return errors.New("There was already a version for that game stored")
}
s.statesLock.Lock()
versionMap[version] = state
s.statesLock.Unlock()
s.movesLock.Lock()
if move != nil {
moveMap[version] = move
}
s.movesLock.Unlock()
s.gamesLock.Lock()
s.games[game.ID] = game
s.gamesLock.Unlock()
return nil
}
//AllGames implements the extra method that storage/internal/helpers needs.
func (s *StorageManager) AllGames() []*boardgame.GameStorageRecord {
var result []*boardgame.GameStorageRecord
s.gamesLock.RLock()
for _, game := range s.games {
result = append(result, game)
}
s.gamesLock.RUnlock()
return result
}
//ListGames will return game objects for up to max number of games
func (s *StorageManager) ListGames(max int, list listing.Type, userID string, gameType string) []*extendedgame.CombinedStorageRecord {
return helpers.ListGamesHelper(s, max, list, userID, gameType)
}<|fim▁end|>
|
}
return record, nil
|
<|file_name|>RoundJTextField.java<|end_file_name|><|fim▁begin|>package view.rendes;
import java.awt.Graphics;
import java.awt.Shape;
import java.awt.geom.RoundRectangle2D;
<|fim▁hole|>public class RoundJTextField extends JTextField {
private Shape shape;
public RoundJTextField(int size) {
super(size);
setOpaque(false); // As suggested by @AVD in comment.
}
protected void paintComponent(Graphics g) {
g.setColor(getBackground());
g.fillRoundRect(0, 0, getWidth()-1, getHeight()-1, 15, 15);
super.paintComponent(g);
}
protected void paintBorder(Graphics g) {
g.setColor(getForeground());
g.drawRoundRect(0, 0, getWidth()-1, getHeight()-1, 15, 15);
}
public boolean contains(int x, int y) {
if (shape == null || !shape.getBounds().equals(getBounds())) {
shape = new RoundRectangle2D.Float(0, 0, getWidth()-1, getHeight()-1, 15, 15);
}
return shape.contains(x, y);
}
}<|fim▁end|>
|
import javax.swing.JTextField;
|
<|file_name|>integ_test.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
//go:build go1.16 && integration
// +build go1.16,integration
package codebuild_test
import (
"context"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/awstesting/integration"
"github.com/aws/aws-sdk-go/service/codebuild"
)
var _ aws.Config
var _ awserr.Error
var _ request.Request
func TestInteg_00_ListBuilds(t *testing.T) {<|fim▁hole|> ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
sess := integration.SessionWithDefaultRegion("us-west-2")
svc := codebuild.New(sess)
params := &codebuild.ListBuildsInput{}
_, err := svc.ListBuildsWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err != nil {
t.Errorf("expect no error, got %v", err)
}
}<|fim▁end|>
| |
<|file_name|>image_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes and utilities for image datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import io
import os
import numpy as np
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.layers import common_layers
from tensor2tensor.layers import modalities
from tensor2tensor.utils import contrib
from tensor2tensor.utils import metrics
import tensorflow.compat.v1 as tf
def matplotlib_pyplot():
import matplotlib # pylint: disable=g-import-not-at-top
matplotlib.use("agg")
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
return plt
def image_to_tf_summary_value(image, tag):
"""Converts a NumPy image to a tf.Summary.Value object.
Args:
image: 3-D NumPy array.
tag: name for tf.Summary.Value for display in tensorboard.
Returns:
image_summary: A tf.Summary.Value object.
"""
curr_image = np.asarray(image, dtype=np.uint8)
height, width, n_channels = curr_image.shape
# If monochrome image, then reshape to [height, width]
if n_channels == 1:
curr_image = np.reshape(curr_image, [height, width])
s = io.BytesIO()
matplotlib_pyplot().imsave(s, curr_image, format="png")
img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
height=height, width=width,
colorspace=n_channels)
return tf.Summary.Value(tag=tag, image=img_sum)
def convert_predictions_to_image_summaries(hook_args):
"""Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
"""
decode_hparams = hook_args.decode_hparams
if not decode_hparams.display_decoded_images:
return []
predictions = hook_args.predictions[0]
# Display ten random inputs and outputs so that tensorboard does not hang.
all_summaries = []
rand_predictions = np.random.choice(predictions, size=10)
for ind, prediction in enumerate(rand_predictions):
output_summary = image_to_tf_summary_value(
prediction["outputs"], tag="%d_output" % ind)
input_summary = image_to_tf_summary_value(
prediction["inputs"], tag="%d_input" % ind)
all_summaries.append(input_summary)
all_summaries.append(output_summary)
return all_summaries
def resize_by_area(img, size):
"""image resize function used by quite a few image problems."""
return tf.to_int64(
tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA))
def make_multiscale(image, resolutions,
resize_method=tf.image.ResizeMethod.BICUBIC,
num_channels=3):
"""Returns list of scaled images, one for each resolution.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to.
resize_method: tf.image.ResizeMethod.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels].
"""
scaled_images = []
for height in resolutions:
scaled_image = tf.image.resize_images(
image,
size=[height, height], # assuming that height = width
method=resize_method)
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([height, height, num_channels])
scaled_images.append(scaled_image)
return scaled_images
def make_multiscale_dilated(image, resolutions, num_channels=3):
"""Returns list of scaled images, one for each resolution.
Resizes by skipping every nth pixel.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to. The function
assumes VALID padding, so the original image's height must be divisible
by each resolution's height to return the exact resolution size.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels] if resolutions properly
divide the original image's height; otherwise shape height and width is up
to valid skips.
"""
image_height = common_layers.shape_list(image)[0]
scaled_images = []
for height in resolutions:
dilation_rate = image_height // height # assuming height = width
scaled_image = image[::dilation_rate, ::dilation_rate]
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([None, None, num_channels])
scaled_images.append(scaled_image)
return scaled_images
class ImageProblem(problem.Problem):
"""Base class for problems with images."""
@property
def num_channels(self):
"""Number of color channels."""
return 3
@property
def vocab_size(self):
"""Number of pixel values."""
return 256
def example_reading_spec(self):
data_fields = {
"image/encoded": tf.FixedLenFeature((), tf.string),
"image/format": tf.FixedLenFeature((), tf.string),
}
data_items_to_decoders = {
"inputs":
contrib.slim().tfexample_decoder.Image(
image_key="image/encoded",
format_key="image/format",
channels=self.num_channels),
}
return data_fields, data_items_to_decoders
def preprocess_example(self, example, mode, hparams):
if not self._was_reversed:
example["inputs"] = tf.image.per_image_standardization(example["inputs"])
return example
def eval_metrics(self):
eval_metrics = [
metrics.Metrics.ACC, metrics.Metrics.ACC_TOP5,
metrics.Metrics.ACC_PER_SEQ, metrics.Metrics.NEG_LOG_PERPLEXITY
]
if self._was_reversed:
eval_metrics += [metrics.Metrics.IMAGE_SUMMARY]
return eval_metrics
@property
def decode_hooks(self):
return [convert_predictions_to_image_summaries]
class Image2ClassProblem(ImageProblem):
"""Base class for image classification problems."""
@property
def is_small(self):
raise NotImplementedError()
@property
def num_classes(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
return 1
@property
def class_labels(self):
return ["ID_%d" % i for i in range(self.num_classes)]
def feature_encoders(self, data_dir):
del data_dir
return {
"inputs": text_encoder.ImageEncoder(channels=self.num_channels),
"targets": text_encoder.ClassLabelEncoder(self.class_labels)
}
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2ClassProblem, self).example_reading_spec())
data_fields[label_key] = tf.FixedLenFeature((1,), tf.int64)
data_items_to_decoders["targets"] = contrib.slim().tfexample_decoder.Tensor(
label_key)
return data_fields, data_items_to_decoders
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.modality = {"inputs": modalities.ModalityType.IMAGE,
"targets": modalities.ModalityType.CLASS_LABEL}
p.vocab_size = {"inputs": 256,
"targets": self.num_classes}
p.batch_size_multiplier = 4 if self.is_small else 256
p.loss_multiplier = 3.0 if self.is_small else 1.0
if self._was_reversed:
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = problem.SpaceID.IMAGE_LABEL
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def encode_images_as_png(images):
"""Yield images encoded as pngs."""
if tf.executing_eagerly():
for image in images:
yield tf.image.encode_png(image).numpy()
else:
(height, width, channels) = images[0].shape
with tf.Graph().as_default():
image_t = tf.placeholder(dtype=tf.uint8, shape=(height, width, channels))
encoded_image_t = tf.image.encode_png(image_t)
with tf.Session() as sess:
for image in images:
enc_string = sess.run(encoded_image_t, feed_dict={image_t: image})
yield enc_string
def image_generator(images, labels):
"""Generator for images that takes image and labels lists and creates pngs.
Args:
images: list of images given as [width x height x channels] numpy arrays.
labels: list of ints, same length as images.
Yields:
A dictionary representing the images with the following fields:
* image/encoded: the string encoding the image as PNG,
* image/format: the string "png" representing image format,
* image/class/label: an integer representing the label,
* image/height: an integer representing the height,
* image/width: an integer representing the width.
Every field is actually a singleton list of the corresponding type.
Raises:
ValueError: if images is an empty list.
"""
if not images:
raise ValueError("Must provide some images for the generator.")
width, height, _ = images[0].shape
for (enc_image, label) in zip(encode_images_as_png(images), labels):
yield {
"image/encoded": [enc_image],
"image/format": ["png"],
"image/class/label": [int(label)],
"image/height": [height],
"image/width": [width]
}
class Image2TextProblem(ImageProblem):
"""Base class for image-to-text problems."""
@property
def is_character_level(self):
raise NotImplementedError()
@property
def vocab_problem(self):
raise NotImplementedError() # Not needed if self.is_character_level.
@property
def target_space_id(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
raise NotImplementedError()
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2TextProblem, self).example_reading_spec())
data_fields[label_key] = tf.VarLenFeature(tf.int64)
data_items_to_decoders["targets"] = contrib.slim().tfexample_decoder.Tensor(
label_key)
return data_fields, data_items_to_decoders
def feature_encoders(self, data_dir):
if self.is_character_level:
encoder = text_encoder.ByteTextEncoder()
else:
vocab_filename = os.path.join(
data_dir, self.vocab_problem.vocab_filename)
encoder = text_encoder.SubwordTextEncoder(vocab_filename)
input_encoder = text_encoder.ImageEncoder(channels=self.num_channels)
return {"inputs": input_encoder, "targets": encoder}
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.modality = {"inputs": modalities.ModalityType.IMAGE,
"targets": modalities.ModalityType.SYMBOL}<|fim▁hole|> p.vocab_size = {"inputs": 256,
"targets": self._encoders["targets"].vocab_size}
p.batch_size_multiplier = 256
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = self.target_space_id
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def image_augmentation(images, do_colors=False, crop_size=None):
"""Image augmentation: cropping, flipping, and color transforms."""
if crop_size is None:
crop_size = [299, 299]
images = tf.random_crop(images, crop_size + [3])
images = tf.image.random_flip_left_right(images)
if do_colors: # More augmentation, but might be slow.
images = tf.image.random_brightness(images, max_delta=32. / 255.)
images = tf.image.random_saturation(images, lower=0.5, upper=1.5)
images = tf.image.random_hue(images, max_delta=0.2)
images = tf.image.random_contrast(images, lower=0.5, upper=1.5)
return images
def cifar_image_augmentation(images):
"""Image augmentation suitable for CIFAR-10/100.
As described in https://arxiv.org/pdf/1608.06993v3.pdf (page 5).
Args:
images: a Tensor.
Returns:
Tensor of the same shape as images.
"""
images = tf.image.resize_image_with_crop_or_pad(images, 40, 40)
images = tf.random_crop(images, [32, 32, 3])
images = tf.image.random_flip_left_right(images)
return images
def random_shift(image, wsr=0.1, hsr=0.1):
"""Apply random horizontal and vertical shift to images.
This is the default data-augmentation strategy used on CIFAR in Glow.
Args:
image: a 3-D Tensor
wsr: Width shift range, as a float fraction of the width.
hsr: Height shift range, as a float fraction of the width.
Returns:
images: images translated by the provided wsr and hsr.
"""
height, width, _ = common_layers.shape_list(image)
width_range, height_range = wsr*width, hsr*height
height_translations = tf.random_uniform((1,), -height_range, height_range)
width_translations = tf.random_uniform((1,), -width_range, width_range)
translations = tf.concat((height_translations, width_translations), axis=0)
return contrib.image().translate(image, translations=translations)<|fim▁end|>
| |
<|file_name|>rest.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package endpoint
import (
"fmt"
endptspkg "github.com/GoogleCloudPlatform/kubernetes/pkg/api/endpoints"
"github.com/GoogleCloudPlatform/kubernetes/pkg/fields"
"github.com/GoogleCloudPlatform/kubernetes/pkg/labels"
"github.com/GoogleCloudPlatform/kubernetes/pkg/registry/generic"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util/fielderrors"
"github.com/cnaize/kubernetes/pkg/api"
"github.com/cnaize/kubernetes/pkg/api/validation"
)
// endpointsStrategy implements behavior for Endpoints
type endpointsStrategy struct {
runtime.ObjectTyper
api.NameGenerator
}
// Strategy is the default logic that applies when creating and updating Endpoint
// objects via the REST API.
var Strategy = endpointsStrategy{api.Scheme, api.SimpleNameGenerator}
// NamespaceScoped is true for endpoints.
func (endpointsStrategy) NamespaceScoped() bool {
return true
}
// PrepareForCreate clears fields that are not allowed to be set by end users on creation.
func (endpointsStrategy) PrepareForCreate(obj runtime.Object) {
endpoints := obj.(*api.Endpoints)
endpoints.Subsets = endptspkg.RepackSubsets(endpoints.Subsets)
}
// PrepareForUpdate clears fields that are not allowed to be set by end users on update.
func (endpointsStrategy) PrepareForUpdate(obj, old runtime.Object) {
newEndpoints := obj.(*api.Endpoints)
_ = old.(*api.Endpoints)
newEndpoints.Subsets = endptspkg.RepackSubsets(newEndpoints.Subsets)
}<|fim▁hole|>
// Validate validates a new endpoints.
func (endpointsStrategy) Validate(obj runtime.Object) fielderrors.ValidationErrorList {
return validation.ValidateEndpoints(obj.(*api.Endpoints))
}
// AllowCreateOnUpdate is true for endpoints.
func (endpointsStrategy) AllowCreateOnUpdate() bool {
return true
}
// ValidateUpdate is the default update validation for an end user.
func (endpointsStrategy) ValidateUpdate(obj, old runtime.Object) fielderrors.ValidationErrorList {
return validation.ValidateEndpointsUpdate(old.(*api.Endpoints), obj.(*api.Endpoints))
}
// MatchEndpoints returns a generic matcher for a given label and field selector.
func MatchEndpoints(label labels.Selector, field fields.Selector) generic.Matcher {
return generic.MatcherFunc(func(obj runtime.Object) (bool, error) {
endpoints, ok := obj.(*api.Endpoints)
if !ok {
return false, fmt.Errorf("not a endpoints")
}
fields := EndpointsToSelectableFields(endpoints)
return label.Matches(labels.Set(endpoints.Labels)) && field.Matches(fields), nil
})
}
// EndpointsToSelectableFields returns a label set that represents the object
// TODO: fields are not labels, and the validation rules for them do not apply.
func EndpointsToSelectableFields(endpoints *api.Endpoints) labels.Set {
return labels.Set{
"name": endpoints.Name,
}
}<|fim▁end|>
| |
<|file_name|>git.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: git
author:
- "Ansible Core Team"
- "Michael DeHaan"
version_added: "0.0.1"
short_description: Deploy software (or files) from git checkouts
description:
- Manage I(git) checkouts of repositories to deploy files or software.
options:
repo:
required: true
aliases: [ name ]
description:
- git, SSH, or HTTP(S) protocol address of the git repository.
dest:
required: true
description:
- Absolute path of where the repository should be checked out to.
This parameter is required, unless C(clone) is set to C(no)
This change was made in version 1.8.3. Prior to this version,
the C(dest) parameter was always required.
version:
required: false
default: "HEAD"
description:
- What version of the repository to check out. This can be the
the literal string C(HEAD), a branch name, a tag name.
It can also be a I(SHA-1) hash, in which case C(refspec) needs
to be specified if the given revision is not already available.
accept_hostkey:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.5"
description:
- if C(yes), ensure that "-o StrictHostKeyChecking=no" is
present as an ssh options.
ssh_opts:<|fim▁hole|> version_added: "1.5"
description:
- Creates a wrapper script and exports the path as GIT_SSH
which git then automatically uses to override ssh arguments.
An example value could be "-o StrictHostKeyChecking=no"
key_file:
required: false
default: None
version_added: "1.5"
description:
- Specify an optional private key file to use for the checkout.
reference:
required: false
default: null
version_added: "1.4"
description:
- Reference repository (see "git clone --reference ...")
remote:
required: false
default: "origin"
description:
- Name of the remote.
refspec:
required: false
default: null
version_added: "1.9"
description:
- Add an additional refspec to be fetched.
If version is set to a I(SHA-1) not reachable from any branch
or tag, this option may be necessary to specify the ref containing
the I(SHA-1).
Uses the same syntax as the 'git fetch' command.
An example value could be "refs/meta/config".
force:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "0.7"
description:
- If C(yes), any modified files in the working
repository will be discarded. Prior to 0.7, this was always
'yes' and could not be disabled. Prior to 1.9, the default was
`yes`
depth:
required: false
default: null
version_added: "1.2"
description:
- Create a shallow clone with a history truncated to the specified
number or revisions. The minimum possible value is C(1), otherwise
ignored. Needs I(git>=1.9.1) to work correctly.
clone:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "1.9"
description:
- If C(no), do not clone the repository if it does not exist locally
update:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "1.2"
description:
- If C(no), do not retrieve new revisions from the origin repository
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to git executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
bare:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.4"
description:
- if C(yes), repository will be created as a bare repo, otherwise
it will be a standard repo with a workspace.
umask:
required: false
default: null
version_added: "2.2"
description:
- The umask to set before doing any checkouts, or any other
repository maintenance.
recursive:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "1.6"
description:
- if C(no), repository will be cloned without the --recursive
option, skipping sub-modules.
track_submodules:
required: false
default: "no"
choices: ["yes", "no"]
version_added: "1.8"
description:
- if C(yes), submodules will track the latest commit on their
master branch (or other branch specified in .gitmodules). If
C(no), submodules will be kept at the revision specified by the
main project. This is equivalent to specifying the --remote flag
to git submodule update.
verify_commit:
required: false
default: "no"
choices: ["yes", "no"]
version_added: "2.0"
description:
- if C(yes), when cloning or checking out a C(version) verify the
signature of a GPG signed commit. This requires C(git) version>=2.1.0
to be installed. The commit MUST be signed and the public key MUST
be present in the GPG keyring.
archive:
required: false
version_added: "2.4"
description:
- Specify archive file path with extension. If specified, creates an
archive file of the specified format containing the tree structure
for the source tree.
Allowed archive formats ["zip", "tar.gz", "tar", "tgz"]
requirements:
- git>=1.7.1 (the command line tool)
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to use the option accept_hostkey. Another solution is to
add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the git module, with the following command: ssh-keyscan -H remote_host.com >> /etc/ssh/ssh_known_hosts."
'''
EXAMPLES = '''
# Example git checkout from Ansible Playbooks
- git:
repo: 'https://foosball.example.org/path/to/repo.git'
dest: /srv/checkout
version: release-0.22
# Example read-write git checkout from github
- git:
repo: ssh://[email protected]/mylogin/hello.git
dest: /home/mylogin/hello
# Example just ensuring the repo checkout exists
- git:
repo: 'https://foosball.example.org/path/to/repo.git'
dest: /srv/checkout
update: no
# Example just get information about the repository whether or not it has
# already been cloned locally.
- git:
repo: 'https://foosball.example.org/path/to/repo.git'
dest: /srv/checkout
clone: no
update: no
# Example checkout a github repo and use refspec to fetch all pull requests
- git:
repo: https://github.com/ansible/ansible-examples.git
dest: /src/ansible-examples
refspec: '+refs/pull/*:refs/heads/*'
# Example Create git archive from repo
- git:
repo: https://github.com/ansible/ansible-examples.git
dest: /src/ansible-examples
archive: /tmp/ansible-examples.zip
'''
RETURN = '''
after:
description: last commit revision of the repository retrieved during the update
returned: success
type: string
sample: 4c020102a9cd6fe908c9a4a326a38f972f63a903
before:
description: commit revision before the repository was updated, "null" for new repository
returned: success
type: string
sample: 67c04ebe40a003bda0efb34eacfb93b0cafdf628
remote_url_changed:
description: Contains True or False whether or not the remote URL was changed.
returned: success
type: boolean
sample: True
warnings:
description: List of warnings if requested features were not available due to a too old git version.
returned: error
type: string
sample: Your git version is too old to fully support the depth argument. Falling back to full checkouts.
'''
import filecmp
import os
import re
import shlex
import stat
import sys
import shutil
import tempfile
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, get_module_path
from ansible.module_utils.basic import get_exception
from ansible.module_utils.six import b, string_types
from ansible.module_utils._text import to_native
def head_splitter(headfile, remote, module=None, fail_on_error=False):
'''Extract the head reference'''
# https://github.com/ansible/ansible-modules-core/pull/907
res = None
if os.path.exists(headfile):
rawdata = None
try:
f = open(headfile, 'r')
rawdata = f.readline()
f.close()
except:
if fail_on_error and module:
module.fail_json(msg="Unable to read %s" % headfile)
if rawdata:
try:
rawdata = rawdata.replace('refs/remotes/%s' % remote, '', 1)
refparts = rawdata.split(' ')
newref = refparts[-1]
nrefparts = newref.split('/', 2)
res = nrefparts[-1].rstrip('\n')
except:
if fail_on_error and module:
module.fail_json(msg="Unable to split head from '%s'" % rawdata)
return res
def unfrackgitpath(path):
if path is None:
return None
# copied from ansible.utils.path
return os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(path))))
def get_submodule_update_params(module, git_path, cwd):
# or: git submodule [--quiet] update [--init] [-N|--no-fetch]
# [-f|--force] [--rebase] [--reference <repository>] [--merge]
# [--recursive] [--] [<path>...]
params = []
# run a bad submodule command to get valid params
cmd = "%s submodule update --help" % (git_path)
rc, stdout, stderr = module.run_command(cmd, cwd=cwd)
lines = stderr.split('\n')
update_line = None
for line in lines:
if 'git submodule [--quiet] update ' in line:
update_line = line
if update_line:
update_line = update_line.replace('[', '')
update_line = update_line.replace(']', '')
update_line = update_line.replace('|', ' ')
parts = shlex.split(update_line)
for part in parts:
if part.startswith('--'):
part = part.replace('--', '')
params.append(part)
return params
def write_ssh_wrapper():
module_dir = get_module_path()
try:
# make sure we have full permission to the module_dir, which
# may not be the case if we're sudo'ing to a non-root user
if os.access(module_dir, os.W_OK | os.R_OK | os.X_OK):
fd, wrapper_path = tempfile.mkstemp(prefix=module_dir + '/')
else:
raise OSError
except (IOError, OSError):
fd, wrapper_path = tempfile.mkstemp()
fh = os.fdopen(fd, 'w+b')
template = b("""#!/bin/sh
if [ -z "$GIT_SSH_OPTS" ]; then
BASEOPTS=""
else
BASEOPTS=$GIT_SSH_OPTS
fi
# Let ssh fail rather than prompt
BASEOPTS="$BASEOPTS -o BatchMode=yes"
if [ -z "$GIT_KEY" ]; then
ssh $BASEOPTS "$@"
else
ssh -i "$GIT_KEY" -o IdentitiesOnly=yes $BASEOPTS "$@"
fi
""")
fh.write(template)
fh.close()
st = os.stat(wrapper_path)
os.chmod(wrapper_path, st.st_mode | stat.S_IEXEC)
return wrapper_path
def set_git_ssh(ssh_wrapper, key_file, ssh_opts):
if os.environ.get("GIT_SSH"):
del os.environ["GIT_SSH"]
os.environ["GIT_SSH"] = ssh_wrapper
if os.environ.get("GIT_KEY"):
del os.environ["GIT_KEY"]
if key_file:
os.environ["GIT_KEY"] = key_file
if os.environ.get("GIT_SSH_OPTS"):
del os.environ["GIT_SSH_OPTS"]
if ssh_opts:
os.environ["GIT_SSH_OPTS"] = ssh_opts
def get_version(module, git_path, dest, ref="HEAD"):
''' samples the version of the git repo '''
cmd = "%s rev-parse %s" % (git_path, ref)
rc, stdout, stderr = module.run_command(cmd, cwd=dest)
sha = to_native(stdout).rstrip('\n')
return sha
def get_submodule_versions(git_path, module, dest, version='HEAD'):
cmd = [git_path, 'submodule', 'foreach', git_path, 'rev-parse', version]
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(
msg='Unable to determine hashes of submodules',
stdout=out,
stderr=err,
rc=rc)
submodules = {}
subm_name = None
for line in out.splitlines():
if line.startswith("Entering '"):
subm_name = line[10:-1]
elif len(line.strip()) == 40:
if subm_name is None:
module.fail_json()
submodules[subm_name] = line.strip()
subm_name = None
else:
module.fail_json(msg='Unable to parse submodule hash line: %s' % line.strip())
if subm_name is not None:
module.fail_json(msg='Unable to find hash for submodule: %s' % subm_name)
return submodules
def clone(git_path, module, repo, dest, remote, depth, version, bare,
reference, refspec, verify_commit):
''' makes a new git repo if it does not already exist '''
dest_dirname = os.path.dirname(dest)
try:
os.makedirs(dest_dirname)
except:
pass
cmd = [git_path, 'clone']
if bare:
cmd.append('--bare')
else:
cmd.extend(['--origin', remote])
if depth:
if version == 'HEAD' or refspec:
cmd.extend(['--depth', str(depth)])
elif is_remote_branch(git_path, module, dest, repo, version) \
or is_remote_tag(git_path, module, dest, repo, version):
cmd.extend(['--depth', str(depth)])
cmd.extend(['--branch', version])
else:
# only use depth if the remote object is branch or tag (i.e. fetchable)
module.warn("Ignoring depth argument. "
"Shallow clones are only available for "
"HEAD, branches, tags or in combination with refspec.")
if reference:
cmd.extend(['--reference', str(reference)])
cmd.extend([repo, dest])
module.run_command(cmd, check_rc=True, cwd=dest_dirname)
if bare:
if remote != 'origin':
module.run_command([git_path, 'remote', 'add', remote, repo], check_rc=True, cwd=dest)
if refspec:
cmd = [git_path, 'fetch']
if depth:
cmd.extend(['--depth', str(depth)])
cmd.extend([remote, refspec])
module.run_command(cmd, check_rc=True, cwd=dest)
if verify_commit:
verify_commit_sign(git_path, module, dest, version)
def has_local_mods(module, git_path, dest, bare):
if bare:
return False
cmd = "%s status --porcelain" % (git_path)
rc, stdout, stderr = module.run_command(cmd, cwd=dest)
lines = stdout.splitlines()
lines = list(filter(lambda c: not re.search('^\\?\\?.*$', c), lines))
return len(lines) > 0
def reset(git_path, module, dest):
'''
Resets the index and working tree to HEAD.
Discards any changes to tracked files in working
tree since that commit.
'''
cmd = "%s reset --hard HEAD" % (git_path,)
return module.run_command(cmd, check_rc=True, cwd=dest)
def get_diff(module, git_path, dest, repo, remote, depth, bare, before, after):
''' Return the difference between 2 versions '''
if before is None:
return {'prepared': '>> Newly checked out %s' % after}
elif before != after:
# Ensure we have the object we are referring to during git diff !
git_version_used = git_version(git_path, module)
fetch(git_path, module, repo, dest, after, remote, depth, bare, '', git_version_used)
cmd = '%s diff %s %s' % (git_path, before, after)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc == 0 and out:
return {'prepared': out}
elif rc == 0:
return {'prepared': '>> No visual differences between %s and %s' % (before, after)}
elif err:
return {'prepared': '>> Failed to get proper diff between %s and %s:\n>> %s' % (before, after, err)}
else:
return {'prepared': '>> Failed to get proper diff between %s and %s' % (before, after)}
return {}
def get_remote_head(git_path, module, dest, version, remote, bare):
cloning = False
cwd = None
tag = False
if remote == module.params['repo']:
cloning = True
else:
cwd = dest
if version == 'HEAD':
if cloning:
# cloning the repo, just get the remote's HEAD version
cmd = '%s ls-remote %s -h HEAD' % (git_path, remote)
else:
head_branch = get_head_branch(git_path, module, dest, remote, bare)
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, head_branch)
elif is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
elif is_remote_tag(git_path, module, dest, remote, version):
tag = True
cmd = '%s ls-remote %s -t refs/tags/%s*' % (git_path, remote, version)
else:
# appears to be a sha1. return as-is since it appears
# cannot check for a specific sha1 on remote
return version
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=cwd)
if len(out) < 1:
module.fail_json(msg="Could not determine remote revision for %s" % version, stdout=out, stderr=err, rc=rc)
out = to_native(out)
if tag:
# Find the dereferenced tag if this is an annotated tag.
for tag in out.split('\n'):
if tag.endswith(version + '^{}'):
out = tag
break
elif tag.endswith(version):
out = tag
rev = out.split()[0]
return rev
def is_remote_tag(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -t refs/tags/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if to_native(version, errors='surrogate_or_strict') in out:
return True
else:
return False
def get_branches(git_path, module, dest):
branches = []
cmd = '%s branch --no-color -a' % (git_path,)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine branch data - received %s" % out, stdout=out, stderr=err)
for line in out.split('\n'):
if line.strip():
branches.append(line.strip())
return branches
def get_annotated_tags(git_path, module, dest):
tags = []
cmd = [git_path, 'for-each-ref', 'refs/tags/', '--format', '%(objecttype):%(refname:short)']
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine tag data - received %s" % out, stdout=out, stderr=err)
for line in to_native(out).split('\n'):
if line.strip():
tagtype, tagname = line.strip().split(':')
if tagtype == 'tag':
tags.append(tagname)
return tags
def is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if to_native(version, errors='surrogate_or_strict') in out:
return True
else:
return False
def is_local_branch(git_path, module, dest, branch):
branches = get_branches(git_path, module, dest)
lbranch = '%s' % branch
if lbranch in branches:
return True
elif '* %s' % branch in branches:
return True
else:
return False
def is_not_a_branch(git_path, module, dest):
branches = get_branches(git_path, module, dest)
for branch in branches:
if branch.startswith('* ') and ('no branch' in branch or 'detached from' in branch):
return True
return False
def get_head_branch(git_path, module, dest, remote, bare=False):
'''
Determine what branch HEAD is associated with. This is partly
taken from lib/ansible/utils/__init__.py. It finds the correct
path to .git/HEAD and reads from that file the branch that HEAD is
associated with. In the case of a detached HEAD, this will look
up the branch in .git/refs/remotes/<remote>/HEAD.
'''
if bare:
repo_path = dest
else:
repo_path = os.path.join(dest, '.git')
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
git_conf = open(repo_path, 'rb')
for line in git_conf:
config_val = line.split(b(':'), 1)
if config_val[0].strip() == b('gitdir'):
gitdir = to_native(config_val[1].strip(), errors='surrogate_or_strict')
break
else:
# No repo path found
return ''
# There is a possibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
except (IOError, AttributeError):
# No repo path found
return ''
# Read .git/HEAD for the name of the branch.
# If we're in a detached HEAD state, look up the branch associated with
# the remote HEAD in .git/refs/remotes/<remote>/HEAD
headfile = os.path.join(repo_path, "HEAD")
if is_not_a_branch(git_path, module, dest):
headfile = os.path.join(repo_path, 'refs', 'remotes', remote, 'HEAD')
branch = head_splitter(headfile, remote, module=module, fail_on_error=True)
return branch
def get_remote_url(git_path, module, dest, remote):
'''Return URL of remote source for repo.'''
command = [git_path, 'ls-remote', '--get-url', remote]
(rc, out, err) = module.run_command(command, cwd=dest)
if rc != 0:
# There was an issue getting remote URL, most likely
# command is not available in this version of Git.
return None
return to_native(out).rstrip('\n')
def set_remote_url(git_path, module, repo, dest, remote):
''' updates repo from remote sources '''
# Return if remote URL isn't changing.
remote_url = get_remote_url(git_path, module, dest, remote)
if remote_url == repo or unfrackgitpath(remote_url) == unfrackgitpath(repo):
return False
command = [git_path, 'remote', 'set-url', remote, repo]
(rc, out, err) = module.run_command(command, cwd=dest)
if rc != 0:
label = "set a new url %s for %s" % (repo, remote)
module.fail_json(msg="Failed to %s: %s %s" % (label, out, err))
# Return False if remote_url is None to maintain previous behavior
# for Git versions prior to 1.7.5 that lack required functionality.
return remote_url is not None
def fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used):
''' updates repo from remote sources '''
set_remote_url(git_path, module, repo, dest, remote)
commands = []
fetch_str = 'download remote objects and refs'
fetch_cmd = [git_path, 'fetch']
refspecs = []
if depth:
# try to find the minimal set of refs we need to fetch to get a
# successful checkout
currenthead = get_head_branch(git_path, module, dest, remote)
if refspec:
refspecs.append(refspec)
elif version == 'HEAD':
refspecs.append(currenthead)
elif is_remote_branch(git_path, module, dest, repo, version):
if currenthead != version:
# this workaround is only needed for older git versions
# 1.8.3 is broken, 1.9.x works
# ensure that remote branch is available as both local and remote ref
refspecs.append('+refs/heads/%s:refs/heads/%s' % (version, version))
refspecs.append('+refs/heads/%s:refs/remotes/%s/%s' % (version, remote, version))
else:
refspecs.append(version)
elif is_remote_tag(git_path, module, dest, repo, version):
refspecs.append('+refs/tags/' + version + ':refs/tags/' + version)
if refspecs:
# if refspecs is empty, i.e. version is neither heads nor tags
# assume it is a version hash
# fall back to a full clone, otherwise we might not be able to checkout
# version
fetch_cmd.extend(['--depth', str(depth)])
if not depth or not refspecs:
# don't try to be minimalistic but do a full clone
# also do this if depth is given, but version is something that can't be fetched directly
if bare:
refspecs = ['+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*']
else:
# ensure all tags are fetched
if git_version_used >= LooseVersion('1.9'):
fetch_cmd.append('--tags')
else:
# old git versions have a bug in --tags that prevents updating existing tags
commands.append((fetch_str, fetch_cmd + [remote]))
refspecs = ['+refs/tags/*:refs/tags/*']
if refspec:
refspecs.append(refspec)
fetch_cmd.extend([remote])
commands.append((fetch_str, fetch_cmd + refspecs))
for (label, command) in commands:
(rc, out, err) = module.run_command(command, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to %s: %s %s" % (label, out, err), cmd=command)
def submodules_fetch(git_path, module, remote, track_submodules, dest):
changed = False
if not os.path.exists(os.path.join(dest, '.gitmodules')):
# no submodules
return changed
gitmodules_file = open(os.path.join(dest, '.gitmodules'), 'r')
for line in gitmodules_file:
# Check for new submodules
if not changed and line.strip().startswith('path'):
path = line.split('=', 1)[1].strip()
# Check that dest/path/.git exists
if not os.path.exists(os.path.join(dest, path, '.git')):
changed = True
# Check for updates to existing modules
if not changed:
# Fetch updates
begin = get_submodule_versions(git_path, module, dest)
cmd = [git_path, 'submodule', 'foreach', git_path, 'fetch']
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to fetch submodules: %s" % out + err)
if track_submodules:
# Compare against submodule HEAD
# FIXME: determine this from .gitmodules
version = 'master'
after = get_submodule_versions(git_path, module, dest, '%s/%s' % (remote, version))
if begin != after:
changed = True
else:
# Compare against the superproject's expectation
cmd = [git_path, 'submodule', 'status']
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if rc != 0:
module.fail_json(msg='Failed to retrieve submodule status: %s' % out + err)
for line in out.splitlines():
if line[0] != ' ':
changed = True
break
return changed
def submodule_update(git_path, module, dest, track_submodules, force=False):
''' init and update any submodules '''
# get the valid submodule params
params = get_submodule_update_params(module, git_path, dest)
# skip submodule commands if .gitmodules is not present
if not os.path.exists(os.path.join(dest, '.gitmodules')):
return (0, '', '')
cmd = [git_path, 'submodule', 'sync']
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if 'remote' in params and track_submodules:
cmd = [git_path, 'submodule', 'update', '--init', '--recursive', '--remote']
else:
cmd = [git_path, 'submodule', 'update', '--init', '--recursive']
if force:
cmd.append('--force')
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to init/update submodules: %s" % out + err)
return (rc, out, err)
def set_remote_branch(git_path, module, dest, remote, version, depth):
"""set refs for the remote branch version
This assumes the branch does not yet exist locally and is therefore also not checked out.
Can't use git remote set-branches, as it is not available in git 1.7.1 (centos6)
"""
branchref = "+refs/heads/%s:refs/heads/%s" % (version, version)
branchref += ' +refs/heads/%s:refs/remotes/%s/%s' % (version, remote, version)
cmd = "%s fetch --depth=%s %s %s" % (git_path, depth, remote, branchref)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to fetch branch from remote: %s" % version, stdout=out, stderr=err, rc=rc)
def switch_version(git_path, module, dest, remote, version, verify_commit, depth):
cmd = ''
if version == 'HEAD':
branch = get_head_branch(git_path, module, dest, remote)
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, branch), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % branch,
stdout=out, stderr=err, rc=rc)
cmd = "%s reset --hard %s/%s --" % (git_path, remote, branch)
else:
# FIXME check for local_branch first, should have been fetched already
if is_remote_branch(git_path, module, dest, remote, version):
if depth and not is_local_branch(git_path, module, dest, version):
# git clone --depth implies --single-branch, which makes
# the checkout fail if the version changes
# fetch the remote branch, to be able to check it out next
set_remote_branch(git_path, module, dest, remote, version, depth)
if not is_local_branch(git_path, module, dest, version):
cmd = "%s checkout --track -b %s %s/%s" % (git_path, version, remote, version)
else:
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, version), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % version, stdout=out, stderr=err, rc=rc)
cmd = "%s reset --hard %s/%s" % (git_path, remote, version)
else:
cmd = "%s checkout --force %s" % (git_path, version)
(rc, out1, err1) = module.run_command(cmd, cwd=dest)
if rc != 0:
if version != 'HEAD':
module.fail_json(msg="Failed to checkout %s" % (version),
stdout=out1, stderr=err1, rc=rc, cmd=cmd)
else:
module.fail_json(msg="Failed to checkout branch %s" % (branch),
stdout=out1, stderr=err1, rc=rc, cmd=cmd)
if verify_commit:
verify_commit_sign(git_path, module, dest, version)
return (rc, out1, err1)
def verify_commit_sign(git_path, module, dest, version):
if version in get_annotated_tags(git_path, module, dest):
git_sub = "verify-tag"
else:
git_sub = "verify-commit"
cmd = "%s %s %s" % (git_path, git_sub, version)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg='Failed to verify GPG signature of commit/tag "%s"' % version, stdout=out, stderr=err, rc=rc)
return (rc, out, err)
def git_version(git_path, module):
"""return the installed version of git"""
cmd = "%s --version" % git_path
(rc, out, err) = module.run_command(cmd)
if rc != 0:
# one could fail_json here, but the version info is not that important,
# so let's try to fail only on actual git commands
return None
rematch = re.search('git version (.*)$', to_native(out))
if not rematch:
return None
return LooseVersion(rematch.groups()[0])
def git_archive(git_path, module, dest, archive, archive_fmt, version):
""" Create git archive in given source directory """
cmd = "%s archive --format=%s --output=%s %s" \
% (git_path, archive_fmt, archive, version)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to perform archive operation",
details="Git archive command failed to create "
"archive %s using %s directory."
"Error: %s" % (archive, dest, err))
return rc, out, err
def create_archive(git_path, module, dest, archive, version, repo, result):
""" Helper function for creating archive using git_archive """
all_archive_fmt = {'.zip': 'zip', '.gz': 'tar.gz', '.tar': 'tar',
'.tgz': 'tgz'}
_, archive_ext = os.path.splitext(archive)
archive_fmt = all_archive_fmt.get(archive_ext, None)
if archive_fmt is None:
module.fail_json(msg="Unable to get file extension from "
"archive file name : %s" % archive,
details="Please specify archive as filename with "
"extension. File extension can be one "
"of ['tar', 'tar.gz', 'zip', 'tgz']")
repo_name = repo.split("/")[-1].replace(".git", "")
if os.path.exists(archive):
# If git archive file exists, then compare it with new git archive file.
# if match, do nothing
# if does not match, then replace existing with temp archive file.
tempdir = tempfile.mkdtemp()
new_archive_dest = os.path.join(tempdir, repo_name)
new_archive = new_archive_dest + '.' + archive_fmt
git_archive(git_path, module, dest, new_archive, archive_fmt, version)
# filecmp is supposed to be efficient than md5sum checksum
if filecmp.cmp(new_archive, archive):
result.update(changed=False)
# Cleanup before exiting
try:
shutil.remove(tempdir)
except OSError:
pass
else:
try:
shutil.move(new_archive, archive)
shutil.remove(tempdir)
result.update(changed=True)
except OSError:
exception = get_exception()
module.fail_json(msg="Failed to move %s to %s" %
(new_archive, archive),
details="Error occured while moving : %s"
% exception)
else:
# Perform archive from local directory
git_archive(git_path, module, dest, archive, archive_fmt, version)
result.update(changed=True)
# ===========================================
def main():
module = AnsibleModule(
argument_spec=dict(
dest=dict(type='path'),
repo=dict(required=True, aliases=['name']),
version=dict(default='HEAD'),
remote=dict(default='origin'),
refspec=dict(default=None),
reference=dict(default=None),
force=dict(default='no', type='bool'),
depth=dict(default=None, type='int'),
clone=dict(default='yes', type='bool'),
update=dict(default='yes', type='bool'),
verify_commit=dict(default='no', type='bool'),
accept_hostkey=dict(default='no', type='bool'),
key_file=dict(default=None, type='path', required=False),
ssh_opts=dict(default=None, required=False),
executable=dict(default=None, type='path'),
bare=dict(default='no', type='bool'),
recursive=dict(default='yes', type='bool'),
track_submodules=dict(default='no', type='bool'),
umask=dict(default=None, type='raw'),
archive=dict(type='path'),
),
supports_check_mode=True
)
dest = module.params['dest']
repo = module.params['repo']
version = module.params['version']
remote = module.params['remote']
refspec = module.params['refspec']
force = module.params['force']
depth = module.params['depth']
update = module.params['update']
allow_clone = module.params['clone']
bare = module.params['bare']
verify_commit = module.params['verify_commit']
reference = module.params['reference']
git_path = module.params['executable'] or module.get_bin_path('git', True)
key_file = module.params['key_file']
ssh_opts = module.params['ssh_opts']
umask = module.params['umask']
archive = module.params['archive']
result = dict(changed=False, warnings=list())
if module.params['accept_hostkey']:
if ssh_opts is not None:
if "-o StrictHostKeyChecking=no" not in ssh_opts:
ssh_opts += " -o StrictHostKeyChecking=no"
else:
ssh_opts = "-o StrictHostKeyChecking=no"
# evaluate and set the umask before doing anything else
if umask is not None:
if not isinstance(umask, string_types):
module.fail_json(msg="umask must be defined as a quoted octal integer")
try:
umask = int(umask, 8)
except:
module.fail_json(msg="umask must be an octal integer",
details=str(sys.exc_info()[1]))
os.umask(umask)
# Certain features such as depth require a file:/// protocol for path based urls
# so force a protocol here ...
if repo.startswith('/'):
repo = 'file://' + repo
# We screenscrape a huge amount of git commands so use C locale anytime we
# call run_command()
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
gitconfig = None
if not dest and allow_clone:
module.fail_json(msg="the destination directory must be specified unless clone=no")
elif dest:
dest = os.path.abspath(dest)
if bare:
gitconfig = os.path.join(dest, 'config')
else:
gitconfig = os.path.join(dest, '.git', 'config')
# create a wrapper script and export
# GIT_SSH=<path> as an environment variable
# for git to use the wrapper script
ssh_wrapper = write_ssh_wrapper()
set_git_ssh(ssh_wrapper, key_file, ssh_opts)
module.add_cleanup_file(path=ssh_wrapper)
git_version_used = git_version(git_path, module)
if depth is not None and git_version_used < LooseVersion('1.9.1'):
result['warnings'].append("Your git version is too old to fully support the depth argument. Falling back to full checkouts.")
depth = None
recursive = module.params['recursive']
track_submodules = module.params['track_submodules']
result.update(before=None)
local_mods = False
need_fetch = True
if (dest and not os.path.exists(gitconfig)) or (not dest and not allow_clone):
# if there is no git configuration, do a clone operation unless:
# * the user requested no clone (they just want info)
# * we're doing a check mode test
# In those cases we do an ls-remote
if module.check_mode or not allow_clone:
remote_head = get_remote_head(git_path, module, dest, version, repo, bare)
result.update(changed=True, after=remote_head)
if module._diff:
diff = get_diff(module, git_path, dest, repo, remote, depth, bare, result['before'], result['after'])
if diff:
result['diff'] = diff
module.exit_json(**result)
# there's no git config, so clone
clone(git_path, module, repo, dest, remote, depth, version, bare, reference, refspec, verify_commit)
need_fetch = False
elif not update:
# Just return having found a repo already in the dest path
# this does no checking that the repo is the actual repo
# requested.
result['before'] = get_version(module, git_path, dest)
result.update(after=result['before'])
module.exit_json(**result)
else:
# else do a pull
local_mods = has_local_mods(module, git_path, dest, bare)
result['before'] = get_version(module, git_path, dest)
if local_mods:
# failure should happen regardless of check mode
if not force:
module.fail_json(msg="Local modifications exist in repository (force=no).", **result)
# if force and in non-check mode, do a reset
if not module.check_mode:
reset(git_path, module, dest)
result.update(changed=True, msg='Local modifications exist.')
# exit if already at desired sha version
if module.check_mode:
remote_url = get_remote_url(git_path, module, dest, remote)
remote_url_changed = remote_url and remote_url != repo and unfrackgitpath(remote_url) != unfrackgitpath(repo)
else:
remote_url_changed = set_remote_url(git_path, module, repo, dest, remote)
result.update(remote_url_changed=remote_url_changed)
if module.check_mode:
remote_head = get_remote_head(git_path, module, dest, version, remote, bare)
result.update(changed=(result['before'] != remote_head or remote_url_changed), after=remote_head)
# FIXME: This diff should fail since the new remote_head is not fetched yet?!
if module._diff:
diff = get_diff(module, git_path, dest, repo, remote, depth, bare, result['before'], result['after'])
if diff:
result['diff'] = diff
module.exit_json(**result)
else:
fetch(git_path, module, repo, dest, version, remote, depth, bare, refspec, git_version_used)
result['after'] = get_version(module, git_path, dest)
# switch to version specified regardless of whether
# we got new revisions from the repository
if not bare:
switch_version(git_path, module, dest, remote, version, verify_commit, depth)
# Deal with submodules
submodules_updated = False
if recursive and not bare:
submodules_updated = submodules_fetch(git_path, module, remote, track_submodules, dest)
if submodules_updated:
result.update(submodules_changed=submodules_updated)
if module.check_mode:
result.update(changed=True, after=remote_head)
module.exit_json(**result)
# Switch to version specified
submodule_update(git_path, module, dest, track_submodules, force=force)
# determine if we changed anything
result['after'] = get_version(module, git_path, dest)
if result['before'] != result['after'] or local_mods or submodules_updated or remote_url_changed:
result.update(changed=True)
if module._diff:
diff = get_diff(module, git_path, dest, repo, remote, depth, bare, result['before'], result['after'])
if diff:
result['diff'] = diff
if archive:
# Git archive is not supported by all git servers, so
# we will first clone and perform git archive from local directory
if module.check_mode:
result.update(changed=True)
module.exit_json(**result)
create_archive(git_path, module, dest, archive, version, repo, result)
# cleanup the wrapper script
if ssh_wrapper:
try:
os.remove(ssh_wrapper)
except OSError:
# No need to fail if the file already doesn't exist
pass
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|>
|
required: false
default: None
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.urls import path, include
from .routers import router
from . import views
app_name = "milage"
urlpatterns = [
path("api/", include(router.urls), name="api_router"),
path("class-based/", views.ClassBasedView.as_view(), name="class_based_drf"),
path(
"class-based-detail/<int:pk>",
views.ClassBasedDetailView.as_view(),
name="class_detail",
),
path("", views.BaseView.as_view(), name="index"),
]<|fim▁end|>
| |
<|file_name|>emission.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Python module for generating fake total emission in a magnitude band along a sightline.
This uses the Arepo/Illustris output GFM_Photometrics to get photometric band data,
which may or may not be accurate.
"""
from __future__ import print_function
import math
import os.path as path
import shutil
import h5py
import numpy as np
from . import spectra as ss
def maginJy(mag, band):
"""Convert a magnitude to flux in Jansky, according to wikipedia's table"""
bandfluxes = {'U':1810, 'B':4260, 'V':3640,'K':670,'g':3730,'r':4490,'i':4760 ,'z':4810}
return 10**(mag/(-2.5))*bandfluxes[band]
def apparentflux(DL):
"""Convert flux from absolute magnitudes (flux at 10 pc distance) to apparent flux in Jy.
DL is luminosity distance in Mpc"""
return (10/(DL*1e6))**2
def distance(arcsec, redshift, hubble, OmegaM):
"""Find the size of something in comoving kpc/h from the size on the sky in arcseconds.
"""
#First arcsec to radians
#2 pi radians -> degrees -> arcminute -> arcsecond
rad = 2*math.pi/360./60./60. * arcsec
#Then to physical kpc
atime = 1./(1+redshift)
(_, DA, _) = calculator(hubble*100, OmegaM, redshift)
size = DA * rad * 1000
#Comoving kpc/h
size = size /( atime/ hubble)
return size
class EmissionSpectra(ss.Spectra):
"""Class to compute the emission from stars in B band around the DLA spectrum"""
stellar = {}
def _read_stellar_data(self,fn, band, hhmult=10.):
"""Read the particle data for a single interpolation"""
bands = {'U':0, 'B':1, 'V':2,'K':3,'g':4,'r':5,'i':6,'z':7}
nband = bands[band]
pos = self.snapshot_set.get_data(4,"Position", segment = fn).astype(np.float32)
#Set each stellar radius to the pixel size
hh = hhmult*np.ones(np.shape(pos)[0], dtype=np.float32)
#Find particles we care about<|fim▁hole|> raise ValueError("No stars")
pos = pos[ind,:]
hh = hh[ind]
#Find the magnitude of stars in this band
emflux = maginJy(self.snapshot_set.get_data(4,"GFM_StellarPhotometrics", segment = fn).astype(np.float32)[ind][:,nband],band)
fluxx = np.array([ np.sum(emflux[self.particles_near_lines(pos, hh,np.array([ax,]),np.array([cofm,]))]) for (ax, cofm) in zip(self.axis, self.cofm)])
#print np.sum(emflux)
return fluxx
#return (pos, emflux, hh)
def get_emflux(self, band, pixelsz=1):
"""
Get the density weighted flux in each pixel for a given species.
band: rest-frame optical band observed in
pixelsz: Angular size of the pixels in arcseconds
"""
#Mapping from bandname to number
dist = distance(pixelsz, 1./self.atime-1, self.hubble, self.OmegaM)
try:
self._really_load_array((band,dist), self.stellar, "stellar")
emflux = self.stellar[(band,dist)]
except KeyError:
emflux = np.zeros(self.NumLos,dtype=np.float32)
for fn in self.snapshot_set.get_n_segments():
try:
emflux += self._read_stellar_data(fn, band,dist)
except ValueError:
pass
self.stellar[(band,dist)] = emflux
(_,_,DL) = calculator(self.hubble*100, self.OmegaM, 1./self.atime-1)
emflux *= apparentflux(DL)
return emflux
def save_file(self):
"""
Saves spectra to a file, because they are slow to generate.
File is by default to be $snap_dir/snapdir_$snapnum/spectra.hdf5.
"""
#We should make sure we have loaded all lazy-loaded things first.
self._load_all_multihash(self.stellar, "stellar")
self._load_all_multihash(self.tau_obs, "tau_obs")
self._load_all_multihash(self.tau, "tau")
self._load_all_multihash(self.colden, "colden")
try:
self._load_all_multihash(self.colden, "velocity")
except IOError:
pass
try:
if path.exists(self.savefile):
shutil.move(self.savefile,self.savefile+".backup")
f=h5py.File(self.savefile,'w')
except IOError:
try:
f=h5py.File(self.savefile,'w')
except IOError:
raise IOError("Could not open ",self.savefile," for writing")
grp_grid = f.create_group("stellar")
self._save_multihash(self.stellar, grp_grid)
self._save_file(f)
def calculator(H0, Omega_M, zz):
"""Compute luminosity distance for a given cosmology. Assumes flatness.
Freely adapted from James Schombert's python version of Ned Wright's cosmology calculator.
Inputs:
H0 - Hubble constant in km/s/Mpc
Omega_M - Omega_matter
zz - redshift to compute distances to
Returns:
(Comoving distance, angular distancem luminosity distance) (all in physical Mpc)"""
light = 299792.458 # speed of light in km/sec
h = H0/100.
WR = 4.165E-5/(h*h) # includes 3 massless neutrino species, T0 = 2.72528
#Assume flat
WV = 1- Omega_M-WR
#scale factor to compute distance to
az = 1.0/(1.+zz)
n=1000 # number of points in integrals
# do integral over a=1/(1+z) from az to 1 in n steps, midpoint rule
a = np.logspace(np.log10(az), 0, n)
a2H = a*a*np.sqrt(Omega_M/a**3+WR/(a**4)+WV)
#Comoving distance
DCMR = np.trapz(1./a2H, a)
#In Mpc
DC_Mpc = (light/H0) * DCMR
# angular size distance In Mpc
DA_Mpc = (light/H0)*az*DCMR
#Luminosity distance in Mpc
DL_Mpc = DA_Mpc/(az*az)
#print 'The comoving radial distance is %1.1f' % DC_Mpc + ' Mpc'
#print 'The angular size distance D_A is ' + '%1.1f' % DA_Mpc + ' Mpc'
#print 'The luminosity distance D_L is ' + '%1.1f' % DL_Mpc + ' Mpc'
return (DC_Mpc, DA_Mpc, DL_Mpc)<|fim▁end|>
|
ind = self.particles_near_lines(pos, hh,self.axis,self.cofm)
#print np.size(ind)
#Do nothing if there aren't any, and return a suitably shaped zero array
if np.size(ind) == 0:
|
<|file_name|>test_sample.py<|end_file_name|><|fim▁begin|><|fim▁hole|>raise Exception("tests where moved to emzed to avoid circular dependencies")<|fim▁end|>
| |
<|file_name|>portfolioDirective.js<|end_file_name|><|fim▁begin|>app.directive("portfolio", function(socket, upload, ms, filterFilter) {
return {
restrict: 'E',
templateUrl: '/directives/templates/portfolioDirective.html',
scope: false,
link: function(scope, elements, attrs) {
scope.uploads = [];
scope.controllerPlaceholder = false;
scope.portfolioController = 'Current Projects';
scope.filteredProjects = filterFilter(scope.userData.projects, function(value, index){
if(value.status == 'running'){
//alert('found file' + value.name);
return true
};
});
if (scope.userData.projects == undefined) {
scope.userData.projects = [];
}
/* if (localStorage['userData']) {
var newData = JSON.parse(localStorage['userData']);
console.log(" portfolio localStorage exists");
console.log(newData);
scope.filteredProjects = filterFilter(scope.userData.projects, function(value, index){
if(value.status == 'running'){
//alert('found file' + value.name);
return true
};
});
if (scope.userData.projects == undefined) {
scope.userData.projects = [];
}
} else{
console.log(" portfolio localStorage does not exist");
socket.emit('restricted', 'request portfolio', localStorage['token']);
};*/
socket.on('restricted', 'recieve portfolio',function(data) {
var newData = JSON.parse(data);
console.log(newData);
scope.userData.projects = newData.projects;
scope.filteredProjects = filterFilter(scope.userData.projects, function(value, index){
if(value.status == 'running'){
//alert('found file' + value.name);
return true
};
});
if (scope.userData.projects == undefined) {
scope.userData.projects = [];
}
console.log(scope.userData.projects);
});
socket.on('restricted', 'recieve portfolioUpdate',function(project) {
scope.userData.projects.push(project);
console.log(scope.userData.projects);
var oldData = JSON.parse(localStorage['userData']);
oldData.projects = scope.userData.projects;
scope.filteredProjects = filterFilter(scope.userData.projects, function(value, index){
if(value.status == 'running'){
//alert('found file' + value.name);
return true
};
});
localStorage['userData']= JSON.stringify(oldData);
console.log('updated portfolio localStorage');
console.log(localStorage['userData']);
scope.$root.$broadcast('portfolioCropModalWindowClose');
});
/*Bubbles
var chart, render_vis, display_all, display_year, toggle_view, update;
chart = null;
render_vis = function(csv) {
console.log(csv);
chart = new BubbleChart(csv);
chart.start();
return chart.display_group_all();
};
update = function(csv) {
chart.update_vis(csv);
chart.start();
return chart.display_group_all();
};
//d3.csv("data/gates_money_current.csv", render_vis);
render_vis(scope.userData.projects);
*/
document.getElementById('portfolioDropZone').addEventListener("drop", function(event){
event.preventDefault();
console.log(event);
if ('dataTransfer' in event) {
var files = event.dataTransfer.files;
} else if('originalTarget' in event){
var files = event.originalTarget.files;
}else if('target' in event){
var files = event.target.files;
}else{
var files = event.files;
};
for(var i=0; i<files.length; i++){
scope.uploads.push([ms.sealer("portfolio" + i, files[i].name), files]);
};
}, false);
//3
document.getElementById('portfolioDrop').addEventListener("change", function(event){
event.preventDefault();
if ('dataTransfer' in event) {
var files = event.dataTransfer.files;
} else if('originalTarget' in event){
var files = event.originalTarget.files;
}else if('target' in event){
var files = event.target.files;
}else{
var files = event.files;
}; console.log(files);
for(var i=0; i<files.length; i++){
scope.uploads.push([ms.sealer("portfolio" + i, files[i].name), files]);
};
}, false);
upload.listenOnInput(document.getElementById('portfolioDrop'));
upload.listenOnDrop(document.getElementById('portfolioDropZone'));
scope.changeData = function(expressionArg) {
scope.filteredProjects = filterFilter(scope.userData.projects, function(value, index){
if(value.status == expressionArg){
return true
};
});
};
/*scope.changeData = function(value) {
if (value == "Current Projects") {
d3.csv("data/gates_money_current.csv", update);
scope.controllerPlaceholder = false;
} else if (value == "Portfolio"){
if (scope.controllerPlaceholder) {
chart.display_group_all();
}else{
d3.csv("data/gates_money.csv", update);
scope.controllerPlaceholder = true;
}
} else{
chart.display_by_year();
}
};*/
}
};<|fim▁hole|><|fim▁end|>
|
});
|
<|file_name|>test_recursive.py<|end_file_name|><|fim▁begin|>import py
from rpython.rlib.jit import JitDriver, hint, set_param
from rpython.rlib.jit import unroll_safe, dont_look_inside, promote
from rpython.rlib.objectmodel import we_are_translated
from rpython.rlib.debug import fatalerror
from rpython.jit.metainterp.test.support import LLJitMixin
from rpython.jit.codewriter.policy import StopAtXPolicy
from rpython.rtyper.annlowlevel import hlstr
from rpython.jit.metainterp.warmspot import get_stats
from rpython.jit.backend.llsupport import codemap
class RecursiveTests:
def test_simple_recursion(self):
myjitdriver = JitDriver(greens=[], reds=['n', 'm'])
def f(n):
m = n - 2
while True:
myjitdriver.jit_merge_point(n=n, m=m)
n -= 1
if m == n:
return main(n) * 2
myjitdriver.can_enter_jit(n=n, m=m)
def main(n):
if n > 0:
return f(n+1)
else:
return 1
res = self.meta_interp(main, [20], enable_opts='')
assert res == main(20)
self.check_history(call_i=0)
def test_simple_recursion_with_exc(self):
myjitdriver = JitDriver(greens=[], reds=['n', 'm'])
class Error(Exception):
pass
def f(n):
m = n - 2
while True:
myjitdriver.jit_merge_point(n=n, m=m)
n -= 1
if n == 10:
raise Error
if m == n:
try:
return main(n) * 2
except Error:
return 2
myjitdriver.can_enter_jit(n=n, m=m)
def main(n):
if n > 0:
return f(n+1)
else:
return 1
res = self.meta_interp(main, [20], enable_opts='')
assert res == main(20)
def test_recursion_three_times(self):
myjitdriver = JitDriver(greens=[], reds=['n', 'm', 'total'])
def f(n):
m = n - 3
total = 0
while True:
myjitdriver.jit_merge_point(n=n, m=m, total=total)
n -= 1
total += main(n)
if m == n:
return total + 5
myjitdriver.can_enter_jit(n=n, m=m, total=total)
def main(n):
if n > 0:
return f(n)
else:
return 1
print
for i in range(1, 11):
print '%3d %9d' % (i, f(i))
res = self.meta_interp(main, [10], enable_opts='')
assert res == main(10)
self.check_enter_count_at_most(11)
def test_bug_1(self):
myjitdriver = JitDriver(greens=[], reds=['n', 'i', 'stack'])
def opaque(n, i):
if n == 1 and i == 19:
for j in range(20):
res = f(0) # recurse repeatedly, 20 times
assert res == 0
def f(n):
stack = [n]
i = 0
while i < 20:
myjitdriver.can_enter_jit(n=n, i=i, stack=stack)
myjitdriver.jit_merge_point(n=n, i=i, stack=stack)
opaque(n, i)
i += 1
return stack.pop()
res = self.meta_interp(f, [1], enable_opts='', repeat=2,
policy=StopAtXPolicy(opaque))
assert res == 1
def get_interpreter(self, codes):
ADD = "0"
JUMP_BACK = "1"
CALL = "2"
EXIT = "3"
def getloc(i, code):
return 'code="%s", i=%d' % (code, i)
jitdriver = JitDriver(greens = ['i', 'code'], reds = ['n'],
get_printable_location = getloc)
def interpret(codenum, n, i):
code = codes[codenum]
while i < len(code):
jitdriver.jit_merge_point(n=n, i=i, code=code)
op = code[i]
if op == ADD:
n += 1
i += 1
elif op == CALL:
n = interpret(1, n, 1)
i += 1
elif op == JUMP_BACK:
if n > 20:
return 42
i -= 2
jitdriver.can_enter_jit(n=n, i=i, code=code)
elif op == EXIT:
return n
else:
raise NotImplementedError
return n
return interpret
def test_inline(self):
code = "021"
subcode = "00"
codes = [code, subcode]
f = self.get_interpreter(codes)
assert self.meta_interp(f, [0, 0, 0], enable_opts='') == 42
self.check_resops(call_may_force_i=1, int_add=1, call=0)
assert self.meta_interp(f, [0, 0, 0], enable_opts='',
inline=True) == 42
self.check_resops(call=0, int_add=2, call_may_force_i=0,
guard_no_exception=0)
def test_inline_jitdriver_check(self):
code = "021"
subcode = "100"
codes = [code, subcode]
f = self.get_interpreter(codes)
assert self.meta_interp(f, [0, 0, 0], enable_opts='',
inline=True) == 42
# the call is fully inlined, because we jump to subcode[1], thus
# skipping completely the JUMP_BACK in subcode[0]
self.check_resops(call=0, call_may_force=0, call_assembler=0)
def test_guard_failure_in_inlined_function(self):
def p(pc, code):
code = hlstr(code)
return "%s %d %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n'],
get_printable_location=p)
def f(code, n):
pc = 0
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc)
op = code[pc]
if op == "-":
n -= 1
elif op == "c":
n = f("---i---", n)
elif op == "i":
if n % 5 == 1:
return n
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=0)
pc = 0
continue
else:
assert 0
pc += 1
return n
def main(n):
return f("c-l", n)
print main(100)
res = self.meta_interp(main, [100], enable_opts='', inline=True)
assert res == 0
def test_guard_failure_and_then_exception_in_inlined_function(self):
def p(pc, code):
code = hlstr(code)
return "%s %d %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n', 'flag'],
get_printable_location=p)
def f(code, n):
pc = 0
flag = False
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc, flag=flag)
op = code[pc]
if op == "-":
n -= 1
elif op == "c":
try:
n = f("---ir---", n)
except Exception:
return n
elif op == "i":
if n < 200:
flag = True
elif op == "r":
if flag:
raise Exception
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=0, flag=flag)
pc = 0
continue
else:
assert 0
pc += 1
return n
def main(n):
return f("c-l", n)
print main(1000)
res = self.meta_interp(main, [1000], enable_opts='', inline=True)
assert res == main(1000)
def test_exception_in_inlined_function(self):
def p(pc, code):
code = hlstr(code)
return "%s %d %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n'],
get_printable_location=p)
class Exc(Exception):
pass
def f(code, n):
pc = 0
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc)
op = code[pc]
if op == "-":
n -= 1
elif op == "c":
try:
n = f("---i---", n)
except Exc:
pass
elif op == "i":
if n % 5 == 1:
raise Exc
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=0)
pc = 0
continue
else:
assert 0
pc += 1
return n
def main(n):
return f("c-l", n)
res = self.meta_interp(main, [100], enable_opts='', inline=True)
assert res == main(100)
def test_recurse_during_blackholing(self):
# this passes, if the blackholing shortcut for calls is turned off
# it fails, it is very delicate in terms of parameters,
# bridge/loop creation order
def p(pc, code):
code = hlstr(code)
return "%s %d %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n'],
get_printable_location=p)
def f(code, n):
pc = 0
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc)
op = code[pc]
if op == "-":
n -= 1
elif op == "c":
if n < 70 and n % 3 == 1:
n = f("--", n)
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=0)
pc = 0
continue
else:
assert 0
pc += 1
return n
def main(n):
set_param(None, 'threshold', 3)
set_param(None, 'trace_eagerness', 5)
return f("c-l", n)
expected = main(100)
res = self.meta_interp(main, [100], enable_opts='', inline=True)
assert res == expected
def check_max_trace_length(self, length):
for loop in get_stats().loops:
assert len(loop.operations) <= length + 5 # because we only check once per metainterp bytecode
for op in loop.operations:
if op.is_guard() and hasattr(op.getdescr(), '_debug_suboperations'):
assert len(op.getdescr()._debug_suboperations) <= length + 5
def test_inline_trace_limit(self):
myjitdriver = JitDriver(greens=[], reds=['n'])
def recursive(n):
if n > 0:
return recursive(n - 1) + 1
return 0
def loop(n):
set_param(myjitdriver, "threshold", 10)
pc = 0
while n:
myjitdriver.can_enter_jit(n=n)
myjitdriver.jit_merge_point(n=n)
n = recursive(n)
n -= 1
return n
TRACE_LIMIT = 66
res = self.meta_interp(loop, [100], enable_opts='', inline=True, trace_limit=TRACE_LIMIT)
assert res == 0
self.check_max_trace_length(TRACE_LIMIT)
self.check_enter_count_at_most(10) # maybe
self.check_aborted_count(6)
def test_trace_limit_bridge(self):
def recursive(n):
if n > 0:
return recursive(n - 1) + 1
return 0
myjitdriver = JitDriver(greens=[], reds=['n'])
def loop(n):
set_param(None, "threshold", 4)
set_param(None, "trace_eagerness", 2)
while n:
myjitdriver.can_enter_jit(n=n)
myjitdriver.jit_merge_point(n=n)
if n % 5 == 0:
n -= 1
if n < 50:
n = recursive(n)
n -= 1
return n
TRACE_LIMIT = 20
res = self.meta_interp(loop, [100], enable_opts='', inline=True, trace_limit=TRACE_LIMIT)
self.check_max_trace_length(TRACE_LIMIT)
self.check_aborted_count(8)
self.check_enter_count_at_most(30)
def test_trace_limit_with_exception_bug(self):
myjitdriver = JitDriver(greens=[], reds=['n'])
@unroll_safe
def do_stuff(n):
while n > 0:
n -= 1
raise ValueError
def loop(n):
pc = 0
while n > 80:
myjitdriver.can_enter_jit(n=n)
myjitdriver.jit_merge_point(n=n)
try:
do_stuff(n)
except ValueError:
# the trace limit is checked when we arrive here, and we
# have the exception still in last_exc_value_box at this
# point -- so when we abort because of a trace too long,
# the exception is passed to the blackhole interp and
# incorrectly re-raised from here
pass
n -= 1
return n
TRACE_LIMIT = 66
res = self.meta_interp(loop, [100], trace_limit=TRACE_LIMIT)
assert res == 80
def test_max_failure_args(self):
FAILARGS_LIMIT = 10
jitdriver = JitDriver(greens = [], reds = ['i', 'n', 'o'])
class A(object):
def __init__(self, i0, i1, i2, i3, i4, i5, i6, i7, i8, i9):
self.i0 = i0
self.i1 = i1
self.i2 = i2
self.i3 = i3
self.i4 = i4
self.i5 = i5
self.i6 = i6
self.i7 = i7
self.i8 = i8
self.i9 = i9
def loop(n):
i = 0
o = A(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
while i < n:
jitdriver.can_enter_jit(o=o, i=i, n=n)
jitdriver.jit_merge_point(o=o, i=i, n=n)
o = A(i, i + 1, i + 2, i + 3, i + 4, i + 5,
i + 6, i + 7, i + 8, i + 9)
i += 1
return o
res = self.meta_interp(loop, [20], failargs_limit=FAILARGS_LIMIT,
listops=True)
self.check_aborted_count(4)
def test_max_failure_args_exc(self):
FAILARGS_LIMIT = 10
jitdriver = JitDriver(greens = [], reds = ['i', 'n', 'o'])
class A(object):
def __init__(self, i0, i1, i2, i3, i4, i5, i6, i7, i8, i9):
self.i0 = i0
self.i1 = i1
self.i2 = i2
self.i3 = i3
self.i4 = i4
self.i5 = i5
self.i6 = i6
self.i7 = i7
self.i8 = i8
self.i9 = i9
def loop(n):
i = 0
o = A(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
while i < n:
jitdriver.can_enter_jit(o=o, i=i, n=n)
jitdriver.jit_merge_point(o=o, i=i, n=n)
o = A(i, i + 1, i + 2, i + 3, i + 4, i + 5,
i + 6, i + 7, i + 8, i + 9)
i += 1
raise ValueError
def main(n):
try:
loop(n)
return 1
except ValueError:
return 0
res = self.meta_interp(main, [20], failargs_limit=FAILARGS_LIMIT,
listops=True)
assert not res
self.check_aborted_count(4)
def test_set_param_inlining(self):
myjitdriver = JitDriver(greens=[], reds=['n', 'recurse'])
def loop(n, recurse=False):
while n:
myjitdriver.jit_merge_point(n=n, recurse=recurse)
n -= 1
if not recurse:
loop(10, True)
myjitdriver.can_enter_jit(n=n, recurse=recurse)
return n
TRACE_LIMIT = 66
def main(inline):
set_param(None, "threshold", 10)
set_param(None, 'function_threshold', 60)
if inline:
set_param(None, 'inlining', True)
else:
set_param(None, 'inlining', False)
return loop(100)
res = self.meta_interp(main, [0], enable_opts='', trace_limit=TRACE_LIMIT)
self.check_resops(call=0, call_may_force_i=1)
res = self.meta_interp(main, [1], enable_opts='', trace_limit=TRACE_LIMIT)
self.check_resops(call=0, call_may_force=0)
def test_trace_from_start(self):
def p(pc, code):
code = hlstr(code)
return "'%s' at %d: %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n'],
get_printable_location=p)
def f(code, n):
pc = 0
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc)
op = code[pc]
if op == "+":
n += 7
elif op == "-":
n -= 1
elif op == "c":
n = f('---', n)
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=1)
pc = 1
continue
else:
assert 0
pc += 1
return n
def g(m):
if m > 1000000:
f('', 0)
result = 0
for i in range(m):
result += f('+-cl--', i)
res = self.meta_interp(g, [50], backendopt=True)
assert res == g(50)
py.test.skip("tracing from start is by now only longer enabled "
"if a trace gets too big")
self.check_tree_loop_count(3)
self.check_history(int_add=1)
def test_dont_inline_huge_stuff(self):
def p(pc, code):
code = hlstr(code)
return "%s %d %s" % (code, pc, code[pc])
myjitdriver = JitDriver(greens=['pc', 'code'], reds=['n'],
get_printable_location=p,
is_recursive=True)
def f(code, n):
pc = 0
while pc < len(code):
myjitdriver.jit_merge_point(n=n, code=code, pc=pc)
op = code[pc]
if op == "-":
n -= 1
elif op == "c":
f('--------------------', n)
elif op == "l":
if n > 0:
myjitdriver.can_enter_jit(n=n, code=code, pc=0)
pc = 0
continue
else:
assert 0
pc += 1
return n
def g(m):
set_param(None, 'inlining', True)
# carefully chosen threshold to make sure that the inner function
# cannot be inlined, but the inner function on its own is small
# enough
set_param(None, 'trace_limit', 40)
if m > 1000000:
f('', 0)
result = 0
for i in range(m):
result += f('-c-----------l-', i+100)
self.meta_interp(g, [10], backendopt=True)
self.check_aborted_count(1)
self.check_resops(call=0, call_assembler_i=2)
self.check_jitcell_token_count(2)
def test_directly_call_assembler(self):
driver = JitDriver(greens = ['codeno'], reds = ['i'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno):
i = 0
while i < 10:
driver.can_enter_jit(codeno = codeno, i = i)
driver.jit_merge_point(codeno = codeno, i = i)
if codeno == 2:
portal(1)
i += 1
self.meta_interp(portal, [2], inline=True)
self.check_history(call_assembler_n=1)
def test_recursion_cant_call_assembler_directly(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'j'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno, j):
i = 1
while 1:
driver.jit_merge_point(codeno=codeno, i=i, j=j)
if (i >> 1) == 1:
if j == 0:
return
portal(2, j - 1)
elif i == 5:
return
i += 1
driver.can_enter_jit(codeno=codeno, i=i, j=j)
portal(2, 5)
from rpython.jit.metainterp import compile, pyjitpl
pyjitpl._warmrunnerdesc = None
trace = []
def my_ctc(*args):
looptoken = original_ctc(*args)
trace.append(looptoken)
return looptoken
original_ctc = compile.compile_tmp_callback
try:
compile.compile_tmp_callback = my_ctc
self.meta_interp(portal, [2, 5], inline=True)
self.check_resops(call_may_force=0, call_assembler_n=2)
finally:
compile.compile_tmp_callback = original_ctc
# check that we made a temporary callback
assert len(trace) == 1
# and that we later redirected it to something else
try:
redirected = pyjitpl._warmrunnerdesc.cpu._redirected_call_assembler
except AttributeError:
pass # not the llgraph backend
else:
print redirected
assert redirected.keys() == trace
def test_recursion_cant_call_assembler_directly_with_virtualizable(self):
# exactly the same logic as the previous test, but with 'frame.j'
# instead of just 'j'
class Frame(object):
_virtualizable_ = ['j']
def __init__(self, j):
self.j = j
driver = JitDriver(greens = ['codeno'], reds = ['i', 'frame'],
virtualizables = ['frame'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno, frame):
i = 1
while 1:
driver.jit_merge_point(codeno=codeno, i=i, frame=frame)
if (i >> 1) == 1:
if frame.j == 0:
return
portal(2, Frame(frame.j - 1))
elif i == 5:
return
i += 1
driver.can_enter_jit(codeno=codeno, i=i, frame=frame)
def main(codeno, j):
portal(codeno, Frame(j))
main(2, 5)
from rpython.jit.metainterp import compile, pyjitpl
pyjitpl._warmrunnerdesc = None
trace = []
def my_ctc(*args):
looptoken = original_ctc(*args)
trace.append(looptoken)
return looptoken
original_ctc = compile.compile_tmp_callback
try:
compile.compile_tmp_callback = my_ctc
self.meta_interp(main, [2, 5], inline=True)
self.check_resops(call_may_force=0, call_assembler_n=2)
finally:
compile.compile_tmp_callback = original_ctc
# check that we made a temporary callback
assert len(trace) == 1
# and that we later redirected it to something else
try:
redirected = pyjitpl._warmrunnerdesc.cpu._redirected_call_assembler
except AttributeError:
pass # not the llgraph backend
else:
print redirected
assert redirected.keys() == trace
def test_directly_call_assembler_return(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'k'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno):
i = 0
k = codeno
while i < 10:
driver.can_enter_jit(codeno = codeno, i = i, k = k)
driver.jit_merge_point(codeno = codeno, i = i, k = k)
if codeno == 2:
k = portal(1)
i += 1
return k
self.meta_interp(portal, [2], inline=True)
self.check_history(call_assembler_i=1)
def test_directly_call_assembler_raise(self):
class MyException(Exception):
def __init__(self, x):
self.x = x
driver = JitDriver(greens = ['codeno'], reds = ['i'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno):
i = 0
while i < 10:
driver.can_enter_jit(codeno = codeno, i = i)
driver.jit_merge_point(codeno = codeno, i = i)
if codeno == 2:
try:
portal(1)
except MyException as me:
i += me.x
i += 1
if codeno == 1:
raise MyException(1)
self.meta_interp(portal, [2], inline=True)
self.check_history(call_assembler_n=1)
def test_directly_call_assembler_fail_guard(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'k'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno, k):
i = 0
while i < 10:
driver.can_enter_jit(codeno=codeno, i=i, k=k)
driver.jit_merge_point(codeno=codeno, i=i, k=k)
if codeno == 2:
k += portal(1, k)
elif k > 40:
if i % 2:
k += 1
else:
k += 2
k += 1
i += 1
return k
res = self.meta_interp(portal, [2, 0], inline=True)
assert res == 13542
def test_directly_call_assembler_virtualizable(self):
class Thing(object):
def __init__(self, val):
self.val = val
class Frame(object):
_virtualizable_ = ['thing']
driver = JitDriver(greens = ['codeno'], reds = ['i', 's', 'frame'],
virtualizables = ['frame'],
get_printable_location = lambda codeno : str(codeno))
def main(codeno):
frame = Frame()
frame.thing = Thing(0)
result = portal(codeno, frame)
return result
def portal(codeno, frame):
i = 0
s = 0
while i < 10:
driver.can_enter_jit(frame=frame, codeno=codeno, i=i, s=s)
driver.jit_merge_point(frame=frame, codeno=codeno, i=i, s=s)
nextval = frame.thing.val
if codeno == 0:
subframe = Frame()
subframe.thing = Thing(nextval)
nextval = portal(1, subframe)
s += subframe.thing.val
frame.thing = Thing(nextval + 1)
i += 1
return frame.thing.val + s
res = self.meta_interp(main, [0], inline=True)
self.check_resops(call=0, cond_call=2)
assert res == main(0)
def test_directly_call_assembler_virtualizable_reset_token(self):
py.test.skip("not applicable any more, I think")
from rpython.rtyper.lltypesystem import lltype
from rpython.rlib.debug import llinterpcall
class Thing(object):
def __init__(self, val):
self.val = val
class Frame(object):
_virtualizable_ = ['thing']
driver = JitDriver(greens = ['codeno'], reds = ['i', 'frame'],
virtualizables = ['frame'],
get_printable_location = lambda codeno : str(codeno))
@dont_look_inside
def check_frame(subframe):
if we_are_translated():
llinterpcall(lltype.Void, check_ll_frame, subframe)
def check_ll_frame(ll_subframe):
# This is called with the low-level Struct that is the frame.
# Check that the vable_token was correctly reset to zero.
# Note that in order for that test to catch failures, it needs
# three levels of recursion: the vable_token of the subframe<|fim▁hole|> # at the level 2 is set to a non-zero value when doing the
# call to the level 3 only. This used to fail when the test
# is run via rpython.jit.backend.x86.test.test_recursive.
from rpython.jit.metainterp.virtualizable import TOKEN_NONE
assert ll_subframe.vable_token == TOKEN_NONE
def main(codeno):
frame = Frame()
frame.thing = Thing(0)
portal(codeno, frame)
return frame.thing.val
def portal(codeno, frame):
i = 0
while i < 5:
driver.can_enter_jit(frame=frame, codeno=codeno, i=i)
driver.jit_merge_point(frame=frame, codeno=codeno, i=i)
nextval = frame.thing.val
if codeno < 2:
subframe = Frame()
subframe.thing = Thing(nextval)
nextval = portal(codeno + 1, subframe)
check_frame(subframe)
frame.thing = Thing(nextval + 1)
i += 1
return frame.thing.val
res = self.meta_interp(main, [0], inline=True)
assert res == main(0)
def test_directly_call_assembler_virtualizable_force1(self):
class Thing(object):
def __init__(self, val):
self.val = val
class Frame(object):
_virtualizable_ = ['thing']
driver = JitDriver(greens = ['codeno'], reds = ['i', 'frame'],
virtualizables = ['frame'],
get_printable_location = lambda codeno : str(codeno))
class SomewhereElse(object):
pass
somewhere_else = SomewhereElse()
def change(newthing):
somewhere_else.frame.thing = newthing
def main(codeno):
frame = Frame()
somewhere_else.frame = frame
frame.thing = Thing(0)
portal(codeno, frame)
return frame.thing.val
def portal(codeno, frame):
print 'ENTER:', codeno, frame.thing.val
i = 0
while i < 10:
driver.can_enter_jit(frame=frame, codeno=codeno, i=i)
driver.jit_merge_point(frame=frame, codeno=codeno, i=i)
nextval = frame.thing.val
if codeno == 0:
subframe = Frame()
subframe.thing = Thing(nextval)
nextval = portal(1, subframe)
elif codeno == 1:
if frame.thing.val > 40:
change(Thing(13))
nextval = 13
else:
fatalerror("bad codeno = " + str(codeno))
frame.thing = Thing(nextval + 1)
i += 1
print 'LEAVE:', codeno, frame.thing.val
return frame.thing.val
res = self.meta_interp(main, [0], inline=True,
policy=StopAtXPolicy(change))
assert res == main(0)
def test_directly_call_assembler_virtualizable_with_array(self):
myjitdriver = JitDriver(greens = ['codeno'], reds = ['n', 'x', 'frame'],
virtualizables = ['frame'])
class Frame(object):
_virtualizable_ = ['l[*]', 's']
def __init__(self, l, s):
self = hint(self, access_directly=True,
fresh_virtualizable=True)
self.l = l
self.s = s
def main(codeno, n, a):
frame = Frame([a, a+1, a+2, a+3], 0)
return f(codeno, n, a, frame)
def f(codeno, n, a, frame):
x = 0
while n > 0:
myjitdriver.can_enter_jit(codeno=codeno, frame=frame, n=n, x=x)
myjitdriver.jit_merge_point(codeno=codeno, frame=frame, n=n,
x=x)
frame.s = promote(frame.s)
n -= 1
s = frame.s
assert s >= 0
x += frame.l[s]
frame.s += 1
if codeno == 0:
subframe = Frame([n, n+1, n+2, n+3], 0)
x += f(1, 10, 1, subframe)
s = frame.s
assert s >= 0
x += frame.l[s]
x += len(frame.l)
frame.s -= 1
return x
res = self.meta_interp(main, [0, 10, 1], listops=True, inline=True)
assert res == main(0, 10, 1)
def test_directly_call_assembler_virtualizable_force_blackhole(self):
class Thing(object):
def __init__(self, val):
self.val = val
class Frame(object):
_virtualizable_ = ['thing']
driver = JitDriver(greens = ['codeno'], reds = ['i', 'frame'],
virtualizables = ['frame'],
get_printable_location = lambda codeno : str(codeno))
class SomewhereElse(object):
pass
somewhere_else = SomewhereElse()
def change(newthing, arg):
print arg
if arg > 30:
somewhere_else.frame.thing = newthing
arg = 13
return arg
def main(codeno):
frame = Frame()
somewhere_else.frame = frame
frame.thing = Thing(0)
portal(codeno, frame)
return frame.thing.val
def portal(codeno, frame):
i = 0
while i < 10:
driver.can_enter_jit(frame=frame, codeno=codeno, i=i)
driver.jit_merge_point(frame=frame, codeno=codeno, i=i)
nextval = frame.thing.val
if codeno == 0:
subframe = Frame()
subframe.thing = Thing(nextval)
nextval = portal(1, subframe)
else:
nextval = change(Thing(13), frame.thing.val)
frame.thing = Thing(nextval + 1)
i += 1
return frame.thing.val
res = self.meta_interp(main, [0], inline=True,
policy=StopAtXPolicy(change))
assert res == main(0)
def test_assembler_call_red_args(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'k'],
get_printable_location = lambda codeno : str(codeno))
def residual(k):
if k > 150:
return 0
return 1
def portal(codeno, k):
i = 0
while i < 15:
driver.can_enter_jit(codeno=codeno, i=i, k=k)
driver.jit_merge_point(codeno=codeno, i=i, k=k)
if codeno == 2:
k += portal(residual(k), k)
if codeno == 0:
k += 2
elif codeno == 1:
k += 1
i += 1
return k
res = self.meta_interp(portal, [2, 0], inline=True,
policy=StopAtXPolicy(residual))
assert res == portal(2, 0)
self.check_resops(call_assembler_i=4)
def test_inline_without_hitting_the_loop(self):
driver = JitDriver(greens = ['codeno'], reds = ['i'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno):
i = 0
while True:
driver.jit_merge_point(codeno=codeno, i=i)
if codeno < 10:
i += portal(20)
codeno += 1
elif codeno == 10:
if i > 63:
return i
codeno = 0
driver.can_enter_jit(codeno=codeno, i=i)
else:
return 1
assert portal(0) == 70
res = self.meta_interp(portal, [0], inline=True)
assert res == 70
self.check_resops(call_assembler=0)
def test_inline_with_hitting_the_loop_sometimes(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'k'],
get_printable_location = lambda codeno : str(codeno))
def portal(codeno, k):
if k > 2:
return 1
i = 0
while True:
driver.jit_merge_point(codeno=codeno, i=i, k=k)
if codeno < 10:
i += portal(codeno + 5, k+1)
codeno += 1
elif codeno == 10:
if i > [-1, 2000, 63][k]:
return i
codeno = 0
driver.can_enter_jit(codeno=codeno, i=i, k=k)
else:
return 1
assert portal(0, 1) == 2095
res = self.meta_interp(portal, [0, 1], inline=True)
assert res == 2095
self.check_resops(call_assembler_i=12)
def test_inline_with_hitting_the_loop_sometimes_exc(self):
driver = JitDriver(greens = ['codeno'], reds = ['i', 'k'],
get_printable_location = lambda codeno : str(codeno))
class GotValue(Exception):
def __init__(self, result):
self.result = result
def portal(codeno, k):
if k > 2:
raise GotValue(1)
i = 0
while True:
driver.jit_merge_point(codeno=codeno, i=i, k=k)
if codeno < 10:
try:
portal(codeno + 5, k+1)
except GotValue as e:
i += e.result
codeno += 1
elif codeno == 10:
if i > [-1, 2000, 63][k]:
raise GotValue(i)
codeno = 0
driver.can_enter_jit(codeno=codeno, i=i, k=k)
else:
raise GotValue(1)
def main(codeno, k):
try:
portal(codeno, k)
except GotValue as e:
return e.result
assert main(0, 1) == 2095
res = self.meta_interp(main, [0, 1], inline=True)
assert res == 2095
self.check_resops(call_assembler_n=12)
def test_inline_recursion_limit(self):
driver = JitDriver(greens = ["threshold", "loop"], reds=["i"])
@dont_look_inside
def f():
set_param(driver, "max_unroll_recursion", 10)
def portal(threshold, loop, i):
f()
if i > threshold:
return i
while True:
driver.jit_merge_point(threshold=threshold, loop=loop, i=i)
if loop:
portal(threshold, False, 0)
else:
portal(threshold, False, i + 1)
return i
if i > 10:
return 1
i += 1
driver.can_enter_jit(threshold=threshold, loop=loop, i=i)
res1 = portal(10, True, 0)
res2 = self.meta_interp(portal, [10, True, 0], inline=True)
assert res1 == res2
self.check_resops(call_assembler_i=2)
res1 = portal(9, True, 0)
res2 = self.meta_interp(portal, [9, True, 0], inline=True)
assert res1 == res2
self.check_resops(call_assembler=0)
def test_handle_jitexception_in_portal(self):
# a test for _handle_jitexception_in_portal in blackhole.py
driver = JitDriver(greens = ['codeno'], reds = ['i', 'str'],
get_printable_location = lambda codeno: str(codeno))
def do_can_enter_jit(codeno, i, str):
i = (i+1)-1 # some operations
driver.can_enter_jit(codeno=codeno, i=i, str=str)
def intermediate(codeno, i, str):
if i == 9:
do_can_enter_jit(codeno, i, str)
def portal(codeno, str):
i = value.initial
while i < 10:
intermediate(codeno, i, str)
driver.jit_merge_point(codeno=codeno, i=i, str=str)
i += 1
if codeno == 64 and i == 10:
str = portal(96, str)
str += chr(codeno+i)
return str
class Value:
initial = -1
value = Value()
def main():
value.initial = 0
return (portal(64, '') +
portal(64, '') +
portal(64, '') +
portal(64, '') +
portal(64, ''))
assert main() == 'ABCDEFGHIabcdefghijJ' * 5
for tlimit in [95, 90, 102]:
print 'tlimit =', tlimit
res = self.meta_interp(main, [], inline=True, trace_limit=tlimit)
assert ''.join(res.chars) == 'ABCDEFGHIabcdefghijJ' * 5
def test_handle_jitexception_in_portal_returns_void(self):
# a test for _handle_jitexception_in_portal in blackhole.py
driver = JitDriver(greens = ['codeno'], reds = ['i', 'str'],
get_printable_location = lambda codeno: str(codeno))
def do_can_enter_jit(codeno, i, str):
i = (i+1)-1 # some operations
driver.can_enter_jit(codeno=codeno, i=i, str=str)
def intermediate(codeno, i, str):
if i == 9:
do_can_enter_jit(codeno, i, str)
def portal(codeno, str):
i = value.initial
while i < 10:
intermediate(codeno, i, str)
driver.jit_merge_point(codeno=codeno, i=i, str=str)
i += 1
if codeno == 64 and i == 10:
portal(96, str)
str += chr(codeno+i)
class Value:
initial = -1
value = Value()
def main():
value.initial = 0
portal(64, '')
portal(64, '')
portal(64, '')
portal(64, '')
portal(64, '')
main()
for tlimit in [95, 90, 102]:
print 'tlimit =', tlimit
self.meta_interp(main, [], inline=True, trace_limit=tlimit)
def test_no_duplicates_bug(self):
driver = JitDriver(greens = ['codeno'], reds = ['i'],
get_printable_location = lambda codeno: str(codeno))
def portal(codeno, i):
while i > 0:
driver.can_enter_jit(codeno=codeno, i=i)
driver.jit_merge_point(codeno=codeno, i=i)
if codeno > 0:
break
portal(i, i)
i -= 1
self.meta_interp(portal, [0, 10], inline=True)
def test_trace_from_start_always(self):
from rpython.rlib.nonconst import NonConstant
driver = JitDriver(greens = ['c'], reds = ['i', 'v'])
def portal(c, i, v):
while i > 0:
driver.jit_merge_point(c=c, i=i, v=v)
portal(c, i - 1, v)
if v:
driver.can_enter_jit(c=c, i=i, v=v)
break
def main(c, i, _set_param, v):
if _set_param:
set_param(driver, 'function_threshold', 0)
portal(c, i, v)
self.meta_interp(main, [10, 10, False, False], inline=True)
self.check_jitcell_token_count(1)
self.check_trace_count(1)
self.meta_interp(main, [3, 10, True, False], inline=True)
self.check_jitcell_token_count(0)
self.check_trace_count(0)
def test_trace_from_start_does_not_prevent_inlining(self):
driver = JitDriver(greens = ['c', 'bc'], reds = ['i'])
def portal(bc, c, i):
while True:
driver.jit_merge_point(c=c, bc=bc, i=i)
if bc == 0:
portal(1, 8, 0)
c += 1
else:
return
if c == 10: # bc == 0
c = 0
if i >= 100:
return
driver.can_enter_jit(c=c, bc=bc, i=i)
i += 1
self.meta_interp(portal, [0, 0, 0], inline=True)
self.check_resops(call_may_force=0, call=0)
def test_dont_repeatedly_trace_from_the_same_guard(self):
driver = JitDriver(greens = [], reds = ['level', 'i'])
def portal(level):
if level == 0:
i = -10
else:
i = 0
#
while True:
driver.jit_merge_point(level=level, i=i)
if level == 25:
return 42
i += 1
if i <= 0: # <- guard
continue # first make a loop
else:
# then we fail the guard above, doing a recursive call,
# which will itself fail the same guard above, and so on
return portal(level + 1)
self.meta_interp(portal, [0])
self.check_trace_count_at_most(2) # and not, e.g., 24
def test_get_unique_id(self):
lst = []
def reg_codemap(self, (start, size, l)):
lst.append((start, size))
old_reg_codemap(self, (start, size, l))
old_reg_codemap = codemap.CodemapStorage.register_codemap
try:
codemap.CodemapStorage.register_codemap = reg_codemap
def get_unique_id(pc, code):
return (code + 1) * 2
driver = JitDriver(greens=["pc", "code"], reds='auto',
get_unique_id=get_unique_id, is_recursive=True)
def f(pc, code):
i = 0
while i < 10:
driver.jit_merge_point(pc=pc, code=code)
pc += 1
if pc == 3:
if code == 1:
f(0, 0)
pc = 0
i += 1
self.meta_interp(f, [0, 1], inline=True)
self.check_get_unique_id(lst) # overloaded on assembler backends
finally:
codemap.CodemapStorage.register_codemap = old_reg_codemap
def check_get_unique_id(self, lst):
pass
class TestLLtype(RecursiveTests, LLJitMixin):
pass<|fim▁end|>
| |
<|file_name|>Main.java<|end_file_name|><|fim▁begin|>package com.covoex.qarvox;<|fim▁hole|>
import com.covoex.qarvox.Application.BasicFunction;
/**
* @author Myeongjun Kim
*/
public class Main {
public static void main(String[] args) {
BasicFunction.programStart();
}
}<|fim▁end|>
| |
<|file_name|>test_domain.py<|end_file_name|><|fim▁begin|>__author__ = 'frank'
# Setup our test environment
import os
os.environ['NETKI_ENV'] = 'test'
from unittest import TestCase
from netki.api.domain import *
from mock import patch, Mock
class TestWalletLookup(TestCase):
# This is the open wallet name lookup API
def setUp(self):
self.patcher1 = patch("netki.api.domain.InputValidation")
self.patcher2 = patch("netki.api.domain.create_json_response")
self.patcher3 = patch("netki.api.domain.WalletNameResolver")
self.patcher4 = patch("netki.api.domain.requests")
self.mockInputValidation = self.patcher1.start()
self.mockCreateJSONResponse = self.patcher2.start()
self.mockWalletNameResolver = self.patcher3.start()
self.mockRequests = self.patcher4.start()
config.namecoin.enabled = True
self.mockRequests.get.return_value.json.return_value = {'success': True, 'wallet_address': '1walletaddy'}
def tearDown(self):
self.patcher1.stop()
self.patcher2.stop()
self.patcher3.stop()
def get_json_call(self):
# Utility function to get JSON call_args_list cleaning up assertions in below tests
return self.mockCreateJSONResponse.call_args_list[0][1].get('data')
def test_invalid_wallet_name_field(self):
# Used to simulate failure in validation for each iteration [iteration 1, iteration 2, etc.]
self.mockInputValidation.is_valid_field.side_effect = [False]
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertFalse(self.mockWalletNameResolver.called)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Invalid Parameters')
def test_invalid_currency_field(self):
# Used to simulate failure in validation for each iteration [iteration 1, iteration 2, etc.]
self.mockInputValidation.is_valid_field.side_effect = [True, False]
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertFalse(self.mockWalletNameResolver.called)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Invalid Parameters')
def test_invalid_wallet_name_field_no_dot(self):
api_wallet_lookup('walletfrankcontrerasme', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertFalse(self.mockWalletNameResolver.called)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Invalid Parameters')
def test_wallet_address_returned_success(self):
<|fim▁hole|>
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockWalletNameResolver.return_value.set_namecoin_options.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertTrue(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), '')
# Returned Data Validation
call_dict = self.get_json_call()
self.assertEqual(call_dict.get('wallet_name'), 'wallet.frankcontreras.me')
self.assertEqual(call_dict.get('currency'), 'btc')
self.assertEqual(call_dict.get('wallet_address'), '1djskfaklasdjflkasdf')
def test_namecoin_config_disabled(self):
self.mockWalletNameResolver.return_value.resolve_wallet_name.return_value = '1djskfaklasdjflkasdf'
config.namecoin.enabled = False
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockWalletNameResolver.return_value.set_namecoin_options.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertTrue(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), '')
# Returned Data Validation
call_dict = self.get_json_call()
self.assertEqual(call_dict.get('wallet_name'), 'wallet.frankcontreras.me')
self.assertEqual(call_dict.get('currency'), 'btc')
self.assertEqual(call_dict.get('wallet_address'), '1djskfaklasdjflkasdf')
def test_namecoin_use_api_returned_success(self):
config.namecoin.use_api = True
api_wallet_lookup('wallet.frankcontreras.bit', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 1)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.set_namecoin_options.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertTrue(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), '')
# Returned Data Validation
call_dict = self.get_json_call()
self.assertEqual(call_dict.get('wallet_name'), 'wallet.frankcontreras.bit')
self.assertEqual(call_dict.get('currency'), 'btc')
self.assertEqual(call_dict.get('wallet_address'), '1walletaddy')
def test_namecoin_use_api_returned_failure(self):
config.namecoin.use_api = True
self.mockRequests.get.return_value.json.return_value['success'] = False
api_wallet_lookup('wallet.frankcontreras.bit', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 1)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.set_namecoin_options.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
# Returned Data Validation
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name does not exist')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_insecure_error(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_wallet_name.side_effect = WalletNameLookupInsecureError()
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name Lookup is Insecure')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_does_not_exist(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_wallet_name.side_effect = WalletNameLookupError()
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name does not exist')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_empty_currency_list(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_wallet_name.side_effect = WalletNameUnavailableError()
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name does not exist')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_currency_unavailable(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_wallet_name.side_effect = WalletNameCurrencyUnavailableError()
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name Does Not Contain Requested Currency')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_exception(self):
self.mockWalletNameResolver.return_value.resolve_wallet_name.side_effect = Exception('Raising Exception for testing')
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 2)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockWalletNameResolver.return_value.resolve_wallet_name.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'General Wallet Lookup Failure')
def test_uppercase_currency_and_wallet_name_to_lowercase(self):
api_wallet_lookup('Wallet.FrankContreras.Me', 'BTC')
# Validate call to resolve has values in lowercase
call_args = self.mockWalletNameResolver.return_value.resolve_wallet_name.call_args_list[0][0]
self.assertEqual('wallet.frankcontreras.me', call_args[0])
self.assertEqual('btc', call_args[1])
def test_dogecoin_transform(self):
api_wallet_lookup('wallet.frankContreras.me', 'doge')
# Validate call to resolve has values in lowercase
call_args = self.mockWalletNameResolver.return_value.resolve_wallet_name.call_args_list[0][0]
self.assertEqual('wallet.frankcontreras.me', call_args[0])
self.assertEqual('dgc', call_args[1])
class TestWalletnameCurrencyLookup(TestCase):
def setUp(self):
self.patcher1 = patch("netki.api.domain.InputValidation")
self.patcher2 = patch("netki.api.domain.create_json_response")
self.patcher3 = patch("netki.api.domain.WalletNameResolver")
self.patcher4 = patch("netki.api.domain.requests")
self.mockInputValidation = self.patcher1.start()
self.mockCreateJSONResponse = self.patcher2.start()
self.mockWalletNameResolver = self.patcher3.start()
self.mockRequests = self.patcher4.start()
self.mockWalletNameResolver.return_value.resolve_available_currencies.return_value = ['btc','ltc']
self.mockRequests.get.return_value.json.return_value = {'success': True, 'available_currencies': ['btc','ltc']}
def tearDown(self):
self.patcher1.stop()
self.patcher2.stop()
self.patcher4.stop()
def get_json_call(self):
# Utility function to get JSON call_args_list cleaning up assertions in below tests
return self.mockCreateJSONResponse.call_args_list[0][1].get('data')
def test_invalid_wallet_name_field(self):
# Used to simulate failure in validation for each iteration [iteration 1, iteration 2, etc.]
self.mockInputValidation.is_valid_field.side_effect = [False]
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Invalid Parameters')
self.assertFalse(self.mockWalletNameResolver.called)
def test_invalid_wallet_name_field_no_dot(self):
walletname_currency_lookup('walletfrankcontrerasme')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Invalid Parameters')
self.assertFalse(self.mockWalletNameResolver.called)
def test_wallet_address_returned_success(self):
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertTrue(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), '')
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
# Returned Data Validation
call_dict = self.get_json_call()
self.assertEqual(call_dict.get('wallet_name'), 'wallet.frankcontreras.me')
self.assertEqual(call_dict.get('available_currencies'), ['btc','ltc'])
def test_wallet_address_namecoin_use_api_returned_success(self):
config.namecoin.use_api = True
walletname_currency_lookup('wallet.frankcontreras.bit')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertTrue(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), '')
self.assertEqual(0, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
# Returned Data Validation
call_dict = self.get_json_call()
self.assertEqual(call_dict.get('wallet_name'), 'wallet.frankcontreras.bit')
self.assertEqual(call_dict.get('available_currencies'), ['btc','ltc'])
def test_wallet_address_namecoin_use_api_returned_failure(self):
config.namecoin.use_api = True
self.mockRequests.get.return_value.json.return_value['success'] = False
walletname_currency_lookup('wallet.frankcontreras.bit')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 1)
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'Wallet Name Does Not Exist')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
self.assertEqual(0, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
def test_wallet_lookup_returned_error(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_available_currencies.side_effect = WalletNameUnavailableError()
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'),'Wallet Name Does Not Exist')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_insecure(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_available_currencies.side_effect = WalletNameLookupInsecureError()
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'),'Wallet Name Lookup is Insecure')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_currency_unavailable(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_available_currencies.side_effect = WalletNameCurrencyUnavailableError()
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'),'Requested Currency Unavailable')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_returned_currency_namecoin_unavailable(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_available_currencies.side_effect = WalletNameNamecoinUnavailable()
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'),'Namecoin-based Wallet Name Lookup Unavailable')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data'), {})
def test_wallet_lookup_failed(self):
self.mockInputValidation.is_valid_field.return_value = True
self.mockWalletNameResolver.return_value.resolve_available_currencies.return_value = None
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'LOOKUP_FAILURE')
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('data').get('wallet_name'), 'wallet.frankcontreras.me')
def test_wallet_lookup_exception(self):
self.mockWalletNameResolver.return_value.resolve_available_currencies.side_effect = Exception()
walletname_currency_lookup('wallet.frankcontreras.me')
self.assertEqual(self.mockInputValidation.is_valid_field.call_count, 1)
self.assertEqual(self.mockRequests.get.call_count, 0)
self.assertEqual(1, self.mockWalletNameResolver.return_value.resolve_available_currencies.call_count)
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
self.assertEqual(self.mockCreateJSONResponse.call_count, 1)
self.assertFalse(self.mockCreateJSONResponse.call_args_list[0][1].get('success'))
self.assertEqual(self.mockCreateJSONResponse.call_args_list[0][1].get('message'), 'General Wallet Lookup Failure')
def test_uppercase_currency_and_wallet_name_to_lowercase(self):
walletname_currency_lookup('wallet.frankcontreras.me')
# Validate call to resolve has values in lowercase
self.assertEqual('wallet.frankcontreras.me', self.mockWalletNameResolver.return_value.resolve_available_currencies.call_args[0][0])
if __name__ == "__main__":
import unittest
unittest.main()<|fim▁end|>
|
self.mockWalletNameResolver.return_value.resolve_wallet_name.return_value = '1djskfaklasdjflkasdf'
api_wallet_lookup('wallet.frankcontreras.me', 'btc')
|
<|file_name|>HttpPatch.java<|end_file_name|><|fim▁begin|>// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.hooks.rtc.network;
import java.net.URI;
import org.apache.http.client.methods.HttpPost;
public class HttpPatch extends HttpPost {
<|fim▁hole|>
public HttpPatch(String uri) {
super(uri);
}
public HttpPatch(URI uri) {
super(uri);
}
@Override
public String getMethod() {
return "PATCH";
}
}<|fim▁end|>
|
public HttpPatch() {
super();
}
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>#[cfg(not(feature = "unstable"))]
mod inner {
extern crate syntex;
extern crate diesel_codegen;
extern crate dotenv_codegen;
use std::env;
use std::path::Path;
pub fn main() {
let out_dir = env::var_os("OUT_DIR").unwrap();
let mut registry = syntex::Registry::new();
diesel_codegen::register(&mut registry);
dotenv_codegen::register(&mut registry);
let src = Path::new("tests/lib.in.rs");
let dst = Path::new(&out_dir).join("lib.rs");
registry.expand("", &src, &dst).unwrap();
}
}
#[cfg(feature = "unstable")]
mod inner {
pub fn main() {}
}
extern crate diesel;
extern crate dotenv;<|fim▁hole|> dotenv().ok();
let database_url = ::std::env::var("DATABASE_URL")
.expect("DATABASE_URL must be set to run tests");
let connection = Connection::establish(&database_url).unwrap();
migrations::run_pending_migrations(&connection).unwrap();
inner::main();
}<|fim▁end|>
|
use diesel::*;
use dotenv::dotenv;
fn main() {
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Tools for wrapping devices.
mod buffered;
mod buffered_output;<|fim▁hole|>#[doc(inline)]
pub use self::buffered_output::BufferedOutput;
#[doc(inline)]
pub use self::messaged::Messaged;<|fim▁end|>
|
mod messaged;
#[doc(inline)]
pub use self::buffered::Buffered;
|
<|file_name|>ConnectivityService.java<|end_file_name|><|fim▁begin|>package main.habitivity.services;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
/**
* Created by Shally on 2017-12-01.
*/
public class ConnectivityService implements IConnectivityService {
private Context context;
private OnConnectivityChangedListener onConnectivityChangedListener;
/**
* Instantiates a new Android connectivity service.
*
* @param context the context
*/
public ConnectivityService(Context context) {
this.context = context;
registerConnectivityListener();
}
@Override
public boolean isInternetAvailable() {
ConnectivityManager connectivityManager = (ConnectivityManager) context<|fim▁hole|> return networkInfo != null && networkInfo.isConnected();
}
@Override
public void setOnConnectivityChangedListener(OnConnectivityChangedListener onConnectivityChangedListener) {
this.onConnectivityChangedListener = onConnectivityChangedListener;
dispatchConnectivityChange();
}
private void registerConnectivityListener() {
context.registerReceiver(new ConnectivityListener(),
new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
}
private void dispatchConnectivityChange() {
if (onConnectivityChangedListener != null) {
onConnectivityChangedListener.onConnectivityChanged(isInternetAvailable());
}
}
private class ConnectivityListener extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
dispatchConnectivityChange();
}
}
}<|fim▁end|>
|
.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connectivityManager.getActiveNetworkInfo();
|
<|file_name|>list_assets.go<|end_file_name|><|fim▁begin|>package endpoint
import (<|fim▁hole|> "fmt"
"net/http"
"github.com/spolu/settle/lib/db"
"github.com/spolu/settle/lib/errors"
"github.com/spolu/settle/lib/format"
"github.com/spolu/settle/lib/ptr"
"github.com/spolu/settle/lib/svc"
"github.com/spolu/settle/mint"
"github.com/spolu/settle/mint/lib/authentication"
"github.com/spolu/settle/mint/model"
)
const (
// EndPtListAssets creates a new assset.
EndPtListAssets EndPtName = "ListAssets"
)
func init() {
registrar[EndPtListAssets] = NewListAssets
}
// ListAssets returns a list of assets.
type ListAssets struct {
ListEndpoint
Owner string
}
// NewListAssets constructs and initialiezes the endpoint.
func NewListAssets(
r *http.Request,
) (Endpoint, error) {
return &ListAssets{
ListEndpoint: ListEndpoint{},
}, nil
}
// Validate validates the input parameters.
func (e *ListAssets) Validate(
r *http.Request,
) error {
ctx := r.Context()
e.Owner = fmt.Sprintf("%s@%s",
authentication.Get(ctx).User.Username, mint.GetHost(ctx))
return e.ListEndpoint.Validate(r)
}
// Execute executes the endpoint.
func (e *ListAssets) Execute(
ctx context.Context,
) (*int, *svc.Resp, error) {
ctx = db.Begin(ctx, "mint")
defer db.LoggedRollback(ctx)
assets, err := model.LoadAssetListByOwner(ctx,
e.ListEndpoint.CreatedBefore,
e.ListEndpoint.Limit,
e.Owner,
)
if err != nil {
return nil, nil, errors.Trace(err) // 500
}
db.Commit(ctx)
l := []mint.AssetResource{}
for _, a := range assets {
a := a
l = append(l, model.NewAssetResource(ctx, &a))
}
return ptr.Int(http.StatusOK), &svc.Resp{
"assets": format.JSONPtr(l),
}, nil
}<|fim▁end|>
|
"context"
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.conf import settings
from django.contrib.auth.models import User, Group
from .endpoint import Endpoint
DEFAULT_STORE_SLUG = getattr(settings, 'DEFAULT_STORE_SLUG', 'public')
class Store(models.Model):
slug = models.SlugField(primary_key=True)
name = models.CharField(max_length=128)
query_endpoint = models.URLField()
update_endpoint = models.URLField(null=True, blank=True)
graph_store_endpoint = models.URLField(null=True, blank=True)<|fim▁hole|>
def __unicode__(self):
return self.name
def query(self, *args, **kwargs):
return Endpoint(self.query_endpoint).query(*args, **kwargs)
class Meta:
permissions = (('administer_store', 'can administer'),
('query_store', 'can query'),
('update_store', 'can update'))
class UserPrivileges(models.Model):
user = models.ForeignKey(User, null=True, blank=True)
group = models.ForeignKey(Group, null=True, blank=True)
allow_concurrent_queries = models.BooleanField()
disable_throttle = models.BooleanField()
throttle_threshold = models.FloatField(null=True, blank=True)
deny_threshold = models.FloatField(null=True, blank=True)
intensity_decay = models.FloatField(null=True, blank=True)
disable_timeout = models.BooleanField()
maximum_timeout = models.IntegerField(null=True)<|fim▁end|>
| |
<|file_name|>update-credential.7.x.py<|end_file_name|><|fim▁begin|># NOTE: This example uses the next generation Twilio helper library - for more
# information on how to download and install this version, visit
# https://www.twilio.com/docs/libraries/python<|fim▁hole|># Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure
account = os.environ['TWILIO_ACCOUNT_SID']
token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account, token)
credential = client.notify \
.credentials("CRXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.update(friendly_name="MyCredential",
sandbox=True)
print(credential.friendly_name)<|fim▁end|>
|
import os
from twilio.rest import Client
|
<|file_name|>receipt_print.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 Silvina Faner (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
import netsvc
class report_receipt_print(report_sxw.rml_parse):
_name = 'report.receipt.print'
def __init__(self, cr, uid, name, context):
super(report_receipt_print, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'convert': self.convert,
})
def convert(self, amount, currency): return self.pool.get('ir.translation').amount_to_text(amount, 'pe', currency or 'Pesos')
report_sxw.report_sxw(
'report.receipt.print',
'receipt.receipt',<|fim▁hole|><|fim▁end|>
|
'trunk/receipt_pay/report/receipt_pay_print.rml',
parser=report_receipt_print,header="external"
)
|
<|file_name|>outline.py<|end_file_name|><|fim▁begin|>import phidl.geometry as pg
import gdsfactory as gf
from gdsfactory.component import Component
@gf.cell
def outline(elements, **kwargs) -> Component:
"""
Returns Component containing the outlined polygon(s).
wraps phidl.geometry.outline
Creates an outline around all the polygons passed in the `elements`
argument. `elements` may be a Device, Polygon, or list of Devices.
Args:
elements: Device(/Reference), list of Device(/Reference), or Polygon
Polygons to outline or Device containing polygons to outline.
Keyword Args:
distance: int or float
Distance to offset polygons. Positive values expand, negative shrink.
precision: float
Desired precision for rounding vertex coordinates.
num_divisions: array-like[2] of int
The number of divisions with which the geometry is divided into
multiple rectangular regions. This allows for each region to be
processed sequentially, which is more computationally efficient.
join: {'miter', 'bevel', 'round'}
Type of join used to create the offset polygon.
tolerance: int or float
For miter joints, this number must be at least 2 and it represents the
maximal distance in multiples of offset between new vertices and their
original position before beveling to avoid spikes at acute joints. For
round joints, it indicates the curvature resolution in number of
points per full circle.
join_first: bool
Join all paths before offsetting to avoid unnecessary joins in
adjacent polygon sides.
max_points: int
The maximum number of vertices within the resulting polygon.
open_ports: bool or float
If not False, holes will be cut in the outline such that the Ports are
not covered. If True, the holes will have the same width as the Ports.
If a float, the holes will be be widened by that value (useful for fully
clearing the outline around the Ports for positive-tone processes
layer: int, array-like[2], or set
Specific layer(s) to put polygon geometry on.)
<|fim▁hole|> """
return gf.read.from_phidl(component=pg.outline(elements, **kwargs))
def test_outline():
e1 = gf.components.ellipse(radii=(6, 6))
e2 = gf.components.ellipse(radii=(10, 4))
c = outline([e1, e2])
assert int(c.area()) == 52
if __name__ == "__main__":
e1 = gf.components.ellipse(radii=(6, 6))
e2 = gf.components.ellipse(radii=(10, 4))
c = outline([e1, e2])
c.show()<|fim▁end|>
| |
<|file_name|>wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! Wrapper definitions on top of Gecko types in order to be used in the style
//! system.
//!
//! This really follows the Servo pattern in
//! `components/script/layout_wrapper.rs`.
//!
//! This theoretically should live in its own crate, but now it lives in the
//! style system it's kind of pointless in the Stylo case, and only Servo forces
//! the separation between the style system implementation and everything else.
use CaseSensitivityExt;
use app_units::Au;
use applicable_declarations::ApplicableDeclarationBlock;
use atomic_refcell::{AtomicRefCell, AtomicRefMut};
use context::{QuirksMode, SharedStyleContext, PostAnimationTasks, UpdateAnimationsTasks};
use data::{ElementData, RestyleData};
use dom::{LayoutIterator, NodeInfo, TElement, TNode, UnsafeNode};
use dom::{OpaqueNode, PresentationalHintsSynthesizer};
use element_state::{ElementState, DocumentState, NS_DOCUMENT_STATE_WINDOW_INACTIVE};
use error_reporting::ParseErrorReporter;
use font_metrics::{FontMetrics, FontMetricsProvider, FontMetricsQueryResult};
use gecko::data::PerDocumentStyleData;
use gecko::global_style_data::GLOBAL_STYLE_DATA;
use gecko::selector_parser::{SelectorImpl, NonTSPseudoClass, PseudoElement};
use gecko::snapshot_helpers;
use gecko_bindings::bindings;
use gecko_bindings::bindings::{Gecko_ConstructStyleChildrenIterator, Gecko_DestroyStyleChildrenIterator};
use gecko_bindings::bindings::{Gecko_DocumentState, Gecko_ElementState, Gecko_GetDocumentLWTheme};
use gecko_bindings::bindings::{Gecko_GetLastChild, Gecko_GetNextStyleChild};
use gecko_bindings::bindings::{Gecko_IsRootElement, Gecko_MatchesElement, Gecko_Namespace};
use gecko_bindings::bindings::{Gecko_SetNodeFlags, Gecko_UnsetNodeFlags};
use gecko_bindings::bindings::Gecko_ClassOrClassList;
use gecko_bindings::bindings::Gecko_ElementHasAnimations;
use gecko_bindings::bindings::Gecko_ElementHasCSSAnimations;
use gecko_bindings::bindings::Gecko_ElementHasCSSTransitions;
use gecko_bindings::bindings::Gecko_GetActiveLinkAttrDeclarationBlock;
use gecko_bindings::bindings::Gecko_GetAnimationRule;
use gecko_bindings::bindings::Gecko_GetExtraContentStyleDeclarations;
use gecko_bindings::bindings::Gecko_GetHTMLPresentationAttrDeclarationBlock;
use gecko_bindings::bindings::Gecko_GetSMILOverrideDeclarationBlock;
use gecko_bindings::bindings::Gecko_GetStyleAttrDeclarationBlock;
use gecko_bindings::bindings::Gecko_GetStyleContext;
use gecko_bindings::bindings::Gecko_GetUnvisitedLinkAttrDeclarationBlock;
use gecko_bindings::bindings::Gecko_GetVisitedLinkAttrDeclarationBlock;
use gecko_bindings::bindings::Gecko_IsSignificantChild;
use gecko_bindings::bindings::Gecko_MatchLang;
use gecko_bindings::bindings::Gecko_MatchStringArgPseudo;
use gecko_bindings::bindings::Gecko_UnsetDirtyStyleAttr;
use gecko_bindings::bindings::Gecko_UpdateAnimations;
use gecko_bindings::structs;
use gecko_bindings::structs::{RawGeckoElement, RawGeckoNode, RawGeckoXBLBinding};
use gecko_bindings::structs::{nsIAtom, nsIContent, nsINode_BooleanFlag, nsStyleContext};
use gecko_bindings::structs::ELEMENT_HANDLED_SNAPSHOT;
use gecko_bindings::structs::ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO;
use gecko_bindings::structs::ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO;
use gecko_bindings::structs::ELEMENT_HAS_SNAPSHOT;
use gecko_bindings::structs::EffectCompositor_CascadeLevel as CascadeLevel;
use gecko_bindings::structs::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE;
use gecko_bindings::structs::NODE_IS_NATIVE_ANONYMOUS;
use gecko_bindings::structs::nsChangeHint;
use gecko_bindings::structs::nsIDocument_DocumentTheme as DocumentTheme;
use gecko_bindings::structs::nsRestyleHint;
use gecko_bindings::sugar::ownership::{HasArcFFI, HasSimpleFFI};
use logical_geometry::WritingMode;
use media_queries::Device;
use properties::{ComputedValues, parse_style_attribute};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock};
use properties::animated_properties::{AnimatableLonghand, AnimationValue, AnimationValueMap};
use properties::animated_properties::TransitionProperty;
use properties::style_structs::Font;
use rule_tree::CascadeLevel as ServoCascadeLevel;
use selector_parser::{AttrValue, ElementExt, PseudoClassStringArg};
use selectors::Element;
use selectors::attr::{AttrSelectorOperation, AttrSelectorOperator, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, LocalMatchingContext, MatchingContext};
use selectors::matching::{RelevantLinkStatus, VisitedHandlingMode};
use selectors::sink::Push;
use servo_arc::{Arc, ArcBorrow, RawOffsetArc};
use shared_lock::Locked;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::mem;
use std::ops::DerefMut;
use std::ptr;
use string_cache::{Atom, Namespace, WeakAtom, WeakNamespace};
use stylesheets::UrlExtraData;
use stylist::Stylist;
/// A simple wrapper over a non-null Gecko node (`nsINode`) pointer.
///
/// Important: We don't currently refcount the DOM, because the wrapper lifetime
/// magic guarantees that our LayoutFoo references won't outlive the root, and
/// we don't mutate any of the references on the Gecko side during restyle.
///
/// We could implement refcounting if need be (at a potentially non-trivial
/// performance cost) by implementing Drop and making LayoutFoo non-Copy.
#[derive(Clone, Copy)]
pub struct GeckoNode<'ln>(pub &'ln RawGeckoNode);
impl<'ln> fmt::Debug for GeckoNode<'ln> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(el) = self.as_element() {
el.fmt(f)
} else {
if self.is_text_node() {
write!(f, "<text node> ({:#x})", self.opaque().0)
} else {
write!(f, "<non-text node> ({:#x})", self.opaque().0)
}
}
}
}
impl<'ln> GeckoNode<'ln> {
#[inline]
fn from_content(content: &'ln nsIContent) -> Self {
GeckoNode(&content._base)
}
#[inline]
fn flags(&self) -> u32 {
(self.0)._base._base_1.mFlags
}
#[inline]
fn node_info(&self) -> &structs::NodeInfo {
debug_assert!(!self.0.mNodeInfo.mRawPtr.is_null());
unsafe { &*self.0.mNodeInfo.mRawPtr }
}
// These live in different locations depending on processor architecture.
#[cfg(target_pointer_width = "64")]
#[inline]
fn bool_flags(&self) -> u32 {
(self.0)._base._base_1.mBoolFlags
}
#[cfg(target_pointer_width = "32")]
#[inline]
fn bool_flags(&self) -> u32 {
(self.0).mBoolFlags
}
#[inline]
fn get_bool_flag(&self, flag: nsINode_BooleanFlag) -> bool {
self.bool_flags() & (1u32 << flag as u32) != 0
}
fn owner_doc(&self) -> &structs::nsIDocument {
debug_assert!(!self.node_info().mDocument.is_null());
unsafe { &*self.node_info().mDocument }
}
#[inline]
fn first_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mFirstChild.as_ref().map(GeckoNode::from_content) }
}
#[inline]
fn last_child(&self) -> Option<GeckoNode<'ln>> {
unsafe { Gecko_GetLastChild(self.0).map(GeckoNode) }
}
#[inline]
fn prev_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mPreviousSibling.as_ref().map(GeckoNode::from_content) }
}
#[inline]
fn next_sibling(&self) -> Option<GeckoNode<'ln>> {
unsafe { self.0.mNextSibling.as_ref().map(GeckoNode::from_content) }
}
/// Simple iterator over all this node's children. Unlike `.children()`, this iterator does
/// not filter out nodes that don't need layout.
fn dom_children(self) -> GeckoChildrenIterator<'ln> {
GeckoChildrenIterator::Current(self.first_child())
}
/// WARNING: This logic is duplicated in Gecko's FlattenedTreeParentIsParent.
/// Make sure to mirror any modifications in both places.
fn flattened_tree_parent_is_parent(&self) -> bool {
use ::gecko_bindings::structs::*;
let flags = self.flags();
if flags & (NODE_MAY_BE_IN_BINDING_MNGR as u32 |
NODE_IS_IN_SHADOW_TREE as u32) != 0 {
return false;
}
let parent = unsafe { self.0.mParent.as_ref() }.map(GeckoNode);
let parent_el = parent.and_then(|p| p.as_element());
if flags & (NODE_IS_NATIVE_ANONYMOUS_ROOT as u32) != 0 &&
parent_el.map_or(false, |el| el.is_root())
{
return false;
}
if parent_el.map_or(false, |el| el.has_shadow_root()) {
return false;
}
true
}
fn flattened_tree_parent(&self) -> Option<Self> {
let fast_path = self.flattened_tree_parent_is_parent();
debug_assert!(fast_path == unsafe { bindings::Gecko_FlattenedTreeParentIsParent(self.0) });
if fast_path {
unsafe { self.0.mParent.as_ref().map(GeckoNode) }
} else {
unsafe { bindings::Gecko_GetFlattenedTreeParentNode(self.0).map(GeckoNode) }
}
}
/// This logic is duplicated in Gecko's nsIContent::IsRootOfNativeAnonymousSubtree.
fn is_root_of_native_anonymous_subtree(&self) -> bool {
use gecko_bindings::structs::NODE_IS_NATIVE_ANONYMOUS_ROOT;
return self.flags() & (NODE_IS_NATIVE_ANONYMOUS_ROOT as u32) != 0
}
fn contains_non_whitespace_content(&self) -> bool {
unsafe { Gecko_IsSignificantChild(self.0, true, false) }
}
#[inline]
fn may_have_anonymous_children(&self) -> bool {
self.get_bool_flag(nsINode_BooleanFlag::ElementMayHaveAnonymousChildren)
}
/// This logic is duplicated in Gecko's nsIContent::IsInAnonymousSubtree.
#[inline]
fn is_in_anonymous_subtree(&self) -> bool {
use gecko_bindings::structs::NODE_IS_IN_SHADOW_TREE;
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32) != 0 ||
((self.flags() & (NODE_IS_IN_SHADOW_TREE as u32) == 0) &&
self.as_element().map_or(false, |e| e.has_xbl_binding_parent()))
}
}
impl<'ln> NodeInfo for GeckoNode<'ln> {
#[inline]
fn is_element(&self) -> bool {
self.get_bool_flag(nsINode_BooleanFlag::NodeIsElement)
}
fn is_text_node(&self) -> bool {
// This is a DOM constant that isn't going to change.
const TEXT_NODE: u16 = 3;
self.node_info().mInner.mNodeType == TEXT_NODE
}
}
impl<'ln> TNode for GeckoNode<'ln> {
type ConcreteElement = GeckoElement<'ln>;
type ConcreteChildrenIterator = GeckoChildrenIterator<'ln>;
fn to_unsafe(&self) -> UnsafeNode {
(self.0 as *const _ as usize, 0)
}
unsafe fn from_unsafe(n: &UnsafeNode) -> Self {
GeckoNode(&*(n.0 as *mut RawGeckoNode))
}
fn parent_node(&self) -> Option<Self> {
unsafe { self.0.mParent.as_ref().map(GeckoNode) }
}
fn children(&self) -> LayoutIterator<GeckoChildrenIterator<'ln>> {
LayoutIterator(self.dom_children())
}
fn traversal_parent(&self) -> Option<GeckoElement<'ln>> {
self.flattened_tree_parent().and_then(|n| n.as_element())
}
fn traversal_children(&self) -> LayoutIterator<GeckoChildrenIterator<'ln>> {
if let Some(element) = self.as_element() {
// This condition is similar to the check that
// StyleChildrenIterator::IsNeeded does, except that it might return
// true if we used to (but no longer) have anonymous content from
// ::before/::after, XBL bindings, or nsIAnonymousContentCreators.
if self.is_in_anonymous_subtree() ||
element.has_xbl_binding_with_content() ||
self.may_have_anonymous_children() {
unsafe {
let mut iter: structs::StyleChildrenIterator = ::std::mem::zeroed();
Gecko_ConstructStyleChildrenIterator(element.0, &mut iter);
return LayoutIterator(GeckoChildrenIterator::GeckoIterator(iter));
}
}
}
LayoutIterator(self.dom_children())
}
fn opaque(&self) -> OpaqueNode {
let ptr: usize = self.0 as *const _ as usize;
OpaqueNode(ptr)
}
fn debug_id(self) -> usize {
unimplemented!()
}
fn as_element(&self) -> Option<GeckoElement<'ln>> {
if self.is_element() {
unsafe { Some(GeckoElement(&*(self.0 as *const _ as *const RawGeckoElement))) }
} else {
None
}
}
fn can_be_fragmented(&self) -> bool {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
false
}
unsafe fn set_can_be_fragmented(&self, _value: bool) {
// FIXME(SimonSapin): Servo uses this to implement CSS multicol / fragmentation
// Maybe this isn’t useful for Gecko?
}
fn is_in_doc(&self) -> bool {
unsafe { bindings::Gecko_IsInDocument(self.0) }
}
fn needs_dirty_on_viewport_size_changed(&self) -> bool {
// Gecko's node doesn't have the DIRTY_ON_VIEWPORT_SIZE_CHANGE flag,
// so we force them to be dirtied on viewport size change, regardless if
// they use viewport percentage size or not.
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
true
}
// TODO(shinglyu): implement this in Gecko: https://github.com/servo/servo/pull/11890
unsafe fn set_dirty_on_viewport_size_changed(&self) {}
}
/// A wrapper on top of two kind of iterators, depending on the parent being
/// iterated.
///
/// We generally iterate children by traversing the light-tree siblings of the
/// first child like Servo does.
///
/// However, for nodes with anonymous children, we use a custom (heavier-weight)
/// Gecko-implemented iterator.
///
/// FIXME(emilio): If we take into account shadow DOM, we're going to need the
/// flat tree pretty much always. We can try to optimize the case where there's
/// no shadow root sibling, probably.
pub enum GeckoChildrenIterator<'a> {
/// A simple iterator that tracks the current node being iterated and
/// replaces it with the next sibling when requested.
Current(Option<GeckoNode<'a>>),
/// A Gecko-implemented iterator we need to drop appropriately.
GeckoIterator(structs::StyleChildrenIterator),
}
impl<'a> Drop for GeckoChildrenIterator<'a> {
fn drop(&mut self) {
if let GeckoChildrenIterator::GeckoIterator(ref mut it) = *self {
unsafe {
Gecko_DestroyStyleChildrenIterator(it);
}
}
}
}
impl<'a> Iterator for GeckoChildrenIterator<'a> {
type Item = GeckoNode<'a>;
fn next(&mut self) -> Option<GeckoNode<'a>> {
match *self {
GeckoChildrenIterator::Current(curr) => {
let next = curr.and_then(|node| node.next_sibling());
*self = GeckoChildrenIterator::Current(next);
curr
},
GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe {
// We do this unsafe lengthening of the lifetime here because
// structs::StyleChildrenIterator is actually StyleChildrenIterator<'a>,
// however we can't express this easily with bindgen, and it would
// introduce functions with two input lifetimes into bindgen,
// which would be out of scope for elision.
Gecko_GetNextStyleChild(&mut * (it as *mut _)).map(GeckoNode)
}
}
}
}
/// A Simple wrapper over a non-null Gecko `nsXBLBinding` pointer.
#[derive(Clone, Copy)]
pub struct GeckoXBLBinding<'lb>(pub &'lb RawGeckoXBLBinding);
impl<'lb> GeckoXBLBinding<'lb> {
fn base_binding(&self) -> Option<Self> {
unsafe { self.0.mNextBinding.mRawPtr.as_ref().map(GeckoXBLBinding) }
}
fn anon_content(&self) -> *const nsIContent {
unsafe { self.0.mContent.raw::<nsIContent>() }
}
fn inherits_style(&self) -> bool {
unsafe { bindings::Gecko_XBLBinding_InheritsStyle(self.0) }
}
// This duplicates the logic in Gecko's
// nsBindingManager::GetBindingWithContent.
fn get_binding_with_content(&self) -> Option<Self> {
let mut binding = *self;
loop {
if !binding.anon_content().is_null() {
return Some(binding);
}
binding = match binding.base_binding() {
Some(b) => b,
None => return None,
};
}
}
fn each_xbl_stylist<F>(self, mut f: &mut F)
where
F: FnMut(&Stylist),
{
if let Some(base) = self.base_binding() {
base.each_xbl_stylist(f);
}
let raw_data = unsafe {
bindings::Gecko_XBLBinding_GetRawServoStyleSet(self.0)
};
if let Some(raw_data) = raw_data {
let data = PerDocumentStyleData::from_ffi(&*raw_data).borrow();
f(&data.stylist);
}
}
}
/// A simple wrapper over a non-null Gecko `Element` pointer.
#[derive(Clone, Copy)]
pub struct GeckoElement<'le>(pub &'le RawGeckoElement);
impl<'le> fmt::Debug for GeckoElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<{}", self.get_local_name())?;
if let Some(id) = self.get_id() {
write!(f, " id={}", id)?;
}
let mut first = true;
let mut any = false;
self.each_class(|c| {
if first {
first = false;
any = true;
let _ = f.write_str(" class=\"");
} else {
let _ = f.write_str(" ");
}
let _ = write!(f, "{}", c);
});
if any {
f.write_str("\"")?;
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> GeckoElement<'le> {
/// Parse the style attribute of an element.
pub fn parse_style_attribute(value: &str,
url_data: &UrlExtraData,
quirks_mode: QuirksMode,
reporter: &ParseErrorReporter) -> PropertyDeclarationBlock {
parse_style_attribute(value, url_data, reporter, quirks_mode)
}
fn flags(&self) -> u32 {
self.raw_node()._base._base_1.mFlags
}
fn raw_node(&self) -> &RawGeckoNode {
&(self.0)._base._base._base
}
// FIXME: We can implement this without OOL calls, but we can't easily given
// GeckoNode is a raw reference.
//
// We can use a Cell<T>, but that's a bit of a pain.
fn set_flags(&self, flags: u32) {
unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) }
}
fn unset_flags(&self, flags: u32) {
unsafe { Gecko_UnsetNodeFlags(self.as_node().0, flags) }
}
/// Returns true if this element has a shadow root.
fn has_shadow_root(&self) -> bool {
self.get_extended_slots().map_or(false, |slots| !slots.mShadowRoot.mRawPtr.is_null())
}
/// Returns a reference to the DOM slots for this Element, if they exist.
fn get_dom_slots(&self) -> Option<&structs::FragmentOrElement_nsDOMSlots> {
let slots = self.as_node().0.mSlots as *const structs::FragmentOrElement_nsDOMSlots;
unsafe { slots.as_ref() }
}
/// Returns a reference to the extended DOM slots for this Element.
fn get_extended_slots(&self) -> Option<&structs::FragmentOrElement_nsExtendedDOMSlots> {
self.get_dom_slots().and_then(|s| {
unsafe { s.mExtendedSlots.mPtr.as_ref() }
})
}
#[inline]
fn get_xbl_binding(&self) -> Option<GeckoXBLBinding> {
if self.flags() & (structs::NODE_MAY_BE_IN_BINDING_MNGR as u32) == 0 {
return None;
}
unsafe { bindings::Gecko_GetXBLBinding(self.0).map(GeckoXBLBinding) }
}
#[inline]
fn get_xbl_binding_with_content(&self) -> Option<GeckoXBLBinding> {
self.get_xbl_binding().and_then(|b| b.get_binding_with_content())
}
#[inline]
fn has_xbl_binding_with_content(&self) -> bool {
!self.get_xbl_binding_with_content().is_none()
}
/// This and has_xbl_binding_parent duplicate the logic in Gecko's virtual
/// nsINode::GetBindingParent function, which only has two implementations:
/// one for XUL elements, and one for other elements. We just hard code in
/// our knowledge of those two implementations here.
fn get_xbl_binding_parent(&self) -> Option<Self> {
if self.is_xul_element() {
// FIXME(heycam): Having trouble with bindgen on nsXULElement,
// where the binding parent is stored in a member variable
// rather than in slots. So just get it through FFI for now.
unsafe { bindings::Gecko_GetBindingParent(self.0).map(GeckoElement) }
} else {
let binding_parent =
unsafe { self.get_non_xul_xbl_binding_parent_raw_content().as_ref() }
.map(GeckoNode::from_content)
.and_then(|n| n.as_element());
debug_assert!(binding_parent ==
unsafe { bindings::Gecko_GetBindingParent(self.0).map(GeckoElement) });
binding_parent
}
}
fn get_non_xul_xbl_binding_parent_raw_content(&self) -> *mut nsIContent {
debug_assert!(!self.is_xul_element());
self.get_extended_slots().map_or(ptr::null_mut(), |slots| {
slots.mBindingParent
})
}
fn has_xbl_binding_parent(&self) -> bool {
if self.is_xul_element() {
// FIXME(heycam): Having trouble with bindgen on nsXULElement,
// where the binding parent is stored in a member variable
// rather than in slots. So just get it through FFI for now.
unsafe { bindings::Gecko_GetBindingParent(self.0).is_some() }
} else {
!self.get_non_xul_xbl_binding_parent_raw_content().is_null()
}
}
fn namespace_id(&self) -> i32 {
self.as_node().node_info().mInner.mNamespaceID
}
fn is_xul_element(&self) -> bool {
self.namespace_id() == (structs::root::kNameSpaceID_XUL as i32)
}
/// Sets the specified element data, return any existing data.
///
/// Like `ensure_data`, only safe to call with exclusive access to the
/// element.
pub unsafe fn set_data(&self, replace_data: Option<ElementData>) -> Option<ElementData> {
match (self.get_data(), replace_data) {
(Some(old), Some(replace_data)) => {
Some(mem::replace(old.borrow_mut().deref_mut(), replace_data))
}
(Some(old), None) => {
let old_data = mem::replace(old.borrow_mut().deref_mut(), ElementData::default());
self.0.mServoData.set(ptr::null_mut());
Some(old_data)
}
(None, Some(replace_data)) => {
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(replace_data)));
self.0.mServoData.set(ptr);
None
}
(None, None) => None,
}
}
#[inline]
fn has_id(&self) -> bool {
self.as_node().get_bool_flag(nsINode_BooleanFlag::ElementHasID)
}
#[inline]
fn get_state_internal(&self) -> u64 {
if !self.as_node().get_bool_flag(nsINode_BooleanFlag::ElementHasLockedStyleStates) {
return self.0.mState.mStates;
}
unsafe { Gecko_ElementState(self.0) }
}
fn document_state(&self) -> DocumentState {
let node = self.as_node();
unsafe {
let states = Gecko_DocumentState(node.owner_doc());
DocumentState::from_bits_truncate(states)
}
}
#[inline]
fn may_have_class(&self) -> bool {
self.as_node().get_bool_flag(nsINode_BooleanFlag::ElementMayHaveClass)
}
#[inline]
fn has_properties(&self) -> bool {
use gecko_bindings::structs::NODE_HAS_PROPERTIES;
(self.flags() & NODE_HAS_PROPERTIES as u32) != 0
}
#[inline]
fn get_before_or_after_pseudo(&self, is_before: bool) -> Option<Self> {
if !self.has_properties() {
return None;
}
unsafe {
bindings::Gecko_GetBeforeOrAfterPseudo(self.0, is_before)
.map(GeckoElement)
}
}
#[inline]
fn may_have_style_attribute(&self) -> bool {
self.as_node().get_bool_flag(nsINode_BooleanFlag::ElementMayHaveStyle)
}
#[inline]
fn get_document_theme(&self) -> DocumentTheme {
let node = self.as_node();
unsafe { Gecko_GetDocumentLWTheme(node.owner_doc()) }
}
/// Owner document quirks mode getter.
pub fn owner_document_quirks_mode(&self) -> QuirksMode {
self.as_node().owner_doc().mCompatMode.into()
}
/// Only safe to call on the main thread, with exclusive access to the element and
/// its ancestors.
/// This function is also called after display property changed for SMIL animation.
///
/// Also this function schedules style flush.
unsafe fn maybe_restyle<'a>(&self,
data: &'a mut ElementData,
animation_only: bool) -> Option<&'a mut RestyleData> {
// Don't generate a useless RestyleData if the element hasn't been styled.
if !data.has_styles() {
return None;
}
// Propagate the bit up the chain.
if animation_only {
bindings::Gecko_NoteAnimationOnlyDirtyElement(self.0);
} else {
bindings::Gecko_NoteDirtyElement(self.0);
}
// Ensure and return the RestyleData.
Some(&mut data.restyle)
}
/// Set restyle and change hints to the element data.
pub fn note_explicit_hints(&self,
restyle_hint: nsRestyleHint,
change_hint: nsChangeHint) {
use gecko::restyle_damage::GeckoRestyleDamage;
use invalidation::element::restyle_hints::RestyleHint;
let damage = GeckoRestyleDamage::new(change_hint);
debug!("note_explicit_hints: {:?}, restyle_hint={:?}, change_hint={:?}",
self, restyle_hint, change_hint);
let restyle_hint: RestyleHint = restyle_hint.into();
debug_assert!(!(restyle_hint.has_animation_hint() &&
restyle_hint.has_non_animation_hint()),
"Animation restyle hints should not appear with non-animation restyle hints");
let mut maybe_data = self.mutate_data();
let maybe_restyle_data = maybe_data.as_mut().and_then(|d| unsafe {
self.maybe_restyle(d, restyle_hint.has_animation_hint())
});
if let Some(restyle_data) = maybe_restyle_data {
restyle_data.hint.insert(restyle_hint.into());
restyle_data.damage |= damage;
} else {
debug!("(Element not styled, discarding hints)");
}
}
}
/// Converts flags from the layout used by rust-selectors to the layout used
/// by Gecko. We could align these and then do this without conditionals, but
/// it's probably not worth the trouble.
fn selector_flags_to_node_flags(flags: ElementSelectorFlags) -> u32 {
use gecko_bindings::structs::*;
use selectors::matching::*;
let mut gecko_flags = 0u32;
if flags.contains(HAS_SLOW_SELECTOR) {
gecko_flags |= NODE_HAS_SLOW_SELECTOR as u32;
}
if flags.contains(HAS_SLOW_SELECTOR_LATER_SIBLINGS) {
gecko_flags |= NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS as u32;
}
if flags.contains(HAS_EDGE_CHILD_SELECTOR) {
gecko_flags |= NODE_HAS_EDGE_CHILD_SELECTOR as u32;
}
if flags.contains(HAS_EMPTY_SELECTOR) {
gecko_flags |= NODE_HAS_EMPTY_SELECTOR as u32;
}
gecko_flags
}
fn get_animation_rule(element: &GeckoElement,
cascade_level: CascadeLevel)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
use gecko_bindings::sugar::ownership::HasSimpleFFI;
// Also, we should try to reuse the PDB, to avoid creating extra rule nodes.
let mut animation_values = AnimationValueMap::default();
if unsafe { Gecko_GetAnimationRule(element.0,
cascade_level,
AnimationValueMap::as_ffi_mut(&mut animation_values)) } {
let shared_lock = &GLOBAL_STYLE_DATA.shared_lock;
Some(Arc::new(shared_lock.wrap(
PropertyDeclarationBlock::from_animation_value_map(&animation_values))))
} else {
None
}
}
#[derive(Debug)]
/// Gecko font metrics provider
pub struct GeckoFontMetricsProvider {
/// Cache of base font sizes for each language
///
/// Usually will have 1 element.
///
// This may be slow on pages using more languages, might be worth optimizing
// by caching lang->group mapping separately and/or using a hashmap on larger
// loads.
pub font_size_cache: RefCell<Vec<(Atom, ::gecko_bindings::structs::FontSizePrefs)>>,
}
impl GeckoFontMetricsProvider {
/// Construct
pub fn new() -> Self {
GeckoFontMetricsProvider {
font_size_cache: RefCell::new(Vec::new()),
}
}
}
impl FontMetricsProvider for GeckoFontMetricsProvider {
fn create_from(_: &SharedStyleContext) -> GeckoFontMetricsProvider {
GeckoFontMetricsProvider::new()
}
fn get_size(&self, font_name: &Atom, font_family: u8) -> Au {
use gecko_bindings::bindings::Gecko_GetBaseSize;
let mut cache = self.font_size_cache.borrow_mut();
if let Some(sizes) = cache.iter().find(|el| el.0 == *font_name) {
return sizes.1.size_for_generic(font_family);
}
let sizes = unsafe {
Gecko_GetBaseSize(font_name.as_ptr())
};
cache.push((font_name.clone(), sizes));
sizes.size_for_generic(font_family)
}
fn query(&self, font: &Font, font_size: Au, wm: WritingMode,
in_media_query: bool, device: &Device) -> FontMetricsQueryResult {
use gecko_bindings::bindings::Gecko_GetFontMetrics;
let gecko_metrics = unsafe {
Gecko_GetFontMetrics(device.pres_context(),
wm.is_vertical() && !wm.is_sideways(),
font.gecko(),
font_size.0,
// we don't use the user font set in a media query
!in_media_query)
};
let metrics = FontMetrics {
x_height: Au(gecko_metrics.mXSize),
zero_advance_measure: Au(gecko_metrics.mChSize),
};
FontMetricsQueryResult::Available(metrics)
}
}
impl structs::FontSizePrefs {
fn size_for_generic(&self, font_family: u8) -> Au {
Au(match font_family {
structs::kPresContext_DefaultVariableFont_ID => self.mDefaultVariableSize,
structs::kPresContext_DefaultFixedFont_ID => self.mDefaultFixedSize,
structs::kGenericFont_serif => self.mDefaultSerifSize,
structs::kGenericFont_sans_serif => self.mDefaultSansSerifSize,
structs::kGenericFont_monospace => self.mDefaultMonospaceSize,
structs::kGenericFont_cursive => self.mDefaultCursiveSize,
structs::kGenericFont_fantasy => self.mDefaultFantasySize,
x => unreachable!("Unknown generic ID {}", x),
})
}
}
impl<'le> TElement for GeckoElement<'le> {
type ConcreteNode = GeckoNode<'le>;
type FontMetricsProvider = GeckoFontMetricsProvider;
fn inheritance_parent(&self) -> Option<Self> {
if self.is_native_anonymous() {
self.closest_non_native_anonymous_ancestor()
} else {
self.as_node().flattened_tree_parent().and_then(|n| n.as_element())
}
}
fn before_pseudo_element(&self) -> Option<Self> {
self.get_before_or_after_pseudo(/* is_before = */ true)
}
fn after_pseudo_element(&self) -> Option<Self> {
self.get_before_or_after_pseudo(/* is_before = */ false)
}
/// Execute `f` for each anonymous content child element (apart from
/// ::before and ::after) whose originating element is `self`.
fn each_anonymous_content_child<F>(&self, mut f: F)
where
F: FnMut(Self),
{
let array: *mut structs::nsTArray<*mut nsIContent> =
unsafe { bindings::Gecko_GetAnonymousContentForElement(self.0) };
if array.is_null() {
return;
}
for content in unsafe { &**array } {
let node = GeckoNode::from_content(unsafe { &**content });
let element = match node.as_element() {
Some(e) => e,
None => continue,
};
f(element);
}
unsafe { bindings::Gecko_DestroyAnonymousContentList(array) };
}
fn closest_non_native_anonymous_ancestor(&self) -> Option<Self> {
debug_assert!(self.is_native_anonymous());
let mut parent = match self.parent_element() {
Some(e) => e,
None => return None,
};
loop {
if !parent.is_native_anonymous() {
return Some(parent);
}
parent = match parent.parent_element() {
Some(p) => p,
None => return None,
};
}
}
fn as_node(&self) -> Self::ConcreteNode {
unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) }
}
fn owner_doc_matches_for_testing(&self, device: &Device) -> bool {
self.as_node().owner_doc() as *const structs::nsIDocument ==
device.pres_context().mDocument.raw::<structs::nsIDocument>()
}
fn style_attribute(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
if !self.may_have_style_attribute() {
return None;
}
let declarations = unsafe { Gecko_GetStyleAttrDeclarationBlock(self.0) };
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
declarations.map(|s| s.borrow_arc())
}
fn unset_dirty_style_attribute(&self) {
if !self.may_have_style_attribute() {
return;
}
unsafe { Gecko_UnsetDirtyStyleAttr(self.0) };
}
fn get_smil_override(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> {
let declarations = unsafe { Gecko_GetSMILOverrideDeclarationBlock(self.0) };
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
declarations.map(|s| s.borrow_arc())
}
fn get_animation_rule_by_cascade(&self, cascade_level: ServoCascadeLevel)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
match cascade_level {
ServoCascadeLevel::Animations => self.get_animation_rule(),
ServoCascadeLevel::Transitions => self.get_transition_rule(),
_ => panic!("Unsupported cascade level for getting the animation rule")
}
}
fn get_animation_rule(&self)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
get_animation_rule(self, CascadeLevel::Animations)
}
fn get_transition_rule(&self)
-> Option<Arc<Locked<PropertyDeclarationBlock>>> {
get_animation_rule(self, CascadeLevel::Transitions)
}
fn get_state(&self) -> ElementState {
ElementState::from_bits_truncate(self.get_state_internal())
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool {
unsafe {
bindings::Gecko_HasAttr(self.0,
namespace.0.as_ptr(),
attr.as_ptr())
}
}
fn get_id(&self) -> Option<Atom> {
if !self.has_id() {
return None
}
let ptr = unsafe {
bindings::Gecko_AtomAttrValue(self.0,
atom!("id").as_ptr())
};
if ptr.is_null() {
None
} else {
Some(Atom::from(ptr))
}
}
fn each_class<F>(&self, callback: F)
where F: FnMut(&Atom)
{
snapshot_helpers::each_class(self.0,
callback,
Gecko_ClassOrClassList)
}
fn existing_style_for_restyle_damage<'a>(&'a self,
_existing_values: &'a ComputedValues,
pseudo: Option<&PseudoElement>)
-> Option<&'a nsStyleContext> {
// TODO(emilio): Migrate this to CSSPseudoElementType.
let atom_ptr = pseudo.map_or(ptr::null_mut(), |p| p.atom().as_ptr());
unsafe {
let context_ptr = Gecko_GetStyleContext(self.0, atom_ptr);
context_ptr.as_ref()
}
}
fn has_snapshot(&self) -> bool {
self.flags() & (ELEMENT_HAS_SNAPSHOT as u32) != 0
}
fn handled_snapshot(&self) -> bool {
self.flags() & (ELEMENT_HANDLED_SNAPSHOT as u32) != 0
}
unsafe fn set_handled_snapshot(&self) {
debug_assert!(self.get_data().is_some());
self.set_flags(ELEMENT_HANDLED_SNAPSHOT as u32)
}
fn has_dirty_descendants(&self) -> bool {
self.flags() & (ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32) != 0
}
unsafe fn set_dirty_descendants(&self) {
debug_assert!(self.get_data().is_some());
debug!("Setting dirty descendants: {:?}", self);
self.set_flags(ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unset_dirty_descendants(&self) {
self.unset_flags(ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn has_animation_only_dirty_descendants(&self) -> bool {
self.flags() & (ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32) != 0
}
unsafe fn set_animation_only_dirty_descendants(&self) {
self.set_flags(ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
unsafe fn unset_animation_only_dirty_descendants(&self) {
self.unset_flags(ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32)
}
fn is_visited_link(&self) -> bool {
use element_state::IN_VISITED_STATE;
self.get_state().intersects(IN_VISITED_STATE)
}
fn is_native_anonymous(&self) -> bool {
self.flags() & (NODE_IS_NATIVE_ANONYMOUS as u32) != 0
}
fn implemented_pseudo_element(&self) -> Option<PseudoElement> {
if !self.is_native_anonymous() {
return None;
}
if !self.has_properties() {
return None;
}
let pseudo_type =
unsafe { bindings::Gecko_GetImplementedPseudo(self.0) };
PseudoElement::from_pseudo_type(pseudo_type)
}
fn store_children_to_process(&self, _: isize) {
// This is only used for bottom-up traversal, and is thus a no-op for Gecko.
}
fn did_process_child(&self) -> isize {
panic!("Atomic child count not implemented in Gecko");
}
fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> {
unsafe { self.0.mServoData.get().as_ref() }
}
unsafe fn ensure_data(&self) -> AtomicRefMut<ElementData> {
if self.get_data().is_none() {
debug!("Creating ElementData for {:?}", self);
let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::default())));
self.0.mServoData.set(ptr);
}
self.mutate_data().unwrap()
}
unsafe fn clear_data(&self) {
let ptr = self.0.mServoData.get();
unsafe {
self.unset_flags(ELEMENT_HAS_SNAPSHOT as u32 |
ELEMENT_HANDLED_SNAPSHOT as u32);
}
if !ptr.is_null() {
debug!("Dropping ElementData for {:?}", self);
let data = unsafe { Box::from_raw(self.0.mServoData.get()) };
self.0.mServoData.set(ptr::null_mut());
// Perform a mutable borrow of the data in debug builds. This
// serves as an assertion that there are no outstanding borrows
// when we destroy the data.
debug_assert!({ let _ = data.borrow_mut(); true });
}
}
fn skip_root_and_item_based_display_fixup(&self) -> bool {
// We don't want to fix up display values of native anonymous content.
// Additionally, we want to skip root-based display fixup for document
// level native anonymous content subtree roots, since they're not
// really roots from the style fixup perspective. Checking that we
// are NAC handles both cases.
self.is_native_anonymous()
}
unsafe fn set_selector_flags(&self, flags: ElementSelectorFlags) {
debug_assert!(!flags.is_empty());
self.set_flags(selector_flags_to_node_flags(flags));
}
fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool {
let node_flags = selector_flags_to_node_flags(flags);
(self.flags() & node_flags) == node_flags
}
#[inline]
fn may_have_animations(&self) -> bool {
if let Some(pseudo) = self.implemented_pseudo_element() {
if !pseudo.is_before_or_after() {
return false;
}
return self.parent_element()
.map_or(false, |p| {
p.as_node()
.get_bool_flag(nsINode_BooleanFlag::ElementHasAnimations)
});
}
self.as_node().get_bool_flag(nsINode_BooleanFlag::ElementHasAnimations)
}
/// Process various tasks that are a result of animation-only restyle.
fn process_post_animation(&self,
tasks: PostAnimationTasks) {
use context::DISPLAY_CHANGED_FROM_NONE_FOR_SMIL;
use gecko_bindings::structs::nsChangeHint_nsChangeHint_Empty;
use gecko_bindings::structs::nsRestyleHint_eRestyle_Subtree;
debug_assert!(!tasks.is_empty(), "Should be involved a task");
// If display style was changed from none to other, we need to resolve
// the descendants in the display:none subtree. Instead of resolving
// those styles in animation-only restyle, we defer it to a subsequent
// normal restyle.
if tasks.intersects(DISPLAY_CHANGED_FROM_NONE_FOR_SMIL) {
debug_assert!(self.implemented_pseudo_element()
.map_or(true, |p| !p.is_before_or_after()),
"display property animation shouldn't run on pseudo elements \
since it's only for SMIL");
self.note_explicit_hints(nsRestyleHint_eRestyle_Subtree,
nsChangeHint_nsChangeHint_Empty);
}
}
/// Update various animation-related state on a given (pseudo-)element as
/// results of normal restyle.
fn update_animations(&self,
before_change_style: Option<Arc<ComputedValues>>,
tasks: UpdateAnimationsTasks) {
// We have to update animations even if the element has no computed
// style since it means the element is in a display:none subtree, we
// should destroy all CSS animations in display:none subtree.
let computed_data = self.borrow_data();
let computed_values =
computed_data.as_ref().map(|d| d.styles.primary());
let before_change_values =
before_change_style.as_ref().map(|x| &**x);
let computed_values_opt = computed_values.as_ref().map(|x| &***x);
unsafe {
Gecko_UpdateAnimations(self.0,
before_change_values,
computed_values_opt,
tasks.bits());
}
}
fn has_animations(&self) -> bool {
self.may_have_animations() && unsafe { Gecko_ElementHasAnimations(self.0) }
}
fn has_css_animations(&self) -> bool {
self.may_have_animations() && unsafe { Gecko_ElementHasCSSAnimations(self.0) }
}
fn has_css_transitions(&self) -> bool {
self.may_have_animations() && unsafe { Gecko_ElementHasCSSTransitions(self.0) }
}
fn each_xbl_stylist<F>(&self, mut f: F) -> bool
where
F: FnMut(&Stylist),
{
// Walk the binding scope chain, starting with the binding attached to
// our content, up till we run out of scopes or we get cut off.
//
// If we are a NAC pseudo-element, we want to get rules from our
// rule_hash_target, that is, our originating element.
let mut current = Some(self.rule_hash_target());
while let Some(element) = current {
if let Some(binding) = element.get_xbl_binding() {
binding.each_xbl_stylist(&mut f);
// If we're not looking at our original element, allow the
// binding to cut off style inheritance.
if element != *self {
if !binding.inherits_style() {
// Go no further; we're not inheriting style from
// anything above here.
break;
}
}
}
if element.as_node().is_root_of_native_anonymous_subtree() {
// Deliberately cut off style inheritance here.
break;
}
current = element.get_xbl_binding_parent();
}
// If current has something, this means we cut off inheritance at some
// point in the loop.
current.is_some()
}
fn xbl_binding_anonymous_content(&self) -> Option<GeckoNode<'le>> {
self.get_xbl_binding_with_content()
.map(|b| unsafe { b.anon_content().as_ref() }.unwrap())
.map(GeckoNode::from_content)
}
fn get_css_transitions_info(&self)
-> HashMap<TransitionProperty, Arc<AnimationValue>> {
use gecko_bindings::bindings::Gecko_ElementTransitions_EndValueAt;
use gecko_bindings::bindings::Gecko_ElementTransitions_Length;
use gecko_bindings::bindings::Gecko_ElementTransitions_PropertyAt;
let collection_length =
unsafe { Gecko_ElementTransitions_Length(self.0) };
let mut map = HashMap::with_capacity(collection_length);
for i in 0..collection_length {
let (property, raw_end_value) = unsafe {
(Gecko_ElementTransitions_PropertyAt(self.0, i as usize).into(),
Gecko_ElementTransitions_EndValueAt(self.0, i as usize))
};
let end_value = AnimationValue::arc_from_borrowed(&raw_end_value);
debug_assert!(end_value.is_some());
map.insert(property, end_value.unwrap().clone_arc());
}
map
}
fn might_need_transitions_update(&self,
old_values: Option<&ComputedValues>,
new_values: &ComputedValues) -> bool {
use properties::longhands::display::computed_value as display;
let old_values = match old_values {
Some(v) => v,
None => return false,
};
let new_box_style = new_values.get_box();
let transition_not_running = !self.has_css_transitions() &&
new_box_style.transition_property_count() == 1 &&
new_box_style.transition_combined_duration_at(0) <= 0.0f32;
let new_display_style = new_box_style.clone_display();
let old_display_style = old_values.get_box().clone_display();
new_box_style.transition_property_count() > 0 &&
!transition_not_running &&
(new_display_style != display::T::none &&
old_display_style != display::T::none)
}
// Detect if there are any changes that require us to update transitions.
// This is used as a more thoroughgoing check than the, cheaper
// might_need_transitions_update check.
//
// The following logic shadows the logic used on the Gecko side
// (nsTransitionManager::DoUpdateTransitions) where we actually perform the
// update.
//
// https://drafts.csswg.org/css-transitions/#starting
fn needs_transitions_update(&self,
before_change_style: &ComputedValues,
after_change_style: &ComputedValues)
-> bool {
use gecko_bindings::structs::nsCSSPropertyID;
use std::collections::HashSet;
debug_assert!(self.might_need_transitions_update(Some(before_change_style),
after_change_style),
"We should only call needs_transitions_update if \
might_need_transitions_update returns true");
let after_change_box_style = after_change_style.get_box();
let transitions_count = after_change_box_style.transition_property_count();
let existing_transitions = self.get_css_transitions_info();
let mut transitions_to_keep = if !existing_transitions.is_empty() &&
(after_change_box_style.transition_nscsspropertyid_at(0) !=
nsCSSPropertyID::eCSSPropertyExtra_all_properties) {
Some(HashSet::<TransitionProperty>::with_capacity(transitions_count))
} else {
None
};
// Check if this property is none, custom or unknown.
let is_none_or_custom_property = |property: nsCSSPropertyID| -> bool {
return property == nsCSSPropertyID::eCSSPropertyExtra_no_properties ||
property == nsCSSPropertyID::eCSSPropertyExtra_variable ||
property == nsCSSPropertyID::eCSSProperty_UNKNOWN;
};
for i in 0..transitions_count {
let property = after_change_box_style.transition_nscsspropertyid_at(i);
let combined_duration = after_change_box_style.transition_combined_duration_at(i);
// We don't need to update transition for none/custom properties.
if is_none_or_custom_property(property) {
continue;
}
let transition_property: TransitionProperty = property.into();
let mut property_check_helper = |property: &TransitionProperty| -> bool {
if self.needs_transitions_update_per_property(property,
combined_duration,
before_change_style,
after_change_style,
&existing_transitions) {
return true;
}
if let Some(set) = transitions_to_keep.as_mut() {
// The TransitionProperty here must be animatable, so cloning it is cheap
// because it is an integer-like enum.
set.insert(property.clone());
}
false
};
match transition_property {
TransitionProperty::All => {
if TransitionProperty::any(property_check_helper) {
return true;
}
},
TransitionProperty::Unsupported(_) => { },
ref shorthand if shorthand.is_shorthand() => {
if shorthand.longhands().iter().any(|p| property_check_helper(p)) {
return true;
}
},
ref longhand => {
if property_check_helper(longhand) {
return true;
}
},
};
}
// Check if we have to cancel the running transition because this is not a matching
// transition-property value.
transitions_to_keep.map_or(false, |set| {
existing_transitions.keys().any(|property| !set.contains(property))
})
}
fn needs_transitions_update_per_property(&self,
property: &TransitionProperty,
combined_duration: f32,
before_change_style: &ComputedValues,
after_change_style: &ComputedValues,
existing_transitions: &HashMap<TransitionProperty,
Arc<AnimationValue>>)
-> bool {
// |property| should be an animatable longhand
let animatable_longhand = AnimatableLonghand::from_transition_property(property).unwrap();
if existing_transitions.contains_key(property) {
// If there is an existing transition, update only if the end value differs.
// If the end value has not changed, we should leave the currently running
// transition as-is since we don't want to interrupt its timing function.
let after_value =
Arc::new(AnimationValue::from_computed_values(&animatable_longhand,
after_change_style));
return existing_transitions.get(property).unwrap() != &after_value;
}
let from = AnimationValue::from_computed_values(&animatable_longhand,
before_change_style);
let to = AnimationValue::from_computed_values(&animatable_longhand,
after_change_style);
combined_duration > 0.0f32 && from != to
}
#[inline]
fn lang_attr(&self) -> Option<AttrValue> {
let ptr = unsafe { bindings::Gecko_LangValue(self.0) };
if ptr.is_null() {
None
} else {
Some(unsafe { Atom::from_addrefed(ptr) })
}
}
fn match_element_lang(&self,
override_lang: Option<Option<AttrValue>>,
value: &PseudoClassStringArg)
-> bool
{
// Gecko supports :lang() from CSS Selectors 3, which only accepts a
// single language tag, and which performs simple dash-prefix matching
// on it.
debug_assert!(value.len() > 0 && value[value.len() - 1] == 0,
"expected value to be null terminated");
let override_lang_ptr = match &override_lang {
&Some(Some(ref atom)) => atom.as_ptr(),
_ => ptr::null_mut(),
};
unsafe {
Gecko_MatchLang(self.0, override_lang_ptr, override_lang.is_some(), value.as_ptr())
}
}
}
impl<'le> PartialEq for GeckoElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.0 as *const _ == other.0 as *const _
}
}
impl<'le> Eq for GeckoElement<'le> {}
impl<'le> Hash for GeckoElement<'le> {
fn hash<H: Hasher>(&self, state: &mut H) {
(self.0 as *const _).hash(state);
}
}
impl<'le> PresentationalHintsSynthesizer for GeckoElement<'le> {
fn synthesize_presentational_hints_for_legacy_attributes<V>(&self,
visited_handling: VisitedHandlingMode,
hints: &mut V)
where V: Push<ApplicableDeclarationBlock>,
{
use properties::longhands::_x_lang::SpecifiedValue as SpecifiedLang;
use properties::longhands::_x_text_zoom::SpecifiedValue as SpecifiedZoom;
use properties::longhands::color::SpecifiedValue as SpecifiedColor;
use properties::longhands::text_align::SpecifiedValue as SpecifiedTextAlign;
use values::specified::color::Color;
lazy_static! {
static ref TH_RULE: ApplicableDeclarationBlock = {
let global_style_data = &*GLOBAL_STYLE_DATA;
let pdb = PropertyDeclarationBlock::with_one(
PropertyDeclaration::TextAlign(SpecifiedTextAlign::MozCenterOrInherit),
Importance::Normal
);
let arc = Arc::new(global_style_data.shared_lock.wrap(pdb));
ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints)
};
static ref TABLE_COLOR_RULE: ApplicableDeclarationBlock = {
let global_style_data = &*GLOBAL_STYLE_DATA;
let pdb = PropertyDeclarationBlock::with_one(
PropertyDeclaration::Color(SpecifiedColor(Color::InheritFromBodyQuirk.into())),
Importance::Normal
);
let arc = Arc::new(global_style_data.shared_lock.wrap(pdb));
ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints)
};
static ref MATHML_LANG_RULE: ApplicableDeclarationBlock = {
let global_style_data = &*GLOBAL_STYLE_DATA;
let pdb = PropertyDeclarationBlock::with_one(
PropertyDeclaration::XLang(SpecifiedLang(atom!("x-math"))),
Importance::Normal
);
let arc = Arc::new(global_style_data.shared_lock.wrap(pdb));
ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints)
};
static ref SVG_TEXT_DISABLE_ZOOM_RULE: ApplicableDeclarationBlock = {
let global_style_data = &*GLOBAL_STYLE_DATA;
let pdb = PropertyDeclarationBlock::with_one(
PropertyDeclaration::XTextZoom(SpecifiedZoom(false)),
Importance::Normal
);
let arc = Arc::new(global_style_data.shared_lock.wrap(pdb));
ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints)
};
};
let ns = self.namespace_id();<|fim▁hole|> if ns == structs::kNameSpaceID_XHTML as i32 {
if self.get_local_name().as_ptr() == atom!("th").as_ptr() {
hints.push(TH_RULE.clone());
} else if self.get_local_name().as_ptr() == atom!("table").as_ptr() &&
self.as_node().owner_doc().mCompatMode == structs::nsCompatibility::eCompatibility_NavQuirks {
hints.push(TABLE_COLOR_RULE.clone());
}
}
if ns == structs::kNameSpaceID_SVG as i32 {
if self.get_local_name().as_ptr() == atom!("text").as_ptr() {
hints.push(SVG_TEXT_DISABLE_ZOOM_RULE.clone());
}
}
let declarations = unsafe { Gecko_GetHTMLPresentationAttrDeclarationBlock(self.0) };
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
if let Some(decl) = declarations {
hints.push(
ApplicableDeclarationBlock::from_declarations(decl.clone_arc(), ServoCascadeLevel::PresHints)
);
}
let declarations = unsafe { Gecko_GetExtraContentStyleDeclarations(self.0) };
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
if let Some(decl) = declarations {
hints.push(
ApplicableDeclarationBlock::from_declarations(decl.clone_arc(), ServoCascadeLevel::PresHints)
);
}
// Support for link, vlink, and alink presentation hints on <body>
if self.is_link() {
// Unvisited vs. visited styles are computed up-front based on the
// visited mode (not the element's actual state).
let declarations = match visited_handling {
VisitedHandlingMode::AllLinksVisitedAndUnvisited => {
unreachable!("We should never try to selector match with \
AllLinksVisitedAndUnvisited");
},
VisitedHandlingMode::AllLinksUnvisited => unsafe {
Gecko_GetUnvisitedLinkAttrDeclarationBlock(self.0)
},
VisitedHandlingMode::RelevantLinkVisited => unsafe {
Gecko_GetVisitedLinkAttrDeclarationBlock(self.0)
},
};
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
if let Some(decl) = declarations {
hints.push(
ApplicableDeclarationBlock::from_declarations(decl.clone_arc(), ServoCascadeLevel::PresHints)
);
}
let active = self.get_state().intersects(NonTSPseudoClass::Active.state_flag());
if active {
let declarations = unsafe { Gecko_GetActiveLinkAttrDeclarationBlock(self.0) };
let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> =
declarations.and_then(|s| s.as_arc_opt());
if let Some(decl) = declarations {
hints.push(
ApplicableDeclarationBlock::from_declarations(decl.clone_arc(), ServoCascadeLevel::PresHints)
);
}
}
}
// xml:lang has precedence over lang, which can be
// set by Gecko_GetHTMLPresentationAttrDeclarationBlock
//
// http://www.whatwg.org/specs/web-apps/current-work/multipage/elements.html#language
let ptr = unsafe {
bindings::Gecko_GetXMLLangValue(self.0)
};
if !ptr.is_null() {
let global_style_data = &*GLOBAL_STYLE_DATA;
let pdb = PropertyDeclarationBlock::with_one(
PropertyDeclaration::XLang(SpecifiedLang(unsafe { Atom::from_addrefed(ptr) })),
Importance::Normal
);
let arc = Arc::new(global_style_data.shared_lock.wrap(pdb));
hints.push(ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints))
}
// MathML's default lang has precedence over both `lang` and `xml:lang`
if ns == structs::kNameSpaceID_MathML as i32 {
if self.get_local_name().as_ptr() == atom!("math").as_ptr() {
hints.push(MATHML_LANG_RULE.clone());
}
}
}
}
impl<'le> ::selectors::Element for GeckoElement<'le> {
type Impl = SelectorImpl;
fn parent_element(&self) -> Option<Self> {
// FIXME(emilio): This will need to jump across if the parent node is a
// shadow root to get the shadow host.
let parent_node = self.as_node().parent_node();
parent_node.and_then(|n| n.as_element())
}
fn pseudo_element_originating_element(&self) -> Option<Self> {
debug_assert!(self.implemented_pseudo_element().is_some());
self.closest_non_native_anonymous_ancestor()
}
fn first_child_element(&self) -> Option<Self> {
let mut child = self.as_node().first_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.next_sibling();
}
None
}
fn last_child_element(&self) -> Option<Self> {
let mut child = self.as_node().last_child();
while let Some(child_node) = child {
if let Some(el) = child_node.as_element() {
return Some(el)
}
child = child_node.prev_sibling();
}
None
}
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().prev_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.prev_sibling();
}
None
}
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.as_node().next_sibling();
while let Some(sibling_node) = sibling {
if let Some(el) = sibling_node.as_element() {
return Some(el)
}
sibling = sibling_node.next_sibling();
}
None
}
fn attr_matches(&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &Atom,
operation: &AttrSelectorOperation<&Atom>)
-> bool {
unsafe {
match *operation {
AttrSelectorOperation::Exists => {
bindings::Gecko_HasAttr(self.0,
ns.atom_or_null(),
local_name.as_ptr())
}
AttrSelectorOperation::WithValue { operator, case_sensitivity, expected_value } => {
let ignore_case = match case_sensitivity {
CaseSensitivity::CaseSensitive => false,
CaseSensitivity::AsciiCaseInsensitive => true,
};
// FIXME: case sensitivity for operators other than Equal
match operator {
AttrSelectorOperator::Equal => bindings::Gecko_AttrEquals(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case
),
AttrSelectorOperator::Includes => bindings::Gecko_AttrIncludes(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case,
),
AttrSelectorOperator::DashMatch => bindings::Gecko_AttrDashEquals(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case,
),
AttrSelectorOperator::Prefix => bindings::Gecko_AttrHasPrefix(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case,
),
AttrSelectorOperator::Suffix => bindings::Gecko_AttrHasSuffix(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case,
),
AttrSelectorOperator::Substring => bindings::Gecko_AttrHasSubstring(
self.0,
ns.atom_or_null(),
local_name.as_ptr(),
expected_value.as_ptr(),
ignore_case,
),
}
}
}
}
}
fn is_root(&self) -> bool {
unsafe {
Gecko_IsRootElement(self.0)
}
}
fn is_empty(&self) -> bool {
!self.as_node().dom_children().any(|child| unsafe {
Gecko_IsSignificantChild(child.0, true, true)
})
}
fn get_local_name(&self) -> &WeakAtom {
unsafe {
WeakAtom::new(self.as_node().node_info().mInner.mName)
}
}
fn get_namespace(&self) -> &WeakNamespace {
unsafe {
WeakNamespace::new(Gecko_Namespace(self.0))
}
}
fn match_non_ts_pseudo_class<F>(&self,
pseudo_class: &NonTSPseudoClass,
context: &mut LocalMatchingContext<Self::Impl>,
relevant_link: &RelevantLinkStatus,
flags_setter: &mut F)
-> bool
where F: FnMut(&Self, ElementSelectorFlags),
{
use selectors::matching::*;
match *pseudo_class {
NonTSPseudoClass::Focus |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::MozFullScreen |
NonTSPseudoClass::Indeterminate |
NonTSPseudoClass::PlaceholderShown |
NonTSPseudoClass::Target |
NonTSPseudoClass::Valid |
NonTSPseudoClass::Invalid |
NonTSPseudoClass::MozUIValid |
NonTSPseudoClass::MozBroken |
NonTSPseudoClass::MozUserDisabled |
NonTSPseudoClass::MozSuppressed |
NonTSPseudoClass::MozLoading |
NonTSPseudoClass::MozHandlerBlocked |
NonTSPseudoClass::MozHandlerDisabled |
NonTSPseudoClass::MozHandlerCrashed |
NonTSPseudoClass::Required |
NonTSPseudoClass::Optional |
NonTSPseudoClass::MozReadOnly |
NonTSPseudoClass::MozReadWrite |
NonTSPseudoClass::Unresolved |
NonTSPseudoClass::FocusWithin |
NonTSPseudoClass::MozDragOver |
NonTSPseudoClass::MozDevtoolsHighlighted |
NonTSPseudoClass::MozStyleeditorTransitioning |
NonTSPseudoClass::MozFocusRing |
NonTSPseudoClass::MozHandlerClickToPlay |
NonTSPseudoClass::MozHandlerVulnerableUpdatable |
NonTSPseudoClass::MozHandlerVulnerableNoUpdate |
NonTSPseudoClass::MozMathIncrementScriptLevel |
NonTSPseudoClass::InRange |
NonTSPseudoClass::OutOfRange |
NonTSPseudoClass::Default |
NonTSPseudoClass::MozSubmitInvalid |
NonTSPseudoClass::MozUIInvalid |
NonTSPseudoClass::MozMeterOptimum |
NonTSPseudoClass::MozMeterSubOptimum |
NonTSPseudoClass::MozMeterSubSubOptimum |
NonTSPseudoClass::MozHasDirAttr |
NonTSPseudoClass::MozDirAttrLTR |
NonTSPseudoClass::MozDirAttrRTL |
NonTSPseudoClass::MozDirAttrLikeAuto |
NonTSPseudoClass::MozAutofill |
NonTSPseudoClass::MozAutofillPreview => {
self.get_state().intersects(pseudo_class.state_flag())
},
NonTSPseudoClass::AnyLink => self.is_link(),
NonTSPseudoClass::Link => relevant_link.is_unvisited(self, context.shared),
NonTSPseudoClass::Visited => relevant_link.is_visited(self, context.shared),
NonTSPseudoClass::Active |
NonTSPseudoClass::Hover => {
if context.active_hover_quirk_matches() && !self.is_link() {
false
} else {
self.get_state().contains(pseudo_class.state_flag())
}
},
NonTSPseudoClass::MozFirstNode => {
flags_setter(self, HAS_EDGE_CHILD_SELECTOR);
let mut elem = self.as_node();
while let Some(prev) = elem.prev_sibling() {
if prev.contains_non_whitespace_content() {
return false
}
elem = prev;
}
true
}
NonTSPseudoClass::MozLastNode => {
flags_setter(self, HAS_EDGE_CHILD_SELECTOR);
let mut elem = self.as_node();
while let Some(next) = elem.next_sibling() {
if next.contains_non_whitespace_content() {
return false
}
elem = next;
}
true
}
NonTSPseudoClass::MozOnlyWhitespace => {
flags_setter(self, HAS_EMPTY_SELECTOR);
if self.as_node().dom_children().any(|c| c.contains_non_whitespace_content()) {
return false
}
true
}
NonTSPseudoClass::MozTableBorderNonzero |
NonTSPseudoClass::MozBrowserFrame |
NonTSPseudoClass::MozNativeAnonymous |
NonTSPseudoClass::MozUseShadowTreeRoot => unsafe {
Gecko_MatchesElement(pseudo_class.to_gecko_pseudoclasstype().unwrap(), self.0)
},
NonTSPseudoClass::MozIsHTML => {
self.is_html_element_in_html_document()
}
NonTSPseudoClass::MozLWTheme => {
self.get_document_theme() != DocumentTheme::Doc_Theme_None
}
NonTSPseudoClass::MozLWThemeBrightText => {
self.get_document_theme() == DocumentTheme::Doc_Theme_Bright
}
NonTSPseudoClass::MozLWThemeDarkText => {
self.get_document_theme() == DocumentTheme::Doc_Theme_Dark
}
NonTSPseudoClass::MozWindowInactive => {
self.document_state().contains(NS_DOCUMENT_STATE_WINDOW_INACTIVE)
}
NonTSPseudoClass::MozPlaceholder => false,
NonTSPseudoClass::MozAny(ref sels) => {
context.nesting_level += 1;
let result = sels.iter().any(|s| {
matches_complex_selector(s.iter(), self, context, flags_setter)
});
context.nesting_level -= 1;
result
}
NonTSPseudoClass::Lang(ref lang_arg) => {
self.match_element_lang(None, lang_arg)
}
NonTSPseudoClass::MozSystemMetric(ref s) |
NonTSPseudoClass::MozLocaleDir(ref s) |
NonTSPseudoClass::MozEmptyExceptChildrenWithLocalname(ref s) |
NonTSPseudoClass::Dir(ref s) => {
unsafe {
let mut set_slow_selector = false;
let matches = Gecko_MatchStringArgPseudo(self.0,
pseudo_class.to_gecko_pseudoclasstype().unwrap(),
s.as_ptr(), &mut set_slow_selector);
if set_slow_selector {
flags_setter(self, HAS_SLOW_SELECTOR);
}
matches
}
}
}
}
fn match_pseudo_element(&self,
pseudo_element: &PseudoElement,
_context: &mut MatchingContext)
-> bool
{
// TODO(emilio): I believe we could assert we are a pseudo-element and
// match the proper pseudo-element, given how we rulehash the stuff
// based on the pseudo.
match self.implemented_pseudo_element() {
Some(ref pseudo) => *pseudo == pseudo_element.canonical(),
None => false,
}
}
#[inline]
fn is_link(&self) -> bool {
self.get_state().intersects(NonTSPseudoClass::AnyLink.state_flag())
}
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
if !self.has_id() {
return false
}
unsafe {
let ptr = bindings::Gecko_AtomAttrValue(self.0, atom!("id").as_ptr());
if ptr.is_null() {
false
} else {
case_sensitivity.eq_atom(WeakAtom::new(ptr), id)
}
}
}
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
if !self.may_have_class() {
return false;
}
snapshot_helpers::has_class(self.0,
name,
case_sensitivity,
Gecko_ClassOrClassList)
}
fn is_html_element_in_html_document(&self) -> bool {
let node = self.as_node();
let node_info = node.node_info();
node_info.mInner.mNamespaceID == (structs::root::kNameSpaceID_XHTML as i32) &&
node.owner_doc().mType == structs::root::nsIDocument_Type::eHTML
}
fn blocks_ancestor_combinators(&self) -> bool {
use gecko_bindings::structs::NODE_IS_ANONYMOUS_ROOT;
if self.flags() & (NODE_IS_ANONYMOUS_ROOT as u32) == 0 {
return false
}
match self.parent_element() {
Some(e) => {
// If this element is the shadow root of an use-element shadow
// tree, according to the spec, we should not match rules
// cross the shadow DOM boundary.
e.get_local_name() == &*local_name!("use") &&
e.get_namespace() == &*ns!("http://www.w3.org/2000/svg")
},
None => false,
}
}
}
/// A few helpers to help with attribute selectors and snapshotting.
pub trait NamespaceConstraintHelpers {
/// Returns the namespace of the selector, or null otherwise.
fn atom_or_null(&self) -> *mut nsIAtom;
}
impl<'a> NamespaceConstraintHelpers for NamespaceConstraint<&'a Namespace> {
fn atom_or_null(&self) -> *mut nsIAtom {
match *self {
NamespaceConstraint::Any => ptr::null_mut(),
NamespaceConstraint::Specific(ref ns) => ns.0.as_ptr(),
}
}
}
impl<'le> ElementExt for GeckoElement<'le> {
#[inline]
fn matches_user_and_author_rules(&self) -> bool {
self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32) == 0
}
}<|fim▁end|>
|
// <th> elements get a default MozCenterOrInherit which may get overridden
|
<|file_name|>foreman.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
#
# Copyright (C) 2016 Guido Günther <[email protected]>,
# Daniel Lobato Garcia <[email protected]>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with it. If not, see <http://www.gnu.org/licenses/>.
#
# This is somewhat based on cobbler inventory
# Stdlib imports
# __future__ imports must occur at the beginning of file
from __future__ import print_function
try:
# Python 2 version
import ConfigParser
except ImportError:
# Python 3 version
import configparser as ConfigParser
import json
import argparse
import copy
import os
import re
import sys
from time import time
from collections import defaultdict
from distutils.version import LooseVersion, StrictVersion
# 3rd party imports
import requests
if LooseVersion(requests.__version__) < LooseVersion('1.1.0'):
print('This script requires python-requests 1.1 as a minimum version')
sys.exit(1)
from requests.auth import HTTPBasicAuth
from ansible.module_utils._text import to_text
def json_format_dict(data, pretty=False):
"""Converts a dict to a JSON object and dumps it as a formatted string"""
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
class ForemanInventory(object):
def __init__(self):
self.inventory = defaultdict(list) # A list of groups and the hosts in that group
self.cache = dict() # Details about hosts in the inventory
self.params = dict() # Params of each host
self.facts = dict() # Facts of each host
self.hostgroups = dict() # host groups
self.hostcollections = dict() # host collections
self.session = None # Requests session
self.config_paths = [
"/etc/ansible/foreman.ini",
os.path.dirname(os.path.realpath(__file__)) + '/foreman.ini',
]
env_value = os.environ.get('FOREMAN_INI_PATH')
if env_value is not None:
self.config_paths.append(os.path.expanduser(os.path.expandvars(env_value)))
def read_settings(self):
"""Reads the settings from the foreman.ini file"""
config = ConfigParser.SafeConfigParser()
config.read(self.config_paths)
# Foreman API related
try:
self.foreman_url = config.get('foreman', 'url')
self.foreman_user = config.get('foreman', 'user')
self.foreman_pw = config.get('foreman', 'password', raw=True)
self.foreman_ssl_verify = config.getboolean('foreman', 'ssl_verify')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
print("Error parsing configuration: %s" % e, file=sys.stderr)
return False
# Ansible related
try:
group_patterns = config.get('ansible', 'group_patterns')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
group_patterns = "[]"
self.group_patterns = json.loads(group_patterns)
try:
self.group_prefix = config.get('ansible', 'group_prefix')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.group_prefix = "foreman_"
try:
self.want_facts = config.getboolean('ansible', 'want_facts')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.want_facts = True
try:
self.want_hostcollections = config.getboolean('ansible', 'want_hostcollections')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.want_hostcollections = False
# Do we want parameters to be interpreted if possible as JSON? (no by default)
try:
self.rich_params = config.getboolean('ansible', 'rich_params')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.rich_params = False
try:
self.host_filters = config.get('foreman', 'host_filters')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.host_filters = None
# Cache related
try:
cache_path = os.path.expanduser(config.get('cache', 'path'))
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
cache_path = '.'
(script, ext) = os.path.splitext(os.path.basename(__file__))
self.cache_path_cache = cache_path + "/%s.cache" % script
self.cache_path_inventory = cache_path + "/%s.index" % script
self.cache_path_params = cache_path + "/%s.params" % script
self.cache_path_facts = cache_path + "/%s.facts" % script
self.cache_path_hostcollections = cache_path + "/%s.hostcollections" % script
try:
self.cache_max_age = config.getint('cache', 'max_age')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.cache_max_age = 60
try:
self.scan_new_hosts = config.getboolean('cache', 'scan_new_hosts')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
self.scan_new_hosts = False
return True
def parse_cli_args(self):
"""Command line argument processing"""
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on foreman')
parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to foreman (default: False - use cache files)')
self.args = parser.parse_args()
def _get_session(self):
if not self.session:
self.session = requests.session()
self.session.auth = HTTPBasicAuth(self.foreman_user, self.foreman_pw)
self.session.verify = self.foreman_ssl_verify
return self.session
def _get_json(self, url, ignore_errors=None, params=None):
if params is None:
params = {}
params['per_page'] = 250
page = 1
results = []
s = self._get_session()
while True:
params['page'] = page
ret = s.get(url, params=params)
if ignore_errors and ret.status_code in ignore_errors:
break
ret.raise_for_status()
json = ret.json()
# /hosts/:id has not results key
if 'results' not in json:
return json
# Facts are returned as dict in results not list
if isinstance(json['results'], dict):
return json['results']
# List of all hosts is returned paginaged
results = results + json['results']
if len(results) >= json['subtotal']:
break
page += 1
if len(json['results']) == 0:
print("Did not make any progress during loop. "
"expected %d got %d" % (json['total'], len(results)),
file=sys.stderr)
break
return results
def _get_hosts(self):
url = "%s/api/v2/hosts" % self.foreman_url
params = {}
if self.host_filters:
params['search'] = self.host_filters
return self._get_json(url, params=params)
def _get_host_data_by_id(self, hid):
url = "%s/api/v2/hosts/%s" % (self.foreman_url, hid)
return self._get_json(url)
def _get_facts_by_id(self, hid):
url = "%s/api/v2/hosts/%s/facts" % (self.foreman_url, hid)
return self._get_json(url)
def _resolve_params(self, host_params):
"""Convert host params to dict"""
params = {}
for param in host_params:
name = param['name']
if self.rich_params:
try:
params[name] = json.loads(param['value'])
except ValueError:
params[name] = param['value']
else:
params[name] = param['value']
return params
def _get_facts(self, host):
"""Fetch all host facts of the host"""
if not self.want_facts:
return {}
ret = self._get_facts_by_id(host['id'])
if len(ret.values()) == 0:
facts = {}
elif len(ret.values()) == 1:
facts = list(ret.values())[0]
else:
raise ValueError("More than one set of facts returned for '%s'" % host)
return facts
def write_to_cache(self, data, filename):
"""Write data in JSON format to a file"""
json_data = json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def _write_cache(self):
self.write_to_cache(self.cache, self.cache_path_cache)
self.write_to_cache(self.inventory, self.cache_path_inventory)
self.write_to_cache(self.params, self.cache_path_params)
self.write_to_cache(self.facts, self.cache_path_facts)
self.write_to_cache(self.hostcollections, self.cache_path_hostcollections)
def to_safe(self, word):
'''Converts 'bad' characters in a string to underscores
so they can be used as Ansible groups
>>> ForemanInventory.to_safe("foo-bar baz")
'foo_barbaz'
'''
regex = r"[^A-Za-z0-9\_]"
return re.sub(regex, "_", word.replace(" ", ""))
def update_cache(self, scan_only_new_hosts=False):
"""Make calls to foreman and save the output in a cache"""
self.groups = dict()
self.hosts = dict()
for host in self._get_hosts():
if host['name'] in self.cache.keys() and scan_only_new_hosts:
continue
dns_name = host['name']
host_data = self._get_host_data_by_id(host['id'])
host_params = host_data.get('all_parameters', {})
# Create ansible groups for hostgroup
group = 'hostgroup'
val = host.get('%s_title' % group) or host.get('%s_name' % group)
if val:
safe_key = self.to_safe('%s%s_%s' % (
to_text(self.group_prefix),
group,
to_text(val).lower()
))
self.inventory[safe_key].append(dns_name)
# Create ansible groups for environment, location and organization
for group in ['environment', 'location', 'organization']:
val = host.get('%s_name' % group)
if val:
safe_key = self.to_safe('%s%s_%s' % (
to_text(self.group_prefix),
group,
to_text(val).lower()
))
self.inventory[safe_key].append(dns_name)
for group in ['lifecycle_environment', 'content_view']:
val = host.get('content_facet_attributes', {}).get('%s_name' % group)
if val:
safe_key = self.to_safe('%s%s_%s' % (
to_text(self.group_prefix),
group,
to_text(val).lower()
))
self.inventory[safe_key].append(dns_name)
params = self._resolve_params(host_params)
# Ansible groups by parameters in host groups and Foreman host
# attributes.
groupby = dict()
for k, v in params.items():
groupby[k] = self.to_safe(to_text(v))
# The name of the ansible groups is given by group_patterns:
for pattern in self.group_patterns:
try:
key = pattern.format(**groupby)
self.inventory[key].append(dns_name)
except KeyError:
pass # Host not part of this group
if self.want_hostcollections:
hostcollections = host_data.get('host_collections')
if hostcollections:
# Create Ansible groups for host collections
for hostcollection in hostcollections:
safe_key = self.to_safe('%shostcollection_%s' % (self.group_prefix, hostcollection['name'].lower()))
self.inventory[safe_key].append(dns_name)
self.hostcollections[dns_name] = hostcollections
self.cache[dns_name] = host
self.params[dns_name] = params
self.facts[dns_name] = self._get_facts(host)
self.inventory['all'].append(dns_name)
self._write_cache()
def is_cache_valid(self):
"""Determines if the cache is still valid"""
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if (os.path.isfile(self.cache_path_inventory) and
os.path.isfile(self.cache_path_params) and
os.path.isfile(self.cache_path_facts)):
return True
return False
def load_inventory_from_cache(self):
"""Read the index from the cache file sets self.index"""
with open(self.cache_path_inventory, 'r') as fp:
self.inventory = json.load(fp)
def load_params_from_cache(self):
"""Read the index from the cache file sets self.index"""
with open(self.cache_path_params, 'r') as fp:
self.params = json.load(fp)
def load_facts_from_cache(self):
"""Read the index from the cache file sets self.facts"""
if not self.want_facts:
return
with open(self.cache_path_facts, 'r') as fp:
self.facts = json.load(fp)
def load_hostcollections_from_cache(self):
"""Read the index from the cache file sets self.hostcollections"""
if not self.want_hostcollections:
return
with open(self.cache_path_hostcollections, 'r') as fp:
self.hostcollections = json.load(fp)
def load_cache_from_cache(self):
"""Read the cache from the cache file sets self.cache"""
with open(self.cache_path_cache, 'r') as fp:
self.cache = json.load(fp)
def get_inventory(self):
if self.args.refresh_cache or not self.is_cache_valid():
self.update_cache()
else:
self.load_inventory_from_cache()
self.load_params_from_cache()
self.load_facts_from_cache()
self.load_hostcollections_from_cache()
self.load_cache_from_cache()
if self.scan_new_hosts:
self.update_cache(True)
def get_host_info(self):
"""Get variables about a specific host"""
if not self.cache or len(self.cache) == 0:
# Need to load index from cache
self.load_cache_from_cache()
if self.args.host not in self.cache:
# try updating the cache
self.update_cache()
if self.args.host not in self.cache:
# host might not exist anymore
return json_format_dict({}, True)
return json_format_dict(self.cache[self.args.host], True)
def _print_data(self):
data_to_print = ""
if self.args.host:
data_to_print += self.get_host_info()
else:
self.inventory['_meta'] = {'hostvars': {}}
for hostname in self.cache:
self.inventory['_meta']['hostvars'][hostname] = {
'foreman': self.cache[hostname],
'foreman_params': self.params[hostname],
}
if self.want_facts:
self.inventory['_meta']['hostvars'][hostname]['foreman_facts'] = self.facts[hostname]
data_to_print += json_format_dict(self.inventory, True)
print(data_to_print)
def run(self):
# Read settings and parse CLI arguments
if not self.read_settings():
return False
self.parse_cli_args()
self.get_inventory()
self._print_data()
return True
if __name__ == '__main__':
sys.exit(not ForemanInventory().run())<|fim▁end|>
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.