prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>invalidation_map.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Code for invalidations due to state or attribute changes.
use {Atom, LocalName, Namespace};
use context::QuirksMode;
use element_state::ElementState;
use selector_map::{MaybeCaseInsensitiveHashMap, SelectorMap, SelectorMapEntry};
use selector_parser::SelectorImpl;
use selectors::attr::NamespaceConstraint;
use selectors::parser::{Combinator, Component};
use selectors::parser::{Selector, SelectorIter, SelectorMethods};
use selectors::visitor::SelectorVisitor;
use smallvec::SmallVec;
#[cfg(feature = "gecko")]
/// Gets the element state relevant to the given `:dir` pseudo-class selector.
pub fn dir_selector_to_state(s: &[u16]) -> ElementState {
use element_state::{IN_LTR_STATE, IN_RTL_STATE};
// Jump through some hoops to deal with our Box<[u16]> thing.
const LTR: [u16; 4] = [b'l' as u16, b't' as u16, b'r' as u16, 0];
const RTL: [u16; 4] = [b'r' as u16, b't' as u16, b'l' as u16, 0];
if LTR == *s {
IN_LTR_STATE
} else if RTL == *s {
IN_RTL_STATE
} else {
// :dir(something-random) is a valid selector, but shouldn't
// match anything.
ElementState::empty()
}
}
/// Mapping between (partial) CompoundSelectors (and the combinator to their
/// right) and the states and attributes they depend on.
///
/// In general, for all selectors in all applicable stylesheets of the form:
///
/// |a _ b _ c _ d _ e|
///
/// Where:
/// * |b| and |d| are simple selectors that depend on state (like :hover) or
/// attributes (like [attr...], .foo, or #foo).
/// * |a|, |c|, and |e| are arbitrary simple selectors that do not depend on
/// state or attributes.
///
/// We generate a Dependency for both |a _ b:X _| and |a _ b:X _ c _ d:Y _|,
/// even though those selectors may not appear on their own in any stylesheet.
/// This allows us to quickly scan through the dependency sites of all style
/// rules and determine the maximum effect that a given state or attribute
/// change may have on the style of elements in the document.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct Dependency {
/// The dependency selector.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
pub selector: Selector<SelectorImpl>,
/// The offset into the selector that we should match on.
pub selector_offset: usize,
}
impl Dependency {
/// Returns the combinator to the right of the partial selector this
/// dependency represents.
///
/// TODO(emilio): Consider storing inline if it helps cache locality?
pub fn combinator(&self) -> Option<Combinator> {
if self.selector_offset == 0 {
return None;
}
Some(self.selector.combinator_at(self.selector_offset))
}
/// Whether this dependency affects the style of the element.
///
/// NOTE(emilio): pseudo-elements need to be here to account for eager
/// pseudos, since they just grab the style from the originating element.
///
/// TODO(emilio): We could look at the selector itself to see if it's an
/// eager pseudo, and return false here if not.
pub fn affects_self(&self) -> bool {
matches!(self.combinator(), None | Some(Combinator::PseudoElement))
}
/// Whether this dependency may affect style of any of our descendants.
pub fn affects_descendants(&self) -> bool {
matches!(self.combinator(), Some(Combinator::PseudoElement) |
Some(Combinator::Child) |
Some(Combinator::Descendant))
}
/// Whether this dependency may affect style of any of our later siblings.
pub fn affects_later_siblings(&self) -> bool {
matches!(self.combinator(), Some(Combinator::NextSibling) |
Some(Combinator::LaterSibling))
}
}
impl SelectorMapEntry for Dependency {
fn selector(&self) -> SelectorIter<SelectorImpl> {
self.selector.iter_from(self.selector_offset)
}
}
/// The same, but for state selectors, which can track more exactly what state
/// do they track.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct StateDependency {
/// The other dependency fields.
pub dep: Dependency,
/// The state this dependency is affected by.
pub state: ElementState,
}
impl SelectorMapEntry for StateDependency {
fn selector(&self) -> SelectorIter<SelectorImpl> {
self.dep.selector()
}
}
/// A map where we store invalidations.
///
/// This is slightly different to a SelectorMap, in the sense of that the same
/// selector may appear multiple times.
///
/// In particular, we want to lookup as few things as possible to get the fewer
/// selectors the better, so this looks up by id, class, or looks at the list of
/// state/other attribute affecting selectors.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct InvalidationMap {
/// A map from a given class name to all the selectors with that class
/// selector.
pub class_to_selector: MaybeCaseInsensitiveHashMap<Atom, SelectorMap<Dependency>>,
/// A map from a given id to all the selectors with that ID in the
/// stylesheets currently applying to the document.
pub id_to_selector: MaybeCaseInsensitiveHashMap<Atom, SelectorMap<Dependency>>,
/// A map of all the state dependencies.
pub state_affecting_selectors: SelectorMap<StateDependency>,
/// A map of other attribute affecting selectors.
pub other_attribute_affecting_selectors: SelectorMap<Dependency>,
/// Whether there are attribute rules of the form `[class~="foo"]` that may
/// match. In that case, we need to look at
/// `other_attribute_affecting_selectors` too even if only the `class` has
/// changed.
pub has_class_attribute_selectors: bool,
/// Whether there are attribute rules of the form `[id|="foo"]` that may
/// match. In that case, we need to look at
/// `other_attribute_affecting_selectors` too even if only the `id` has
/// changed.
pub has_id_attribute_selectors: bool,
}
impl InvalidationMap {
/// Creates an empty `InvalidationMap`.
pub fn new() -> Self {
Self {
class_to_selector: MaybeCaseInsensitiveHashMap::new(),
id_to_selector: MaybeCaseInsensitiveHashMap::new(),
state_affecting_selectors: SelectorMap::new(),
other_attribute_affecting_selectors: SelectorMap::new(),
has_class_attribute_selectors: false,
has_id_attribute_selectors: false,
}
}
/// Returns the number of dependencies stored in the invalidation map.
pub fn len(&self) -> usize {
self.state_affecting_selectors.len() +
self.other_attribute_affecting_selectors.len() +
self.id_to_selector.iter().fold(0, |accum, (_, ref v)| {
accum + v.len()
}) +
self.class_to_selector.iter().fold(0, |accum, (_, ref v)| {
accum + v.len()
})
}
/// Adds a selector to this `InvalidationMap`.
pub fn note_selector(
&mut self,
selector: &Selector<SelectorImpl>,
quirks_mode: QuirksMode)
{
self.collect_invalidations_for(selector, quirks_mode)
}
/// Clears this map, leaving it empty.
pub fn clear(&mut self) {
self.class_to_selector.clear();
self.id_to_selector.clear();
self.state_affecting_selectors.clear();
self.other_attribute_affecting_selectors.clear();
self.has_id_attribute_selectors = false;
self.has_class_attribute_selectors = false;
}
fn collect_invalidations_for(
&mut self,
selector: &Selector<SelectorImpl>,
quirks_mode: QuirksMode)
{
debug!("InvalidationMap::collect_invalidations_for({:?})", selector);
let mut iter = selector.iter();
let mut combinator;
let mut index = 0;
loop {
let sequence_start = index;
let mut compound_visitor = CompoundSelectorDependencyCollector {
classes: SmallVec::new(),
ids: SmallVec::new(),
state: ElementState::empty(),
other_attributes: false,
has_id_attribute_selectors: false,
has_class_attribute_selectors: false,
};
// Visit all the simple selectors in this sequence.
//
// Note that this works because we can't have combinators nested
// inside simple selectors (i.e. in :not() or :-moz-any()).
//
// If we ever support that we'll need to visit nested complex
// selectors as well, in order to mark them as affecting descendants
// at least.
for ss in &mut iter {
ss.visit(&mut compound_visitor);
index += 1; // Account for the simple selector.
}
self.has_id_attribute_selectors |= compound_visitor.has_id_attribute_selectors;
self.has_class_attribute_selectors |= compound_visitor.has_class_attribute_selectors;
for class in compound_visitor.classes {
self.class_to_selector
.entry(class, quirks_mode)
.or_insert_with(SelectorMap::new)
.insert(Dependency {
selector: selector.clone(),
selector_offset: sequence_start,
}, quirks_mode);
}
for id in compound_visitor.ids {
self.id_to_selector
.entry(id, quirks_mode)
.or_insert_with(SelectorMap::new)
.insert(Dependency {
selector: selector.clone(),
selector_offset: sequence_start,<|fim▁hole|>
if !compound_visitor.state.is_empty() {
self.state_affecting_selectors
.insert(StateDependency {
dep: Dependency {
selector: selector.clone(),
selector_offset: sequence_start,
},
state: compound_visitor.state,
}, quirks_mode);
}
if compound_visitor.other_attributes {
self.other_attribute_affecting_selectors
.insert(Dependency {
selector: selector.clone(),
selector_offset: sequence_start,
}, quirks_mode);
}
combinator = iter.next_sequence();
if combinator.is_none() {
break;
}
index += 1; // Account for the combinator.
}
}
}
/// A struct that collects invalidations for a given compound selector.
struct CompoundSelectorDependencyCollector {
/// The state this compound selector is affected by.
state: ElementState,
/// The classes this compound selector is affected by.
///
/// NB: This will be often a single class, but could be multiple in
/// presence of :not, :-moz-any, .foo.bar.baz, etc.
classes: SmallVec<[Atom; 5]>,
/// The IDs this compound selector is affected by.
///
/// NB: This will be almost always a single id, but could be multiple in
/// presence of :not, :-moz-any, #foo#bar, etc.
ids: SmallVec<[Atom; 5]>,
/// Whether it affects other attribute-dependent selectors that aren't ID or
/// class selectors (NB: We still set this to true in presence of [class] or
/// [id] attribute selectors).
other_attributes: bool,
/// Whether there were attribute selectors with the id attribute.
has_id_attribute_selectors: bool,
/// Whether there were attribute selectors with the class attribute.
has_class_attribute_selectors: bool,
}
impl SelectorVisitor for CompoundSelectorDependencyCollector {
type Impl = SelectorImpl;
fn visit_simple_selector(&mut self, s: &Component<SelectorImpl>) -> bool {
#[cfg(feature = "gecko")]
use selector_parser::NonTSPseudoClass;
match *s {
Component::ID(ref id) => {
self.ids.push(id.clone());
}
Component::Class(ref class) => {
self.classes.push(class.clone());
}
Component::NonTSPseudoClass(ref pc) => {
self.other_attributes |= pc.is_attr_based();
self.state |= match *pc {
#[cfg(feature = "gecko")]
NonTSPseudoClass::Dir(ref s) => {
dir_selector_to_state(s)
}
_ => pc.state_flag(),
};
}
_ => {}
}
true
}
fn visit_attribute_selector(
&mut self,
constraint: &NamespaceConstraint<&Namespace>,
_local_name: &LocalName,
local_name_lower: &LocalName,
) -> bool {
self.other_attributes = true;
let may_match_in_no_namespace = match *constraint {
NamespaceConstraint::Any => true,
NamespaceConstraint::Specific(ref ns) => ns.is_empty(),
};
if may_match_in_no_namespace {
self.has_id_attribute_selectors |= *local_name_lower == local_name!("id");
self.has_class_attribute_selectors |= *local_name_lower == local_name!("class");
}
true
}
}<|fim▁end|>
|
}, quirks_mode);
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>mod linking_rust;<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/*===============================================================================================*/
// Copyright 2016 Kyle Finlay
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*===============================================================================================*/<|fim▁hole|>//! Contains all window related functionality.
/*===============================================================================================*/
mod window;
mod window_config;
pub use self::window::Window;
pub use self::window_config::WindowConfig;<|fim▁end|>
|
/*===============================================================================================*/
//! The window module.
//!
|
<|file_name|>identity.js<|end_file_name|><|fim▁begin|>// Copyright 2017 Joyent, Inc.
module.exports = Identity;
var assert = require('assert-plus');
var algs = require('./algs');
var crypto = require('crypto');
var Fingerprint = require('./fingerprint');
var Signature = require('./signature');
var errs = require('./errors');
var util = require('util');
var utils = require('./utils');
var asn1 = require('asn1');
/*JSSTYLED*/
var DNS_NAME_RE = /^([*]|[a-z0-9][a-z0-9\-]{0,62})(?:\.([*]|[a-z0-9][a-z0-9\-]{0,62}))*$/i;
var oids = {};
oids.cn = '2.5.4.3';
oids.o = '2.5.4.10';
oids.ou = '2.5.4.11';
oids.l = '2.5.4.7';
oids.s = '2.5.4.8';
oids.c = '2.5.4.6';
oids.sn = '2.5.4.4';
oids.dc = '0.9.2342.19200300.100.1.25';
oids.uid = '0.9.2342.19200300.100.1.1';
oids.mail = '0.9.2342.19200300.100.1.3';
var unoids = {};
Object.keys(oids).forEach(function (k) {
unoids[oids[k]] = k;
});
function Identity(opts) {
var self = this;
assert.object(opts, 'options');
assert.arrayOfObject(opts.components, 'options.components');
this.components = opts.components;
this.componentLookup = {};
this.components.forEach(function (c) {
if (c.name && !c.oid)
c.oid = oids[c.name];
if (c.oid && !c.name)
c.name = unoids[c.oid];
if (self.componentLookup[c.name] === undefined)
self.componentLookup[c.name] = [];
self.componentLookup[c.name].push(c);
});
if (this.componentLookup.cn && this.componentLookup.cn.length > 0) {
this.cn = this.componentLookup.cn[0].value;
}
assert.optionalString(opts.type, 'options.type');
if (opts.type === undefined) {
if (this.components.length === 1 &&
this.componentLookup.cn &&
this.componentLookup.cn.length === 1 &&
this.componentLookup.cn[0].value.match(DNS_NAME_RE)) {
this.type = 'host';
this.hostname = this.componentLookup.cn[0].value;
} else if (this.componentLookup.dc &&
this.components.length === this.componentLookup.dc.length) {
this.type = 'host';
this.hostname = this.componentLookup.dc.map(
function (c) {
return (c.value);
}).join('.');
} else if (this.componentLookup.uid &&
this.components.length ===
this.componentLookup.uid.length) {
this.type = 'user';
this.uid = this.componentLookup.uid[0].value;
} else if (this.componentLookup.cn &&
this.componentLookup.cn.length === 1 &&
this.componentLookup.cn[0].value.match(DNS_NAME_RE)) {
this.type = 'host';
this.hostname = this.componentLookup.cn[0].value;
} else if (this.componentLookup.uid &&
this.componentLookup.uid.length === 1) {
this.type = 'user';
this.uid = this.componentLookup.uid[0].value;
} else if (this.componentLookup.mail &&
this.componentLookup.mail.length === 1) {
this.type = 'email';
this.email = this.componentLookup.mail[0].value;
} else if (this.componentLookup.cn &&
this.componentLookup.cn.length === 1) {
this.type = 'user';
this.uid = this.componentLookup.cn[0].value;
} else {
this.type = 'unknown';
}
} else {
this.type = opts.type;
if (this.type === 'host')
this.hostname = opts.hostname;
else if (this.type === 'user')
this.uid = opts.uid;
else if (this.type === 'email')
this.email = opts.email;
else
throw (new Error('Unknown type ' + this.type));
}
}
Identity.prototype.toString = function () {
return (this.components.map(function (c) {
return (c.name.toUpperCase() + '=' + c.value);
}).join(', '));
};
/*
* These are from X.680 -- PrintableString allowed chars are in section 37.4
* table 8. Spec for IA5Strings is "1,6 + SPACE + DEL" where 1 refers to
* ISO IR #001 (standard ASCII control characters) and 6 refers to ISO IR #006
* (the basic ASCII character set).
*/
/* JSSTYLED */
var NOT_PRINTABLE = /[^a-zA-Z0-9 '(),+.\/:=?-]/;
/* JSSTYLED */
var NOT_IA5 = /[^\x00-\x7f]/;
Identity.prototype.toAsn1 = function (der, tag) {
der.startSequence(tag);
this.components.forEach(function (c) {
der.startSequence(asn1.Ber.Constructor | asn1.Ber.Set);
der.startSequence();
der.writeOID(c.oid);
/*
* If we fit in a PrintableString, use that. Otherwise use an
* IA5String or UTF8String.
*/
if (c.value.match(NOT_IA5)) {
var v = new Buffer(c.value, 'utf8');
der.writeBuffer(v, asn1.Ber.Utf8String);
} else if (c.value.match(NOT_PRINTABLE)) {
der.writeString(c.value, asn1.Ber.IA5String);
} else {
der.writeString(c.value, asn1.Ber.PrintableString);
}
der.endSequence();
der.endSequence();
});
der.endSequence();
};
function globMatch(a, b) {
if (a === '**' || b === '**')
return (true);
var aParts = a.split('.');
var bParts = b.split('.');
if (aParts.length !== bParts.length)
return (false);
for (var i = 0; i < aParts.length; ++i) {
if (aParts[i] === '*' || bParts[i] === '*')
continue;
if (aParts[i] !== bParts[i])
return (false);
}
return (true);
}
Identity.prototype.equals = function (other) {
if (!Identity.isIdentity(other, [1, 0]))<|fim▁hole|> return (false);
for (var i = 0; i < this.components.length; ++i) {
if (this.components[i].oid !== other.components[i].oid)
return (false);
if (!globMatch(this.components[i].value,
other.components[i].value)) {
return (false);
}
}
return (true);
};
Identity.forHost = function (hostname) {
assert.string(hostname, 'hostname');
return (new Identity({
type: 'host',
hostname: hostname,
components: [{name: 'cn', value: hostname}]
}));
};
Identity.forUser = function (uid) {
assert.string(uid, 'uid');
return (new Identity({
type: 'user',
uid: uid,
components: [{name: 'uid', value: uid}]
}));
};
Identity.forEmail = function (email) {
assert.string(email, 'email');
return (new Identity({
type: 'email',
email: email,
components: [{name: 'mail', value: email}]
}));
};
Identity.parseDN = function (dn) {
assert.string(dn, 'dn');
var parts = dn.split(',');
var cmps = parts.map(function (c) {
c = c.trim();
var eqPos = c.indexOf('=');
var name = c.slice(0, eqPos).toLowerCase();
var value = c.slice(eqPos + 1);
return ({name: name, value: value});
});
return (new Identity({components: cmps}));
};
Identity.parseAsn1 = function (der, top) {
var components = [];
der.readSequence(top);
var end = der.offset + der.length;
while (der.offset < end) {
der.readSequence(asn1.Ber.Constructor | asn1.Ber.Set);
var after = der.offset + der.length;
der.readSequence();
var oid = der.readOID();
var type = der.peek();
var value;
switch (type) {
case asn1.Ber.PrintableString:
case asn1.Ber.IA5String:
case asn1.Ber.OctetString:
case asn1.Ber.T61String:
value = der.readString(type);
break;
case asn1.Ber.Utf8String:
value = der.readString(type, true);
value = value.toString('utf8');
break;
case asn1.Ber.CharacterString:
case asn1.Ber.BMPString:
value = der.readString(type, true);
value = value.toString('utf16le');
break;
default:
throw (new Error('Unknown asn1 type ' + type));
}
components.push({oid: oid, value: value});
der._offset = after;
}
der._offset = end;
return (new Identity({
components: components
}));
};
Identity.isIdentity = function (obj, ver) {
return (utils.isCompatible(obj, Identity, ver));
};
/*
* API versions for Identity:
* [1,0] -- initial ver
*/
Identity.prototype._sshpkApiVersion = [1, 0];
Identity._oldVersionDetect = function (obj) {
return ([1, 0]);
};<|fim▁end|>
|
return (false);
if (other.components.length !== this.components.length)
|
<|file_name|>selectrow.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import os
if len(sys.argv) >= 4 :
filename = sys.argv[1]
row_i = int(sys.argv[2])-1
target_ls_filename = sys.argv[3]
output_filename = sys.argv[4]
else:
print("usage: python selectrow.py filename row_i target_ls_filename")
print("or ./selectrow.py filename row_i target_ls_filename")
sys.exit(1)
################################################################################
file = open(filename,'r')
dt = {}
for line in file:
ls=line.strip().split('\t')
if not dt.has_key(ls[row_i]):
dt[ ls[row_i] ] = []
dt[ ls[row_i] ].append( line.strip() )
file.close()
################################################################################
output = open(output_filename,'w')
target_ls_file = open(target_ls_filename, 'r')
for line in target_ls_file:
id = line.strip()<|fim▁hole|> if len(dt[id])>1:
print id + '\t' + str(len(dt[id]))
for item in dt[id]:
output.write( item + '\n')
output.close()
target_ls_file.close()<|fim▁end|>
|
if not dt.has_key(id):
print id
continue
|
<|file_name|>image_ops.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Image processing and decoding ops.
See the @{$python/image} guide.
@@decode_bmp
@@decode_gif
@@decode_jpeg
@@decode_and_crop_jpeg
@@encode_jpeg
@@extract_jpeg_shape
@@decode_png
@@encode_png
@@is_jpeg
@@decode_image
@@resize_images
@@resize_area
@@resize_bicubic
@@resize_bilinear
@@resize_nearest_neighbor
@@resize_image_with_crop_or_pad
@@central_crop
@@pad_to_bounding_box
@@crop_to_bounding_box
@@extract_glimpse
@@crop_and_resize
@@flip_up_down
@@random_flip_up_down
@@flip_left_right
@@random_flip_left_right
@@transpose_image
@@rot90
@@rgb_to_grayscale
@@grayscale_to_rgb
@@hsv_to_rgb
@@rgb_to_hsv
@@rgb_to_yiq
@@yiq_to_rgb
@@rgb_to_yuv
@@yuv_to_rgb
@@convert_image_dtype
@@adjust_brightness
@@random_brightness
@@adjust_contrast
@@random_contrast
@@adjust_hue
@@random_hue
@@adjust_gamma
@@adjust_saturation
@@random_saturation
@@per_image_standardization<|fim▁hole|>@@total_variation
@@psnr
@@ssim
@@ssim_multiscale
@@image_gradients
@@sobel_edges
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_image_ops import *
from tensorflow.python.ops.image_ops_impl import *
# pylint: enable=wildcard-import
# TODO(drpng): remove these once internal use has discontinued.
# pylint: disable=unused-import
from tensorflow.python.ops.image_ops_impl import _Check3DImage
from tensorflow.python.ops.image_ops_impl import _ImageDimensions
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
# ResizeMethod is not documented, but is documented in functions
# that use it.
'ResizeMethod',
]
remove_undocumented(__name__, _allowed_symbols)<|fim▁end|>
|
@@draw_bounding_boxes
@@non_max_suppression
@@sample_distorted_bounding_box
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.<|fim▁hole|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
from horizon import version
from openstack_dashboard.openstack.common import setup as os_common_setup
requires = os_common_setup.parse_requirements()
depend_links = os_common_setup.parse_dependency_links()
tests_require = os_common_setup.parse_requirements(['tools/test-requires'])
ROOT = os.path.dirname(__file__)
target_dirs = ['horizon', 'openstack_dashboard', 'bin']
def read(fname):
return open(os.path.join(ROOT, fname)).read()
def split(path, result=None):
"""
Split a path into components in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return split(head, [tail] + result)
# Tell distutils not to put the data_files in platform-specific installation
# locations. See here for an explanation:
# https://groups.google.com/forum/#!topic/comp.lang.python/Nex7L-026uw
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for target_dir in target_dirs:
for dirpath, dirnames, filenames in os.walk(target_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(split(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f)
for f in filenames]])
setup(name="horizon",
version=version.canonical_version_string(),
url='https://github.com/openstack/horizon/',
license='Apache 2.0',
description="The OpenStack Dashboard.",
long_description=read('README.rst'),
author='OpenStack',
author_email='[email protected]',
packages=packages,
data_files=data_files,
cmdclass=os_common_setup.get_cmdclass(),
include_package_data=True,
install_requires=requires,
tests_require=tests_require,
dependency_links=depend_links,
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Environment :: OpenStack']
)<|fim▁end|>
|
#
# Copyright 2012 Nebula, Inc.
#
|
<|file_name|>LeafTypeProviderImpl.java<|end_file_name|><|fim▁begin|>package com.github.fhtw.swp.tutorium.guice;
import com.github.fhtw.swp.tutorium.composite.Leaf;
import com.github.fhtw.swp.tutorium.composite.LeafTypeProvider;
import com.github.fhtw.swp.tutorium.reflection.AnnotatedTypeFinder;
import org.reflections.Configuration;
import javax.inject.Inject;<|fim▁hole|>
public class LeafTypeProviderImpl implements LeafTypeProvider {
private final Configuration configuration;
@Inject
public LeafTypeProviderImpl(Configuration configuration) {
this.configuration = configuration;
}
@Override
public Set<Class<?>> getLeafTypes() {
return new AnnotatedTypeFinder(configuration, Leaf.class).getAnnotatedTypes();
}
}<|fim▁end|>
|
import java.util.Set;
|
<|file_name|>bootstrap-table-en-US.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
(global = global || self, factory(global.jQuery));
}(this, (function ($) { 'use strict';
$ = $ && $.hasOwnProperty('default') ? $['default'] : $;
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var check = function (it) {
return it && it.Math == Math && it;
};
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global_1 =
// eslint-disable-next-line no-undef
check(typeof globalThis == 'object' && globalThis) ||
check(typeof window == 'object' && window) ||
check(typeof self == 'object' && self) ||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
// eslint-disable-next-line no-new-func
Function('return this')();
var fails = function (exec) {
try {
return !!exec();
} catch (error) {
return true;
}
};
// Thank's IE8 for his funny defineProperty
var descriptors = !fails(function () {
return Object.defineProperty({}, 'a', { get: function () { return 7; } }).a != 7;
});
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
var getOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
// Nashorn ~ JDK8 bug
var NASHORN_BUG = getOwnPropertyDescriptor && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
// `Object.prototype.propertyIsEnumerable` method implementation
// https://tc39.github.io/ecma262/#sec-object.prototype.propertyisenumerable
var f = NASHORN_BUG ? function propertyIsEnumerable(V) {
var descriptor = getOwnPropertyDescriptor(this, V);
return !!descriptor && descriptor.enumerable;
} : nativePropertyIsEnumerable;
var objectPropertyIsEnumerable = {
f: f
};
var createPropertyDescriptor = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
var toString = {}.toString;
var classofRaw = function (it) {
return toString.call(it).slice(8, -1);
};
var split = ''.split;
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var indexedObject = fails(function () {
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
// eslint-disable-next-line no-prototype-builtins
return !Object('z').propertyIsEnumerable(0);
}) ? function (it) {
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
} : Object;
// `RequireObjectCoercible` abstract operation
// https://tc39.github.io/ecma262/#sec-requireobjectcoercible
var requireObjectCoercible = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
// toObject with fallback for non-array-like ES3 strings
var toIndexedObject = function (it) {
return indexedObject(requireObjectCoercible(it));
};
var isObject = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
// `ToPrimitive` abstract operation
// https://tc39.github.io/ecma262/#sec-toprimitive
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
var toPrimitive = function (input, PREFERRED_STRING) {
if (!isObject(input)) return input;
var fn, val;
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
throw TypeError("Can't convert object to primitive value");
};
var hasOwnProperty = {}.hasOwnProperty;
var has = function (it, key) {
return hasOwnProperty.call(it, key);
};
var document = global_1.document;
// typeof document.createElement is 'object' in old IE
var EXISTS = isObject(document) && isObject(document.createElement);
var documentCreateElement = function (it) {
return EXISTS ? document.createElement(it) : {};
};
// Thank's IE8 for his funny defineProperty
var ie8DomDefine = !descriptors && !fails(function () {
return Object.defineProperty(documentCreateElement('div'), 'a', {
get: function () { return 7; }
}).a != 7;
});
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
// `Object.getOwnPropertyDescriptor` method
// https://tc39.github.io/ecma262/#sec-object.getownpropertydescriptor
var f$1 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
O = toIndexedObject(O);
P = toPrimitive(P, true);
if (ie8DomDefine) try {
return nativeGetOwnPropertyDescriptor(O, P);
} catch (error) { /* empty */ }
if (has(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
};
var objectGetOwnPropertyDescriptor = {
f: f$1
};
var anObject = function (it) {
if (!isObject(it)) {
throw TypeError(String(it) + ' is not an object');
} return it;
};
var nativeDefineProperty = Object.defineProperty;
// `Object.defineProperty` method
// https://tc39.github.io/ecma262/#sec-object.defineproperty
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if (ie8DomDefine) try {
return nativeDefineProperty(O, P, Attributes);
} catch (error) { /* empty */ }
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
var objectDefineProperty = {
f: f$2
};
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
var setGlobal = function (key, value) {
try {
createNonEnumerableProperty(global_1, key, value);
} catch (error) {
global_1[key] = value;
} return value;
};
var SHARED = '__core-js_shared__';
var store = global_1[SHARED] || setGlobal(SHARED, {});
var sharedStore = store;
var functionToString = Function.toString;
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
if (typeof sharedStore.inspectSource != 'function') {
sharedStore.inspectSource = function (it) {
return functionToString.call(it);
};
}
var inspectSource = sharedStore.inspectSource;
var WeakMap = global_1.WeakMap;
var nativeWeakMap = typeof WeakMap === 'function' && /native code/.test(inspectSource(WeakMap));
var shared = createCommonjsModule(function (module) {
(module.exports = function (key, value) {
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
})('versions', []).push({
version: '3.6.0',
mode: 'global',
copyright: '© 2019 Denis Pushkarev (zloirock.ru)'
});
});
var id = 0;
var postfix = Math.random();
var uid = function (key) {
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
};
var keys = shared('keys');
var sharedKey = function (key) {
return keys[key] || (keys[key] = uid(key));
};
var hiddenKeys = {};
var WeakMap$1 = global_1.WeakMap;
var set, get, has$1;
var enforce = function (it) {
return has$1(it) ? get(it) : set(it, {});
};
var getterFor = function (TYPE) {
return function (it) {
var state;
if (!isObject(it) || (state = get(it)).type !== TYPE) {
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
} return state;
};
};
if (nativeWeakMap) {
var store$1 = new WeakMap$1();
var wmget = store$1.get;
var wmhas = store$1.has;
var wmset = store$1.set;
set = function (it, metadata) {
wmset.call(store$1, it, metadata);
return metadata;
};
get = function (it) {
return wmget.call(store$1, it) || {};
};
has$1 = function (it) {
return wmhas.call(store$1, it);
};
} else {
var STATE = sharedKey('state');
hiddenKeys[STATE] = true;
set = function (it, metadata) {
createNonEnumerableProperty(it, STATE, metadata);
return metadata;
};
get = function (it) {
return has(it, STATE) ? it[STATE] : {};
};
has$1 = function (it) {
return has(it, STATE);
};
}
var internalState = {
set: set,
get: get,
has: has$1,
enforce: enforce,
getterFor: getterFor
};
var redefine = createCommonjsModule(function (module) {
var getInternalState = internalState.get;<|fim▁hole|>
(module.exports = function (O, key, value, options) {
var unsafe = options ? !!options.unsafe : false;
var simple = options ? !!options.enumerable : false;
var noTargetGet = options ? !!options.noTargetGet : false;
if (typeof value == 'function') {
if (typeof key == 'string' && !has(value, 'name')) createNonEnumerableProperty(value, 'name', key);
enforceInternalState(value).source = TEMPLATE.join(typeof key == 'string' ? key : '');
}
if (O === global_1) {
if (simple) O[key] = value;
else setGlobal(key, value);
return;
} else if (!unsafe) {
delete O[key];
} else if (!noTargetGet && O[key]) {
simple = true;
}
if (simple) O[key] = value;
else createNonEnumerableProperty(O, key, value);
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
})(Function.prototype, 'toString', function toString() {
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
});
});
var path = global_1;
var aFunction = function (variable) {
return typeof variable == 'function' ? variable : undefined;
};
var getBuiltIn = function (namespace, method) {
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
};
var ceil = Math.ceil;
var floor = Math.floor;
// `ToInteger` abstract operation
// https://tc39.github.io/ecma262/#sec-tointeger
var toInteger = function (argument) {
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
};
var min = Math.min;
// `ToLength` abstract operation
// https://tc39.github.io/ecma262/#sec-tolength
var toLength = function (argument) {
return argument > 0 ? min(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
};
var max = Math.max;
var min$1 = Math.min;
// Helper for a popular repeating case of the spec:
// Let integer be ? ToInteger(index).
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
var toAbsoluteIndex = function (index, length) {
var integer = toInteger(index);
return integer < 0 ? max(integer + length, 0) : min$1(integer, length);
};
// `Array.prototype.{ indexOf, includes }` methods implementation
var createMethod = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = toIndexedObject($this);
var length = toLength(O.length);
var index = toAbsoluteIndex(fromIndex, length);
var value;
// Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++];
// eslint-disable-next-line no-self-compare
if (value != value) return true;
// Array#indexOf ignores holes, Array#includes - not
} else for (;length > index; index++) {
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
var arrayIncludes = {
// `Array.prototype.includes` method
// https://tc39.github.io/ecma262/#sec-array.prototype.includes
includes: createMethod(true),
// `Array.prototype.indexOf` method
// https://tc39.github.io/ecma262/#sec-array.prototype.indexof
indexOf: createMethod(false)
};
var indexOf = arrayIncludes.indexOf;
var objectKeysInternal = function (object, names) {
var O = toIndexedObject(object);
var i = 0;
var result = [];
var key;
for (key in O) !has(hiddenKeys, key) && has(O, key) && result.push(key);
// Don't enum bug & hidden keys
while (names.length > i) if (has(O, key = names[i++])) {
~indexOf(result, key) || result.push(key);
}
return result;
};
// IE8- don't enum bug keys
var enumBugKeys = [
'constructor',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toLocaleString',
'toString',
'valueOf'
];
var hiddenKeys$1 = enumBugKeys.concat('length', 'prototype');
// `Object.getOwnPropertyNames` method
// https://tc39.github.io/ecma262/#sec-object.getownpropertynames
var f$3 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return objectKeysInternal(O, hiddenKeys$1);
};
var objectGetOwnPropertyNames = {
f: f$3
};
var f$4 = Object.getOwnPropertySymbols;
var objectGetOwnPropertySymbols = {
f: f$4
};
// all object keys, includes non-enumerable and symbols
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
var keys = objectGetOwnPropertyNames.f(anObject(it));
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
};
var copyConstructorProperties = function (target, source) {
var keys = ownKeys(source);
var defineProperty = objectDefineProperty.f;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (!has(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
}
};
var replacement = /#|\.prototype\./;
var isForced = function (feature, detection) {
var value = data[normalize(feature)];
return value == POLYFILL ? true
: value == NATIVE ? false
: typeof detection == 'function' ? fails(detection)
: !!detection;
};
var normalize = isForced.normalize = function (string) {
return String(string).replace(replacement, '.').toLowerCase();
};
var data = isForced.data = {};
var NATIVE = isForced.NATIVE = 'N';
var POLYFILL = isForced.POLYFILL = 'P';
var isForced_1 = isForced;
var getOwnPropertyDescriptor$1 = objectGetOwnPropertyDescriptor.f;
/*
options.target - name of the target object
options.global - target is the global object
options.stat - export as static methods of target
options.proto - export as prototype methods of target
options.real - real prototype method for the `pure` version
options.forced - export even if the native feature is available
options.bind - bind methods to the target, required for the `pure` version
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
options.unsafe - use the simple assignment of property instead of delete + defineProperty
options.sham - add a flag to not completely full polyfills
options.enumerable - export as enumerable property
options.noTargetGet - prevent calling a getter on target
*/
var _export = function (options, source) {
var TARGET = options.target;
var GLOBAL = options.global;
var STATIC = options.stat;
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
if (GLOBAL) {
target = global_1;
} else if (STATIC) {
target = global_1[TARGET] || setGlobal(TARGET, {});
} else {
target = (global_1[TARGET] || {}).prototype;
}
if (target) for (key in source) {
sourceProperty = source[key];
if (options.noTargetGet) {
descriptor = getOwnPropertyDescriptor$1(target, key);
targetProperty = descriptor && descriptor.value;
} else targetProperty = target[key];
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
// contained in target
if (!FORCED && targetProperty !== undefined) {
if (typeof sourceProperty === typeof targetProperty) continue;
copyConstructorProperties(sourceProperty, targetProperty);
}
// add a flag to not completely full polyfills
if (options.sham || (targetProperty && targetProperty.sham)) {
createNonEnumerableProperty(sourceProperty, 'sham', true);
}
// extend global
redefine(target, key, sourceProperty, options);
}
};
// `IsArray` abstract operation
// https://tc39.github.io/ecma262/#sec-isarray
var isArray = Array.isArray || function isArray(arg) {
return classofRaw(arg) == 'Array';
};
// `ToObject` abstract operation
// https://tc39.github.io/ecma262/#sec-toobject
var toObject = function (argument) {
return Object(requireObjectCoercible(argument));
};
var createProperty = function (object, key, value) {
var propertyKey = toPrimitive(key);
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
else object[propertyKey] = value;
};
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
// Chrome 38 Symbol has incorrect toString conversion
// eslint-disable-next-line no-undef
return !String(Symbol());
});
var useSymbolAsUid = nativeSymbol
// eslint-disable-next-line no-undef
&& !Symbol.sham
// eslint-disable-next-line no-undef
&& typeof Symbol() == 'symbol';
var WellKnownSymbolsStore = shared('wks');
var Symbol$1 = global_1.Symbol;
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : uid;
var wellKnownSymbol = function (name) {
if (!has(WellKnownSymbolsStore, name)) {
if (nativeSymbol && has(Symbol$1, name)) WellKnownSymbolsStore[name] = Symbol$1[name];
else WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
} return WellKnownSymbolsStore[name];
};
var SPECIES = wellKnownSymbol('species');
// `ArraySpeciesCreate` abstract operation
// https://tc39.github.io/ecma262/#sec-arrayspeciescreate
var arraySpeciesCreate = function (originalArray, length) {
var C;
if (isArray(originalArray)) {
C = originalArray.constructor;
// cross-realm fallback
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
else if (isObject(C)) {
C = C[SPECIES];
if (C === null) C = undefined;
}
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
};
var userAgent = getBuiltIn('navigator', 'userAgent') || '';
var process = global_1.process;
var versions = process && process.versions;
var v8 = versions && versions.v8;
var match, version;
if (v8) {
match = v8.split('.');
version = match[0] + match[1];
} else if (userAgent) {
match = userAgent.match(/Edge\/(\d+)/);
if (!match || match[1] >= 74) {
match = userAgent.match(/Chrome\/(\d+)/);
if (match) version = match[1];
}
}
var v8Version = version && +version;
var SPECIES$1 = wellKnownSymbol('species');
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/677
return v8Version >= 51 || !fails(function () {
var array = [];
var constructor = array.constructor = {};
constructor[SPECIES$1] = function () {
return { foo: 1 };
};
return array[METHOD_NAME](Boolean).foo !== 1;
});
};
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/679
var IS_CONCAT_SPREADABLE_SUPPORT = v8Version >= 51 || !fails(function () {
var array = [];
array[IS_CONCAT_SPREADABLE] = false;
return array.concat()[0] !== array;
});
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
var isConcatSpreadable = function (O) {
if (!isObject(O)) return false;
var spreadable = O[IS_CONCAT_SPREADABLE];
return spreadable !== undefined ? !!spreadable : isArray(O);
};
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
// `Array.prototype.concat` method
// https://tc39.github.io/ecma262/#sec-array.prototype.concat
// with adding support of @@isConcatSpreadable and @@species
_export({ target: 'Array', proto: true, forced: FORCED }, {
concat: function concat(arg) { // eslint-disable-line no-unused-vars
var O = toObject(this);
var A = arraySpeciesCreate(O, 0);
var n = 0;
var i, k, length, len, E;
for (i = -1, length = arguments.length; i < length; i++) {
E = i === -1 ? O : arguments[i];
if (isConcatSpreadable(E)) {
len = toLength(E.length);
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
} else {
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
createProperty(A, n++, E);
}
}
A.length = n;
return A;
}
});
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<[email protected]>
*/
$.fn.bootstrapTable.locales['en-US'] = {
formatLoadingMessage: function formatLoadingMessage() {
return 'Loading, please wait';
},
formatRecordsPerPage: function formatRecordsPerPage(pageNumber) {
return "".concat(pageNumber, " rows per page");
},
formatShowingRows: function formatShowingRows(pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows (filtered from ").concat(totalNotFiltered, " total rows)");
}
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows");
},
formatSRPaginationPreText: function formatSRPaginationPreText() {
return 'previous page';
},
formatSRPaginationPageText: function formatSRPaginationPageText(page) {
return "to page ".concat(page);
},
formatSRPaginationNextText: function formatSRPaginationNextText() {
return 'next page';
},
formatDetailPagination: function formatDetailPagination(totalRows) {
return "Showing ".concat(totalRows, " rows");
},
formatClearSearch: function formatClearSearch() {
return 'Clear Search';
},
formatSearch: function formatSearch() {
return 'Search';
},
formatNoMatches: function formatNoMatches() {
return 'No matching records found';
},
formatPaginationSwitch: function formatPaginationSwitch() {
return 'Hide/Show pagination';
},
formatPaginationSwitchDown: function formatPaginationSwitchDown() {
return 'Show pagination';
},
formatPaginationSwitchUp: function formatPaginationSwitchUp() {
return 'Hide pagination';
},
formatRefresh: function formatRefresh() {
return 'Refresh';
},
formatToggle: function formatToggle() {
return 'Toggle';
},
formatToggleOn: function formatToggleOn() {
return 'Show card view';
},
formatToggleOff: function formatToggleOff() {
return 'Hide card view';
},
formatColumns: function formatColumns() {
return 'Columns';
},
formatColumnsToggleAll: function formatColumnsToggleAll() {
return 'Toggle all';
},
formatFullscreen: function formatFullscreen() {
return 'Fullscreen';
},
formatAllRows: function formatAllRows() {
return 'All';
},
formatAutoRefresh: function formatAutoRefresh() {
return 'Auto Refresh';
},
formatExport: function formatExport() {
return 'Export data';
},
formatJumpTo: function formatJumpTo() {
return 'GO';
},
formatAdvancedSearch: function formatAdvancedSearch() {
return 'Advanced search';
},
formatAdvancedCloseButton: function formatAdvancedCloseButton() {
return 'Close';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['en-US']);
})));<|fim▁end|>
|
var enforceInternalState = internalState.enforce;
var TEMPLATE = String(String).split('String');
|
<|file_name|>pyunit_xgboost_gbm_monotone.py<|end_file_name|><|fim▁begin|>from h2o.estimators.xgboost import *
from h2o.estimators.gbm import *
from tests import pyunit_utils
def xgboost_vs_gbm_monotone_test():
assert H2OXGBoostEstimator.available() is True
monotone_constraints = {
"AGE": 1
}
xgboost_params = {
"tree_method": "exact",
"seed": 123,
"backend": "cpu", # CPU Backend is forced for the results to be comparable
"monotone_constraints": monotone_constraints
}
gbm_params = {
"seed": 42,
"monotone_constraints": monotone_constraints
}
prostate_hex = h2o.import_file(pyunit_utils.locate('smalldata/prostate/prostate.csv'))
prostate_hex["CAPSULE"] = prostate_hex["CAPSULE"].asfactor()
xgboost_model = H2OXGBoostEstimator(**xgboost_params)
xgboost_model.train(y="CAPSULE", ignored_columns=["ID"], training_frame=prostate_hex)
gbm_model = H2OGradientBoostingEstimator(**gbm_params)
gbm_model.train(y="CAPSULE", ignored_columns=["ID"], training_frame=prostate_hex)
xgb_varimp_percentage = dict(map(lambda x: (x[0], x[3]), xgboost_model.varimp(use_pandas=False)))
gbm_varimp_percentage = dict(map(lambda x: (x[0], x[3]), gbm_model.varimp(use_pandas=False)))
# We expect the variable importances of AGE to be similar
assert xgb_varimp_percentage["VOL"] > xgb_varimp_percentage["AGE"]
assert xgb_varimp_percentage["AGE"] > xgb_varimp_percentage["RACE"]
print("XGBoost varimp of AGE = %s" % xgb_varimp_percentage["AGE"])
print("GBM varimp of AGE = %s" % gbm_varimp_percentage["AGE"])<|fim▁hole|>
if __name__ == "__main__":
pyunit_utils.standalone_test(xgboost_vs_gbm_monotone_test)
else:
xgboost_vs_gbm_monotone_test()<|fim▁end|>
|
assert abs(xgb_varimp_percentage["AGE"] - gbm_varimp_percentage["AGE"]) < 0.02
|
<|file_name|>common.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// Contains all the wrappers from the common package.
package gexp
import (
"encoding/hex"
"errors"
"fmt"
"strings"
"github.com/expanse-org/go-expanse/common"
"github.com/expanse-org/go-expanse/common/hexutil"
)
// Hash represents the 32 byte Keccak256 hash of arbitrary data.
type Hash struct {
hash common.Hash
}
// NewHashFromBytes converts a slice of bytes to a hash value.
func NewHashFromBytes(binary []byte) (hash *Hash, _ error) {
h := new(Hash)
if err := h.SetBytes(common.CopyBytes(binary)); err != nil {
return nil, err
}
return h, nil
}
// NewHashFromHex converts a hex string to a hash value.
func NewHashFromHex(hex string) (hash *Hash, _ error) {
h := new(Hash)
if err := h.SetHex(hex); err != nil {
return nil, err
}
return h, nil
}
// SetBytes sets the specified slice of bytes as the hash value.
func (h *Hash) SetBytes(hash []byte) error {
if length := len(hash); length != common.HashLength {
return fmt.Errorf("invalid hash length: %v != %v", length, common.HashLength)
}
copy(h.hash[:], hash)
return nil
}
// GetBytes retrieves the byte representation of the hash.
func (h *Hash) GetBytes() []byte {
return h.hash[:]
}
// SetHex sets the specified hex string as the hash value.
func (h *Hash) SetHex(hash string) error {
hash = strings.ToLower(hash)
if len(hash) >= 2 && hash[:2] == "0x" {
hash = hash[2:]
}
if length := len(hash); length != 2*common.HashLength {
return fmt.Errorf("invalid hash hex length: %v != %v", length, 2*common.HashLength)
}
bin, err := hex.DecodeString(hash)
if err != nil {
return err
}
copy(h.hash[:], bin)
return nil
}
// GetHex retrieves the hex string representation of the hash.
func (h *Hash) GetHex() string {
return h.hash.Hex()
}
// Hashes represents a slice of hashes.
type Hashes struct{ hashes []common.Hash }
// NewHashes creates a slice of uninitialized Hashes.
func NewHashes(size int) *Hashes {
return &Hashes{
hashes: make([]common.Hash, size),
}
}
// NewHashesEmpty creates an empty slice of Hashes values.
func NewHashesEmpty() *Hashes {
return NewHashes(0)
}
// Size returns the number of hashes in the slice.
func (h *Hashes) Size() int {
return len(h.hashes)
}
// Get returns the hash at the given index from the slice.
func (h *Hashes) Get(index int) (hash *Hash, _ error) {
if index < 0 || index >= len(h.hashes) {
return nil, errors.New("index out of bounds")
}
return &Hash{h.hashes[index]}, nil
}
// Set sets the Hash at the given index in the slice.
func (h *Hashes) Set(index int, hash *Hash) error {
if index < 0 || index >= len(h.hashes) {
return errors.New("index out of bounds")
}
h.hashes[index] = hash.hash
return nil
}
// Append adds a new Hash element to the end of the slice.
func (h *Hashes) Append(hash *Hash) {
h.hashes = append(h.hashes, hash.hash)
}
// Address represents the 20 byte address of an Ethereum account.
type Address struct {
address common.Address
}
// NewAddressFromBytes converts a slice of bytes to a hash value.
func NewAddressFromBytes(binary []byte) (address *Address, _ error) {
a := new(Address)
if err := a.SetBytes(common.CopyBytes(binary)); err != nil {
return nil, err
}
return a, nil
}
// NewAddressFromHex converts a hex string to a address value.
func NewAddressFromHex(hex string) (address *Address, _ error) {
a := new(Address)
if err := a.SetHex(hex); err != nil {
return nil, err
}
return a, nil
}
// SetBytes sets the specified slice of bytes as the address value.
func (a *Address) SetBytes(address []byte) error {
if length := len(address); length != common.AddressLength {
return fmt.Errorf("invalid address length: %v != %v", length, common.AddressLength)
}
copy(a.address[:], address)
return nil
}
// GetBytes retrieves the byte representation of the address.
func (a *Address) GetBytes() []byte {
return a.address[:]
}
// SetHex sets the specified hex string as the address value.
func (a *Address) SetHex(address string) error {
address = strings.ToLower(address)
if len(address) >= 2 && address[:2] == "0x" {
address = address[2:]
}
if length := len(address); length != 2*common.AddressLength {
return fmt.Errorf("invalid address hex length: %v != %v", length, 2*common.AddressLength)
}
bin, err := hex.DecodeString(address)
if err != nil {
return err
}
copy(a.address[:], bin)
return nil
}
// GetHex retrieves the hex string representation of the address.
func (a *Address) GetHex() string {
return a.address.Hex()
}
// Addresses represents a slice of addresses.
type Addresses struct{ addresses []common.Address }
// NewAddresses creates a slice of uninitialized addresses.
func NewAddresses(size int) *Addresses {
return &Addresses{
addresses: make([]common.Address, size),
}
}
// NewAddressesEmpty creates an empty slice of Addresses values.
func NewAddressesEmpty() *Addresses {
return NewAddresses(0)
}
// Size returns the number of addresses in the slice.
func (a *Addresses) Size() int {
return len(a.addresses)
}
// Get returns the address at the given index from the slice.
func (a *Addresses) Get(index int) (address *Address, _ error) {
if index < 0 || index >= len(a.addresses) {
return nil, errors.New("index out of bounds")
}
return &Address{a.addresses[index]}, nil
}
// Set sets the address at the given index in the slice.
func (a *Addresses) Set(index int, address *Address) error {
if index < 0 || index >= len(a.addresses) {<|fim▁hole|>}
// Append adds a new address element to the end of the slice.
func (a *Addresses) Append(address *Address) {
a.addresses = append(a.addresses, address.address)
}
// EncodeToHex encodes b as a hex string with 0x prefix.
func EncodeToHex(b []byte) string {
return hexutil.Encode(b)
}
// DecodeFromHex decodes a hex string with 0x prefix.
func DecodeFromHex(s string) ([]byte, error) {
return hexutil.Decode(s)
}<|fim▁end|>
|
return errors.New("index out of bounds")
}
a.addresses[index] = address.address
return nil
|
<|file_name|>bcompos1.cpp<|end_file_name|><|fim▁begin|>// STLport regression testsuite component.
// To compile as a separate example, please #define MAIN.
#include <algorithm>
#include <iostream>
#include "unary.h"
#ifdef MAIN
#define bcompos1_test main
#endif
#if !defined (STLPORT) || defined(__STL_USE_NAMESPACES)
using namespace std;
#endif
int bcompos1_test(int, char**)
{
cout<<"Results of bcompos1_test:"<<endl;
int array [6] = { -2, -1, 0, 1, 2, 3 };
binary_compose<logical_and<bool>, odd, positive>
<|fim▁hole|> int* p = find_if((int*)array, (int*)array + 6, b);
if(p != array + 6)
cout << *p << " is odd and positive" << endl;
return 0;
}<|fim▁end|>
|
b = binary_compose<logical_and<bool>, odd, positive>
(logical_and<bool>(), odd(), positive());
|
<|file_name|>query.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- encoding: utf-8 -*-<|fim▁hole|>
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' query.py '''
class QueryHandler(object):
''' QueryHandler '''
def fetch(self, cluster, metric, topology, component, instance, timerange, envirn=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param envirn:
:return:
'''
pass
def fetch_max(self, cluster, metric, topology, component, instance, timerange, envirn=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param envirn:
:return:
'''
pass
def fetch_backpressure(self, cluster, metric, topology, component, instance, \
timerange, is_max, environ=None):
'''
:param cluster:
:param metric:
:param topology:
:param component:
:param instance:
:param timerange:
:param is_max:
:param environ:
:return:
'''
pass<|fim▁end|>
| |
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { BrowserModule } from '@angular/platform-browser';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { NgModule } from '@angular/core';
import { MatToolbarModule, MatButtonModule, MatListModule } from '@angular/material';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { ProfileComponent } from './profile/profile.component';
import { MsalModule, MsalInterceptor } from '@azure/msal-angular';
import { HTTP_INTERCEPTORS, HttpClientModule } from '@angular/common/http';
export const protectedResourceMap: [string, string[]][] = [
['https://graph.microsoft.com/v1.0/me', ['user.read']]
];
const isIE = window.navigator.userAgent.indexOf("MSIE ") > -1 || window.navigator.userAgent.indexOf("Trident/") > -1;
@NgModule({
declarations: [
AppComponent,
ProfileComponent
],
imports: [
BrowserModule,
BrowserAnimationsModule,
HttpClientModule,
MatToolbarModule,
MatButtonModule,
MatListModule,
AppRoutingModule,
MsalModule.forRoot({
auth: {
clientId: '6226576d-37e9-49eb-b201-ec1eeb0029b6',
authority: "https://login.microsoftonline.com/common/",
validateAuthority: true,
redirectUri: "http://localhost:4200/",
postLogoutRedirectUri: "http://localhost:4200/",
navigateToLoginRequestUrl: true,
},
cache: {
cacheLocation: "localStorage",
storeAuthStateInCookie: isIE, // set to true for IE 11
},
},
{
popUp: !isIE,
consentScopes: [
"user.read",
"openid",
"profile",
"api://a88bb933-319c-41b5-9f04-eff36d985612/access_as_user"<|fim▁hole|> extraQueryParameters: {}
}
)
],
providers: [
{
provide: HTTP_INTERCEPTORS,
useClass: MsalInterceptor,
multi: true
}
],
bootstrap: [AppComponent]
})
export class AppModule { }<|fim▁end|>
|
],
unprotectedResources: ["https://www.microsoft.com/en-us/"],
protectedResourceMap,
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
import datetime
import os
import os.path
import subprocess
from collections import defaultdict
from django.conf import settings as dsettings
from django.core import exceptions
from django.core.cache import cache
from django.db import connection
from django.db import models
from django.db import transaction
from django.db.models.query import QuerySet
from django.db.models.signals import post_save
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext as _
from django_urls import UrlMixin
import tagging
from tagging.fields import TagField
import conference
import conference.gmap
from conference import settings
from conference import signals
from taggit.models import TagBase, GenericTaggedItemBase, ItemBase
from taggit.managers import TaggableManager
import inspect
import traceback
import logging
log = logging.getLogger('conference.tags')
# ConferenceTag e ConferenceTaggedItem servono per creare un "namespace" per i
# tag relativi a conference. In questo modo non devo preocuparmi di altri
# utilizzi di taggit fatti da altre app.
class ConferenceTagManager(models.Manager):
def get_query_set(self):
return self._QuerySet(self.model)
def __getattr__(self, name):
return getattr(self.all(), name)
class _QuerySet(QuerySet):
def annotate_with_usage(self):
return self\
.annotate(usage=models.Count('conference_conferencetaggeditem_items'))
def order_by_usage(self, asc=False):
key = 'usage' if asc else '-usage'
return self.annotate_with_usage().order_by(key)
class ConferenceTag(TagBase):
objects = ConferenceTagManager()
category = models.CharField(max_length=50, default='', blank=True)
def save(self, **kw):
if not self.pk:
frame = inspect.currentframe()
stack_trace = traceback.format_stack(frame)
log.debug(u'saving new tag {}'.format(self.name))
log.debug(u''.join(stack_trace[:-1]))
# prima di salvare questo tag mi assicuro che non ne esista un
# altro diverso solo per maiuscole/minuscole
try:
c = ConferenceTag.objects.get(name__iexact=self.name)
except ConferenceTag.DoesNotExist:
pass
else:
self.pk = c.pk
return
return super(ConferenceTag, self).save(**kw)
class ConferenceTaggedItem(GenericTaggedItemBase, ItemBase):
tag = models.ForeignKey(ConferenceTag, related_name="%(app_label)s_%(class)s_items")
class Meta:
verbose_name = _("Tagged Item")
verbose_name_plural = _("Tagged Items")
class ConferenceManager(models.Manager):
def current(self):
key = 'CONFERENCE_CURRENT'
data = cache.get(key)
if data is None:
data = self.get(code=settings.CONFERENCE)
# mantengo in cache abbastanza a lungo perchè la query non sia più
# un problema
cache.set(key, data, 60*60*24*7)
return data
@classmethod
def clear_cache(cls, sender, **kwargs):
cache.delete('CONFERENCE_CURRENT')
class Conference(models.Model):
code = models.CharField(max_length=10, primary_key=True)
name = models.CharField(max_length=100)
cfp_start = models.DateField(null=True, blank=True)
cfp_end = models.DateField(null=True, blank=True)
conference_start = models.DateField(null=True, blank=True)
conference_end = models.DateField(null=True, blank=True)
voting_start = models.DateField(null=True, blank=True)
voting_end = models.DateField(null=True, blank=True)
objects = ConferenceManager()
def __unicode__(self):
return self.code
def days(self):
output = []
if self.conference_start and self.conference_end:
d = self.conference_start
step = datetime.timedelta(days=1)
while d<= self.conference_end:
output.append(d)
d += step
return output
def clean(self):
if self.conference_start and self.conference_end:
if self.conference_start > self.conference_end:
raise exceptions.ValidationError('Conference end must be > of conference start')
if self.cfp_start and self.cfp_end:
if self.cfp_start > self.cfp_end:
raise exceptions.ValidationError('Cfp end must be > of cfp start')
if self.voting_start and self.voting_end:
if self.voting_start > self.voting_end:
raise exceptions.ValidationError('Voting end must be > of voting start')
def cfp(self):
today = datetime.date.today()
try:
return self.cfp_start <= today <= self.cfp_end
except TypeError:
# date non impostate
return False
def voting(self):
today = datetime.date.today()
try:
return self.voting_start <= today <= self.voting_end
except TypeError:
# date non impostate
return False
def conference(self):
today = datetime.date.today()
try:
return self.conference_start <= today <= self.conference_end
except TypeError:
raise
# date non impostate
return False
post_save.connect(ConferenceManager.clear_cache, sender=Conference)
class DeadlineManager(models.Manager):
def valid_news(self):
today = datetime.date.today()
return self.all().filter(date__gte = today)
class Deadline(models.Model):
"""
deadline per il pycon
"""
date = models.DateField()
objects = DeadlineManager()
def __unicode__(self):
return "deadline: %s" % (self.date, )
class Meta:
ordering = ['date']
def isExpired(self):
today = datetime.date.today()
return today > self.date
def content(self, lang, fallback=True):
"""
Ritorna il DeadlineContent nella lingua specificata. Se il
DeadlineContent non esiste e fallback è False viene sollevata
l'eccezione ObjectDoesNotExist. Se fallback è True viene ritornato il
primo DeadlineContent disponibile.
"""
contents = dict((c.language, c) for c in self.deadlinecontent_set.exclude(body=''))
if not contents:
raise DeadlineContent.DoesNotExist()
try:
return contents[lang]
except KeyError:
if not fallback:
raise DeadlineContent.DoesNotExist()
return contents.values()[0]
class DeadlineContent(models.Model):
"""
Testo, multilingua, di una deadline
"""
deadline = models.ForeignKey(Deadline)
language = models.CharField(max_length=3)
headline = models.CharField(max_length=200)
body = models.TextField()
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class MultilingualContentManager(models.Manager):
def setContent(self, object, content, language, body):
if language is None:
language = dsettings.LANGUAGE_CODE.split('-', 1)[0]
object_type = ContentType.objects.get_for_model(object)
try:
mc = self.get(content_type=object_type, object_id=object.pk, content=content, language=language)
except MultilingualContent.DoesNotExist:
mc = MultilingualContent(content_object=object)
mc.content = content
mc.language = language
mc.body = body
mc.save()
def getContent(self, object, content, language):
if language is None:
language = dsettings.LANGUAGE_CODE.split('-', 1)[0]
object_type = ContentType.objects.get_for_model(object)
records = dict(
(x.language, x)
for x in self.exclude(body='').filter(content_type=object_type, object_id=object.pk, content=content)
)
try:
return records[language]
except KeyError:
if not records:
return None
else:
return records.get(dsettings.LANGUAGE_CODE, records.values()[0])
class MultilingualContent(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField(db_index=True)
content_object = generic.GenericForeignKey('content_type', 'object_id')
language = models.CharField(max_length = 3)
content = models.CharField(max_length = 20)
body = models.TextField()
objects = MultilingualContentManager()
def _fs_upload_to(subdir, attr=None, package='conference'):
if attr is None:
attr = lambda i: i.slug
def wrapper(instance, filename):
fpath = os.path.join(package, subdir, '%s%s' % (attr(instance), os.path.splitext(filename)[1].lower()))
ipath = os.path.join(dsettings.MEDIA_ROOT, fpath)
if os.path.exists(ipath):
os.unlink(ipath)
return fpath
return wrapper
def postSaveResizeImageHandler(sender, **kwargs):
tool = os.path.join(os.path.dirname(conference.__file__), 'utils', 'resize_image.py')
null = open('/dev/null')
p = subprocess.Popen(
[tool, settings.STUFF_DIR],
close_fds=True, stdin=null, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
p.communicate()
class AttendeeProfileManager(models.Manager):
def findSlugForUser(self, user):
name = '%s %s' % (user.first_name, user.last_name)
slug = slugify(name)
rows = self.filter(models.Q(slug=slug) | models.Q(slug__startswith=slug + '-'))\
.values_list('slug', flat=True)
last = None
for r in rows:
try:
counter = int(r.rsplit('-', 1)[1])
except (ValueError, IndexError):
if last is None:
# l'if mi tutela da slug del tipo "str-str-str"
last = 0
continue
if counter > last:
last = counter
if last is not None:
slug = '%s-%d' % (slug, last+1)
elif not slug:
# slug può essere una stringa vuota solo se lo user ha nome e
# cognome vuoti e se è il primo con questa anomalia.
# impostando lo slug a "-1" risolvo la situazione anche per i
# successivi che trovando un precedente continueranno la sequenza
slug = '-1'
return slug
def randomUUID(self, length=6):
import string
import random
return ''.join(random.sample(string.letters + string.digits, length))
# TODO: usare i savepoint. Ricordarsi che, almeno fino a django 1.4, il
# backend sqlite non supporta i savepoint nonostante sqlite lo faccia da
# tempo, quindi si deve passare da cursor.execute(); se mai passeremo a
# postgres ricordarsi di fare rollback del savepoint nell'except (o
# impostare l'autocommit)
def getOrCreateForUser(self, user):
"""
Ritorna o crea il profilo associato all'utente.
"""
try:
p = AttendeeProfile.objects.get(user=user)
except AttendeeProfile.DoesNotExist:
p = AttendeeProfile(user=user)
else:
return p
from django.db import IntegrityError
slug = None
uuid = None
while True:
if slug is None:
slug = self.findSlugForUser(user)
if uuid is None:
uuid = self.randomUUID()
p.slug = slug
p.uuid = uuid
try:
p.save()
except IntegrityError, e:
msg = str(e)
if 'uuid' in msg:
uuid = None
elif 'slug' in msg:
slug = None
else:
raise
else:
break
return p
ATTENDEEPROFILE_VISIBILITY = (
('x', 'Private (disabled)'),
('m', 'Participants only'),
('p', 'Public'),
)
class AttendeeProfile(models.Model):
"""
È il profilo di un partecipante (inclusi gli speaker) alla conferenza, il
collegamento con la persona è ottenuto tramite la foreign key verso
auth.User.
"""
user = models.OneToOneField('auth.User', primary_key=True)
slug = models.SlugField(unique=True)
uuid = models.CharField(max_length=6, unique=True)
image = models.ImageField(upload_to=_fs_upload_to('profile'), blank=True)
birthday = models.DateField(_('Birthday'), null=True, blank=True)
phone = models.CharField(
_('Phone'),
max_length=30, blank=True,
help_text=_('Enter a phone number where we can contact you in case of administrative issues.<br />Use the international format, eg: +39-055-123456'),
)
personal_homepage = models.URLField(_('Personal homepage'), blank=True)
company = models.CharField(_('Company'), max_length=50, blank=True)
company_homepage = models.URLField(_('Company homepage'), blank=True)
job_title = models.CharField(_('Job title'), max_length=50, blank=True)
location = models.CharField(_('Location'), max_length=100, blank=True)
bios = generic.GenericRelation(MultilingualContent)
visibility = models.CharField(max_length=1, choices=ATTENDEEPROFILE_VISIBILITY, default='x')
objects = AttendeeProfileManager()
def __unicode__(self):
return self.slug
def clean(self):
from django.core.exceptions import ValidationError
if self.visibility != 'p':
if TalkSpeaker.objects\
.filter(speaker__user=self.user_id, talk__status='accepted')\
.count()>0:
raise ValidationError('This profile must be public')
def setBio(self, body, language=None):
MultilingualContent.objects.setContent(self, 'bios', language, body)
def getBio(self, language=None):
return MultilingualContent.objects.getContent(self, 'bios', language)
post_save.connect(postSaveResizeImageHandler, sender=AttendeeProfile)
class Presence(models.Model):
"""
Presenza di un partecipante ad una conferenza.
"""
profile = models.ForeignKey(AttendeeProfile, related_name='presences')
conference = models.CharField(max_length=10)
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = (('profile', 'conference'),)
class AttendeeLinkManager(models.Manager):
def findLinks(self, uid):
return AttendeeLink.objects.filter(
models.Q(attendee1=uid) |
models.Q(attendee2=uid))
def getLink(self, uid1, uid2):
return AttendeeLink.objects.get(
models.Q(attendee1=uid1, attendee2=uid2) |
models.Q(attendee1=uid2, attendee2=uid1))
class AttendeeLink(models.Model):
"""
Collegamento tra due partecipanti
"""
attendee1 = models.ForeignKey(AttendeeProfile, related_name='link1')
attendee2 = models.ForeignKey(AttendeeProfile, related_name='link2')
message = models.TextField(blank=True)
timestamp = models.DateTimeField(auto_now_add=True)
objects = AttendeeLinkManager()
class SpeakerManager(models.Manager):
def byConference(self, conf, only_accepted=True, talk_type=None):
"""
Ritorna tutti gli speaker della conferenza
"""
qs = TalkSpeaker.objects\
.filter(talk__conference=conf)\
.values('speaker')
if only_accepted:
qs = qs.filter(talk__status='accepted')
if talk_type:
if isinstance(talk_type, (list, tuple)):
qs = qs.filter(talk__type__in=talk_type)
else:
qs = qs.filter(talk__type=talk_type)
return Speaker.objects.filter(user__in=qs)
class Speaker(models.Model, UrlMixin):
user = models.OneToOneField('auth.User', primary_key=True)
objects = SpeakerManager()
def __unicode__(self):
return '%s %s' % (self.user.first_name, self.user.last_name)
def talks(self, conference=None, include_secondary=True, status=None):
"""
Restituisce i talk dello speaker filtrandoli per conferenza (se non
None); se include_secondary è True vengono inclusi anche i talk dove
non è lo speaker principale. Se status è diverso da None vengono
ritornati solo i talk con lo stato richiesto.
"""
qs = TalkSpeaker.objects.filter(speaker=self)
if status in ('proposed', 'accepted', 'canceled'):
qs = qs.filter(talk__status=status)
elif status is not None:
raise ValueError('status unknown')
if not include_secondary:
qs = qs.filter(helper=False)
if conference is not None:
qs = qs.filter(talk__conference=conference)
return Talk.objects.filter(id__in=qs.values('talk'))
TALK_LANGUAGES = dsettings.LANGUAGES
TALK_STATUS = (
('proposed', _('Proposed')),
('accepted', _('Accepted')),
('canceled', _('Canceled')),
)
VIDEO_TYPE = (
('viddler_oembed', 'oEmbed (Youtube, Vimeo, ...)'),
('download', 'Download'),
)
TALK_LEVEL = (
('beginner', _('Beginner')),
('intermediate', _('Intermediate')),
('advanced', _('Advanced')),
)
class TalkManager(models.Manager):
def get_query_set(self):
return self._QuerySet(self.model)
def __getattr__(self, name):
return getattr(self.all(), name)
class _QuerySet(QuerySet):
def proposed(self, conference=None):
qs = self.filter(status='proposed')
if conference:
qs = qs.filter(conference=conference)
return qs
def accepted(self, conference=None):
qs = self.filter(status='accepted')
if conference:
qs = qs.filter(conference=conference)
return qs
def canceled(self, conference=None):
qs = self.filter(status='canceled')
if conference:
qs = qs.filter(conference=conference)
return qs
def createFromTitle(self, title, sub_title, conference, speaker, prerequisites, abstract_short, abstract_extra,
status='proposed', language='en', level='beginner', training_available=False, type='t_30'):
slug = slugify(title)
talk = Talk()
talk.title = title
talk.sub_title = sub_title
talk.prerequisites = prerequisites
talk.abstract_short = abstract_short
talk.conference = conference
talk.status = status
talk.language = language
talk.level = level
talk.abstract_extra = abstract_extra
talk.training_available = training_available
talk.type = type
with transaction.commit_on_success():
count = 0
check = slug
while True:
if self.filter(slug=check).count() == 0:
break
count += 1
check = '%s-%d' % (slug, count)
talk.slug = check
talk.save()
# associo qui lo speaker così se c'è qualche problema, ad esempio
# lo speaker non è valido, il tutto avviene in una transazione ed
# il db rimane pulito.
TalkSpeaker(talk=talk, speaker=speaker).save()
return talk
# Previous definition of TALK_TYPE, kept around, since some of the
# code in the system uses the codes to checks.
#
# TALK_TYPE = (
# ('t', 'Talk'),
# ('i', 'Interactive'),
# ('r', 'Training'),
# ('p', 'Poster session'),
# ('n', 'Panel'),
# ('h', 'Help desk'),
# )
# Talk types combined with duration. Note that the system uses the
# first character to identify the generic talk type, so these should
# not be changed from the ones listed above.
TALK_TYPE = (
('t_30', 'Talk (30 mins)'),
('t_45', 'Talk (45 mins)'),
('t_60', 'Talk (60 mins)'),
('i_60', 'Interactive (60 mins)'),
('r_60', 'Training (60 mins)'),
('r_120', 'Training (120 mins)'),
('p_180', 'Poster during coffe breaks (30x4 mins)'),
('h_180', 'Help desk'),
)
# Mapping of TALK_TYPE to duration in minutes
TALK_DURATION = {
't_30': 30,
't_45': 45,
't_60': 60,
'i_60': 60,
'r_60': 60,
'r_120': 120,
'p_180': 30,
'h_180': 180,
}
TALK_ADMIN_TYPE = (
('o', 'Opening session'),
('c', 'Closing session'),
('l', 'Lightning talk'),
('k', 'Keynote'),
('r', 'Recruiting session'),
('m', 'EPS session'),
('s', 'Open space'),
('e', 'Social event'),
('x', 'Reserved slot'),
)
class Talk(models.Model, UrlMixin):
title = models.CharField(_('Talk title'), max_length=80)
sub_title = models.CharField(_('Sub title'), max_length=1000, default="", blank=True)
slug = models.SlugField(max_length=100, unique=True)
prerequisites = models.CharField(_('prerequisites'), help_text="What should attendees know already",default="", blank=True, max_length=150)
conference = models.CharField(help_text='name of the conference', max_length=20)
admin_type = models.CharField(max_length=1, choices=TALK_ADMIN_TYPE, blank=True)
speakers = models.ManyToManyField(Speaker, through='TalkSpeaker')
language = models.CharField(_('Language'), max_length=3, choices=TALK_LANGUAGES)
abstracts = generic.GenericRelation(
MultilingualContent,
verbose_name=_('Talk abstract'),
help_text=_('<p>Please enter a short description of the talk you are submitting. Be sure to includes the goals of your talk and any prerequisite required to fully understand it.</p><p>Suggested size: two or three paragraphs.</p>'))
abstract_short = models.TextField(
verbose_name=_('Talk abstract short'),
help_text=_('<p>Please enter a short description of the talk you are submitting.</p>'), default="")
abstract_extra = models.TextField(
verbose_name=_('Talk abstract extra'),
help_text=_('<p>Please enter instructions for attendees.</p>'), default="")
slides = models.FileField(upload_to=_fs_upload_to('slides'), blank=True)
video_type = models.CharField(max_length=30, choices=VIDEO_TYPE, blank=True)
video_url = models.TextField(blank=True)
video_file = models.FileField(upload_to=_fs_upload_to('videos'), blank=True)
teaser_video = models.URLField(
_('Teaser video'),
blank=True,
help_text=_('Insert the url for your teaser video'))
status = models.CharField(max_length=8, choices=TALK_STATUS)
level = models.CharField(
_('Audience level'),
default='beginner',
max_length=12,
choices=TALK_LEVEL)
training_available = models.BooleanField(default=False)
type = models.CharField(max_length=5, choices=TALK_TYPE, default='t_30')
#def _talk_duration(self):
# "Returns talk duration"
# duration = self.type
# return int(duration.split("_")[1])
#duration = property(_talk_duration)
# Old duration code
# durata totale del talk (include la sessione di Q&A)T
duration = models.IntegerField(
_('Duration'),
default=0,
help_text=_('This is the duration of the talk'))
# durata della sessione di Q&A
# Questi sono i tag che lo speaker suggerisce per il proprio talk, li ho
# messi qui per una questione di tempo (il cfp di BSW2011 incombe) ma la
# cosa giusta sarebbe creare un nuovo modello "Submission" legato al Talk e
# mettere li i dati del cfp
suggested_tags = models.CharField(max_length=100, blank=True)
created = models.DateTimeField(auto_now_add=True)
tags = TaggableManager(through=ConferenceTaggedItem)
objects = TalkManager()
class Meta:
ordering = ['title']
def save(self, *args, **kwargs):
# the duration is taken directly from talk's type
self.duration = 30 #TALK_DURATION[self.type]
super(Talk, self).save(*args, **kwargs)
def __unicode__(self):
return '%s [%s][%s][%s]' % (self.title, self.conference, self.language, self.duration)
@models.permalink
def get_absolute_url(self):
return ('conference-talk', (), { 'slug': self.slug })
get_url_path = get_absolute_url
def get_event(self):
try:
return self.event_set.all()[0]
except IndexError:
return None
def get_all_speakers(self):
return self.speakers.all().select_related('speaker')
def setAbstract(self, body, language=None):
MultilingualContent.objects.setContent(self, 'abstracts', language, body)
def getAbstract(self, language=None):
return MultilingualContent.objects.getContent(self, 'abstracts', language)
class TalkSpeaker(models.Model):
talk = models.ForeignKey(Talk)
speaker = models.ForeignKey(Speaker)
helper = models.BooleanField(default=False)
class Meta:
unique_together = (('talk', 'speaker'),)
class FareManager(models.Manager):
def get_query_set(self):
return self._QuerySet(self.model)
def __getattr__(self, name):
return getattr(self.all(), name)
class _QuerySet(QuerySet):
def available(self, conference=None):
today = datetime.date.today()
q1 = models.Q(start_validity=None, end_validity=None)
q2 = models.Q(start_validity__lte=today, end_validity__gte=today)
qs = self.filter(q1 | q2)
if conference:
qs = qs.filter(conference=conference)
return qs
FARE_TICKET_TYPES = (
('conference', 'Conference ticket'),
('partner', 'Partner Program'),
('event', 'Event'),
('other', 'Other'),
)
FARE_PAYMENT_TYPE = (
('p', 'Payment'),
('v', 'Voucher'),
('d', 'Deposit'),
)
FARE_TYPES = (
('c', 'Company'),
('s', 'Student'),
('p', 'Personal'),
)
class Fare(models.Model):
conference = models.CharField(help_text='Conference code', max_length=20)
code = models.CharField(max_length=10)
name = models.CharField(max_length=100)
description = models.TextField()
price = models.DecimalField(max_digits=6, decimal_places=2)
start_validity = models.DateField(null=True)
end_validity = models.DateField(null=True)
recipient_type = models.CharField(max_length=1, choices=FARE_TYPES, default='p')
ticket_type = models.CharField(max_length=10, choices=FARE_TICKET_TYPES, default='conference', db_index=True)
payment_type = models.CharField(max_length=1, choices=FARE_PAYMENT_TYPE, default='p')
blob = models.TextField(blank=True)
objects = FareManager()
def __unicode__(self):
return '%s - %s' % (self.code, self.conference)
class Meta:
unique_together = (('conference', 'code'),)
def valid(self):
#numb = len(list(Ticket.objects.all()))
today = datetime.date.today()
validity = self.start_validity <= today <= self.end_validity
#validity = numb < settings.MAX_TICKETS
return validity
def fare_type(self):
""" Return the fare type based on the .recipient_type
"""
return dict(FARE_TYPES).get(self.recipient_type, 'Regular')
def calculated_price(self, qty=1, **kw):
from conference.listeners import fare_price
params = dict(kw)
params['qty'] = qty
calc = {
'total': self.price * qty,
'params': params,
}
fare_price.send(sender=self, calc=calc)
return calc['total']
def create_tickets(self, user):
""" Creates and returns the tickets associated with this rate.
Normally each fare involves just one ticket, but this
behavior can be modified by a listener attached to the
signal fare_tickets.
The instances returned by this method have an additional
attribute `fare_description` (volatile) and contains a
description of the fare specific for the single ticket.
"""
from conference.listeners import fare_tickets
params = {
'user': user,
'tickets': []
}
fare_tickets.send(sender=self, params=params)
if not params['tickets']:
t = Ticket(user=user, fare=self)
t.fare_description = self.name
t.save()
params['tickets'].append(t)
return params['tickets']
class TicketManager(models.Manager):
def get_query_set(self):
return self._QuerySet(self.model)
def __getattr__(self, name):
return getattr(self.all(), name)
class _QuerySet(QuerySet):
def conference(self, conference):
return self.filter(fare__conference=conference)
TICKET_TYPE = (
('standard', 'standard'),
('staff', 'staff'),
)
class Ticket(models.Model):
user = models.ForeignKey(
'auth.User',
help_text=_('Buyer of the ticket'))
name = models.CharField(
max_length=60,
blank=True,
help_text=_('Attendee name, i.e. the person who will attend the conference.'))
fare = models.ForeignKey(Fare)
frozen = models.BooleanField(default=False)
ticket_type = models.CharField(max_length=8, choices=TICKET_TYPE, default='standard')
objects = TicketManager()
def __unicode__(self):
return 'Ticket "%s" (%s)' % (self.fare.name, self.fare.code)
class Sponsor(models.Model):
"""
Attraverso l'elenco di SponsorIncome un'istanza di Sponsor è collegata
con le informazioni riguardanti tutte le sponsorizzazioni fatte.
Sempre in SponsorIncome la conferenza è indicata, come in altri posti,
con una chiave alfanumerica non collegata a nessuna tabella.
"""
sponsor = models.CharField(max_length=100, help_text='nome dello sponsor')
slug = models.SlugField()
url = models.URLField(blank=True)
logo = models.ImageField(
upload_to=_fs_upload_to('sponsor'), blank=True,
help_text='Inserire un immagine raster sufficientemente grande da poter essere scalata al bisogno'
)
alt_text = models.CharField(max_length=150, blank=True)
title_text = models.CharField(max_length=150, blank=True)
class Meta:
ordering = ['sponsor']
def __unicode__(self):
return self.sponsor
post_save.connect(postSaveResizeImageHandler, sender=Sponsor)
class SponsorIncome(models.Model):
sponsor = models.ForeignKey(Sponsor)
conference = models.CharField(max_length=20)
income = models.PositiveIntegerField()
tags = TagField()
class Meta:
ordering = ['conference']
class MediaPartner(models.Model):
"""
I media partner sono degli sponsor che non pagano ma che offrono visibilità
di qualche tipo.
"""
partner = models.CharField(max_length=100, help_text='nome del media partner')
slug = models.SlugField()
url = models.URLField(blank=True)
logo = models.ImageField(
upload_to=_fs_upload_to('media-partner'), blank = True,
help_text='Inserire un immagine raster sufficientemente grande da poter essere scalata al bisogno'
)
class Meta:
ordering = ['partner']
def __unicode__(self):
return self.partner
post_save.connect(postSaveResizeImageHandler, sender=MediaPartner)
class MediaPartnerConference(models.Model):
partner = models.ForeignKey(MediaPartner)
conference = models.CharField(max_length = 20)
tags = TagField()
class Meta:
ordering = ['conference']
class ScheduleManager(models.Manager):
def attendees(self, conference, forecast=False):
"""
restituisce il numero di partecipanti per ogni schedule della conferenza.
"""
return settings.SCHEDULE_ATTENDEES(conference, forecast)
def events_score_by_attendance(self, conference):
"""
Utilizzandoi gli EventInterest ritorna un "punteggio di presenza" per
ogni evento; Il punteggio è proporzionale al numero di persone che
hanno esspresso interesse in quell'evento.
"""
# Considero una manifestazione di interesse, interest > 0, come la
# volontà di partecipare ad un evento e aggiungo l'utente tra i
# partecipanti. Se l'utente ha "votato" più eventi contemporanei
# considero la sua presenza in proporzione (quindi gli eventi potranno
# avere "punteggio" frazionario)
events = defaultdict(set)
for x in EventInterest.objects\
.filter(event__schedule__conference=conference, interest__gt=0)\
.select_related('event__schedule'):
events[x.event].add(x.user_id)
# Oltre agli EventInterest tengo conto anche degli EventBooking, la
# confidenza in questi casi è ancora maggiore
for x in EventBooking.objects\
.filter(event__schedule__conference=conference)\
.select_related('event__schedule'):
events[x.event].add(x.user_id)
# associo ad ogni evento il numero di voti che ha ottenuto;
# l'operazione è complicata dal fatto che non tutti i voti hanno lo
# stesso peso; se un utente ha marcato come +1 due eventi che avvengano
# in parallelo ovviamente non potrà partecipare ad entrambi, quindi il
# suo voto deve essere scalato
scores = defaultdict(lambda: 0.0)
for evt, users in events.items():
group = list(Event.objects.group_events_by_times(events, event=evt))[0]
while users:
u = users.pop()
# Quanto vale la presenza di `u` per l'evento `evt`? Se
# `u` non partecipa a nessun'altro evento dello stesso
# gruppo allora 1, altrimenti un valore proporzionale al
# numero di eventi che gli interesssano.
found = [ evt ]
for other in group:
if other != evt:
try:
events[other].remove(u)
except KeyError:
pass
else:
found.append(other)
score = 1.0 / len(found)
for f in found:
scores[f.id] += score
return scores
def expected_attendance(self, conference, factor=0.85):
"""
restituisce per ogni evento la previsione di partecipazione basata
sugli EventInterest.
"""
seats_available = defaultdict(lambda: 0)
for row in EventTrack.objects\
.filter(event__schedule__conference=conference)\
.values('event', 'track__seats'):
seats_available[row['event']] += row['track__seats']
scores = self.events_score_by_attendance(conference)
events = Event.objects\
.filter(schedule__conference=conference)\
.select_related('schedule')
output = {}
# adesso devo fare la previsione dei partecipanti per ogni evento, per
# farla divido il punteggio di un evento per il numero di votanti che
# hanno espresso un voto per un evento *nella medesima fascia
# temporale*; il numero che ottengo è un fattore k che se moltiplicato
# per la previsione di presenze al giorno mi da un'indicazione di
# quante persone sono previste per l'evento.
forecasts = self.attendees(conference, forecast=True)
# per calcolare il punteggio relativo ad una fascia temporale devo fare
# un doppio for sugli eventi, per limitare il numero delle iterazioni
# interno raggruppo gli eventi per giorno
event_by_day = defaultdict(set)
for e in events:
event_by_day[e.schedule_id].add(e)
for event in events:
score = scores[event.id]
group = list(Event.objects\
.group_events_by_times(event_by_day[event.schedule_id], event=event))[0]
group_score = sum([ scores[e.id] for e in group ])
if group_score:
k = score / group_score
else:
k = 0
expected = k * forecasts[event.schedule_id] * factor
seats = seats_available.get(event.id, 0)
output[event.id] = {
'score': score,
'seats': seats,
'expected': expected,
'overbook': seats and expected > seats,
}
return output
class Schedule(models.Model):
"""
Direttamente dentro lo schedule abbiamo l'indicazione della conferenza,
una campo alfanumerico libero, e il giorno a cui si riferisce.
Attraverso le ForeignKey lo schedule è collegato alle track e agli
eventi.
Questi ultimi possono essere dei talk o degli eventi "custom", come la
pyBirra, e sono collegati alle track in modalità "weak", attraverso un
tagfield.
"""
conference = models.CharField(help_text = 'nome della conferenza', max_length = 20)
slug = models.SlugField()<|fim▁hole|> objects = ScheduleManager()
class Meta:
ordering = ['date']
def __unicode__(self):
return '{0}: {1}'.format(self.conference, self.date)
def speakers(self):
qs = Event.objects\
.filter(schedule=self, talk__id__isnull=False)\
.values('talk__talkspeaker__speaker')
return Speaker.objects.filter(user__in=qs)
class Track(models.Model):
schedule = models.ForeignKey(Schedule)
track = models.CharField('nome track', max_length=20)
title = models.TextField('titolo della track', help_text='HTML supportato')
seats = models.PositiveIntegerField(default=0)
order = models.PositiveIntegerField('ordine', default=0)
translate = models.BooleanField(default=False)
outdoor = models.BooleanField(default=False)
def __unicode__(self):
return self.track
class EventManager(models.Manager):
def group_events_by_times(self, events, event=None):
"""
Raggruppa gli eventi, ovviamente appartenenti a track diverse, che
si "accavvallano" temporalmente.
Rtorna un generatore che ad ogni iterazione restituisce un gruppo(list)
di eventi.
"""
def overlap(range1, range2):
# http://stackoverflow.com/questions/9044084/efficient-data-range-overlap-calculation-in-python
latest_start = max(range1[0], range2[0])
earliest_end = min(range1[1], range2[1])
_overlap = (earliest_end - latest_start)
return _overlap.days == 0 and _overlap.seconds > 0
def extract_group(event, events):
group = []
r0 = event.get_time_range()
for ix in reversed(range(len(events))):
r1 = events[ix].get_time_range()
if r0[0].date() == r1[0].date() and overlap(r0, r1):
group.append(events.pop(ix))
return group
if event:
group = extract_group(event, list(events))
yield group
else:
sorted_events = sorted(
filter(lambda x: x.get_duration() > 0, events),
key=lambda x: x.get_duration())
while sorted_events:
evt0 = sorted_events.pop()
group = [evt0] + extract_group(evt0, sorted_events)
yield group
class Event(models.Model):
schedule = models.ForeignKey(Schedule)
start_time = models.TimeField()
talk = models.ForeignKey(Talk, blank=True, null=True)
custom = models.TextField(
blank=True,
help_text="title for a custom event (an event without a talk)")
abstract = models.TextField(
blank=True,
help_text="description for a custom event")
duration = models.PositiveIntegerField(
default=0,
help_text='duration of the event (in minutes). Override the talk duration if present')
tags = models.CharField(
max_length=200, blank=True,
help_text='comma separated list of tags. Something like: special, break, keynote')
tracks = models.ManyToManyField(Track, through='EventTrack')
sponsor = models.ForeignKey(Sponsor, blank=True, null=True)
video = models.CharField(max_length=1000, blank=True)
bookable = models.BooleanField(default=False)
seats = models.PositiveIntegerField(
default=0,
help_text='seats available. Override the track default if set')
objects = EventManager()
class Meta:
ordering = ['start_time']
def __unicode__(self):
if self.talk:
return '%s - %smin' % (self.talk.title, self.talk.duration)
else:
return self.custom
def get_duration(self):
if self.duration:
return self.duration
elif self.talk:
return self.talk.duration
else:
return 0
def get_time_range(self):
n = datetime.datetime.combine(self.schedule.date, self.start_time)
return (
n, (n + datetime.timedelta(seconds=self.get_duration() * 60))
)
def get_description(self):
if self.talk:
return self.talk.title
else:
return self.custom
def get_all_tracks_names(self):
from tagging.utils import parse_tag_input
return parse_tag_input(self.track)
def get_track(self):
"""
ritorna la prima istanza di track tra quelle specificate o None se l'evento
è di tipo speciale
"""
# XXX: utilizzare il template tag get_event_track che cacha la query
dbtracks = dict( (t.track, t) for t in self.schedule.track_set.all())
for t in tagging.models.Tag.objects.get_for_object(self):
if t.name in dbtracks:
return dbtracks[t.name]
def split(self, time):
"""
Divide l'evento in più eventi della durata massima di `time` minuti.
"""
if self.talk_id and self.duration == 0:
original = self.talk.duration
else:
original = self.duration
if time >= original:
return 0
myid = self.id
tracks = self.tracks.all()
self.duration = time
original -= time
self.save()
count = 1
while original > 0:
self.id = None
dt = datetime.datetime.combine(datetime.date.today(), self.start_time)
dt += datetime.timedelta(minutes=time)
self.start_time = dt.time()
self.save()
for t in tracks:
EventTrack.objects.create(track=t, event=self)
original -= time
count += 1
self.id = myid
return count
class EventTrack(models.Model):
track = models.ForeignKey(Track)
event = models.ForeignKey(Event)
class Meta:
unique_together = (('track', 'event',),)
class EventInterest(models.Model):
event = models.ForeignKey(Event)
user = models.ForeignKey('auth.User')
interest = models.IntegerField()
class Meta:
unique_together = (('user', 'event'),)
class EventBookingManager(models.Manager):
def booking_status(self, eid):
seats = Event.objects.values('seats').get(id=eid)['seats']
if not seats:
seats = sum(EventTrack.objects\
.filter(event=eid)\
.values_list('track__seats', flat=True))
booked = list(EventBooking.objects\
.filter(event=eid)\
.values_list('user', flat=True))
return {
'seats': seats,
'booked': booked,
'available': seats - len(booked),
}
def booking_available(self, eid, uid):
st = self.booking_status(eid)
return (uid in st['booked']) or (st['available'] > 0)
def book_event(self, eid, uid):
try:
e = EventBooking.objects.get(event=eid, user=uid)
except EventBooking.DoesNotExist:
e = EventBooking(event_id=eid, user_id=uid)
e.save()
signals.event_booked.send(sender=Event, booked=True, event_id=eid, user_id=uid)
return e
def cancel_reservation(self, eid, uid):
try:
e = EventBooking.objects.get(event=eid, user=uid)
except EventBooking.DoesNotExist:
return
e.delete()
signals.event_booked.send(sender=Event, booked=False, event_id=eid, user_id=uid)
class EventBooking(models.Model):
event = models.ForeignKey(Event)
user = models.ForeignKey('auth.User')
objects = EventBookingManager()
class Meta:
unique_together = (('user', 'event'),)
class Hotel(models.Model):
"""
Gli hotel permettono di tenere traccia dei posti convenzionati e non dove
trovare alloggio durante la conferenza.
"""
name = models.CharField('nome dell\'hotel', max_length = 100)
telephone = models.CharField('contatti telefonici', max_length = 50, blank = True)
url = models.URLField(blank = True)
email = models.EmailField('email', blank = True)
availability = models.CharField('Disponibilità', max_length = 50, blank = True)
price = models.CharField('Prezzo', max_length = 50, blank = True)
note = models.TextField('note', blank = True)
affiliated = models.BooleanField('convenzionato', default = False)
visible = models.BooleanField('visibile', default = True)
address = models.CharField('indirizzo', max_length = 200, default = '', blank = True)
lng = models.FloatField('longitudine', default = 0.0, blank = True)
lat = models.FloatField('latitudine', default = 0.0, blank = True)
modified = models.DateField(auto_now = True)
class Meta:
ordering = [ 'name' ]
def __unicode__(self):
return self.name
SPECIAL_PLACE_TYPES = (
('conf-hq', 'Conference Site'),
('pyevents', 'PyEvents'),
)
class SpecialPlace(models.Model):
name = models.CharField('nome', max_length = 100)
address = models.CharField('indirizzo', max_length = 200, default = '', blank = True)
type = models.CharField(max_length = 10, choices=SPECIAL_PLACE_TYPES)
url = models.URLField(blank = True)
email = models.EmailField('email', blank = True)
telephone = models.CharField('contatti telefonici', max_length = 50, blank = True)
note = models.TextField('note', blank = True)
visible = models.BooleanField('visibile', default = True)
lng = models.FloatField('longitudine', default = 0.0, blank = True)
lat = models.FloatField('latitudine', default = 0.0, blank = True)
class Meta:
ordering = [ 'name' ]
def __unicode__(self):
return self.name
try:
assert settings.GOOGLE_MAPS['key']
except (KeyError, TypeError, AssertionError):
pass
else:
def postSaveHotelHandler(sender, **kwargs):
query = sender.objects.exclude(address = '').filter(lng = 0.0).filter(lat = 0.0)
for obj in query:
data = conference.gmap.geocode(
obj.address,
settings.GOOGLE_MAPS['key'],
settings.GOOGLE_MAPS.get('country')
)
if data['Status']['code'] == 200:
point = data['Placemark'][0]['Point']['coordinates']
lng, lat = point[0:2]
obj.lng = lng
obj.lat = lat
obj.save()
post_save.connect(postSaveHotelHandler, sender=Hotel)
post_save.connect(postSaveHotelHandler, sender=SpecialPlace)
class DidYouKnow(models.Model):
"""
Lo sai che?
"""
visible = models.BooleanField('visible', default = True)
messages = generic.GenericRelation(MultilingualContent)
class Quote(models.Model):
who = models.CharField(max_length=100)
conference = models.CharField(max_length=20)
text = models.TextField()
activity = models.CharField(max_length=50, blank=True)
image = models.ImageField(upload_to=_fs_upload_to('quote', attr=lambda i: slugify(i.who)), blank=True)
class Meta:
ordering = ['conference', 'who']
class VotoTalk(models.Model):
user = models.ForeignKey('auth.User')
talk = models.ForeignKey(Talk)
vote = models.DecimalField(max_digits=5, decimal_places=2)
class Meta:
unique_together = (('user', 'talk'),)
#
#def _clear_track_cache(sender, **kwargs):
# if hasattr(sender, 'schedule_id'):
# Track.objects.clear_cache(sender.schedule_id)
#post_save.connect(_clear_track_cache, sender=Track)
#
#def _clear_talkspeaker_cache(sender, **kwargs):
# o = kwargs['instance']
# if isinstance(o, Talk):
# conference = o.conference
# else:
# conference = None
# TalkSpeaker.objects.clear_cache(conference)
#post_save.connect(_clear_talkspeaker_cache, sender=Talk)
#post_save.connect(_clear_talkspeaker_cache, sender=Speaker)
#
#def _clear_schedule_cache(sender, **kwargs):
# o = kwargs['instance']
# if isinstance(o, Event):
# conference = o.schedule.conference
# else:
# conference = o.event.schedule.conference
# Schedule.objects.clear_cache(conference)
#post_save.connect(_clear_schedule_cache, sender=Event)
#post_save.connect(_clear_schedule_cache, sender=EventInterest)
from conference import listeners<|fim▁end|>
|
date = models.DateField()
description = models.TextField(blank=True)
|
<|file_name|>ddm_mla_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Copyright (C) 2017, California Institute of Technology
This file is part of addm_toolbox.
addm_toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
addm_toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with addm_toolbox. If not, see <http://www.gnu.org/licenses/>.
---
Module: ddm_mla_test.py
Author: Gabriela Tavares, [email protected]
Performs a test to check the validity of the maximum likelihood algorithm (MLA)
for the drift-diffusion model (DDM). Artificial data is generated using
specific parameters for the model. These parameters are then recovered through
a maximum likelihood estimation procedure, using a grid search over the 2 free
parameters of the model.
"""
from __future__ import absolute_import
import numpy as np
import pkg_resources
from builtins import range, str
from multiprocessing import Pool
from .ddm_mla import DDM
from .util import load_trial_conditions_from_csv
def wrap_ddm_get_model_log_likelihood(args):
"""
Wrapper for DDM.get_model_log_likelihood(), intended for parallel
computation using a threadpool.
Args:
args: a tuple where the first item is a DDM object, and the remaining
item are the same arguments required by
DDM.get_model_log_likelihood().
Returns:<|fim▁hole|> """
model = args[0]
return model.get_model_log_likelihood(*args[1:])
def main(d, sigma, rangeD, rangeSigma, trialsFileName=None, numTrials=10,
numSimulations=10, binStep=100, maxRT=8000, numThreads=9,
verbose=False):
"""
Args:
d: float, DDM parameter for generating artificial data.
sigma: float, DDM parameter for generating artificial data.
rangeD: list of floats, search range for parameter d.
rangeSigma: list of floats, search range for parameter sigma.
trialsFileName: string, path of trial conditions file.
numTrials: int, number of artificial data trials to be generated per
trial condition.
numSimulations: int, number of simulations to be generated per trial
condition, to be used in the RT histograms.
binStep: int, size of the bin step to be used in the RT histograms.
maxRT: int, maximum RT to be used in the RT histograms.
numThreads: int, size of the thread pool.
verbose: boolean, whether or not to increase output verbosity.
"""
pool = Pool(numThreads)
histBins = list(range(0, maxRT + binStep, binStep))
# Load trial conditions.
if not trialsFileName:
trialsFileName = pkg_resources.resource_filename(
u"addm_toolbox", u"test_data/test_trial_conditions.csv")
trialConditions = load_trial_conditions_from_csv(trialsFileName)
# Generate artificial data.
dataRTLeft = dict()
dataRTRight = dict()
for trialCondition in trialConditions:
dataRTLeft[trialCondition] = list()
dataRTRight[trialCondition] = list()
model = DDM(d, sigma)
for trialCondition in trialConditions:
t = 0
while t < numTrials:
try:
trial = model.simulate_trial(
trialCondition[0], trialCondition[1])
except:
print(u"An exception occurred while generating artificial "
"trial " + str(t) + u" for condition " +
str(trialCondition[0]) + u", " + str(trialCondition[1]) +
u".")
raise
if trial.choice == -1:
dataRTLeft[trialCondition].append(trial.RT)
elif trial.choice == 1:
dataRTRight[trialCondition].append(trial.RT)
t += 1
# Generate histograms for artificial data.
dataHistLeft = dict()
dataHistRight = dict()
for trialCondition in trialConditions:
dataHistLeft[trialCondition] = np.histogram(
dataRTLeft[trialCondition], bins=histBins)[0]
dataHistRight[trialCondition] = np.histogram(
dataRTRight[trialCondition], bins=histBins)[0]
# Grid search on the parameters of the model.
if verbose:
print(u"Performing grid search over the model parameters...")
listParams = list()
models = list()
for d in rangeD:
for sigma in rangeSigma:
model = DDM(d, sigma)
models.append(model)
listParams.append((model, trialConditions, numSimulations,
histBins, dataHistLeft, dataHistRight))
logLikelihoods = pool.map(wrap_ddm_get_model_log_likelihood, listParams)
pool.close()
if verbose:
for i, model in enumerate(models):
print(u"L" + str(model.params) + u" = " + str(logLikelihoods[i]))
bestIndex = logLikelihoods.index(max(logLikelihoods))
print(u"Best fit: " + str(models[bestIndex].params))<|fim▁end|>
|
The output of DDM.get_model_log_likelihood().
|
<|file_name|>projectfuture.py<|end_file_name|><|fim▁begin|># SPDX-License-Identifier: MIT
# Copyright (C) 2019-2020 Tobias Gruetzmacher
# Copyright (C) 2019-2020 Daniel Ring
from .common import _ParserScraper
class ProjectFuture(_ParserScraper):
imageSearch = '//td[@class="tamid"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
def __init__(self, name, comic, first, last=None):
if name == 'ProjectFuture':
super(ProjectFuture, self).__init__(name)
else:
super(ProjectFuture, self).__init__('ProjectFuture/' + name)
self.url = 'http://www.projectfuturecomic.com/' + comic + '.php'
self.stripUrl = self.url + '?strip=%s'
self.firstStripUrl = self.stripUrl % first
if last:
self.url = self.stripUrl
self.endOfLife = True
<|fim▁hole|> def getmodules(cls):
return (
cls('AWalkInTheWoods', 'simeon', '1', last='12'),
cls('BenjaminBuranAndTheArkOfUr', 'ben', '00', last='23'),
cls('BookOfTenets', 'tenets', '01', last='45'),
cls('CriticalMass', 'criticalmass', 'cover', last='26'),
cls('DarkLordRising', 'darklord', '01-00', last='10-10'),
cls('Emily', 'emily', '01-00'),
cls('FishingTrip', 'fishing', '01-00'),
cls('HeadsYouLose', 'heads', '00-01', last='07-12'),
cls('NiallsStory', 'niall', '00'),
cls('ProjectFuture', 'strip', '0'),
cls('RedValentine', 'redvalentine', '1', last='6'),
cls('ShortStories', 'shorts', '01-00'),
cls('StrangeBedfellows', 'bedfellows', '1', last='6'),
cls('TheAxemanCometh', 'axeman', '01-01', last='02-18'),
cls('ToCatchADemon', 'daxxon', '01-00', last='03-14'),
cls('TheDarkAngel', 'darkangel', 'cover', last='54'),
cls('TheEpsilonProject', 'epsilon', '00-01'),
cls('TheHarvest', 'harvest', '01-00'),
cls('TheSierraChronicles', 'sierra', '0', last='29'),
cls('TheTuppenyMan', 'tuppenny', '00', last='16'),
cls('TurningANewPage', 'azrael', '1', last='54'),
)<|fim▁end|>
|
@classmethod
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CMAR3 {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct MAR {
bits: u32,
}
impl MAR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _MAW<'a> {
w: &'a mut W,
}
impl<'a> _MAW<'a> {
#[doc = r" Writes raw bits to the field"]<|fim▁hole|> #[inline(always)]
pub fn bits(self, value: u32) -> &'a mut W {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31 - Memory address"]
#[inline(always)]
pub fn ma(&self) -> MAR {
let bits = {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
};
MAR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:31 - Memory address"]
#[inline(always)]
pub fn ma(&mut self) -> _MAW {
_MAW { w: self }
}
}<|fim▁end|>
| |
<|file_name|>text_tag.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use Justification;
use TextDirection;
use TextIter;
use WrapMode;
use ffi;
use gdk;
use gdk_ffi;
use glib;
use glib::GString;
use glib::StaticType;
use glib::Value;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect_raw;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use pango;
use signal::Inhibit;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib_wrapper! {
pub struct TextTag(Object<ffi::GtkTextTag, ffi::GtkTextTagClass, TextTagClass>);
match fn {
get_type => || ffi::gtk_text_tag_get_type(),
}
}
impl TextTag {
pub fn new<'a, P: Into<Option<&'a str>>>(name: P) -> TextTag {
assert_initialized_main_thread!();
let name = name.into();
unsafe {
from_glib_full(ffi::gtk_text_tag_new(name.to_glib_none().0))
}
}
}
pub const NONE_TEXT_TAG: Option<&TextTag> = None;
pub trait TextTagExt: 'static {
#[cfg(any(feature = "v3_20", feature = "dox"))]
fn changed(&self, size_changed: bool);
fn event<P: IsA<glib::Object>>(&self, event_object: &P, event: &gdk::Event, iter: &TextIter) -> bool;
fn get_priority(&self) -> i32;
fn set_priority(&self, priority: i32);
fn get_property_accumulative_margin(&self) -> bool;
fn set_property_accumulative_margin(&self, accumulative_margin: bool);
fn set_property_background<'a, P: Into<Option<&'a str>>>(&self, background: P);
fn get_property_background_full_height(&self) -> bool;
fn set_property_background_full_height(&self, background_full_height: bool);
fn get_property_background_full_height_set(&self) -> bool;
fn set_property_background_full_height_set(&self, background_full_height_set: bool);
fn get_property_background_rgba(&self) -> Option<gdk::RGBA>;
fn set_property_background_rgba(&self, background_rgba: Option<&gdk::RGBA>);
fn get_property_background_set(&self) -> bool;
fn set_property_background_set(&self, background_set: bool);
fn get_property_direction(&self) -> TextDirection;
fn set_property_direction(&self, direction: TextDirection);
fn get_property_editable(&self) -> bool;
fn set_property_editable(&self, editable: bool);
fn get_property_editable_set(&self) -> bool;
fn set_property_editable_set(&self, editable_set: bool);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_fallback(&self) -> bool;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_fallback(&self, fallback: bool);
fn get_property_fallback_set(&self) -> bool;
fn set_property_fallback_set(&self, fallback_set: bool);
fn get_property_family(&self) -> Option<GString>;
fn set_property_family<'a, P: Into<Option<&'a str>>>(&self, family: P);
fn get_property_family_set(&self) -> bool;
fn set_property_family_set(&self, family_set: bool);
fn get_property_font(&self) -> Option<GString>;
fn set_property_font<'a, P: Into<Option<&'a str>>>(&self, font: P);
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn get_property_font_features(&self) -> Option<GString>;
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn set_property_font_features<'a, P: Into<Option<&'a str>>>(&self, font_features: P);
fn get_property_font_features_set(&self) -> bool;
fn set_property_font_features_set(&self, font_features_set: bool);
fn set_property_foreground<'a, P: Into<Option<&'a str>>>(&self, foreground: P);
fn get_property_foreground_rgba(&self) -> Option<gdk::RGBA>;
fn set_property_foreground_rgba(&self, foreground_rgba: Option<&gdk::RGBA>);
fn get_property_foreground_set(&self) -> bool;
fn set_property_foreground_set(&self, foreground_set: bool);
fn get_property_indent(&self) -> i32;
fn set_property_indent(&self, indent: i32);
fn get_property_indent_set(&self) -> bool;
fn set_property_indent_set(&self, indent_set: bool);
fn get_property_invisible(&self) -> bool;
fn set_property_invisible(&self, invisible: bool);
fn get_property_invisible_set(&self) -> bool;
fn set_property_invisible_set(&self, invisible_set: bool);
fn get_property_justification(&self) -> Justification;
fn set_property_justification(&self, justification: Justification);
fn get_property_justification_set(&self) -> bool;
fn set_property_justification_set(&self, justification_set: bool);
fn get_property_language(&self) -> Option<GString>;
fn set_property_language<'a, P: Into<Option<&'a str>>>(&self, language: P);
fn get_property_language_set(&self) -> bool;
fn set_property_language_set(&self, language_set: bool);
fn get_property_left_margin(&self) -> i32;
fn set_property_left_margin(&self, left_margin: i32);
fn get_property_left_margin_set(&self) -> bool;
fn set_property_left_margin_set(&self, left_margin_set: bool);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_letter_spacing(&self) -> i32;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_letter_spacing(&self, letter_spacing: i32);
fn get_property_letter_spacing_set(&self) -> bool;
fn set_property_letter_spacing_set(&self, letter_spacing_set: bool);
fn get_property_name(&self) -> Option<GString>;
fn set_property_paragraph_background<'a, P: Into<Option<&'a str>>>(&self, paragraph_background: P);
fn get_property_paragraph_background_rgba(&self) -> Option<gdk::RGBA>;
fn set_property_paragraph_background_rgba(&self, paragraph_background_rgba: Option<&gdk::RGBA>);
fn get_property_paragraph_background_set(&self) -> bool;
fn set_property_paragraph_background_set(&self, paragraph_background_set: bool);
fn get_property_pixels_above_lines(&self) -> i32;
fn set_property_pixels_above_lines(&self, pixels_above_lines: i32);
fn get_property_pixels_above_lines_set(&self) -> bool;
fn set_property_pixels_above_lines_set(&self, pixels_above_lines_set: bool);
fn get_property_pixels_below_lines(&self) -> i32;
fn set_property_pixels_below_lines(&self, pixels_below_lines: i32);
fn get_property_pixels_below_lines_set(&self) -> bool;
fn set_property_pixels_below_lines_set(&self, pixels_below_lines_set: bool);
fn get_property_pixels_inside_wrap(&self) -> i32;
fn set_property_pixels_inside_wrap(&self, pixels_inside_wrap: i32);
fn get_property_pixels_inside_wrap_set(&self) -> bool;
fn set_property_pixels_inside_wrap_set(&self, pixels_inside_wrap_set: bool);
fn get_property_right_margin(&self) -> i32;
fn set_property_right_margin(&self, right_margin: i32);
fn get_property_right_margin_set(&self) -> bool;
fn set_property_right_margin_set(&self, right_margin_set: bool);
fn get_property_rise(&self) -> i32;
fn set_property_rise(&self, rise: i32);
fn get_property_rise_set(&self) -> bool;
fn set_property_rise_set(&self, rise_set: bool);
fn get_property_scale(&self) -> f64;
fn set_property_scale(&self, scale: f64);
fn get_property_scale_set(&self) -> bool;
fn set_property_scale_set(&self, scale_set: bool);
fn get_property_size(&self) -> i32;
fn set_property_size(&self, size: i32);
fn get_property_size_points(&self) -> f64;
fn set_property_size_points(&self, size_points: f64);
fn get_property_size_set(&self) -> bool;
fn set_property_size_set(&self, size_set: bool);
fn get_property_stretch(&self) -> pango::Stretch;
fn set_property_stretch(&self, stretch: pango::Stretch);
fn get_property_stretch_set(&self) -> bool;
fn set_property_stretch_set(&self, stretch_set: bool);
fn get_property_strikethrough(&self) -> bool;
fn set_property_strikethrough(&self, strikethrough: bool);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_strikethrough_rgba(&self) -> Option<gdk::RGBA>;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_strikethrough_rgba(&self, strikethrough_rgba: Option<&gdk::RGBA>);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_strikethrough_rgba_set(&self) -> bool;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_strikethrough_rgba_set(&self, strikethrough_rgba_set: bool);
fn get_property_strikethrough_set(&self) -> bool;
fn set_property_strikethrough_set(&self, strikethrough_set: bool);
fn get_property_style(&self) -> pango::Style;
fn set_property_style(&self, style: pango::Style);
fn get_property_style_set(&self) -> bool;
fn set_property_style_set(&self, style_set: bool);
fn get_property_tabs_set(&self) -> bool;
fn set_property_tabs_set(&self, tabs_set: bool);
fn get_property_underline(&self) -> pango::Underline;
fn set_property_underline(&self, underline: pango::Underline);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_underline_rgba(&self) -> Option<gdk::RGBA>;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_underline_rgba(&self, underline_rgba: Option<&gdk::RGBA>);
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_underline_rgba_set(&self) -> bool;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_underline_rgba_set(&self, underline_rgba_set: bool);
fn get_property_underline_set(&self) -> bool;
fn set_property_underline_set(&self, underline_set: bool);
fn get_property_variant(&self) -> pango::Variant;
fn set_property_variant(&self, variant: pango::Variant);
fn get_property_variant_set(&self) -> bool;
fn set_property_variant_set(&self, variant_set: bool);
fn get_property_weight(&self) -> i32;
fn set_property_weight(&self, weight: i32);
fn get_property_weight_set(&self) -> bool;
fn set_property_weight_set(&self, weight_set: bool);
fn get_property_wrap_mode(&self) -> WrapMode;
fn set_property_wrap_mode(&self, wrap_mode: WrapMode);
fn get_property_wrap_mode_set(&self) -> bool;
fn set_property_wrap_mode_set(&self, wrap_mode_set: bool);
fn connect_event<F: Fn(&Self, &glib::Object, &gdk::Event, &TextIter) -> Inhibit + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_accumulative_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_background_full_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_background_full_height_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_direction_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_editable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_editable_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_fallback_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_fallback_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_family_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_family_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_font_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn connect_property_font_features_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_font_features_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_foreground_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_foreground_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_foreground_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_indent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_indent_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_invisible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_invisible_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_justification_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_justification_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_language_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_language_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_left_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_left_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_letter_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_letter_spacing_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_paragraph_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_paragraph_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_paragraph_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_above_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_above_lines_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_below_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_below_lines_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_inside_wrap_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_pixels_inside_wrap_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_right_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_right_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_rise_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_rise_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_scale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_scale_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_size_points_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_size_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_stretch_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_stretch_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_strikethrough_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_strikethrough_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_strikethrough_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_strikethrough_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_style_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_style_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_tabs_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_underline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_underline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_underline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_underline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_variant_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_variant_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_weight_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_weight_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_wrap_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_wrap_mode_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<TextTag>> TextTagExt for O {
#[cfg(any(feature = "v3_20", feature = "dox"))]
fn changed(&self, size_changed: bool) {
unsafe {
ffi::gtk_text_tag_changed(self.as_ref().to_glib_none().0, size_changed.to_glib());
}
}
fn event<P: IsA<glib::Object>>(&self, event_object: &P, event: &gdk::Event, iter: &TextIter) -> bool {
unsafe {
from_glib(ffi::gtk_text_tag_event(self.as_ref().to_glib_none().0, event_object.as_ref().to_glib_none().0, mut_override(event.to_glib_none().0), iter.to_glib_none().0))
}
}
fn get_priority(&self) -> i32 {
unsafe {
ffi::gtk_text_tag_get_priority(self.as_ref().to_glib_none().0)
}
}
fn set_priority(&self, priority: i32) {
unsafe {
ffi::gtk_text_tag_set_priority(self.as_ref().to_glib_none().0, priority);
}
}
fn get_property_accumulative_margin(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"accumulative-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_accumulative_margin(&self, accumulative_margin: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"accumulative-margin\0".as_ptr() as *const _, Value::from(&accumulative_margin).to_glib_none().0);
}
}
fn set_property_background<'a, P: Into<Option<&'a str>>>(&self, background: P) {
let background = background.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background\0".as_ptr() as *const _, Value::from(background).to_glib_none().0);
}
}
fn get_property_background_full_height(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-full-height\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_background_full_height(&self, background_full_height: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-full-height\0".as_ptr() as *const _, Value::from(&background_full_height).to_glib_none().0);
}
}
fn get_property_background_full_height_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-full-height-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_background_full_height_set(&self, background_full_height_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-full-height-set\0".as_ptr() as *const _, Value::from(&background_full_height_set).to_glib_none().0);
}
}
fn get_property_background_rgba(&self) -> Option<gdk::RGBA> {
unsafe {
let mut value = Value::from_type(<gdk::RGBA as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_background_rgba(&self, background_rgba: Option<&gdk::RGBA>) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-rgba\0".as_ptr() as *const _, Value::from(background_rgba).to_glib_none().0);
}
}
fn get_property_background_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_background_set(&self, background_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"background-set\0".as_ptr() as *const _, Value::from(&background_set).to_glib_none().0);
}
}
fn get_property_direction(&self) -> TextDirection {
unsafe {
let mut value = Value::from_type(<TextDirection as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"direction\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_direction(&self, direction: TextDirection) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"direction\0".as_ptr() as *const _, Value::from(&direction).to_glib_none().0);
}
}
fn get_property_editable(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"editable\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_editable(&self, editable: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"editable\0".as_ptr() as *const _, Value::from(&editable).to_glib_none().0);
}
}
fn get_property_editable_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"editable-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_editable_set(&self, editable_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"editable-set\0".as_ptr() as *const _, Value::from(&editable_set).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_fallback(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"fallback\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_fallback(&self, fallback: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"fallback\0".as_ptr() as *const _, Value::from(&fallback).to_glib_none().0);
}
}
fn get_property_fallback_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"fallback-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_fallback_set(&self, fallback_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"fallback-set\0".as_ptr() as *const _, Value::from(&fallback_set).to_glib_none().0);
}
}
fn get_property_family(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"family\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_family<'a, P: Into<Option<&'a str>>>(&self, family: P) {
let family = family.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"family\0".as_ptr() as *const _, Value::from(family).to_glib_none().0);
}
}
fn get_property_family_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"family-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_family_set(&self, family_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"family-set\0".as_ptr() as *const _, Value::from(&family_set).to_glib_none().0);
}
}
fn get_property_font(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_font<'a, P: Into<Option<&'a str>>>(&self, font: P) {
let font = font.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font\0".as_ptr() as *const _, Value::from(font).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn get_property_font_features(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font-features\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn set_property_font_features<'a, P: Into<Option<&'a str>>>(&self, font_features: P) {
let font_features = font_features.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font-features\0".as_ptr() as *const _, Value::from(font_features).to_glib_none().0);
}
}
fn get_property_font_features_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font-features-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_font_features_set(&self, font_features_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"font-features-set\0".as_ptr() as *const _, Value::from(&font_features_set).to_glib_none().0);
}
}
fn set_property_foreground<'a, P: Into<Option<&'a str>>>(&self, foreground: P) {
let foreground = foreground.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"foreground\0".as_ptr() as *const _, Value::from(foreground).to_glib_none().0);
}
}
fn get_property_foreground_rgba(&self) -> Option<gdk::RGBA> {
unsafe {
let mut value = Value::from_type(<gdk::RGBA as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"foreground-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_foreground_rgba(&self, foreground_rgba: Option<&gdk::RGBA>) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"foreground-rgba\0".as_ptr() as *const _, Value::from(foreground_rgba).to_glib_none().0);
}
}
fn get_property_foreground_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"foreground-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_foreground_set(&self, foreground_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"foreground-set\0".as_ptr() as *const _, Value::from(&foreground_set).to_glib_none().0);
}
}
fn get_property_indent(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"indent\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_indent(&self, indent: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"indent\0".as_ptr() as *const _, Value::from(&indent).to_glib_none().0);
}
}
fn get_property_indent_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"indent-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_indent_set(&self, indent_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"indent-set\0".as_ptr() as *const _, Value::from(&indent_set).to_glib_none().0);
}
}
fn get_property_invisible(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"invisible\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_invisible(&self, invisible: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"invisible\0".as_ptr() as *const _, Value::from(&invisible).to_glib_none().0);
}
}
fn get_property_invisible_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"invisible-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_invisible_set(&self, invisible_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"invisible-set\0".as_ptr() as *const _, Value::from(&invisible_set).to_glib_none().0);
}
}
fn get_property_justification(&self) -> Justification {
unsafe {
let mut value = Value::from_type(<Justification as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"justification\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_justification(&self, justification: Justification) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"justification\0".as_ptr() as *const _, Value::from(&justification).to_glib_none().0);
}
}
fn get_property_justification_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"justification-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_justification_set(&self, justification_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"justification-set\0".as_ptr() as *const _, Value::from(&justification_set).to_glib_none().0);
}
}
fn get_property_language(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"language\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_language<'a, P: Into<Option<&'a str>>>(&self, language: P) {
let language = language.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"language\0".as_ptr() as *const _, Value::from(language).to_glib_none().0);
}
}
fn get_property_language_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"language-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_language_set(&self, language_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"language-set\0".as_ptr() as *const _, Value::from(&language_set).to_glib_none().0);
}
}
fn get_property_left_margin(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"left-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_left_margin(&self, left_margin: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"left-margin\0".as_ptr() as *const _, Value::from(&left_margin).to_glib_none().0);
}
}
fn get_property_left_margin_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"left-margin-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_left_margin_set(&self, left_margin_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"left-margin-set\0".as_ptr() as *const _, Value::from(&left_margin_set).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_letter_spacing(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"letter-spacing\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_letter_spacing(&self, letter_spacing: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"letter-spacing\0".as_ptr() as *const _, Value::from(&letter_spacing).to_glib_none().0);
}
}
fn get_property_letter_spacing_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"letter-spacing-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_letter_spacing_set(&self, letter_spacing_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"letter-spacing-set\0".as_ptr() as *const _, Value::from(&letter_spacing_set).to_glib_none().0);
}
}
fn get_property_name(&self) -> Option<GString> {
unsafe {
let mut value = Value::from_type(<GString as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"name\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_paragraph_background<'a, P: Into<Option<&'a str>>>(&self, paragraph_background: P) {
let paragraph_background = paragraph_background.into();
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"paragraph-background\0".as_ptr() as *const _, Value::from(paragraph_background).to_glib_none().0);
}
}
fn get_property_paragraph_background_rgba(&self) -> Option<gdk::RGBA> {
unsafe {
let mut value = Value::from_type(<gdk::RGBA as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"paragraph-background-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
fn set_property_paragraph_background_rgba(&self, paragraph_background_rgba: Option<&gdk::RGBA>) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"paragraph-background-rgba\0".as_ptr() as *const _, Value::from(paragraph_background_rgba).to_glib_none().0);
}
}
fn get_property_paragraph_background_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"paragraph-background-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_paragraph_background_set(&self, paragraph_background_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"paragraph-background-set\0".as_ptr() as *const _, Value::from(¶graph_background_set).to_glib_none().0);
}
}
fn get_property_pixels_above_lines(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-above-lines\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_above_lines(&self, pixels_above_lines: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-above-lines\0".as_ptr() as *const _, Value::from(&pixels_above_lines).to_glib_none().0);
}
}
fn get_property_pixels_above_lines_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-above-lines-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_above_lines_set(&self, pixels_above_lines_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-above-lines-set\0".as_ptr() as *const _, Value::from(&pixels_above_lines_set).to_glib_none().0);
}
}
fn get_property_pixels_below_lines(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-below-lines\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_below_lines(&self, pixels_below_lines: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-below-lines\0".as_ptr() as *const _, Value::from(&pixels_below_lines).to_glib_none().0);
}
}
fn get_property_pixels_below_lines_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-below-lines-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_below_lines_set(&self, pixels_below_lines_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-below-lines-set\0".as_ptr() as *const _, Value::from(&pixels_below_lines_set).to_glib_none().0);
}
}
fn get_property_pixels_inside_wrap(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-inside-wrap\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_inside_wrap(&self, pixels_inside_wrap: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-inside-wrap\0".as_ptr() as *const _, Value::from(&pixels_inside_wrap).to_glib_none().0);
}
}
fn get_property_pixels_inside_wrap_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-inside-wrap-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_pixels_inside_wrap_set(&self, pixels_inside_wrap_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"pixels-inside-wrap-set\0".as_ptr() as *const _, Value::from(&pixels_inside_wrap_set).to_glib_none().0);
}
}
fn get_property_right_margin(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"right-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_right_margin(&self, right_margin: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"right-margin\0".as_ptr() as *const _, Value::from(&right_margin).to_glib_none().0);
}
}
fn get_property_right_margin_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"right-margin-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_right_margin_set(&self, right_margin_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"right-margin-set\0".as_ptr() as *const _, Value::from(&right_margin_set).to_glib_none().0);
}
}
fn get_property_rise(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"rise\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_rise(&self, rise: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"rise\0".as_ptr() as *const _, Value::from(&rise).to_glib_none().0);
}
}
fn get_property_rise_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"rise-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_rise_set(&self, rise_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"rise-set\0".as_ptr() as *const _, Value::from(&rise_set).to_glib_none().0);
}
}
fn get_property_scale(&self) -> f64 {
unsafe {
let mut value = Value::from_type(<f64 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"scale\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_scale(&self, scale: f64) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"scale\0".as_ptr() as *const _, Value::from(&scale).to_glib_none().0);
}
}
fn get_property_scale_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"scale-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_scale_set(&self, scale_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"scale-set\0".as_ptr() as *const _, Value::from(&scale_set).to_glib_none().0);
}
}
fn get_property_size(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_size(&self, size: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size\0".as_ptr() as *const _, Value::from(&size).to_glib_none().0);
}
}
fn get_property_size_points(&self) -> f64 {
unsafe {
let mut value = Value::from_type(<f64 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size-points\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_size_points(&self, size_points: f64) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size-points\0".as_ptr() as *const _, Value::from(&size_points).to_glib_none().0);
}
}
fn get_property_size_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_size_set(&self, size_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"size-set\0".as_ptr() as *const _, Value::from(&size_set).to_glib_none().0);
}
}
fn get_property_stretch(&self) -> pango::Stretch {
unsafe {
let mut value = Value::from_type(<pango::Stretch as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"stretch\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_stretch(&self, stretch: pango::Stretch) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"stretch\0".as_ptr() as *const _, Value::from(&stretch).to_glib_none().0);
}
}
fn get_property_stretch_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"stretch-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_stretch_set(&self, stretch_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"stretch-set\0".as_ptr() as *const _, Value::from(&stretch_set).to_glib_none().0);
}
}
fn get_property_strikethrough(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_strikethrough(&self, strikethrough: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough\0".as_ptr() as *const _, Value::from(&strikethrough).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_strikethrough_rgba(&self) -> Option<gdk::RGBA> {
unsafe {
let mut value = Value::from_type(<gdk::RGBA as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_strikethrough_rgba(&self, strikethrough_rgba: Option<&gdk::RGBA>) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-rgba\0".as_ptr() as *const _, Value::from(strikethrough_rgba).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_strikethrough_rgba_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-rgba-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_strikethrough_rgba_set(&self, strikethrough_rgba_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-rgba-set\0".as_ptr() as *const _, Value::from(&strikethrough_rgba_set).to_glib_none().0);
}
}
fn get_property_strikethrough_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_strikethrough_set(&self, strikethrough_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"strikethrough-set\0".as_ptr() as *const _, Value::from(&strikethrough_set).to_glib_none().0);
}
}
fn get_property_style(&self) -> pango::Style {
unsafe {
let mut value = Value::from_type(<pango::Style as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"style\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_style(&self, style: pango::Style) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"style\0".as_ptr() as *const _, Value::from(&style).to_glib_none().0);
}
}
fn get_property_style_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"style-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_style_set(&self, style_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"style-set\0".as_ptr() as *const _, Value::from(&style_set).to_glib_none().0);
}
}
fn get_property_tabs_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"tabs-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_tabs_set(&self, tabs_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"tabs-set\0".as_ptr() as *const _, Value::from(&tabs_set).to_glib_none().0);
}
}
fn get_property_underline(&self) -> pango::Underline {
unsafe {
let mut value = Value::from_type(<pango::Underline as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_underline(&self, underline: pango::Underline) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline\0".as_ptr() as *const _, Value::from(&underline).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_underline_rgba(&self) -> Option<gdk::RGBA> {
unsafe {
let mut value = Value::from_type(<gdk::RGBA as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_underline_rgba(&self, underline_rgba: Option<&gdk::RGBA>) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-rgba\0".as_ptr() as *const _, Value::from(underline_rgba).to_glib_none().0);
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn get_property_underline_rgba_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-rgba-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn set_property_underline_rgba_set(&self, underline_rgba_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-rgba-set\0".as_ptr() as *const _, Value::from(&underline_rgba_set).to_glib_none().0);
}
}
fn get_property_underline_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_underline_set(&self, underline_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"underline-set\0".as_ptr() as *const _, Value::from(&underline_set).to_glib_none().0);
}
}
fn get_property_variant(&self) -> pango::Variant {
unsafe {
let mut value = Value::from_type(<pango::Variant as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"variant\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_variant(&self, variant: pango::Variant) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"variant\0".as_ptr() as *const _, Value::from(&variant).to_glib_none().0);
}
}
fn get_property_variant_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"variant-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_variant_set(&self, variant_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"variant-set\0".as_ptr() as *const _, Value::from(&variant_set).to_glib_none().0);
}
}
fn get_property_weight(&self) -> i32 {
unsafe {
let mut value = Value::from_type(<i32 as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"weight\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_weight(&self, weight: i32) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"weight\0".as_ptr() as *const _, Value::from(&weight).to_glib_none().0);
}
}
fn get_property_weight_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"weight-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_weight_set(&self, weight_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"weight-set\0".as_ptr() as *const _, Value::from(&weight_set).to_glib_none().0);
}
}
fn get_property_wrap_mode(&self) -> WrapMode {
unsafe {
let mut value = Value::from_type(<WrapMode as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"wrap-mode\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_wrap_mode(&self, wrap_mode: WrapMode) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"wrap-mode\0".as_ptr() as *const _, Value::from(&wrap_mode).to_glib_none().0);
}
}
fn get_property_wrap_mode_set(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"wrap-mode-set\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_wrap_mode_set(&self, wrap_mode_set: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0 as *mut gobject_ffi::GObject, b"wrap-mode-set\0".as_ptr() as *const _, Value::from(&wrap_mode_set).to_glib_none().0);
}
}
fn connect_event<F: Fn(&Self, &glib::Object, &gdk::Event, &TextIter) -> Inhibit + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"event\0".as_ptr() as *const _,
Some(transmute(event_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_accumulative_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::accumulative-margin\0".as_ptr() as *const _,
Some(transmute(notify_accumulative_margin_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::background\0".as_ptr() as *const _,
Some(transmute(notify_background_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_background_full_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::background-full-height\0".as_ptr() as *const _,
Some(transmute(notify_background_full_height_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_background_full_height_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::background-full-height-set\0".as_ptr() as *const _,
Some(transmute(notify_background_full_height_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::background-rgba\0".as_ptr() as *const _,
Some(transmute(notify_background_rgba_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::background-set\0".as_ptr() as *const _,
Some(transmute(notify_background_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_direction_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::direction\0".as_ptr() as *const _,
Some(transmute(notify_direction_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_editable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::editable\0".as_ptr() as *const _,
Some(transmute(notify_editable_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_editable_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::editable-set\0".as_ptr() as *const _,
Some(transmute(notify_editable_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_fallback_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::fallback\0".as_ptr() as *const _,
Some(transmute(notify_fallback_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_fallback_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::fallback-set\0".as_ptr() as *const _,
Some(transmute(notify_fallback_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_family_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::family\0".as_ptr() as *const _,
Some(transmute(notify_family_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_family_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::family-set\0".as_ptr() as *const _,
Some(transmute(notify_family_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_font_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::font\0".as_ptr() as *const _,
Some(transmute(notify_font_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_18", feature = "dox"))]
fn connect_property_font_features_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::font-features\0".as_ptr() as *const _,
Some(transmute(notify_font_features_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_font_features_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::font-features-set\0".as_ptr() as *const _,
Some(transmute(notify_font_features_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_foreground_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::foreground\0".as_ptr() as *const _,
Some(transmute(notify_foreground_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_foreground_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::foreground-rgba\0".as_ptr() as *const _,
Some(transmute(notify_foreground_rgba_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_foreground_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::foreground-set\0".as_ptr() as *const _,
Some(transmute(notify_foreground_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_indent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::indent\0".as_ptr() as *const _,
Some(transmute(notify_indent_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_indent_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::indent-set\0".as_ptr() as *const _,
Some(transmute(notify_indent_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_invisible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::invisible\0".as_ptr() as *const _,
Some(transmute(notify_invisible_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_invisible_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::invisible-set\0".as_ptr() as *const _,
Some(transmute(notify_invisible_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_justification_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::justification\0".as_ptr() as *const _,
Some(transmute(notify_justification_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_justification_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::justification-set\0".as_ptr() as *const _,
Some(transmute(notify_justification_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_language_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::language\0".as_ptr() as *const _,
Some(transmute(notify_language_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_language_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::language-set\0".as_ptr() as *const _,
Some(transmute(notify_language_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_left_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::left-margin\0".as_ptr() as *const _,
Some(transmute(notify_left_margin_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_left_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::left-margin-set\0".as_ptr() as *const _,
Some(transmute(notify_left_margin_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_letter_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::letter-spacing\0".as_ptr() as *const _,
Some(transmute(notify_letter_spacing_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_letter_spacing_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::letter-spacing-set\0".as_ptr() as *const _,
Some(transmute(notify_letter_spacing_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_paragraph_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::paragraph-background\0".as_ptr() as *const _,
Some(transmute(notify_paragraph_background_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_paragraph_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::paragraph-background-rgba\0".as_ptr() as *const _,
Some(transmute(notify_paragraph_background_rgba_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_paragraph_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::paragraph-background-set\0".as_ptr() as *const _,
Some(transmute(notify_paragraph_background_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_above_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-above-lines\0".as_ptr() as *const _,
Some(transmute(notify_pixels_above_lines_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_above_lines_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-above-lines-set\0".as_ptr() as *const _,
Some(transmute(notify_pixels_above_lines_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_below_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-below-lines\0".as_ptr() as *const _,
Some(transmute(notify_pixels_below_lines_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_below_lines_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-below-lines-set\0".as_ptr() as *const _,
Some(transmute(notify_pixels_below_lines_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_inside_wrap_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-inside-wrap\0".as_ptr() as *const _,
Some(transmute(notify_pixels_inside_wrap_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_pixels_inside_wrap_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::pixels-inside-wrap-set\0".as_ptr() as *const _,
Some(transmute(notify_pixels_inside_wrap_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_right_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::right-margin\0".as_ptr() as *const _,
Some(transmute(notify_right_margin_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_right_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::right-margin-set\0".as_ptr() as *const _,
Some(transmute(notify_right_margin_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_rise_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::rise\0".as_ptr() as *const _,
Some(transmute(notify_rise_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_rise_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::rise-set\0".as_ptr() as *const _,
Some(transmute(notify_rise_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_scale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::scale\0".as_ptr() as *const _,
Some(transmute(notify_scale_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_scale_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::scale-set\0".as_ptr() as *const _,
Some(transmute(notify_scale_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::size\0".as_ptr() as *const _,
Some(transmute(notify_size_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_size_points_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::size-points\0".as_ptr() as *const _,
Some(transmute(notify_size_points_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_size_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::size-set\0".as_ptr() as *const _,
Some(transmute(notify_size_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_stretch_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::stretch\0".as_ptr() as *const _,
Some(transmute(notify_stretch_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_stretch_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::stretch-set\0".as_ptr() as *const _,
Some(transmute(notify_stretch_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_strikethrough_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::strikethrough\0".as_ptr() as *const _,
Some(transmute(notify_strikethrough_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_strikethrough_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::strikethrough-rgba\0".as_ptr() as *const _,
Some(transmute(notify_strikethrough_rgba_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_strikethrough_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::strikethrough-rgba-set\0".as_ptr() as *const _,
Some(transmute(notify_strikethrough_rgba_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_strikethrough_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::strikethrough-set\0".as_ptr() as *const _,
Some(transmute(notify_strikethrough_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_style_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::style\0".as_ptr() as *const _,
Some(transmute(notify_style_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_style_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::style-set\0".as_ptr() as *const _,
Some(transmute(notify_style_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_tabs_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::tabs-set\0".as_ptr() as *const _,
Some(transmute(notify_tabs_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_underline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::underline\0".as_ptr() as *const _,
Some(transmute(notify_underline_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_underline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::underline-rgba\0".as_ptr() as *const _,
Some(transmute(notify_underline_rgba_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
fn connect_property_underline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::underline-rgba-set\0".as_ptr() as *const _,
Some(transmute(notify_underline_rgba_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_underline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::underline-set\0".as_ptr() as *const _,
Some(transmute(notify_underline_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_variant_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::variant\0".as_ptr() as *const _,
Some(transmute(notify_variant_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_variant_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::variant-set\0".as_ptr() as *const _,
Some(transmute(notify_variant_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_weight_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::weight\0".as_ptr() as *const _,
Some(transmute(notify_weight_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_weight_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::weight-set\0".as_ptr() as *const _,
Some(transmute(notify_weight_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_wrap_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::wrap-mode\0".as_ptr() as *const _,
Some(transmute(notify_wrap_mode_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_wrap_mode_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::wrap-mode-set\0".as_ptr() as *const _,
Some(transmute(notify_wrap_mode_set_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
unsafe extern "C" fn event_trampoline<P, F: Fn(&P, &glib::Object, &gdk::Event, &TextIter) -> Inhibit + 'static>(this: *mut ffi::GtkTextTag, object: *mut gobject_ffi::GObject, event: *mut gdk_ffi::GdkEvent, iter: *mut ffi::GtkTextIter, f: glib_ffi::gpointer) -> glib_ffi::gboolean
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast(), &from_glib_borrow(object), &from_glib_none(event), &from_glib_borrow(iter)).to_glib()
}
unsafe extern "C" fn notify_accumulative_margin_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_background_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_background_full_height_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
<|fim▁hole|>where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_background_rgba_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_background_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_direction_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_editable_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_editable_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_fallback_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_fallback_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_family_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_family_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_font_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_18", feature = "dox"))]
unsafe extern "C" fn notify_font_features_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_font_features_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_foreground_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_foreground_rgba_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_foreground_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_indent_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_indent_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_invisible_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_invisible_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_justification_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_justification_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_language_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_language_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_left_margin_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_left_margin_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_letter_spacing_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_letter_spacing_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_paragraph_background_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_paragraph_background_rgba_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_paragraph_background_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_above_lines_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_above_lines_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_below_lines_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_below_lines_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_inside_wrap_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_pixels_inside_wrap_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_right_margin_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_right_margin_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_rise_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_rise_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_scale_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_scale_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_size_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_size_points_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_size_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_stretch_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_stretch_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_strikethrough_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_strikethrough_rgba_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_strikethrough_rgba_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_strikethrough_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_style_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_style_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_tabs_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_underline_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_underline_rgba_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
#[cfg(any(feature = "v3_16", feature = "dox"))]
unsafe extern "C" fn notify_underline_rgba_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_underline_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_variant_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_variant_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_weight_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_weight_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_wrap_mode_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
unsafe extern "C" fn notify_wrap_mode_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<TextTag> {
let f: &F = transmute(f);
f(&TextTag::from_glib_borrow(this).unsafe_cast())
}
impl fmt::Display for TextTag {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TextTag")
}
}<|fim▁end|>
|
unsafe extern "C" fn notify_background_full_height_set_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GtkTextTag, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
|
<|file_name|>timestamped.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.utils import timezone
class TimeStampedModel(models.Model):<|fim▁hole|> created_at = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(default=timezone.now)
class Meta:
abstract = True<|fim▁end|>
| |
<|file_name|>shadow.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for shadowed variables.
A shadowed variable is a variable declared in an inner scope
with the same name and type as a variable in an outer scope,
and where the outer variable is mentioned after the inner one
is declared.
(This definition can be refined; the module generates too many
false positives and is not yet enabled by default.)
For example:
func BadRead(f *os.File, buf []byte) error {
var err error
for {
n, err := f.Read(buf) // shadows the function variable 'err'
if err != nil {
break // causes return of wrong value
}
foo(buf)
}
return err
}
*/
package main
import (
"go/ast"
"go/token"
"code.google.com/p/go.tools/go/types"
)
// Span stores the minimum range of byte positions in the file in which a
// given variable (types.Object) is mentioned. It is lexically defined: it spans
// from the beginning of its first mention to the end of its last mention.
// A variable is considered shadowed (if *strictShadowing is off) only if the
// shadowing variable is declared within the span of the shadowed variable.
// In other words, if a variable is shadowed but not used after the shadowed
// variable is declared, it is inconsequential and not worth complaining about.
// This simple check dramatically reduces the nuisance rate for the shadowing
// check, at least until something cleverer comes along.
//
// One wrinkle: A "naked return" is a silent use of a variable that the Span
// will not capture, but the compilers catch naked returns of shadowed
// variables so we don't need to.
//
// Cases this gets wrong (TODO):
// - If a for loop's continuation statement mentions a variable redeclared in
// the block, we should complain about it but don't.
// - A variable declared inside a function literal can falsely be identified
// as shadowing a variable in the outer function.
//
type Span struct {
min token.Pos
max token.Pos
}
// contains reports whether the position is inside the span.
func (s Span) contains(pos token.Pos) bool {
return s.min <= pos && pos < s.max
}
// growSpan expands the span for the object to contain the instance represented
// by the identifier.
func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) {
if *strictShadowing {
return // No need
}
pos := ident.Pos()
end := ident.End()
span, ok := pkg.spans[obj]
if ok {
if span.min > pos {
span.min = pos
}
if span.max < end {
span.max = end
}
} else {
span = Span{pos, end}
}
pkg.spans[obj] = span
}
// checkShadowAssignment checks for shadowing in a short variable declaration.
func (f *File) checkShadowAssignment(a *ast.AssignStmt) {
if !vet("shadow") {
return
}
if a.Tok != token.DEFINE {
return
}
if f.idiomaticShortRedecl(a) {
return
}
for _, expr := range a.Lhs {
ident, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return
}
f.checkShadowing(ident)
}
}<|fim▁hole|>
// idiomaticShortRedecl reports whether this short declaration can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
// Don't complain about deliberate redeclarations of the form
// i := i
// Such constructs are idiomatic in range loops to create a new variable
// for each iteration. Another example is
// switch n := n.(type)
if len(a.Rhs) != len(a.Lhs) {
return false
}
// We know it's an assignment, so the LHS must be all identifiers. (We check anyway.)
for i, expr := range a.Lhs {
lhs, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return true // Don't do any more processing.
}
switch rhs := a.Rhs[i].(type) {
case *ast.Ident:
if lhs.Name != rhs.Name {
return false
}
case *ast.TypeAssertExpr:
if id, ok := rhs.X.(*ast.Ident); ok {
if lhs.Name != id.Name {
return false
}
}
}
}
return true
}
// idiomaticRedecl reports whether this declaration spec can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
// Don't complain about deliberate redeclarations of the form
// var i, j = i, j
if len(d.Names) != len(d.Values) {
return false
}
for i, lhs := range d.Names {
if rhs, ok := d.Values[i].(*ast.Ident); ok {
if lhs.Name != rhs.Name {
return false
}
}
}
return true
}
// checkShadowDecl checks for shadowing in a general variable declaration.
func (f *File) checkShadowDecl(d *ast.GenDecl) {
if !vet("shadow") {
return
}
if d.Tok != token.VAR {
return
}
for _, spec := range d.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
return
}
// Don't complain about deliberate redeclarations of the form
// var i = i
if f.idiomaticRedecl(valueSpec) {
return
}
for _, ident := range valueSpec.Names {
f.checkShadowing(ident)
}
}
}
// checkShadowing checks whether the identifier shadows an identifier in an outer scope.
func (f *File) checkShadowing(ident *ast.Ident) {
obj := f.pkg.idents[ident]
if obj == nil {
return
}
// obj.Parent.Parent is the surrounding scope. If we can find another declaration
// starting from there, we have a shadowed variable.
shadowed := obj.Parent().Parent().LookupParent(obj.Name())
if shadowed == nil {
return
}
// Don't complain if it's shadowing a universe-declared variable; that's fine.
if shadowed.Parent() == types.Universe {
return
}
if *strictShadowing {
// The shadowed variable must appear before this one to be an instance of shadowing.
if shadowed.Pos() > ident.Pos() {
return
}
} else {
// Don't complain if the span of validity of the shadowed variable doesn't include
// the shadowing variable.
span, ok := f.pkg.spans[shadowed]
if !ok {
f.Badf(ident.Pos(), "internal error: no range for %s", ident.Name)
return
}
if !span.contains(ident.Pos()) {
return
}
}
// Don't complain if the types differ: that implies the programmer really wants two variables.
if types.IsIdentical(obj.Type(), shadowed.Type()) {
f.Badf(ident.Pos(), "declaration of %s shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos()))
}
}<|fim▁end|>
| |
<|file_name|>docker_cli_update_test.go<|end_file_name|><|fim▁begin|>package main
import (
"strings"
"time"
<|fim▁hole|> "github.com/docker/docker/integration-cli/checker"
"github.com/docker/docker/integration-cli/cli"
"github.com/go-check/check"
"github.com/gotestyourself/gotestyourself/icmd"
)
func (s *DockerSuite) TestUpdateRestartPolicy(c *check.C) {
out := cli.DockerCmd(c, "run", "-d", "--restart=on-failure:3", "busybox", "sh", "-c", "sleep 1 && false").Combined()
timeout := 60 * time.Second
if testEnv.DaemonPlatform() == "windows" {
timeout = 180 * time.Second
}
id := strings.TrimSpace(string(out))
// update restart policy to on-failure:5
cli.DockerCmd(c, "update", "--restart=on-failure:5", id)
cli.WaitExited(c, id, timeout)
count := inspectField(c, id, "RestartCount")
c.Assert(count, checker.Equals, "5")
maximumRetryCount := inspectField(c, id, "HostConfig.RestartPolicy.MaximumRetryCount")
c.Assert(maximumRetryCount, checker.Equals, "5")
}
func (s *DockerSuite) TestUpdateRestartWithAutoRemoveFlag(c *check.C) {
out := runSleepingContainer(c, "--rm")
id := strings.TrimSpace(out)
// update restart policy for an AutoRemove container
cli.Docker(cli.Args("update", "--restart=always", id)).Assert(c, icmd.Expected{
ExitCode: 1,
Err: "Restart policy cannot be updated because AutoRemove is enabled for the container",
})
}<|fim▁end|>
| |
<|file_name|>pdfdify.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
var child_process = require('child_process');
var argv = require('yargs')
.boolean(['readability', 'open'])
.argv;
var pdfdify = require('../lib');
var srcUrl = argv._[0];<|fim▁hole|> title:argv.title|| srcUrl,
readability:argv.readability,
srcUrl:srcUrl
},function (err, pdfFile) {
if (err) {
throw err;
}
console.log("Created: '"+pdfFile+"'");
if(argv.open) {
child_process.exec('open "'+pdfFile+'"');
}
});<|fim▁end|>
|
console.log("Convertering: '"+srcUrl+"'");
pdfdify.convert({
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from reads.conf.development import * # NOQA
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub use dom::bindings::str::{ByteString, DOMString};
pub use dom::headers::normalize_value;
// For compile-fail tests only.<|fim▁hole|>
pub mod area {
pub use dom::htmlareaelement::{Area, Shape};
}
pub mod size_of {
use dom::characterdata::CharacterData;
use dom::element::Element;
use dom::eventtarget::EventTarget;
use dom::htmldivelement::HTMLDivElement;
use dom::htmlelement::HTMLElement;
use dom::htmlspanelement::HTMLSpanElement;
use dom::node::Node;
use dom::text::Text;
use std::mem::size_of;
pub fn CharacterData() -> usize {
size_of::<CharacterData>()
}
pub fn Element() -> usize {
size_of::<Element>()
}
pub fn EventTarget() -> usize {
size_of::<EventTarget>()
}
pub fn HTMLDivElement() -> usize {
size_of::<HTMLDivElement>()
}
pub fn HTMLElement() -> usize {
size_of::<HTMLElement>()
}
pub fn HTMLSpanElement() -> usize {
size_of::<HTMLSpanElement>()
}
pub fn Node() -> usize {
size_of::<Node>()
}
pub fn Text() -> usize {
size_of::<Text>()
}
}<|fim▁end|>
|
pub use dom::bindings::cell::DomRefCell;
pub use dom::bindings::root::Dom;
pub use dom::node::Node;
pub use dom::bindings::refcounted::TrustedPromise;
|
<|file_name|>sync.py<|end_file_name|><|fim▁begin|># !/usr/bin/python
# -*- coding=utf-8 -*-
import json
import urllib2
<|fim▁hole|>from books.models import Book
domain = "http://smartebook.zmapp.com:9026"
# 同步图书详情
def sync_book(bid, cm):
# 完结了的图书不更新信息
if Book.objects.filter(id=bid, status=1).count() == 0:
page = urllib2.urlopen("http://wap.cmread.com/r/p/viewdata.jsp?bid=%s&cm=%s&vt=9" % (bid, cm))
data = page.read()
try:
result = json.loads(data, encoding="utf-8")
print result
update = Book.objects.filter(id=bid).count() != 0
book = Book()
book.pk = int(bid)
book.name = result['showName']
book.brief = result['brief']
book.desc = result['desc']
book.cover_url = result['bigCoverLogo']
book.cover_url_small = result['smallCoverLogo']
book.status = result['status']
book.first_cid = result['firstChpaterCid']
book.last_cid = result['lastChapterCid']
book.chapter_size = result['chapterSize']
book.score = result['score']
book.word_size = result['wordSize']
book.click_amount = result['clickValue']
book.kw = result['kw']
book.price = int(float(result['price']) * 100)
book.charge_mode = result['chargeMode']
if update:
book.save(force_update=update, update_fields=(
'name', 'brief', 'desc', 'cover_url', 'cover_url_small', 'status', 'first_cid', 'last_cid',
'chapter_size', 'score', 'word_size', 'click_amount', 'kw', 'price', 'charge_mode'))
else:
book.save(force_insert=True)
return True
except Exception, e:
print e.message
return False
# 同步书架
def sync_bookshelf():
url = "%s/smart_book/get_bookshelf" % domain
page = urllib2.urlopen(url)
result = json.loads(page.read())
print result
books = result['bookshelf']
update_count = 0
for index, b in enumerate(books):
if sync_book(b['book_id'], 'zm'):
update_count += 1
return len(books), update_count
print sync_bookshelf()<|fim▁end|>
| |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export { ImageUploadComponent } from './src/components/image-upload/image-upload.component';<|fim▁end|>
|
export { ImageUploadModule } from './src/image-upload.module';
|
<|file_name|>0017_userprofile_exp_data.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-01-24 07:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('appauth', '0016_userprofile_numq'),<|fim▁hole|> model_name='userprofile',
name='exp_data',
field=models.TextField(default='{}'),
),
]<|fim▁end|>
|
]
operations = [
migrations.AddField(
|
<|file_name|>test_images_tags_negative.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.image import base
from tempest.common.utils import data_utils
from tempest import exceptions
from tempest.test import attr
class ImagesTagsNegativeTest(base.BaseV2ImageTest):
@attr(type=['negative', 'gate'])<|fim▁hole|> def test_update_tags_for_non_existing_image(self):
# Update tag with non existing image.
tag = data_utils.rand_name('tag-')
non_exist_image = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound, self.client.add_image_tag,
non_exist_image, tag)
@attr(type=['negative', 'gate'])
def test_delete_non_existing_tag(self):
# Delete non existing tag.
resp, body = self.create_image(container_format='bare',
disk_format='raw',
is_public=True,
)
image_id = body['id']
tag = data_utils.rand_name('non-exist-tag-')
self.addCleanup(self.client.delete_image, image_id)
self.assertRaises(exceptions.NotFound, self.client.delete_image_tag,
image_id, tag)<|fim▁end|>
| |
<|file_name|>issue-70942-trait-vs-impl-mismatch.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> const VALUE: usize;
}
struct Zero;
impl Nat for Zero {
const VALUE: i32 = 0;
//~^ ERROR implemented const `VALUE` has an incompatible type for trait
}
fn main() {
let _: [i32; Zero::VALUE] = [];
}<|fim▁end|>
|
trait Nat {
|
<|file_name|>multiModuleClodule1.ts<|end_file_name|><|fim▁begin|>class C {
constructor(x: number) { }
foo() { }
bar() { }
static boo() { }
}
module C {
export var x = 1;
var y = 2;
}
module C {
export function foo() { }<|fim▁hole|>
var c = new C(C.x);
c.foo = C.foo;<|fim▁end|>
|
function baz() { return ''; }
}
|
<|file_name|>CorePolicy.hpp<|end_file_name|><|fim▁begin|>/*
* OpenSplice DDS
*
* This software and documentation are Copyright 2006 to 2012 PrismTech
* Limited and its licensees. All rights reserved. See file:
*
* $OSPL_HOME/LICENSE
*
* for full copyright notice and license terms.
*
*/
/**
* @file
*/
#ifndef ORG_OPENSPLICE_CORE_POLICY_CORE_POLICY_IMPL_HPP_
#define ORG_OPENSPLICE_CORE_POLICY_CORE_POLICY_IMPL_HPP_
#include <dds/core/types.hpp>
#include <dds/core/LengthUnlimited.hpp>
#include <dds/core/Duration.hpp>
//==============================================================================
// DDS Policy Classes
namespace org
{
namespace opensplice
{
namespace core
{
namespace policy
{
//==============================================================================
/**
* @internal The purpose of this QoS is to allow the application to attach additional
* information to the created Entity objects such that when a remote application
* discovers their existence it can access that information and use it for its
* own purposes. One possible use of this QoS is to attach security credentials
* or some other information that can be used by the remote application to
* authenticate the source. In combination with operations such as
* ignore_participant, ignore_publication, ignore_subscription,
* and ignore_topic these QoS can assist an application to define and enforce
* its own security policies. The use of this QoS is not limited to security,
* rather it offers a simple, yet flexible extensibility mechanism.
*/
class UserData
{
public:
/**
* @internal Create a <code>UserData</code> instance with an empty user data.
*/
UserData() : value_() { }
/**
* @internal Create a <code>UserData</code> instance.
*
* @param seq the sequence of octet representing the user data
*/
explicit UserData(const dds::core::ByteSeq& seq) : value_(seq) { }
/**
* @internal Set the value for the user data.
*
* @param seq a sequence of octet representing the user data.
*/
void value(const dds::core::ByteSeq& seq)
{
value_ = seq;
}
/**
* @internal Get/Set the user data.
*
* @return the sequence of octet representing the user data
*/
dds::core::ByteSeq& value()
{
return value_;
}
/**
* @internal Get the user data.
*
* @return the sequence of octet representing the user data
*/
const dds::core::ByteSeq& value() const
{
return value_;
}
bool operator ==(const UserData& other) const
{
return other.value() == value_;
}
private:
dds::core::ByteSeq value_;
};
//==============================================================================
/**
* @internal The purpose of this QoS is to allow the application to attach additional
* information to the created Publisher or Subscriber.
* The value of the GROUP_DATA is available to the application on the
* DataReader and DataWriter entities and is propagated by means of the
* built-in topics. This QoS can be used by an application combination with
* the DataReaderListener and DataWriterListener to implement matching policies
* similar to those of the PARTITION QoS except the decision can be made based
* on an application-defined policy.
*/
class GroupData
{
public:
/**
* @internal Create a <code>GroupData</code> instance.
*/
GroupData() : value_() { }
/**
* @internal Create a <code>GroupData</code> instance.
*
* @param seq the group data value
*/
explicit GroupData(const dds::core::ByteSeq& seq) : value_(seq) { }
/**
* @internal Set the value for this <code>GroupData</code>
*
* @param seq the group data value
*/
void value(const dds::core::ByteSeq& seq)
{
value_ = seq;
}
/**
* @internal Get/Set the value for this <code>GroupData</code>
*
* @return the group data value
*/
dds::core::ByteSeq& value()
{
return value_;
}
/**
* @internal Get the value for this <code>GroupData</code>
*
* @return the group data value
*/
const dds::core::ByteSeq& value() const
{
return value_;
}
bool operator ==(const GroupData& other) const
{
return other.value() == value_;
}
private:
dds::core::ByteSeq value_;
};
//==============================================================================
/**
* @internal The purpose of this QoS is to allow the application to attach additional
* information to the created Topic such that when a remote application
* discovers their existence it can examine the information and use it in
* an application-defined way. In combination with the listeners on the
* DataReader and DataWriter as well as by means of operations such as
* ignore_topic, these QoS can assist an application to extend the provided QoS.
*/
class TopicData
{
public:
TopicData() : value_() { }
explicit TopicData(const dds::core::ByteSeq& seq) : value_(seq) { }
void value(const dds::core::ByteSeq& seq)
{
value_ = seq;
}
const dds::core::ByteSeq& value() const
{
return value_;
}
dds::core::ByteSeq& value()
{
return value_;
}
bool operator ==(const TopicData& other) const
{
return other.value() == value_;
}
private:
dds::core::ByteSeq value_;
};
//==============================================================================
/**
* @internal This policy controls the behavior of the Entity as a factory for other
* entities. This policy concerns only DomainParticipant (as factory for
* Publisher, Subscriber, and Topic), Publisher (as factory for DataWriter),
* and Subscriber (as factory for DataReader). This policy is mutable.
* A change in the policy affects only the entities created after the change;
* not the previously created entities.
* The setting of autoenable_created_entities to TRUE indicates that the
* newly created object will be enabled by default.
* A setting of FALSE indicates that the Entity will not be automatically
* enabled. The application will need to enable it explicitly by means of the
* enable operation (see Section 7.1.2.1.1.7, ÒenableÓ). The default setting
* of autoenable_created_entities = TRUE means that, by default, it is not
* necessary to explicitly call enable on newly created entities.
*/
class EntityFactory
{
public:
EntityFactory() {}
explicit EntityFactory(bool auto_enable)
: auto_enable_(auto_enable) { }
void auto_enable(bool on)
{
auto_enable_ = on;
}
bool auto_enable() const
{
return auto_enable_;
}
bool& auto_enable()
{
return auto_enable_;
}
bool operator ==(const EntityFactory& other) const
{
return other.auto_enable() == auto_enable_;
}
private:
bool auto_enable_;
};
//==============================================================================
/**
* @internal The purpose of this QoS is to allow the application to take advantage of
* transports capable of sending messages with different priorities.
* This policy is considered a hint. The policy depends on the ability of the
* underlying transports to set a priority on the messages they send.
* Any value within the range of a 32-bit signed integer may be chosen;
* higher values indicate higher priority. However, any further interpretation
* of this policy is specific to a particular transport and a particular
* implementation of the Service. For example, a particular transport is
* permitted to treat a range of priority values as equivalent to one another.
* It is expected that during transport configuration the application would
* provide a mapping between the values of the TRANSPORT_PRIORITY set on
* DataWriter and the values meaningful to each transport. This mapping would
* then be used by the infrastructure when propagating the data written by
* the DataWriter.
*/
class TransportPriority
{
public:
TransportPriority() {}
explicit TransportPriority(uint32_t prio) : value_(prio) { }
public:
void value(uint32_t prio)
{
value_ = prio;
}
uint32_t value() const
{
return value_;
}
uint32_t& value()
{
return value_;
}
bool operator ==(const TransportPriority& other) const
{
return other.value() == value_;
}
private:
uint32_t value_;
};
//==============================================================================
/**
* @internal The purpose of this QoS is to avoid delivering ÒstaleÓ data to the
* application. Each data sample written by the DataWriter has an associated
* expiration time beyond which the data should not be delivered to any
* application. Once the sample expires, the data will be removed from the
* DataReader caches as well as from the transient and persistent
* information caches. The expiration time of each sample is computed by
* adding the duration specified by the LIFESPAN QoS to the source timestamp.
* As described in Section 7.1.2.4.2.11, Òwrite and Section 7.1.2.4.2.12,
* write_w_timestamp the source timestamp is either automatically computed by
* the Service each time the DataWriter write operation is called, or else
* supplied by the application by means of the write_w_timestamp operation.
*
* This QoS relies on the sender and receiving applications having their clocks
* sufficiently synchronized. If this is not the case and the Service can
* detect it, the DataReader is allowed to use the reception timestamp instead
* of the source timestamp in its computation of the expiration time.
*/
class Lifespan
{
public:
Lifespan() {}
explicit Lifespan(const dds::core::Duration& d) : duration_(d) { }
public:
void duration(const dds::core::Duration& d)
{
duration_ = d;
}
const dds::core::Duration duration() const
{
return duration_;
}
dds::core::Duration& duration()
{
return duration_;
}
bool operator ==(const Lifespan& other) const
{
return other.duration() == duration_;
}
private:
dds::core::Duration duration_;
};
//==============================================================================
/**
* @internal This policy is useful for cases where a Topic is expected to have each
* instance updated periodically. On the publishing side this setting
* establishes a contract that the application must meet. On the subscribing
* side the setting establishes a minimum requirement for the remote publishers
* that are expected to supply the data values. When the Service ÔmatchesÕ a
* DataWriter and a DataReader it checks whether the settings are compatible
* (i.e., offered deadline period<= requested deadline period) if they are not,
* the two entities are informed (via the listener or condition mechanism)
* of the incompatibility of the QoS settings and communication will not occur.
* Assuming that the reader and writer ends have compatible settings, the
* fulfillment of this contract is monitored by the Service and the application
* is informed of any violations by means of the proper listener or condition.
* The value offered is considered compatible with the value requested if and
* only if the inequality Òoffered deadline period <= requested deadline periodÓ
* evaluates to ÔTRUE.Õ The setting of the DEADLINE policy must be set
* consistently with that of the TIME_BASED_FILTER.
* For these two policies to be consistent the settings must be such that
* Òdeadline period>= minimum_separation.Ó
*/
class Deadline
{
public:
Deadline() {}
explicit Deadline(const dds::core::Duration& d) : period_(d) { }
public:
void period(const dds::core::Duration& d)
{
period_ = d;
}
const dds::core::Duration period() const
{
return period_;
}
bool operator ==(const Deadline& other) const
{
return other.period() == period_;
}
private:
dds::core::Duration period_;
};
//==============================================================================
class LatencyBudget
{
public:
LatencyBudget() {}
explicit LatencyBudget(const dds::core::Duration& d) : duration_(d) { }
public:
void duration(const dds::core::Duration& d)
{
duration_ = d;
}
const dds::core::Duration duration() const
{
return duration_;
}
dds::core::Duration& duration()
{
return duration_;
}
bool operator ==(const LatencyBudget& other) const
{
return other.duration() == duration_;
}
private:
dds::core::Duration duration_;
};
//==============================================================================
class TimeBasedFilter
{
public:
TimeBasedFilter() {}
explicit TimeBasedFilter(const dds::core::Duration& min_separation)
: min_sep_(min_separation) { }
public:
void min_separation(const dds::core::Duration& ms)
{
min_sep_ = ms;
}
const dds::core::Duration min_separation() const
{
return min_sep_;
}
dds::core::Duration& min_separation()
{
return min_sep_;
}
bool operator ==(const TimeBasedFilter& other) const
{
return other.min_separation() == min_sep_;
}
private:
dds::core::Duration min_sep_;
};
//==============================================================================
class Partition
{
public:
Partition() {}
explicit Partition(const std::string& partition) : name_()
{
name_.push_back(partition);
}
explicit Partition(const dds::core::StringSeq& partitions)
: name_(partitions) { }
public:
void name(const std::string& partition)
{
name_.clear();
name_.push_back(partition);
}
void name(const dds::core::StringSeq& partitions)
{
name_ = partitions;
}
const dds::core::StringSeq& name() const
{
return name_;
}
dds::core::StringSeq& name()
{
return name_;
}
bool operator ==(const Partition& other) const
{
return other.name() == name_;
}
private:
dds::core::StringSeq name_;
};
//==============================================================================
class Ownership
{
public:
public:
Ownership() {}
Ownership(dds::core::policy::OwnershipKind::Type kind) : kind_(kind) { }
public:
void kind(dds::core::policy::OwnershipKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::OwnershipKind::Type kind() const
{
return kind_;
}
dds::core::policy::OwnershipKind::Type& kind()
{
return kind_;
}
bool operator ==(const Ownership& other) const
{
return other.kind() == kind_;
}
private:
dds::core::policy::OwnershipKind::Type kind_;
};
//==============================================================================
#ifdef OMG_DDS_OWNERSHIP_SUPPORT
class OwnershipStrength
{
public:
OwnershipStrength() {}
explicit OwnershipStrength(int32_t s) : s_(s) { }
int32_t strength() const
{
return s_;
}
int32_t& strength()
{
return s_;
}
void strength(int32_t s)
{
s_ = s;
}
bool operator ==(const OwnershipStrength& other) const
{
return other.strength() == s_;
}
private:
int32_t s_;
};
#endif // OMG_DDS_OWNERSHIP_SUPPORT
//==============================================================================
class WriterDataLifecycle
{
public:
WriterDataLifecycle() {}
WriterDataLifecycle(bool autodispose)
: autodispose_(autodispose) { }
bool autodispose() const
{
return autodispose_;
}
bool& autodispose()
{
return autodispose_;
}
void autodispose(bool b)
{
autodispose_ = b;
}
bool operator ==(const WriterDataLifecycle& other) const
{
return other.autodispose() == autodispose_;
}
private:
bool autodispose_;
};
//==============================================================================
class ReaderDataLifecycle
{
public:
ReaderDataLifecycle() {}
ReaderDataLifecycle(const dds::core::Duration& nowriter_delay,
const dds::core::Duration& disposed_samples_delay)
: autopurge_nowriter_samples_delay_(nowriter_delay),
autopurge_disposed_samples_delay_(disposed_samples_delay) { }
public:
const dds::core::Duration autopurge_nowriter_samples_delay() const
{
return autopurge_nowriter_samples_delay_;
}
void autopurge_nowriter_samples_delay(const dds::core::Duration& d)
{
autopurge_nowriter_samples_delay_ = d;
}
const dds::core::Duration autopurge_disposed_samples_delay() const
{
return autopurge_disposed_samples_delay_;
}
void autopurge_disposed_samples_delay(const dds::core::Duration& d)
{
autopurge_disposed_samples_delay_ = d;
}
bool operator ==(const ReaderDataLifecycle& other) const
{
return other.autopurge_nowriter_samples_delay() == autopurge_nowriter_samples_delay_ &&
other.autopurge_disposed_samples_delay() == autopurge_disposed_samples_delay();
}
private:
dds::core::Duration autopurge_nowriter_samples_delay_;
dds::core::Duration autopurge_disposed_samples_delay_;
};
//==============================================================================
class Durability
{
public:
public:
Durability() {}
Durability(dds::core::policy::DurabilityKind::Type kind) : kind_(kind) { }
public:
void durability(dds::core::policy::DurabilityKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::DurabilityKind::Type durability() const
{
return kind_;
}
dds::core::policy::DurabilityKind::Type& durability()
{
return kind_;
}
void kind(dds::core::policy::DurabilityKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::DurabilityKind::Type& kind()
{
return kind_;
}
dds::core::policy::DurabilityKind::Type kind() const
{
return kind_;
}
bool operator ==(const Durability& other) const
{
return other.kind() == kind_;
}
public:
dds::core::policy::DurabilityKind::Type kind_;
};
//==============================================================================
class Presentation
{
public:
Presentation() {}
Presentation(dds::core::policy::PresentationAccessScopeKind::Type access_scope,
bool coherent_access,
bool ordered_access)
: access_scope_(access_scope),
coherent_access_(coherent_access),
ordered_access_(ordered_access)
{ }
void access_scope(dds::core::policy::PresentationAccessScopeKind::Type as)
{
access_scope_ = as;
}
dds::core::policy::PresentationAccessScopeKind::Type& access_scope()
{
return access_scope_;
}
dds::core::policy::PresentationAccessScopeKind::Type access_scope() const
{
return access_scope_;
}
void coherent_access(bool on)
{
coherent_access_ = on;
}
bool& coherent_access()
{
return coherent_access_;
}
bool coherent_access() const
{
return coherent_access_;
}
void ordered_access(bool on)
{
ordered_access_ = on;
}
bool& ordered_access()
{
return ordered_access_;
}
bool ordered_access() const
{
return ordered_access_;
}
bool operator ==(const Presentation& other) const
{
return other.access_scope() == access_scope_ &&
other.coherent_access() == coherent_access_ &&
other.ordered_access() == ordered_access_;
}
private:
dds::core::policy::PresentationAccessScopeKind::Type access_scope_;
bool coherent_access_;
bool ordered_access_;
};
//==============================================================================
class Reliability
{
public:
public:
Reliability() {}
Reliability(dds::core::policy::ReliabilityKind::Type kind,
const dds::core::Duration& max_blocking_time)
: kind_(kind),
max_blocking_time_(max_blocking_time) { }
public:
void kind(dds::core::policy::ReliabilityKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::ReliabilityKind::Type kind() const
{
return kind_;
}
void max_blocking_time(const dds::core::Duration& d)
{
max_blocking_time_ = d;
}
const dds::core::Duration max_blocking_time() const
{
return max_blocking_time_;
}
bool operator ==(const Reliability& other) const
{
return other.kind() == kind_ &&
other.max_blocking_time() == max_blocking_time_;
}
private:
dds::core::policy::ReliabilityKind::Type kind_;
dds::core::Duration max_blocking_time_;
};
//==============================================================================
class DestinationOrder
{
public:
DestinationOrder() {};
explicit DestinationOrder(dds::core::policy::DestinationOrderKind::Type kind)
: kind_(kind) { }
public:
void kind(dds::core::policy::DestinationOrderKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::DestinationOrderKind::Type& kind()
{
return kind_;
}
dds::core::policy::DestinationOrderKind::Type kind() const
{
return kind_;
}
bool operator ==(const DestinationOrder& other) const
{
return other.kind() == kind_;
}
private:
dds::core::policy::DestinationOrderKind::Type kind_;
};
//==============================================================================
class History
{
public:
History() {}
History(dds::core::policy::HistoryKind::Type kind, int32_t depth)
: kind_(kind),
depth_(depth)
{ }
dds::core::policy::HistoryKind::Type kind() const
{
return kind_;
}
dds::core::policy::HistoryKind::Type& kind()
{
return kind_;
}
void kind(dds::core::policy::HistoryKind::Type kind)
{
kind_ = kind;
}
int32_t depth() const
{
return depth_;
}
int32_t& depth()
{
return depth_;
}
void depth(int32_t depth)
{
depth_ = depth;
}
bool operator ==(const History& other) const
{
return other.kind() == kind_ &&
other.depth() == depth_;
}
private:
dds::core::policy::HistoryKind::Type kind_;
int32_t depth_;
};
//==============================================================================
class ResourceLimits
{
public:
ResourceLimits() {}
ResourceLimits(int32_t max_samples,
int32_t max_instances,
int32_t max_samples_per_instance)
: max_samples_(max_samples),
max_instances_(max_instances),
max_samples_per_instance_(max_samples_per_instance)
{ }
public:
void max_samples(int32_t samples)
{
max_samples_ = samples;
}
int32_t& max_samples()
{
return max_samples_;
}
int32_t max_samples() const
{
return max_samples_;
}
void max_instances(int32_t max_instances)
{
max_instances_ = max_instances;
}
int32_t& max_instances()
{
return max_instances_;
}
int32_t max_instances() const
{
return max_instances_;<|fim▁hole|> void max_samples_per_instance(int32_t max_samples_per_instance)
{
max_samples_per_instance_ = max_samples_per_instance;
}
int32_t& max_samples_per_instance()
{
return max_samples_per_instance_;
}
int32_t max_samples_per_instance() const
{
return max_samples_per_instance_;
}
bool operator ==(const ResourceLimits& other) const
{
return other.max_samples() == max_samples_ &&
other.max_instances() == max_instances_ &&
other.max_samples_per_instance() == max_samples_per_instance_;
}
private:
int32_t max_samples_;
int32_t max_instances_;
int32_t max_samples_per_instance_;
};
//==============================================================================
class Liveliness
{
public:
public:
Liveliness() {}
Liveliness(dds::core::policy::LivelinessKind::Type kind,
dds::core::Duration lease_duration)
: kind_(kind),
lease_duration_(lease_duration)
{ }
void kind(dds::core::policy::LivelinessKind::Type kind)
{
kind_ = kind;
}
dds::core::policy::LivelinessKind::Type& kind()
{
return kind_;
}
dds::core::policy::LivelinessKind::Type kind() const
{
return kind_;
}
void lease_duration(const dds::core::Duration& lease_duration)
{
lease_duration_ = lease_duration;
}
dds::core::Duration& lease_duration()
{
return lease_duration_;
}
const dds::core::Duration lease_duration() const
{
return lease_duration_;
}
bool operator ==(const Liveliness& other) const
{
return other.kind() == kind_ &&
other.lease_duration() == lease_duration_;
}
private:
dds::core::policy::LivelinessKind::Type kind_;
dds::core::Duration lease_duration_;
};
//==============================================================================
#ifdef OMG_DDS_PERSISTENCE_SUPPORT
class DurabilityService
{
public:
DurabilityService() {}
DurabilityService(const dds::core::Duration& service_cleanup_delay,
dds::core::policy::HistoryKind::Type history_kind,
int32_t history_depth,
int32_t max_samples,
int32_t max_instances,
int32_t max_samples_per_instance)
: cleanup_delay_(service_cleanup_delay),
history_kind_(history_kind),
history_depth_(history_depth),
max_samples_(max_samples),
max_instances_(max_instances),
max_samples_per_instance_(max_samples_per_instance)
{ }
public:
void service_cleanup_delay(const dds::core::Duration& d)
{
cleanup_delay_ = d;
}
const dds::core::Duration service_cleanup_delay() const
{
return cleanup_delay_;
}
void history_kind(dds::core::policy::HistoryKind::Type kind)
{
history_kind_ = kind;
}
dds::core::policy::HistoryKind::Type history_kind() const
{
return history_kind_;
}
void history_depth(int32_t depth)
{
history_depth_ = depth;
}
int32_t history_depth() const
{
return history_depth_;
}
void max_samples(int32_t max_samples)
{
max_samples_ = max_samples;
}
int32_t max_samples() const
{
return max_samples_;
}
void max_instances(int32_t max_instances)
{
max_instances_ = max_instances;
}
int32_t max_instances() const
{
return max_instances_;
}
void max_samples_per_instance(int32_t max_samples_per_instance)
{
max_samples_per_instance_ = max_samples_per_instance;
}
int32_t max_samples_per_instance() const
{
return max_samples_per_instance_;
}
bool operator ==(const DurabilityService& other) const
{
return other.service_cleanup_delay() == cleanup_delay_ &&
other.history_kind() == history_kind_ &&
other.history_depth() == history_depth_ &&
other.max_samples() == max_samples_ &&
other.max_instances() == max_instances_ &&
other.max_samples_per_instance() == max_samples_per_instance_;
}
private:
dds::core::Duration cleanup_delay_;
dds::core::policy::HistoryKind::Type history_kind_;
int32_t history_depth_;
int32_t max_samples_;
int32_t max_instances_;
int32_t max_samples_per_instance_;
};
#endif // OMG_DDS_PERSISTENCE_SUPPORT
#ifdef OMG_DDS_EXTENSIBLE_AND_DYNAMIC_TOPIC_TYPE_SUPPORT
class DataRepresentation { };
#endif // OMG_DDS_EXTENSIBLE_AND_DYNAMIC_TOPIC_TYPE_SUPPORT
#ifdef OMG_DDS_EXTENSIBLE_AND_DYNAMIC_TOPIC_TYPE_SUPPORT
class TypeConsistencyEnforcement { };
#endif // OMG_DDS_EXTENSIBLE_AND_DYNAMIC_TOPIC_TYPE_SUPPORT
}
}
}
} // namespace org::opensplice::core::policy
#endif /* ORG_OPENSPLICE_CORE_POLICY_CORE_POLICY_IMPL_HPP_ */<|fim▁end|>
|
}
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import 'tachyons';<|fim▁end|>
|
import 'marky';
|
<|file_name|>config.ts<|end_file_name|><|fim▁begin|>export class Config {
id: string = "";<|fim▁hole|><|fim▁end|>
|
name: string = "";
module: string = "";
value: string = "";
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility back to Python 2.5 and (currently) has significant performance
advantages, even without using the optional C extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
>>> print(json.dumps(u'\u1234'))
"\u1234"
>>> print(json.dumps('\\'))
"\\"
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
>>> from simplejson.compat import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> obj = [1,2,3,{'4': 5, '6': 7}]
>>> json.dumps(obj, separators=(',',':'), sort_keys=True)
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' '))
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from simplejson.compat import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError('Object of type %s is not JSON serializable' %
... obj.__class__.__name__)
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 3 (char 2)
Parsing multiple documents serialized as JSON lines (newline-delimited JSON)::
>>> import simplejson as json
>>> def loads_lines(docs):
... for doc in docs.splitlines():
... yield json.loads(doc)
...
>>> sum(doc["count"] for doc in loads_lines('{"count":1}\n{"count":2}\n{"count":3}\n'))
6
Serializing multiple objects to JSON lines (newline-delimited JSON)::
>>> import simplejson as json
>>> def dumps_lines(objs):
... for obj in objs:
... yield json.dumps(obj, separators=(',',':')) + '\n'
...
>>> ''.join(dumps_lines([{'count': 1}, {'count': 2}, {'count': 3}]))
'{"count":1}\n{"count":2}\n{"count":3}\n'
"""
from __future__ import absolute_import
__version__ = '3.16.1'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first', 'RawJSON'
]
__author__ = 'Bob Ippolito <[email protected]>'
from decimal import Decimal
from .errors import JSONDecodeError
from .raw_json import RawJSON
from .decoder import JSONDecoder
from .encoder import JSONEncoder, JSONEncoderForHTML
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
from . import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from ._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
iterable_as_array=False,
bigint_as_string=False,
item_sort_key=None,
for_json=False,
ignore_nan=False,
int_as_string_bitcount=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the original JSON specification, instead of using
the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See
*ignore_nan* for ECMA-262 compliant behavior.
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, *separators* should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
*encoding* is the character encoding for str instances, default is UTF-8.
*default(obj)* is a function that should return a serializable version
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
of subclassing whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, ``separators`` should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
from . import decoder as dec
from . import encoder as enc
from . import scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,<|fim▁hole|>def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])<|fim▁end|>
|
encoding='utf-8',
default=None,
)
|
<|file_name|>bitcoin_es_CL.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="es_CL" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Bitcoin</source>
<translation>Sobre rainbowcoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Bitcoin</b> version</source>
<translation><b>rainbowcoin</b> - versión </translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Este es un software experimental.
Distribuido bajo la licencia MIT/X11, vea el archivo adjunto
COPYING o http://www.opensource.org/licenses/mit-license.php.
Este producto incluye software desarrollado por OpenSSL Project para su uso en
el OpenSSL Toolkit (http://www.openssl.org/), software criptográfico escrito por
Eric Young ([email protected]) y UPnP software escrito por Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Bitcoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Guia de direcciones</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Haz doble clic para editar una dirección o etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Crea una nueva dirección</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Nueva dirección</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Bitcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Estas son tus direcciones rainbowcoin para recibir pagos. Puedes utilizar una diferente por cada persona emisora para saber quien te está pagando.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Copia dirección</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostrar Código &QR </translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Bitcoin address</source>
<translation>Firmar un mensaje para provar que usted es dueño de esta dirección</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Firmar Mensaje</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Bitcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Bitcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>These are your rainbowcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Copia &etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Exporta datos de la guia de direcciones</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Exportar errores</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>No se pudo escribir al archivo %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Introduce contraseña actual </translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repite nueva contraseña:</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introduce la nueva contraseña para la billetera.<br/>Por favor utiliza un contraseña <b>de 10 o mas caracteres aleatorios</b>, u <b>ocho o mas palabras</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Codificar billetera</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operación necesita la contraseña para desbloquear la billetera.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desbloquea billetera</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operación necesita la contraseña para decodificar la billetara.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Decodificar cartera</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Cambia contraseña</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Introduce la contraseña anterior y la nueva de cartera</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirma la codificación de cartera</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!</source>
<translation>Atención: ¡Si codificas tu billetera y pierdes la contraseña perderás <b>TODOS TUS RAINBOWCOINS</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>¿Seguro que quieres seguir codificando la billetera?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Precaucion: Mayúsculas Activadas</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Billetera codificada</translation>
</message>
<message>
<location line="-56"/>
<source>Bitcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.</source>
<translation>rainbowcoin se cerrará para finalizar el proceso de encriptación. Recuerde que encriptar su billetera no protegera completatamente sus rainbowcoins de ser robados por malware que infecte su computador</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Falló la codificación de la billetera</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>La codificación de la billetera falló debido a un error interno. Tu billetera no ha sido codificada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Las contraseñas no coinciden.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Ha fallado el desbloqueo de la billetera</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contraseña introducida para decodificar la billetera es incorrecta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Ha fallado la decodificación de la billetera</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>La contraseña de billetera ha sido cambiada con éxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Firmar &Mensaje...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Vista general</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Muestra una vista general de la billetera</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transacciónes</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Explora el historial de transacciónes</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Edita la lista de direcciones y etiquetas almacenadas</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Muestra la lista de direcciónes utilizadas para recibir pagos</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>&Salir</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Salir del programa</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Bitcoin</source>
<translation>Muestra información acerca de rainbowcoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Acerca de</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar Información sobre QT</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opciones</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Codificar la billetera...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Respaldar billetera...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Cambiar la contraseña...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Bitcoin address</source>
<translation>Enviar monedas a una dirección rainbowcoin</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Bitcoin</source>
<translation>Modifica las opciones de configuración de rainbowcoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Respaldar billetera en otra ubicación</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambiar la contraseña utilizada para la codificación de la billetera</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Bitcoin</source>
<translation>rainbowcoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Bitcoin</source>
<translation>&Sobre rainbowcoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Mostrar/Ocultar</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Bitcoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Bitcoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Configuración</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Ayuda</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Barra de pestañas</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[red-de-pruebas]</translation>
</message>
<message>
<location line="+47"/>
<source>Bitcoin client</source>
<translation>Cliente rainbowcoin</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Bitcoin network</source>
<translation><numerusform>%n conexión activa hacia la red rainbowcoin</numerusform><numerusform>%n conexiones activas hacia la red rainbowcoin</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Actualizado</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Recuperando...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Transacción enviada</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Fecha: %1
Cantidad: %2
Tipo: %3
Dirección: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Bitcoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>desbloqueda</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>bloqueda</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Bitcoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>La etiqueta asociada con esta entrada de la libreta de direcciones</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Dirección</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>La dirección asociada con esta entrada en la libreta de direcciones. Solo puede ser modificada para direcciónes de envío.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Nueva dirección para recibir</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nueva dirección para enviar</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar dirección de recepción</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar dirección de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>La dirección introducida "%1" ya esta guardada en la libreta de direcciones.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Bitcoin address.</source>
<translation>La dirección introducida "%1" no es una dirección rainbowcoin valida.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>No se pudo desbloquear la billetera.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>La generación de nueva clave falló.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Bitcoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versión</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>UI opciones</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Arranca minimizado
</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Comisión de &transacciónes</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Bitcoin after logging in to the system.</source>
<translation>Inicia rainbowcoin automáticamente despues de encender el computador</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Bitcoin on system login</source>
<translation>&Inicia rainbowcoin al iniciar el sistema</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Bitcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abre automáticamente el puerto del cliente rainbowcoin en el router. Esto funciona solo cuando tu router es compatible con UPnP y está habilitado.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Direcciona el puerto usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Bitcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Conecta a la red rainbowcoin a través de un proxy SOCKS (ej. cuando te conectas por la red Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Conecta a traves de un proxy SOCKS:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP Proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Dirección IP del servidor proxy (ej. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Puerto:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Puerto del servidor proxy (ej. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Muestra solo un ícono en la bandeja después de minimizar la ventana</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiza a la bandeja en vez de la barra de tareas</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimiza la ventana en lugar de salir del programa cuando la ventana se cierra. Cuando esta opción esta activa el programa solo se puede cerrar seleccionando Salir desde el menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimiza a la bandeja al cerrar</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrado</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Bitcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidad en la que mostrar cantitades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elige la subdivisión por defecto para mostrar cantidaded en la interfaz cuando se envien monedas</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Bitcoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Muestra direcciones en el listado de transaccioines</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Atención</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Bitcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulario</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Bitcoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>No confirmados:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transacciones recientes</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Tu saldo actual</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Total de transacciones que no han sido confirmadas aun, y que no cuentan para el saldo actual.</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start bitcoin: click-to-pay handler</source>
<translation>Cannot start rainbowcoin: click-to-pay handler</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Solicitar Pago</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mensaje:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Guardar Como...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Imágenes PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Bitcoin-Qt help message to get a list with possible Bitcoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Bitcoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Bitcoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Bitcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Bitcoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar a múltiples destinatarios</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Agrega destinatario</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Remover todos los campos de la transacción</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 LGBT</source>
<translation>123.456 LGBT</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirma el envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Envía</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> to %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar el envio de monedas</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Estas seguro que quieres enviar %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>y</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>La dirección de destinatarion no es valida, comprueba otra vez.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>La cantidad por pagar tiene que ser mayor 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>La cantidad sobrepasa tu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>El total sobrepasa tu saldo cuando se incluyen %1 como tasa de envio.</translation><|fim▁hole|> <translation>Tienes una dirección duplicada, solo puedes enviar a direcciónes individuales de una sola vez.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Error: La transacción fue rechazada. Esto puede haber ocurrido si alguna de las monedas ya estaba gastada o si ha usado una copia de wallet.dat y las monedas se gastaron en la copia pero no se han marcado como gastadas aqui.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Envio</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>&Pagar a:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introduce una etiqueta a esta dirección para añadirla a tu guia</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Elije dirección de la guia</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Elimina destinatario</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Introduce una dirección rainbowcoin (ej. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Introduce una dirección rainbowcoin (ej. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Elije dirección de la guia</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Escriba el mensaje que desea firmar</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Bitcoin address</source>
<translation>Firmar un mensjage para probar que usted es dueño de esta dirección</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Introduce una dirección rainbowcoin (ej. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Bitcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Introduce una dirección rainbowcoin (ej. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Click en "Firmar Mensage" para conseguir firma</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Bitcoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Bitcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[red-de-pruebas]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/fuera de linea</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/no confirmado</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmaciónes</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Estado</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generado</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Credito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>no aceptada</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Comisión transacción</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Cantidad total</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensaje</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentario</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID de Transacción</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Las monedas generadas deben esperar 120 bloques antes de ser gastadas. Cuando has generado este bloque se emitió a la red para ser agregado en la cadena de bloques. Si falla al incluirse en la cadena, cambiará a "no aceptado" y las monedas no se podrán gastar. Esto puede ocurrir ocasionalmente si otro nodo genera un bloque casi al mismo tiempo que el tuyo.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transacción</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, no ha sido emitido satisfactoriamente todavía</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>desconocido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalles de transacción</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Esta ventana muestra información detallada sobre la transacción</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Fuera de linea (%1 confirmaciónes)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>No confirmado (%1 de %2 confirmaciónes)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmaciones)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloque no ha sido recibido por otros nodos y probablemente no sea aceptado !</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generado pero no acceptado</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recibido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagar a usted mismo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Estado de transacción. Pasa el raton sobre este campo para ver el numero de confirmaciónes.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Fecha y hora cuando se recibió la transaccion</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transacción.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Dirección de destino para la transacción</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Cantidad restada o añadida al balance</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Todo</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoy</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Esta mes</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mes pasado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este año</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Rango...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>A ti mismo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Otra</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Introduce una dirección o etiqueta para buscar</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Cantidad minima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copia dirección</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Edita etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Exportar datos de transacción</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Error exportando</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>No se pudo escribir en el archivo %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Rango:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Bitcoin version</source>
<translation>Versión rainbowcoin</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or bitcoind</source>
<translation>Envia comando a bitcoin lanzado con -server u bitcoind
</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Muestra comandos
</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Recibir ayuda para un comando
</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opciones:
</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: bitcoin.conf)</source>
<translation>Especifica archivo de configuración (predeterminado: bitcoin.conf)
</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: bitcoind.pid)</source>
<translation>Especifica archivo pid (predeterminado: bitcoin.pid)
</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especifica directorio para los datos
</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 11035 or testnet: 5744)</source>
<translation>Escuchar por conecciones en <puerto> (Por defecto: 11035 o red de prueba: 5744)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Mantener al menos <n> conecciones por cliente (por defecto: 125) </translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Umbral de desconección de clientes con mal comportamiento (por defecto: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 21035 or testnet: 5745)</source>
<translation>Escucha conexiones JSON-RPC en el puerto <port> (predeterminado: 21035 or testnet: 5745)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceptar comandos consola y JSON-RPC
</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Correr como demonio y acepta comandos
</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Usa la red de pruebas
</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=bitcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Bitcoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Bitcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Precaución: -paytxfee es muy alta. Esta es la comisión que pagarás si envias una transacción.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Bitcoin will not work properly.</source>
<translation>Precaución: Por favor revise que la fecha y hora de tu ordenador son correctas. Si tu reloj está mal configurado rainbowcoin no funcionará correctamente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Conecta solo al nodo especificado
</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Dirección -tor invalida: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Adjuntar informacion extra de depuracion. Implies all other -debug* options</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Anteponer salida de depuracion con marca de tiempo</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>Opciones SSL: (ver la rainbowcoin Wiki para instrucciones de configuración SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Enviar informacion de seguimiento a la consola en vez del archivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Enviar informacion de seguimiento al depurador</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifica tiempo de espera para conexion en milisegundos (predeterminado: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Intenta usar UPnP para mapear el puerto de escucha (default: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Intenta usar UPnP para mapear el puerto de escucha (default: 1 when listening)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Usuario para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Contraseña para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permite conexiones JSON-RPC desde la dirección IP especificada
</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Envia comando al nodo situado en <ip> (predeterminado: 127.0.0.1)
</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Actualizar billetera al formato actual</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Ajusta el numero de claves en reserva <n> (predeterminado: 100)
</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Rescanea la cadena de bloques para transacciones perdidas de la cartera
</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usa OpenSSL (https) para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Certificado del servidor (Predeterminado: server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Clave privada del servidor (Predeterminado: server.pem)
</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Cifrados aceptados (Predeterminado: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)
</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Este mensaje de ayuda
</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>No es posible escuchar en el %s en este ordenador (bind returned error %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Conecta mediante proxy socks</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permite búsqueda DNS para addnode y connect
</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Cargando direcciónes...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Error cargando wallet.dat: Billetera corrupta</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Bitcoin</source>
<translation>Error cargando wallet.dat: Billetera necesita una vercion reciente de rainbowcoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Bitcoin to complete</source>
<translation>La billetera necesita ser reescrita: reinicie rainbowcoin para completar</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Error cargando wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Dirección -proxy invalida: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Cantidad inválida para -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Cantidad inválida</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Fondos insuficientes</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Agrega un nodo para conectarse and attempt to keep the connection open</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Bitcoin is probably already running.</source>
<translation>No es posible escuchar en el %s en este ordenador. Probablemente rainbowcoin ya se está ejecutando.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Comisión por kB para adicionarla a las transacciones enviadas</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Cargando cartera...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Rescaneando...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Carga completa</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
|
<|file_name|>slide_7.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
jQuery(document).ready(function($){$(".slide8").remove();});
|
<|file_name|>challenge5.rs<|end_file_name|><|fim▁begin|>use super::challenge2::xor_bytes;
pub fn encode(data: &Vec<u8>, key: &Vec<u8>) -> Vec<u8> {
xor_bytes(data, &key.iter().cloned().cycle().take(data.len()).collect())
}
#[cfg(test)]
mod test {<|fim▁hole|> #[test]
fn test_encode() {
let input = b"Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal";
let output = String::from("0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f");
assert_eq!(bytes_to_hex_string(&encode(&input.to_vec(), &b"ICE".to_vec())), output);
}
}<|fim▁end|>
|
use super::super::shared::bytes_to_hex_string;
use super::encode;
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>function render(node){
console.log(node);
};<|fim▁hole|><|fim▁end|>
|
export default render;
|
<|file_name|>packageListContainer.js<|end_file_name|><|fim▁begin|>import React from "react";
import { composeWithTracker } from "@reactioncommerce/reaction-components";
import { Template } from "meteor/templating";
import { Roles } from "meteor/alanning:roles";
import { Reaction } from "/client/api";
/**
* Push package into action view navigation stack
* @param {SyntheticEvent} event Original event
* @param {Object} app Package data
* @return {undefined} No return value
* @private
*/
function handleShowPackage(event, app) {
Reaction.pushActionView(app);
}
/**
* Open full dashbaord menu
* @return {undefined} No return value<|fim▁hole|>function handleShowDashboard() {
Reaction.hideActionViewDetail();
Reaction.showActionView({
i18nKeyTitle: "dashboard.coreTitle",
title: "Dashboard",
template: "dashboardPackages"
});
}
/**
* Push dashbaord & package into action view navigation stack
* @param {SyntheticEvent} event Original event
* @param {Object} app Package data
* @return {undefined} No return value
* @private
*/
function handleOpenShortcut(event, app) {
Reaction.hideActionViewDetail();
Reaction.showActionView(app);
}
function composer(props, onData) {
const audience = Roles.getRolesForUser(Reaction.getUserId(), Reaction.getShopId());
const settings = Reaction.Apps({ provides: "settings", enabled: true, audience }) || [];
const dashboard = Reaction.Apps({ provides: "dashboard", enabled: true, audience })
.filter((d) => typeof Template[d.template] !== "undefined") || [];
onData(null, {
currentView: Reaction.getActionView(),
groupedPackages: {
actions: {
title: "Actions",
i18nKeyTitle: "admin.dashboard.packageGroupActionsLabel",
packages: dashboard
},
settings: {
title: "Settings",
i18nKeyTitle: "admin.dashboard.packageGroupSettingsLabel",
packages: settings
}
},
// Callbacks
handleShowPackage,
handleShowDashboard,
handleOpenShortcut
});
}
export default function PackageListContainer(Comp) {
function CompositeComponent(props) {
return (
<Comp {...props} />
);
}
return composeWithTracker(composer)(CompositeComponent);
}<|fim▁end|>
|
* @private
*/
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from .ppo import PPOAgent
|
<|file_name|>issue-11612.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// #11612
// We weren't updating the auto adjustments with all the resolved
// type information after type check.
// pretty-expanded FIXME #23616
trait A { fn dummy(&self) { } }
struct B<'a, T:'a> {
f: &'a T
}
impl<'a, T> A for B<'a, T> {}
fn foo(_: &A) {}
fn bar<G>(b: &B<G>) {
foo(b); // Coercion should work
foo(b as &A); // Explicit cast should work as well
}
fn main() {}<|fim▁end|>
|
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
<|file_name|>owners_finder_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env vpython3
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for owners_finder.py."""
import os
import sys
import unittest
if sys.version_info.major == 2:
import mock
else:
from unittest import mock
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support import filesystem_mock
import owners_finder
import owners_client
ben = '[email protected]'
brett = '[email protected]'
darin = '[email protected]'
jochen = '[email protected]'
john = '[email protected]'
ken = '[email protected]'
peter = '[email protected]'
tom = '[email protected]'
nonowner = '[email protected]'
def owners_file(*email_addresses, **kwargs):
s = ''
if kwargs.get('comment'):
s += '# %s\n' % kwargs.get('comment')
if kwargs.get('noparent'):
s += 'set noparent\n'
return s + '\n'.join(email_addresses) + '\n'
<|fim▁hole|> self.owners_by_path = {
'DEPS': [ken, peter, tom],
'base/vlog.h': [ken, peter, tom],
'chrome/browser/defaults.h': [brett, ben, ken, peter, tom],
'chrome/gpu/gpu_channel.h': [ken, ben, brett, ken, peter, tom],
'chrome/renderer/gpu/gpu_channel_host.h': [peter, ben, brett, ken, tom],
'chrome/renderer/safe_browsing/scorer.h': [peter, ben, brett, ken, tom],
'content/content.gyp': [john, darin],
'content/bar/foo.cc': [john, darin],
'content/baz/froboz.h': [brett, john, darin],
'content/baz/ugly.cc': [brett, john, darin],
'content/baz/ugly.h': [brett, john, darin],
'content/common/common.cc': [jochen, john, darin],
'content/foo/foo.cc': [jochen, john, darin],
'content/views/pie.h': [ben, john, self.EVERYONE],
}
def ListOwners(self, path):
path = path.replace(os.sep, '/')
return self.owners_by_path[path]
class OutputInterceptedOwnersFinder(owners_finder.OwnersFinder):
def __init__(
self, files, author, reviewers, client, disable_color=False):
super(OutputInterceptedOwnersFinder, self).__init__(
files, author, reviewers, client, disable_color=disable_color)
self.output = []
self.indentation_stack = []
def resetText(self):
self.output = []
self.indentation_stack = []
def indent(self):
self.indentation_stack.append(self.output)
self.output = []
def unindent(self):
block = self.output
self.output = self.indentation_stack.pop()
self.output.append(block)
def writeln(self, text=''):
self.output.append(text)
class _BaseTestCase(unittest.TestCase):
default_files = [
'base/vlog.h',
'chrome/browser/defaults.h',
'chrome/gpu/gpu_channel.h',
'chrome/renderer/gpu/gpu_channel_host.h',
'chrome/renderer/safe_browsing/scorer.h',
'content/content.gyp',
'content/bar/foo.cc',
'content/baz/ugly.cc',
'content/baz/ugly.h',
'content/views/pie.h'
]
def ownersFinder(self, files, author=nonowner, reviewers=None):
reviewers = reviewers or []
return OutputInterceptedOwnersFinder(
files, author, reviewers, TestClient(), disable_color=True)
def defaultFinder(self):
return self.ownersFinder(self.default_files)
class OwnersFinderTests(_BaseTestCase):
def test_constructor(self):
self.assertNotEqual(self.defaultFinder(), None)
def test_skip_files_owned_by_reviewers(self):
files = [
'chrome/browser/defaults.h', # owned by brett
'content/bar/foo.cc', # not owned by brett
]
finder = self.ownersFinder(files, reviewers=[brett])
self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
def test_skip_files_owned_by_author(self):
files = [
'chrome/browser/defaults.h', # owned by brett
'content/bar/foo.cc', # not owned by brett
]
finder = self.ownersFinder(files, author=brett)
self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
def test_native_path_sep(self):
# Create a path with backslashes on Windows to make sure these are handled.
# This test is a harmless duplicate on other platforms.
native_slashes_path = 'chrome/browser/defaults.h'.replace('/', os.sep)
files = [
native_slashes_path, # owned by brett
'content/bar/foo.cc', # not owned by brett
]
finder = self.ownersFinder(files, reviewers=[brett])
self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
@mock.patch('owners_client.OwnersClient.ScoreOwners')
def test_reset(self, mockScoreOwners):
mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
finder = self.defaultFinder()
for _ in range(2):
expected = [brett, darin, john, peter, ken, ben, tom]
self.assertEqual(finder.owners_queue, expected)
self.assertEqual(finder.unreviewed_files, {
'base/vlog.h',
'chrome/browser/defaults.h',
'chrome/gpu/gpu_channel.h',
'chrome/renderer/gpu/gpu_channel_host.h',
'chrome/renderer/safe_browsing/scorer.h',
'content/content.gyp',
'content/bar/foo.cc',
'content/baz/ugly.cc',
'content/baz/ugly.h'
})
self.assertEqual(finder.selected_owners, set())
self.assertEqual(finder.deselected_owners, set())
self.assertEqual(finder.reviewed_by, {})
self.assertEqual(finder.output, [])
finder.select_owner(john)
finder.reset()
finder.resetText()
@mock.patch('owners_client.OwnersClient.ScoreOwners')
def test_select(self, mockScoreOwners):
mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
finder = self.defaultFinder()
finder.select_owner(john)
self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
self.assertEqual(finder.selected_owners, {john})
self.assertEqual(finder.deselected_owners, {darin})
self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': john,
'content/baz/ugly.cc': john,
'content/baz/ugly.h': john,
'content/content.gyp': john})
self.assertEqual(finder.output,
['Selected: ' + john, 'Deselected: ' + darin])
finder = self.defaultFinder()
finder.select_owner(darin)
self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
self.assertEqual(finder.selected_owners, {darin})
self.assertEqual(finder.deselected_owners, {john})
self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': darin,
'content/baz/ugly.cc': darin,
'content/baz/ugly.h': darin,
'content/content.gyp': darin})
self.assertEqual(finder.output,
['Selected: ' + darin, 'Deselected: ' + john])
finder = self.defaultFinder()
finder.select_owner(brett)
expected = [darin, john, peter, ken, tom]
self.assertEqual(finder.owners_queue, expected)
self.assertEqual(finder.selected_owners, {brett})
self.assertEqual(finder.deselected_owners, {ben})
self.assertEqual(finder.reviewed_by,
{'chrome/browser/defaults.h': brett,
'chrome/gpu/gpu_channel.h': brett,
'chrome/renderer/gpu/gpu_channel_host.h': brett,
'chrome/renderer/safe_browsing/scorer.h': brett,
'content/baz/ugly.cc': brett,
'content/baz/ugly.h': brett})
self.assertEqual(finder.output,
['Selected: ' + brett, 'Deselected: ' + ben])
@mock.patch('owners_client.OwnersClient.ScoreOwners')
def test_deselect(self, mockScoreOwners):
mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
finder = self.defaultFinder()
finder.deselect_owner(john)
self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
self.assertEqual(finder.selected_owners, {darin})
self.assertEqual(finder.deselected_owners, {john})
self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': darin,
'content/baz/ugly.cc': darin,
'content/baz/ugly.h': darin,
'content/content.gyp': darin})
self.assertEqual(finder.output,
['Deselected: ' + john, 'Selected: ' + darin])
def test_print_file_info(self):
finder = self.defaultFinder()
finder.print_file_info('chrome/browser/defaults.h')
self.assertEqual(finder.output, ['chrome/browser/defaults.h [5]'])
finder.resetText()
finder.print_file_info('chrome/renderer/gpu/gpu_channel_host.h')
self.assertEqual(finder.output,
['chrome/renderer/gpu/gpu_channel_host.h [5]'])
def test_print_file_info_detailed(self):
finder = self.defaultFinder()
finder.print_file_info_detailed('chrome/browser/defaults.h')
self.assertEqual(finder.output,
['chrome/browser/defaults.h',
[ben, brett, ken, peter, tom]])
finder.resetText()
finder.print_file_info_detailed('chrome/renderer/gpu/gpu_channel_host.h')
self.assertEqual(finder.output,
['chrome/renderer/gpu/gpu_channel_host.h',
[ben, brett, ken, peter, tom]])
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
class TestClient(owners_client.OwnersClient):
def __init__(self):
super(TestClient, self).__init__()
|
<|file_name|>test_hiframes.py<|end_file_name|><|fim▁begin|># *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
import itertools
import numba
import numpy as np
import os
import pandas as pd
import pyarrow.parquet as pq
import random
import string
import unittest
from numba import types
import sdc
from sdc import hiframes
from sdc.str_arr_ext import StringArray
from sdc.tests.test_base import TestCase
from sdc.tests.test_utils import (count_array_OneDs,
count_array_REPs,
count_parfor_OneDs,
count_parfor_REPs,
dist_IR_contains,
get_start_end,
skip_numba_jit,
skip_sdc_jit)
class TestHiFrames(TestCase):
@skip_numba_jit
def test_column_list_select2(self):
# make sure SDC copies the columns like Pandas does
def test_impl(df):
df2 = df[['A']]
df2['A'] += 10
return df2.A, df.A
hpat_func = self.jit(test_impl)
n = 11
df = pd.DataFrame(
{'A': np.arange(n), 'B': np.ones(n), 'C': np.random.ranf(n)})
np.testing.assert_array_equal(hpat_func(df.copy())[1], test_impl(df)[1])
@skip_numba_jit
def test_pd_DataFrame_from_series_par(self):
def test_impl(n):
S1 = pd.Series(np.ones(n))
S2 = pd.Series(np.random.ranf(n))
df = pd.DataFrame({'A': S1, 'B': S2})
return df.A.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 1)<|fim▁hole|> def test_impl(df):
return df['A'][df['B']].values
hpat_func = self.jit(test_impl)
df = pd.DataFrame({'A': [1, 2, 3], 'B': [True, False, True]})
np.testing.assert_array_equal(test_impl(df), hpat_func(df))
@skip_numba_jit
def test_fillna(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
B = df.A.fillna(5.0)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_fillna_inplace(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
df.A.fillna(5.0, inplace=True)
return df.A.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_column_mean(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
return df.A.mean()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_column_var(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.var()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl())
@skip_numba_jit
def test_column_std(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.std()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl())
@skip_numba_jit
def test_column_map(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df['B'] = df.A.map(lambda a: 2 * a)
return df.B.sum()
n = 121
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_column_map_arg(self):
def test_impl(df):
df['B'] = df.A.map(lambda a: 2 * a)
return
n = 121
df1 = pd.DataFrame({'A': np.arange(n)})
df2 = pd.DataFrame({'A': np.arange(n)})
hpat_func = self.jit(test_impl)
hpat_func(df1)
self.assertTrue(hasattr(df1, 'B'))
test_impl(df2)
np.testing.assert_equal(df1.B.values, df2.B.values)
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_cumsum(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.cumsum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_array_OneDs(), 2)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 2)
self.assertTrue(dist_IR_contains('dist_cumsum'))
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_column_distribution(self):
# make sure all column calls are distributed
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df.A.fillna(5.0, inplace=True)
DF = df.A.fillna(5.0)
s = DF.sum()
m = df.A.mean()
v = df.A.var()
t = df.A.std()
Ac = df.A.cumsum()
return Ac.sum() + s + m + v + t
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertTrue(dist_IR_contains('dist_cumsum'))
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_quantile_parallel(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_parallel_float_nan(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float32)})
df.A[0:100] = np.nan
df.A[200:331] = np.nan
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_parallel_int(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.int32)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_sequential(self):
def test_impl(A):
df = pd.DataFrame({'A': A})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
A = np.arange(0, n, 1, np.float64)
np.testing.assert_almost_equal(hpat_func(A), test_impl(A))
@skip_numba_jit
def test_nunique(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df.A[2] = 0
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_nunique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.four.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
def test_nunique_str(self):
def test_impl(n):
df = pd.DataFrame({'A': ['aa', 'bb', 'aa', 'cc', 'cc']})
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@unittest.skip('AssertionError - fix needed\n'
'5 != 3\n')
def test_nunique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.two.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
def test_unique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.four.unique() == 3.0).sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@unittest.skip('AssertionError - fix needed\n'
'2 != 1\n')
def test_unique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.two.unique() == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_describe(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.describe()
hpat_func = self.jit(test_impl)
n = 1001
hpat_func(n)
# XXX: test actual output
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_contains_regex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('AB*', regex=True)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 2)
@skip_numba_jit
def test_str_contains_noregex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('BB', regex=False)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 1)
@skip_numba_jit
def test_str_replace_regex(self):
def test_impl(df):
return df.A.str.replace('AB*', 'EE', regex=True)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_replace_noregex(self):
def test_impl(df):
return df.A.str.replace('AB', 'EE', regex=False)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_replace_regex_parallel(self):
def test_impl(df):
B = df.A.str.replace('AB*', 'EE', regex=True)
return B
n = 5
A = ['ABCC', 'CABBD', 'CCD', 'CCDAABB', 'ED']
start, end = get_start_end(n)
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_default(self):
def test_impl(df):
return df.A.str.split()
df = pd.DataFrame({'A': ['AB CC', 'C ABB D', 'G ', ' ', 'g\t f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_filter(self):
def test_impl(df):
B = df.A.str.split(',')
df2 = pd.DataFrame({'B': B})
return df2[df2.B.str.len() > 1]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_frame_equal(
hpat_func(df), test_impl(df).reset_index(drop=True))
@skip_numba_jit
def test_str_split_box_df(self):
def test_impl(df):
return pd.DataFrame({'B': df.A.str.split(',')})
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df).B, test_impl(df).B, check_names=False)
@skip_numba_jit
def test_str_split_unbox_df(self):
def test_impl(df):
return df.A.iloc[0]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
df2 = pd.DataFrame({'A': df.A.str.split(',')})
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(df2), test_impl(df2))
@unittest.skip('Getitem Series with list values not implement')
def test_str_split_bool_index(self):
def test_impl(df):
C = df.A.str.split(',')
return C[df.B == 'aa']
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D'], 'B': ['aa', 'bb']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_parallel(self):
def test_impl(df):
B = df.A.str.split(',')
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_get(self):
def test_impl(df):
B = df.A.str.split(',')
return B.str.get(1)
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_get_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = A.str.get(1)
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD,F', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_get_to_numeric(self):
def test_impl(df):
B = df.A.str.split(',')
C = pd.to_numeric(B.str.get(1), errors='coerce')
return C
df = pd.DataFrame({'A': ['AB,12', 'C,321,D']})
hpat_func = self.jit(locals={'C': types.int64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_flatten(self):
def test_impl(df):
A = df.A.str.split(',')
return pd.Series(list(itertools.chain(*A)))
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_flatten_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = pd.Series(list(itertools.chain(*A)))
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_to_numeric(self):
def test_impl(df):
B = pd.to_numeric(df.A, errors='coerce')
return B
df = pd.DataFrame({'A': ['123.1', '331.2']})
hpat_func = self.jit(locals={'B': types.float64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_1D_Var_len(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n) + 1.0})
df1 = df[df.A > 5]
return len(df1.B)
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling1(self):
# size 3 without unroll
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
# size 7 with unroll
def test_impl_2(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.rolling(7).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df['moving average'] = df.A.rolling(window=5, center=True).mean()
return df['moving average'].sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling3(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3, center=True).apply(lambda a: a[0] + 2 * a[1] + a[2])
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_shift1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.shift(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_shift2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.pct_change(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_df_input(self):
def test_impl(df):
return df.B.sum()
n = 121
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df))
@skip_numba_jit
def test_df_input2(self):
def test_impl(df):
C = df.B == 'two'
return C.sum()
n = 11
df = pd.DataFrame({'A': np.random.ranf(3 * n), 'B': ['one', 'two', 'three'] * n})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df))
@skip_numba_jit
def test_df_input_dist1(self):
def test_impl(df):
return df.B.sum()
n = 121
A = [3, 4, 5, 6, 1]
B = [5, 6, 2, 1, 3]
n = 5
start, end = get_start_end(n)
df = pd.DataFrame({'A': A, 'B': B})
df_h = pd.DataFrame({'A': A[start:end], 'B': B[start:end]})
hpat_func = self.jit(distributed={'df'})(test_impl)
np.testing.assert_almost_equal(hpat_func(df_h), test_impl(df))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_concat(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
df3 = pd.concat([df1, df2])
return df3.A.sum() + df3.key2.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_concat_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1, df2])
return (A3.two == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_concat_series(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
A3 = pd.concat([df1.A, df2.A])
return A3.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_concat_series_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1.two, df2.two])
return (A3 == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
@unittest.skipIf(int(os.getenv('SDC_NP_MPI', '0')) > 1, 'Test hangs on NP=2 and NP=3 on all platforms')
def test_intraday(self):
def test_impl(nsyms):
max_num_days = 100
all_res = 0.0
for i in sdc.prange(nsyms):
s_open = 20 * np.ones(max_num_days)
s_low = 28 * np.ones(max_num_days)
s_close = 19 * np.ones(max_num_days)
df = pd.DataFrame({'Open': s_open, 'Low': s_low, 'Close': s_close})
df['Stdev'] = df['Close'].rolling(window=90).std()
df['Moving Average'] = df['Close'].rolling(window=20).mean()
df['Criteria1'] = (df['Open'] - df['Low'].shift(1)) < -df['Stdev']
df['Criteria2'] = df['Open'] > df['Moving Average']
df['BUY'] = df['Criteria1'] & df['Criteria2']
df['Pct Change'] = (df['Close'] - df['Open']) / df['Open']
df['Rets'] = df['Pct Change'][df['BUY']]
all_res += df['Rets'].mean()
return all_res
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_OneDs(), 0)
self.assertEqual(count_parfor_OneDs(), 1)
@skip_numba_jit
def test_var_dist1(self):
def test_impl(A, B):
df = pd.DataFrame({'A': A, 'B': B})
df2 = df.groupby('A', as_index=False)['B'].sum()
# TODO: fix handling of df setitem to force match of array dists
# probably with a new node that is appended to the end of basic block
# df2['C'] = np.full(len(df2.B), 3, np.int8)
# TODO: full_like for Series
df2['C'] = np.full_like(df2.B.values, 3, np.int8)
return df2
A = np.array([1, 1, 2, 3])
B = np.array([3, 4, 5, 6])
hpat_func = self.jit(locals={'A:input': 'distributed',
'B:input': 'distributed', 'df2:return': 'distributed'})(test_impl)
start, end = get_start_end(len(A))
df2 = hpat_func(A[start:end], B[start:end])
# TODO:
# pd.testing.assert_frame_equal(
# hpat_func(A[start:end], B[start:end]), test_impl(A, B))
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
@skip_numba_jit
def test_getitem_bool_series(self):
|
<|file_name|>CourseCurriculum.py<|end_file_name|><|fim▁begin|>from django.core import serializers
from django.http import HttpResponse, JsonResponse
from Course.models import *
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST, require_GET
import json
@csrf_exempt
@require_POST
def addCourseCurriculum(request):
response_data = {}
try:
C = CourseCurriculum.objects.addCourseCurriculum(request.POST)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
data = serializers.serialize('json', [C, ])
response_data["coursecurriculum"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_POST
def editCourseCurriculum(request):
response_data = {}
try:
C = CourseCurriculum.objects.editCourseCurriculum(request.POST)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
data = serializers.serialize('json', [C, ])
response_data["coursecurriculum"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_POST<|fim▁hole|> try:
C = CourseCurriculum.objects.deleteCourseCurriculum(request.POST)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
data = serializers.serialize('json', [C, ])
response_data["coursecurriculum"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def getCourseCurriculum(request):
response_data = {}
try:
C = CourseCurriculum.objects.getCourseCurriculum(request.GET)
except Exception as e:
response_data["success"] = 0
response_data['exception'] = str(e)
else:
response_data["success"] = 1
data = serializers.serialize('json', [C.instructor, ])
response_data["coursecurriculum"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveCourseCurriculum(request):
response_data = {}
try:
C = CourseCurriculum.objects.retrieveCourseCurriculum(request.GET)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["coursecurriculum"] = json.loads(data)
return JsonResponse(response_data)<|fim▁end|>
|
def deleteCourseCurriculum(request):
response_data = {}
|
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package health
<|fim▁hole|>)
// Service struct keeps db object to avoid passing it around
type Service struct {
db *gorm.DB
}
// NewService returns a new Service instance
func NewService(db *gorm.DB) *Service {
return &Service{db: db}
}
// Close stops any running services
func (s *Service) Close() {}<|fim▁end|>
|
import (
"github.com/jinzhu/gorm"
|
<|file_name|>network_security_group_vnic.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2020, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// Core Services API
//
// API covering the Networking (https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/overview.htm),
// Compute (https://docs.cloud.oracle.com/iaas/Content/Compute/Concepts/computeoverview.htm), and
// Block Volume (https://docs.cloud.oracle.com/iaas/Content/Block/Concepts/overview.htm) services. Use this API
// to manage resources such as virtual cloud networks (VCNs), compute instances, and
// block storage volumes.
//
package core
import (
"github.com/oracle/oci-go-sdk/common"
)
// NetworkSecurityGroupVnic Information about a VNIC that belongs to a network security group.
type NetworkSecurityGroupVnic struct {
<|fim▁hole|> VnicId *string `mandatory:"true" json:"vnicId"`
// The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the parent resource that the VNIC
// is attached to (for example, a Compute instance).
ResourceId *string `mandatory:"false" json:"resourceId"`
// The date and time the VNIC was added to the network security group, in the format
// defined by RFC3339 (https://tools.ietf.org/html/rfc3339).
// Example: `2016-08-25T21:10:29.600Z`
TimeAssociated *common.SDKTime `mandatory:"false" json:"timeAssociated"`
}
func (m NetworkSecurityGroupVnic) String() string {
return common.PointerString(m)
}<|fim▁end|>
|
// The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the VNIC.
|
<|file_name|>sleeptimer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1364979192.582168
__CHEETAH_genTimestamp__ = 'Wed Apr 3 17:53:12 2013'
__CHEETAH_src__ = '/home/fermi/Work/Model/tmsingle/openpli3.0/build-tmsingle/tmp/work/mips32el-oe-linux/enigma2-plugin-extensions-openwebif-0.1+git1+279a2577c3bc6defebd4bf9e61a046dcf7f37c01-r0.72/git/plugin/controllers/views/web/sleeptimer.tmpl'
__CHEETAH_srcLastModified__ = 'Wed Apr 3 17:10:17 2013'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class sleeptimer(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(sleeptimer, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
_orig_filter_51193055 = _filter
filterName = u'WebSafe'
if self._CHEETAH__filters.has_key("WebSafe"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u'''<?xml version="1.0" encoding="UTF-8"?>
<e2sleeptimer>
\t<e2enabled>''')
_v = VFFSL(SL,"enabled",True) # u'$enabled' on line 4, col 13
if _v is not None: write(_filter(_v, rawExpr=u'$enabled')) # from line 4, col 13.
write(u'''</e2enabled>
\t<e2minutes>''')
_v = VFFSL(SL,"minutes",True) # u'$minutes' on line 5, col 13
if _v is not None: write(_filter(_v, rawExpr=u'$minutes')) # from line 5, col 13.
write(u'''</e2minutes>
\t<e2action>''')
_v = VFFSL(SL,"action",True) # u'$action' on line 6, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$action')) # from line 6, col 12.
write(u'''</e2action>
\t<e2text>''')
_v = VFFSL(SL,"message",True) # u'$message' on line 7, col 10
if _v is not None: write(_filter(_v, rawExpr=u'$message')) # from line 7, col 10.
write(u'''</e2text>
</e2sleeptimer>
''')
_filter = self._CHEETAH__currentFilter = _orig_filter_51193055
########################################<|fim▁hole|>
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_sleeptimer= 'respond'
## END CLASS DEFINITION
if not hasattr(sleeptimer, '_initCheetahAttributes'):
templateAPIClass = getattr(sleeptimer, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(sleeptimer)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=sleeptimer()).run()<|fim▁end|>
|
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
|
<|file_name|>debug_gui.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2002-2015 The DOSBox Team
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#include "dosbox.h"
#if C_DEBUG
#include "control.h"
#include <stdlib.h>
#include <stdarg.h>
#include <stdio.h>
#include <curses.h>
#include <string.h>
#include "support.h"
#include "regs.h"
#include "debug.h"
#include "debug_inc.h"
struct _LogGroup {
char const* front;
bool enabled;
};
#include <list>
#include <string>
using namespace std;
#define MAX_LOG_BUFFER 500
static list<string> logBuff;
static list<string>::iterator logBuffPos = logBuff.end();
static _LogGroup loggrp[LOG_MAX]={{"",true},{0,false}};
static FILE* debuglog;
extern int old_cursor_state;
void DEBUG_ShowMsg(char const* format,...) {
char buf[512];
va_list msg;
va_start(msg,format);
vsprintf(buf,format,msg);
va_end(msg);
/* Add newline if not present */
Bitu len=strlen(buf);
if(buf[len-1]!='\n') strcat(buf,"\n");
if(debuglog) fprintf(debuglog,"%s",buf);
if (logBuffPos!=logBuff.end()) {
logBuffPos=logBuff.end();
DEBUG_RefreshPage(0);
// mvwprintw(dbg.win_out,dbg.win_out->_maxy-1, 0, "");
}
logBuff.push_back(buf);
if (logBuff.size() > MAX_LOG_BUFFER)
logBuff.pop_front();
logBuffPos = logBuff.end();
wprintw(dbg.win_out,"%s",buf);
wrefresh(dbg.win_out);
}
void DEBUG_RefreshPage(char scroll) {
if (scroll==-1 && logBuffPos!=logBuff.begin()) logBuffPos--;
else if (scroll==1 && logBuffPos!=logBuff.end()) logBuffPos++;
list<string>::iterator i = logBuffPos;
int maxy, maxx; getmaxyx(dbg.win_out,maxy,maxx);
int rem_lines = maxy;
if(rem_lines == -1) return;
wclear(dbg.win_out);
while (rem_lines > 0 && i!=logBuff.begin()) {
--i;
for (string::size_type posf=0, posl; (posl=(*i).find('\n',posf)) != string::npos ;posf=posl+1)
rem_lines -= (int) ((posl-posf) / maxx) + 1; // len=(posl+1)-posf-1
/* Const cast is needed for pdcurses which has no const char in mvwprintw (bug maybe) */
mvwprintw(dbg.win_out,rem_lines-1, 0, const_cast<char*>((*i).c_str()));
}
mvwprintw(dbg.win_out,maxy-1, 0, "");
wrefresh(dbg.win_out);
}
void LOG::operator() (char const* format, ...){
char buf[512];
va_list msg;
va_start(msg,format);
vsprintf(buf,format,msg);
va_end(msg);
if (d_type>=LOG_MAX) return;
if ((d_severity!=LOG_ERROR) && (!loggrp[d_type].enabled)) return;
DEBUG_ShowMsg("%10u: %s:%s\n",static_cast<Bit32u>(cycle_count),loggrp[d_type].front,buf);
}
static void Draw_RegisterLayout(void) {
mvwaddstr(dbg.win_reg,0,0,"EAX=");
mvwaddstr(dbg.win_reg,1,0,"EBX=");
mvwaddstr(dbg.win_reg,2,0,"ECX=");
mvwaddstr(dbg.win_reg,3,0,"EDX=");
mvwaddstr(dbg.win_reg,0,14,"ESI=");
mvwaddstr(dbg.win_reg,1,14,"EDI=");
mvwaddstr(dbg.win_reg,2,14,"EBP=");
mvwaddstr(dbg.win_reg,3,14,"ESP=");
mvwaddstr(dbg.win_reg,0,28,"DS=");
mvwaddstr(dbg.win_reg,0,38,"ES=");
mvwaddstr(dbg.win_reg,0,48,"FS=");
mvwaddstr(dbg.win_reg,0,58,"GS=");
mvwaddstr(dbg.win_reg,0,68,"SS=");
mvwaddstr(dbg.win_reg,1,28,"CS=");
mvwaddstr(dbg.win_reg,1,38,"EIP=");
mvwaddstr(dbg.win_reg,2,75,"CPL");
mvwaddstr(dbg.win_reg,2,68,"IOPL");
mvwaddstr(dbg.win_reg,1,52,"C Z S O A P D I T ");
}
static void DrawBars(void) {
if (has_colors()) {
attrset(COLOR_PAIR(PAIR_BLACK_BLUE));
}
/* Show the Register bar */
mvaddstr(1-1,0, "---(Register Overview )---");
/* Show the Data Overview bar perhaps with more special stuff in the end */
mvaddstr(6-1,0,"---(Data Overview Scroll: page up/down)---");
/* Show the Code Overview perhaps with special stuff in bar too */
mvaddstr(17-1,0,"---(Code Overview Scroll: up/down )---");
/* Show the Variable Overview bar */
mvaddstr(29-1,0, "---(Variable Overview )---");
/* Show the Output OverView */
mvaddstr(34-1,0, "---(Output Scroll: home/end )---");
attrset(0);
//Match values with below. So we don't need to touch the internal window structures
}
static void MakeSubWindows(void) {
/* The Std output win should go at the bottom */
/* Make all the subwindows */
int win_main_maxy, win_main_maxx; getmaxyx(dbg.win_main,win_main_maxy,win_main_maxx);
int outy=1; //Match values with above
/* The Register window */
dbg.win_reg=subwin(dbg.win_main,4,win_main_maxx,outy,0);
outy+=5; // 6
/* The Data Window */
dbg.win_data=subwin(dbg.win_main,10,win_main_maxx,outy,0);
outy+=11; // 17
/* The Code Window */
dbg.win_code=subwin(dbg.win_main,11,win_main_maxx,outy,0);
outy+=12; // 29
/* The Variable Window */
dbg.win_var=subwin(dbg.win_main,4,win_main_maxx,outy,0);
outy+=5; // 34
/* The Output Window */
dbg.win_out=subwin(dbg.win_main,win_main_maxy-outy,win_main_maxx,outy,0);
if(!dbg.win_reg ||!dbg.win_data || !dbg.win_code || !dbg.win_var || !dbg.win_out) E_Exit("Setting up windows failed");
// dbg.input_y=win_main_maxy-1;
scrollok(dbg.win_out,TRUE);
DrawBars();
Draw_RegisterLayout();
refresh();
}
static void MakePairs(void) {
init_pair(PAIR_BLACK_BLUE, COLOR_BLACK, COLOR_CYAN);
init_pair(PAIR_BYELLOW_BLACK, COLOR_YELLOW /*| FOREGROUND_INTENSITY */, COLOR_BLACK);
init_pair(PAIR_GREEN_BLACK, COLOR_GREEN /*| FOREGROUND_INTENSITY */, COLOR_BLACK);
init_pair(PAIR_BLACK_GREY, COLOR_BLACK /*| FOREGROUND_INTENSITY */, COLOR_WHITE);
init_pair(PAIR_GREY_RED, COLOR_WHITE/*| FOREGROUND_INTENSITY */, COLOR_RED);
}
static void LOG_Destroy(Section*) {
if(debuglog) fclose(debuglog);
}
static void LOG_Init(Section * sec) {
Section_prop * sect=static_cast<Section_prop *>(sec);
const char * blah=sect->Get_string("logfile");
if(blah && blah[0] &&(debuglog = fopen(blah,"wt+"))){
}else{
debuglog=0;
}
sect->AddDestroyFunction(&LOG_Destroy);
char buf[1024];
for (Bitu i=1;i<LOG_MAX;i++) {
strcpy(buf,loggrp[i].front);
buf[strlen(buf)]=0;<|fim▁hole|> lowcase(buf);
loggrp[i].enabled=sect->Get_bool(buf);
}
}
void LOG_StartUp(void) {
/* Setup logging groups */
loggrp[LOG_ALL].front="ALL";
loggrp[LOG_VGA].front="VGA";
loggrp[LOG_VGAGFX].front="VGAGFX";
loggrp[LOG_VGAMISC].front="VGAMISC";
loggrp[LOG_INT10].front="INT10";
loggrp[LOG_SB].front="SBLASTER";
loggrp[LOG_DMACONTROL].front="DMA_CONTROL";
loggrp[LOG_FPU].front="FPU";
loggrp[LOG_CPU].front="CPU";
loggrp[LOG_PAGING].front="PAGING";
loggrp[LOG_FCB].front="FCB";
loggrp[LOG_FILES].front="FILES";
loggrp[LOG_IOCTL].front="IOCTL";
loggrp[LOG_EXEC].front="EXEC";
loggrp[LOG_DOSMISC].front="DOSMISC";
loggrp[LOG_PIT].front="PIT";
loggrp[LOG_KEYBOARD].front="KEYBOARD";
loggrp[LOG_PIC].front="PIC";
loggrp[LOG_MOUSE].front="MOUSE";
loggrp[LOG_BIOS].front="BIOS";
loggrp[LOG_GUI].front="GUI";
loggrp[LOG_MISC].front="MISC";
loggrp[LOG_IO].front="IO";
loggrp[LOG_PCI].front="PCI";
/* Register the log section */
Section_prop * sect=control->AddSection_prop("log",LOG_Init);
Prop_string* Pstring = sect->Add_string("logfile",Property::Changeable::Always,"");
Pstring->Set_help("file where the log messages will be saved to");
char buf[1024];
for (Bitu i=1;i<LOG_MAX;i++) {
strcpy(buf,loggrp[i].front);
lowcase(buf);
Prop_bool* Pbool = sect->Add_bool(buf,Property::Changeable::Always,true);
Pbool->Set_help("Enable/Disable logging of this type.");
}
// MSG_Add("LOG_CONFIGFILE_HELP","Logging related options for the debugger.\n");
}
void DBGUI_StartUp(void) {
/* Start the main window */
dbg.win_main=initscr();
cbreak(); /* take input chars one at a time, no wait for \n */
noecho(); /* don't echo input */
nodelay(dbg.win_main,true);
keypad(dbg.win_main,true);
#ifndef WIN32
printf("\e[8;50;80t");
fflush(NULL);
resizeterm(50,80);
touchwin(dbg.win_main);
#endif
old_cursor_state = curs_set(0);
start_color();
cycle_count=0;
MakePairs();
MakeSubWindows();
}
#endif<|fim▁end|>
| |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# pi-manage documentation build configuration file, created by
# sphinx-quickstart on Sun Oct 11 19:10:09 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pi-manage'
copyright = u'2015, privacyIDEA'
author = u'privacyIDEA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.8'
# The full version, including alpha/beta/rc tags.
release = '2.8'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pi-managedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pi-manage.tex', u'pi-manage Documentation',
u'privacyIDEA', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
<|fim▁hole|># (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pi-manage', u'pi-manage Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pi-manage', u'pi-manage Documentation',
author, 'pi-manage', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|>
|
# One entry per manual page. List of tuples
|
<|file_name|>qquote.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_imports)]
// ignore-cross-compile
#![feature(quote, rustc_private)]
extern crate syntax;
extern crate syntax_pos;
use syntax::source_map::FilePathMapping;
use syntax::print::pprust::*;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,
syntax::ext::expand::ExpansionConfig::default("qquote".to_string()),
&mut resolver);
let cx = &mut cx;
macro_rules! check {
($f: ident, $($e: expr),+; $expect: expr) => ({
$(assert_eq!($f(&$e), $expect);)+
});
}
let abc = quote_expr!(cx, 23);
check!(expr_to_string, abc, *quote_expr!(cx, $abc); "23");
let ty = quote_ty!(cx, isize);
check!(ty_to_string, ty, *quote_ty!(cx, $ty); "isize");
let item = quote_item!(cx, static x: $ty = 10;).unwrap();
check!(item_to_string, item, quote_item!(cx, $item).unwrap(); "static x: isize = 10;");
let twenty: u16 = 20;
let stmt = quote_stmt!(cx, let x = $twenty;).unwrap();
check!(stmt_to_string, stmt, quote_stmt!(cx, $stmt).unwrap(); "let x = 20u16;");
let pat = quote_pat!(cx, Some(_));
check!(pat_to_string, pat, *quote_pat!(cx, $pat); "Some(_)");
let expr = quote_expr!(cx, (x, y));
let arm = quote_arm!(cx, (ref x, ref y) => $expr,);
check!(arm_to_string, arm, quote_arm!(cx, $arm); " (ref x, ref y) => (x, y),");
let attr = quote_attr!(cx, #![cfg(foo = "bar")]);
check!(attribute_to_string, attr, quote_attr!(cx, $attr); r#"#![cfg(foo = "bar")]"#);
// quote_arg!
let arg = quote_arg!(cx, foo: i32);
check!(arg_to_string, arg, quote_arg!(cx, $arg); "foo: i32");
let function = quote_item!(cx, fn f($arg) { }).unwrap();
check!(item_to_string, function; "fn f(foo: i32) { }");
let args = vec![arg, quote_arg!(cx, bar: u32)];
let args = &args[..];
let function = quote_item!(cx, fn f($args) { }).unwrap();
check!(item_to_string, function; "fn f(foo: i32, bar: u32) { }");
<|fim▁hole|>
let block = quote_block!(cx, { $stmt let y = 40u32; });
check!(block_to_string, block, *quote_block!(cx, $block); "{ let x = 20u16; let y = 40u32; }");
let function = quote_item!(cx, fn f() $block).unwrap();
check!(item_to_string, function; "fn f() { let x = 20u16; let y = 40u32; }");
// quote_path!
let path = quote_path!(cx, ::syntax::ptr::P<MetaItem>);
check!(path_to_string, path, quote_path!(cx, $path); "::syntax::ptr::P<MetaItem>");
let ty = quote_ty!(cx, $path);
check!(ty_to_string, ty; "::syntax::ptr::P<MetaItem>");
// quote_meta_item!
let meta = quote_meta_item!(cx, cfg(foo = "bar"));
check!(meta_item_to_string, meta, quote_meta_item!(cx, $meta); r#"cfg(foo = "bar")"#);
let attr = quote_attr!(cx, #![$meta]);
check!(attribute_to_string, attr; r#"#![cfg(foo = "bar")]"#);
}<|fim▁end|>
|
// quote_block!
|
<|file_name|>filter_tasks_api.py<|end_file_name|><|fim▁begin|>import web
from inginious.frontend.plugins.utils.admin_api import AdminApi
from inginious.frontend.plugins.utils import get_mandatory_parameter
class FilterTasksApi(AdminApi):
def API_POST(self):
parameters = web.input()
task_query = get_mandatory_parameter(parameters, "task_query")
limit = int(get_mandatory_parameter(parameters, "limit"))
page = int(get_mandatory_parameter(parameters, "page"))
course_ids = set(bank["courseid"]
for bank in self.database.problem_banks.find())
for course_id, course in self.course_factory.get_all_courses().items():
if self.user_manager.has_admin_rights_on_course(course):
course_ids.add(course_id)
tasks = list(self.database.tasks_cache.aggregate([
{
"$match":
{
"$text": {
"$search": task_query,
"$diacriticSensitive": False,
"$caseSensitive": False
}
}
},
{
"$match":
{
"course_id": {"$in": list(course_ids)}
}
},
{
"$project": {
"course_id": 1,<|fim▁hole|> "task_id": 1,
"task_name": 1,
"task_author": 1,
"task_context": 1,
"tags": 1,
"course_name": 1,
"_id": 0,
"score": {"$meta": "textScore"}
}
},
{
"$sort": {"score": -1}
}
]))
left = limit * (page - 1)
right = left + limit
total_pages = len(tasks) // limit
if len(tasks) % limit != 0 or total_pages == 0:
total_pages += 1
if right >= len(tasks):
tasks = tasks[left:]
else:
tasks = tasks[left:right]
response = {'total_pages': total_pages, "tasks": tasks}
return 200, response<|fim▁end|>
| |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from rest_framework import serializers
from rest_framework.settings import api_settings
from api.models import VenueList, EventList
class VenueListSerializer(serializers.Serializer):
id = serializers.IntegerField(read_only=True)
venue_name = serializers.CharField(max_length=255, allow_blank=False)
venue_url = serializers.CharField(max_length=255, allow_blank=False)
venue_address = serializers.CharField(max_length=255, allow_blank=False)
venue_lat_long = serializers.CharField(max_length=255, allow_blank=False)
venue_contact = serializers.CharField(max_length=255, allow_blank=False)
venue_details = serializers.CharField(max_length=255, allow_blank=False)
venue_city = serializers.CharField(max_length=255, allow_blank=False)
def create(self, validated_data):
"""
Create and return a new `Snippet` instance, given the validated data.
"""
return VenueList.objects.create(**validated_data)
def update(self, instance, validated_data):
"""
Update and return an existing `Snippet` instance, given the validated data.
"""
instance.venue_name = validated_data.get('venue_name', instance.venue_name)
instance.venue_url = validated_data.get('venue_url', instance.venue_url)
instance.venue_address = validated_data.get('venue_address', instance.venue_address)
instance.venue_lat_long = validated_data.get('venue_lat_long', instance.venue_lat_long)
instance.venue_contact = validated_data.get('venue_contact', instance.venue_contact)
instance.venue_details = validated_data.get('venue_details', instance.venue_details)
instance.venue_city = validated_data.get('venue_city', instance.venue_city)
instance.save()
return instance
class EventListSerializer(serializers.Serializer):
id = serializers.IntegerField(read_only=True)
venue_id = serializers.IntegerField(allow_null=False)
event_name = serializers.CharField(max_length=255, allow_blank=False)
event_price = serializers.DecimalField(allow_null=False, max_digits=6, decimal_places=2)
event_detail = serializers.CharField(max_length=255, allow_blank=False)
#time_start_end = serializers.CharField(max_length=255, allow_blank=False)
event_time_start = serializers.TimeField(format="%H:%M", input_formats=None)
event_time_end = serializers.TimeField(format="%H:%M", input_formats=None)
event_url = serializers.CharField(max_length=255, allow_blank=False)
event_img_url = serializers.CharField(max_length=255, allow_blank=False)
event_date_time = serializers.DateField(allow_null=False)
<|fim▁hole|> """
Update and return an existing `Snippet` instance, given the validated data.
"""
instance.venue_id = validated_data.get('venue_id', instance.venue_id)
instance.event_name = validated_data.get('event_name', instance.event_name)
instance.event_price = validated_data.get('event_price', instance.event_price)
instance.event_detail = validated_data.get('event_detail', instance.event_detail)
#instance.time_start_end = validated_data.get('time_start_end', instance.time_start_end)
instance.event_time_start = validated_data.get('event_time_start', instance.event_time_start)
instance.event_time_end = validated_data.get('event_time_end', instance.event_time_end)
instance.event_url = validated_data.get('event_url', instance.event_url)
instance.event_img_url = validated_data.get('event_img_url', instance.event_img_url)
instance.event_date_time = validated_data.get('event_date_time', instance.event_date_time)
instance.save()
return instance<|fim▁end|>
|
def create(self, validated_data):
return EventList.objects.create(**validated_data)
def update(self, instance, validated_data):
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
This version of julian is currently in development and is not considered stable.
<|fim▁hole|><|fim▁end|>
|
"""
|
<|file_name|>test_file_for.go<|end_file_name|><|fim▁begin|>package cases
import (
"io/ioutil"
"path/filepath"
"regexp"<|fim▁hole|> "strings"
"testing"
)
func testFilesFor(t testing.TB, dirname string, includes, ignores []string) []string {
files, err := ioutil.ReadDir(dirname)
if err != nil {
t.Fatal(err)
}
testFiles := make([]string, 0)
for _, file := range files {
if file.IsDir() {
continue
}
if !strings.HasSuffix(file.Name(), ".json") {
continue
}
if !isIncludedTestCase(file.Name(), includes, ignores) {
continue
}
if !isTestCaseAllowed(file.Name()) {
continue
}
if !file.IsDir() {
testFiles = append(testFiles, filepath.Join(dirname, file.Name()))
}
}
return testFiles
}
func isIncludedTestCase(s string, includes, ignores []string) bool {
for _, ignore := range ignores {
rgx := rgxForMatcher(ignore)
if rgx.MatchString(s) {
return false
}
}
for _, include := range includes {
rgx := rgxForMatcher(include)
if !rgx.MatchString(s) {
return false
}
}
return true
}
func rgxForMatcher(s string) *regexp.Regexp {
return regexp.MustCompile("(?i)" + regexp.QuoteMeta(s))
}<|fim▁end|>
| |
<|file_name|>App.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import android.annotation.TargetApi;
import android.app.Application;
import android.os.Build;
/**
* Created by nubor on 13/02/2017.
*/
public class App extends Application {
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onCreate() {
super.onCreate();
this.registerActivityLifecycleCallbacks(new AppLifeCycle() );
}
}<|fim▁end|>
|
package testPpermission;
|
<|file_name|>json.py<|end_file_name|><|fim▁begin|>from ... import types as sqltypes
class JSON(sqltypes.JSON):
"""SQLite JSON type.
SQLite supports JSON as of version 3.9 through its JSON1_ extension. Note
that JSON1_ is a
`loadable extension <https://www.sqlite.org/loadext.html>`_ and as such
may not be available, or may require run-time loading.
The :class:`_sqlite.JSON` type supports persistence of JSON values
as well as the core index operations provided by :class:`_types.JSON`
datatype, by adapting the operations to render the ``JSON_EXTRACT``
function wrapped in the ``JSON_QUOTE`` function at the database level.
Extracted values are quoted in order to ensure that the results are
always JSON string values.
.. versionadded:: 1.3
.. seealso::
JSON1_
.. _JSON1: https://www.sqlite.org/json1.html
"""
# Note: these objects currently match exactly those of MySQL, however since
# these are not generalizable to all JSON implementations, remain separately
# implemented for each dialect.
class _FormatTypeMixin(object):
def _format_value(self, value):
raise NotImplementedError()
def bind_processor(self, dialect):
super_proc = self.string_bind_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
def literal_processor(self, dialect):
super_proc = self.string_literal_processor(dialect)
def process(value):
value = self._format_value(value)
if super_proc:
value = super_proc(value)
return value
return process
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
def _format_value(self, value):
if isinstance(value, int):
value = "$[%s]" % value
else:
value = '$."%s"' % value
return value
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
def _format_value(self, value):<|fim▁hole|> "".join(
[
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
for elem in value
]
)
)<|fim▁end|>
|
return "$%s" % (
|
<|file_name|>BinomialTree.cpp<|end_file_name|><|fim▁begin|>/**
* Programmer: Minhas Kamal (BSSE0509,IIT,DU)
* Date: 30-Mar-2014
**/
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
using namespace std;
#define X 3
struct node{
int value;
int depth;
<|fim▁hole|> node *next[X];
} nullNode;
int main(){
///input array
int arrayLength=1;
for(int i=0; i<X; i++){
arrayLength = arrayLength * 2;
}
int array[arrayLength];
for(int i=0; i<arrayLength; i++){
array[i]=rand()%100;
printf("%d \n", array[i]);///test
}
///initialize null node
nullNode.value=-1;
nullNode.depth=0;
nullNode.previous=NULL;
for(int i=0; i<X; i++){
nullNode.next[i]=NULL;
}
///constructing binomial tree
bool similarityFound=true;
node binomialTree[arrayLength];
for(int i=0; i<arrayLength; i++){
///initialization
binomialTree[i].value = array[i];
binomialTree[i].depth = 0;
binomialTree[i].previous = &nullNode;
for(int j=0; j<X; j++){
binomialTree[i].next[j] =NULL;
}
nullNode.next[nullNode.depth]=&binomialTree[i];
nullNode.depth++;
///finding & merging similar trees
int newNode=i;
while(1){
similarityFound=false;
int j;
for(j=0; j<nullNode.depth-1; j++){
if(nullNode.next[j]->depth==binomialTree[newNode].depth)
similarityFound=true;
}
if(similarityFound){
if(binomialTree[j].value < binomialTree[newNode].value){
binomialTree[j].next[binomialTree[j].depth]=&binomialTree[newNode];
binomialTree[newNode].previous=&binomialTree[j];
newNode=j;
binomialTree[j].depth++;
}
else{
binomialTree[newNode].next[binomialTree[newNode].depth]=&binomialTree[j];
binomialTree[j].previous=&binomialTree[newNode];
newNode=newNode;
binomialTree[newNode].depth++;
}
nullNode.depth--;
nullNode.next[nullNode.depth]=&binomialTree[newNode];
}else{
break;
}
}
}
///traversing
for(int i=0; i<arrayLength; i++){
cout << &binomialTree[i] << "\t" << binomialTree[i].value << "\t";
cout << binomialTree[i].depth << "\t" << binomialTree[i].previous << "\t";
for(int j=0; j<X; j++){
cout << binomialTree[i].next[j] << "\t";
}
cout << endl;
}
return 0;
}<|fim▁end|>
|
node *previous;
|
<|file_name|>item-inside-macro.rs<|end_file_name|><|fim▁begin|>// run-pass
// Issue #34183
macro_rules! foo {
() => {
fn foo() { }
}
}
macro_rules! bar {
() => {
fn bar();
}
}
trait Bleh {
foo!();
bar!();
}
struct Test;
impl Bleh for Test {
fn bar() {}
}
fn main() {<|fim▁hole|> Test::bar();
Test::foo();
}<|fim▁end|>
| |
<|file_name|>test_alembic.py<|end_file_name|><|fim▁begin|># (c) 2017 - Copyright Red Hat Inc
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|># Authors:
# Pierre-Yves Chibon <[email protected]>
"""This test module contains tests for the migration system."""
import os
import subprocess
import unittest
REPO_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
class TestAlembic(unittest.TestCase):
"""This test class contains tests pertaining to alembic."""
def test_alembic_history(self):
"""Enforce a linear alembic history.
This test runs the `alembic history | grep ' (head), '` command,
and ensure it returns only one line.
"""
proc1 = subprocess.Popen(
["alembic", "history"], cwd=REPO_PATH, stdout=subprocess.PIPE
)
proc2 = subprocess.Popen(
["grep", " (head), "], stdin=proc1.stdout, stdout=subprocess.PIPE
)
stdout = proc2.communicate()[0]
stdout = stdout.strip().split(b"\n")
self.assertEqual(len(stdout), 1)
proc1.communicate()<|fim▁end|>
|
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
|
<|file_name|>supplier-list.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit} from '@angular/core';
import {KendoGrid} from '../kendo/kendo-grid';
import {ViewDefinition} from '../../../services/view-definition';
import {KendoColumn} from '../../../services/view-definition';
@Component({
selector: 'supplier-list',
template:`<div id="kendo-grid-container">
<kendo-grid [viewDefinition]="viewDefinition"></kendo-grid>
</div>`,
directives: [KendoGrid]
})
export class SuppliersList {
public viewDefinition: ViewDefinition;
constructor(){
this.viewDefinition = new ViewDefinition();<|fim▁hole|> this.viewDefinition.columnDefs = COLUMNS;
}
}
var COLUMNS: any[] = [
new KendoColumn('Supplier Name', 'companyName', 120, true),
new KendoColumn('Code', 'supplierCode', 150, true),
new KendoColumn('ABN', 'abn', 80, true)
];<|fim▁end|>
|
this.viewDefinition.className = 'supplier';
this.viewDefinition.detailRoute = '../SupplierDetail';
|
<|file_name|>minimal-config.ts<|end_file_name|><|fim▁begin|>import { writeFile, writeMultipleFiles } from '../../utils/fs';
import { ng } from '../../utils/process';
export default function () {
// TODO(architect): Figure out what a minimal config is for architect apps.
return;
return Promise.resolve()
.then(() => writeFile('angular.json', JSON.stringify({
apps: [{
root: 'src',
main: 'main.ts',
scripts: [
'../node_modules/core-js/client/shim.min.js',
'../node_modules/zone.js/dist/zone.js'<|fim▁hole|> .then(() => ng('e2e', 'test-project-e2e'))
.then(() => writeMultipleFiles({
'./src/script.js': `
document.querySelector('app-root').innerHTML = '<h1>app works!</h1>';
`,
'./e2e/app.e2e-spec.ts': `
import { browser, element, by } from 'protractor';
describe('minimal project App', function() {
it('should display message saying app works', () => {
browser.ignoreSynchronization = true;
browser.get('/');
let el = element(by.css('app-root h1')).getText();
expect(el).toEqual('app works!');
});
});
`,
'angular.json': JSON.stringify({
apps: [{
root: 'src',
scripts: ['./script.js']
}],
e2e: { protractor: { config: './protractor.conf.js' } }
}),
}))
.then(() => ng('e2e', 'test-project-e2e'));
}<|fim▁end|>
|
]
}],
e2e: { protractor: { config: './protractor.conf.js' } }
})))
|
<|file_name|>test_user.py<|end_file_name|><|fim▁begin|># project/server/tests/test_user.py
import datetime
import unittest
from flask_login import current_user
from base import BaseTestCase
from project.server import bcrypt
from project.server.models import User
from project.server.user.forms import LoginForm
class TestUserBlueprint(BaseTestCase):
def test_correct_login(self):
# Ensure login behaves correctly with correct credentials.
with self.client:
response = self.client.post(
"/login",
data=dict(email="[email protected]", password="admin_user"),
follow_redirects=True,
)
self.assertIn(b"Welcome", response.data)
self.assertIn(b"Logout", response.data)
self.assertIn(b"Members", response.data)
self.assertTrue(current_user.email == "[email protected]")
self.assertTrue(current_user.is_active())
self.assertEqual(response.status_code, 200)
def test_logout_behaves_correctly(self):
# Ensure logout behaves correctly - regarding the session.
with self.client:
self.client.post(
"/login",
data=dict(email="[email protected]", password="admin_user"),
follow_redirects=True,
)
response = self.client.get("/logout", follow_redirects=True)
self.assertIn(b"You were logged out. Bye!", response.data)
self.assertFalse(current_user.is_active)
def test_logout_route_requires_login(self):
# Ensure logout route requres logged in user.
response = self.client.get("/logout", follow_redirects=True)
self.assertIn(b"Please log in to access this page", response.data)
def test_member_route_requires_login(self):
# Ensure member route requres logged in user.
response = self.client.get("/members", follow_redirects=True)
self.assertIn(b"Please log in to access this page", response.data)
def test_validate_success_login_form(self):
# Ensure correct data validates.
form = LoginForm(email="[email protected]", password="admin_user")
self.assertTrue(form.validate())
def test_validate_invalid_email_format(self):
# Ensure invalid email format throws error.
form = LoginForm(email="unknown", password="example")
self.assertFalse(form.validate())
def test_get_by_id(self):
# Ensure id is correct for the current/logged in user.
with self.client:
self.client.post(
"/login",
data=dict(email="[email protected]", password="admin_user"),
follow_redirects=True,
)
self.assertTrue(current_user.id == 1)
def test_registered_on_defaults_to_datetime(self):<|fim▁hole|> self.client.post(
"/login",
data=dict(email="[email protected]", password="admin_user"),
follow_redirects=True,
)
user = User.query.filter_by(email="[email protected]").first()
self.assertIsInstance(user.registered_on, datetime.datetime)
def test_check_password(self):
# Ensure given password is correct after unhashing.
user = User.query.filter_by(email="[email protected]").first()
self.assertTrue(
bcrypt.check_password_hash(user.password, "admin_user")
)
self.assertFalse(bcrypt.check_password_hash(user.password, "foobar"))
def test_validate_invalid_password(self):
# Ensure user can't login when the pasword is incorrect.
with self.client:
response = self.client.post(
"/login",
data=dict(email="[email protected]", password="foo_bar"),
follow_redirects=True,
)
self.assertIn(b"Invalid email and/or password.", response.data)
def test_register_route(self):
# Ensure about route behaves correctly.
response = self.client.get("/register", follow_redirects=True)
self.assertIn(b"<h1>Register</h1>\n", response.data)
def test_user_registration(self):
# Ensure registration behaves correctlys.
with self.client:
response = self.client.post(
"/register",
data=dict(
email="[email protected]",
password="testing",
confirm="testing",
),
follow_redirects=True,
)
self.assertIn(b"Welcome", response.data)
self.assertTrue(current_user.email == "[email protected]")
self.assertTrue(current_user.is_active())
self.assertEqual(response.status_code, 200)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
# Ensure that registered_on is a datetime.
with self.client:
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub fn is_leap_year(year: u64) -> bool {
year % 4 == 0 && (year % 100 != 0 || year % 400 == 0)<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>mail.py<|end_file_name|><|fim▁begin|>#!/bin/env python
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Billy Olsen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# <|fim▁hole|># AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from email.mime.text import MIMEText
from jinja2 import Environment, FileSystemLoader
from datetime import datetime as dt
import os
import six
import smtplib
# Get the directory for this file.
SECRET_SANTA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'templates')
j2env = Environment(loader=FileSystemLoader(SECRET_SANTA_DIR),
trim_blocks=False)
class SantaMail(object):
"""
The SantaMail object is used to send email. This class will load email
templates that should be sent out (the master list email and the email
for each Secret Santa.
Templates will be loaded from the template directory and is configurable
via the template_master and template_santa configuration variables.
"""
REQUIRED_PARAMS = ['author', 'email', 'smtp', 'username', 'password']
def __init__(self, author, email, smtp, username, password,
template_master="master.tmpl", template_santa="santa.tmpl"):
self.author = author
self.email = email
self.smtp = smtp
self.username = username
self.password = password
self.template_master = template_master
self.template_santa = template_santa
def send(self, pairings):
"""
Sends the emails out to the secret santa participants.
The secret santa host (the user configured to send the email from)
will receive a copy of the master list.
Each Secret Santa will receive an email with the contents of the
template_santa template.
"""
for pair in pairings:
self._send_to_secret_santa(pair)
self._send_master_list(pairings)
def _do_send(self, toaddr, body, subject):
try:
msg = MIMEText(body)
msg['Subject'] = subject
msg['From'] = self.email
msg['To'] = toaddr
server = smtplib.SMTP(self.smtp)
server.starttls()
server.login(self.username, self.password)
server.sendmail(self.email, [toaddr], msg.as_string())
server.quit()
except:
print("Error sending email to %s!" % toaddr)
def _send_to_secret_santa(self, pair):
"""
Sends an email to the secret santa pairing.
"""
(giver, receiver) = pair
template = j2env.get_template(self.template_santa)
body = template.render(giver=giver, receiver=receiver)
year = dt.utcnow().year
subject = ('Your %s Farmer Family Secret Santa Match' % year)
self._do_send(giver.email, body, subject)
def _send_master_list(self, pairings):
"""
Sends an email to the game master.
"""
pair_list = []
for pair in pairings:
(giver, recipient) = pair
pair_list.append("%s -> %s" % (giver.name, recipient.name))
template = j2env.get_template(self.template_master)
body = template.render(pairs=pair_list)
year = dt.utcnow().year
subject = ('%s Farmer Family Secret Santa Master List' % year)
self._do_send(self.email, body, subject)<|fim▁end|>
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
<|file_name|>util.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
pub fn read_tag<R: io::BufRead>(input: &mut R, tag: &[u8]) -> io::Result<bool> {
let mut buf = vec![0; tag.len()];
input.read_exact(buf.as_mut_slice())?;
Ok(buf.as_slice() == tag)
}
pub fn is_eof<R: io::BufRead>(input: &mut R) -> io::Result<bool> {
let buf = input.fill_buf()?;
Ok(buf.is_empty())
}
pub fn flush_zero_padding<R: io::BufRead>(input: &mut R) -> io::Result<bool> {
loop {
let len = {
let buf = input.fill_buf()?;
let len = buf.len();
if len == 0 {
return Ok(true);
}
for x in buf {
if *x != 0u8 {
return Ok(false);
}
}
len
};
input.consume(len);
}
}
// A Read computing a digest on the bytes read.
pub struct HasherRead<'a, R, H>
where
R: 'a + io::Read,
H: 'a + hash::Hasher,
{
read: &'a mut R, // underlying reader
hasher: &'a mut H, // hasher
}
impl<'a, R, H> HasherRead<'a, R, H>
where
R: io::Read,
H: hash::Hasher,
{
pub fn new(read: &'a mut R, hasher: &'a mut H) -> Self {
Self { read, hasher }
}
}
impl<'a, R, H> io::Read for HasherRead<'a, R, H>
where
R: io::Read,
H: hash::Hasher,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let result = self.read.read(buf)?;
self.hasher.write(&buf[..result]);
Ok(result)
}
}
// A BufRead counting the bytes read.
pub struct CountBufRead<'a, R>
where
R: 'a + io::BufRead,
{
read: &'a mut R, // underlying reader
count: usize, // number of bytes read
}
impl<'a, R> CountBufRead<'a, R>
where
R: io::BufRead,
{
pub fn new(read: &'a mut R) -> Self {
Self { read, count: 0 }
}
pub fn count(&self) -> usize {
self.count
}
}
impl<'a, R> io::Read for CountBufRead<'a, R>
where
R: io::BufRead,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let result = self.read.read(buf)?;
self.count += result;
Ok(result)
}
}
impl<'a, R> io::BufRead for CountBufRead<'a, R>
where
R: io::BufRead,
{
fn fill_buf(&mut self) -> io::Result<&[u8]> {
self.read.fill_buf()
}
fn consume(&mut self, amt: usize) {
self.read.consume(amt);
self.count += amt;
}
}<|fim▁end|>
|
use std::hash;
use std::io;
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/AntonioLangiu/odgbot/bot"
"github.com/AntonioLangiu/odgbot/common"
)
func main() {<|fim▁hole|> bot.LoadBot(configuration)
}<|fim▁end|>
|
configuration := common.LoadConfiguration()
db := bot.LoadDb(configuration)
defer db.Close()
|
<|file_name|>admin.js<|end_file_name|><|fim▁begin|>const router = require('express').Router();
const db1 = require('../db');
// GET - Get All Students Info (Admin)
// response:
// [] students:
// account_id: uuid
// user_id: uuid
// first_name: string
// last_name: string
// hometown: string
// college: string
// major: string
// gender: string
// birthdate: date
// email: string
// date_created: timestamp
// image_path: string
// bio: string
router.get('/student/all', (req, res) => {
db1.any(`
SELECT account_id, user_id, first_name, last_name, hometown, college, major, gender, bio, birthdate, email, date_created, image_path
FROM students natural join account natural join images`)
.then(function(data) {
// Send All Students Information
console.log('Success: Admin Get All Students Information');
res.json({students: data})
})
.catch(function(error) {
console.log(error);
return res.status(400).send('Error: Problem executing Query');
});
});
// GET - Get All Associations Info (Admin)
// response:
// [] associations:
// account_id: uuid
// association_id: uuid
// association_name: string
// initials: string
// email: string
// page_link: string
// image_path: string
// bio: string
// date_created: timestamp
// room: string
// building: string
// city: string
router.get('/association/all', (req, res) => {
db1.any(`
SELECT account_id, association_id, association_name, initials, page_link, image_path, email, bio, room, building, city, date_created
FROM associations natural join account natural join location natural join images`)
.then(function(data) {
// Send All Associations Information
console.log('Success: Admin Get All Associations Information');
res.json({associations: data});
})
.catch(function(error) {
console.log(error);
return res.status(400).send('Error: Problem executing Query');
});
});
// GET - Get All Events Info (Admin)
// response:
// [] events
// event_id: id
// event_name: string
// is_live: bool (yes/no)
// registration_link: string
// start_date: date
// end_date: date
// start_time: time
// end_time: time
// room: string
// building: string
// city: string
// image_path: string
// time_stamp: timestamp
router.get('/event/all', (req, res) => {
db1.any(`
SELECT event_id, event_name, is_live, registration_link, start_date, end_date, start_time, end_time, room, building, city, image_path, time_stamp
FROM events natural join images natural join location`)
.then(function(data) {
// Send All Events Information
console.log('Success: Admin Get All Events Information');
res.json({events: data});
})
.catch(function(error) {<|fim▁hole|> });
});
module.exports = router;<|fim▁end|>
|
console.log(error);
return res.status(400).send('Error: Problem executing Query');
|
<|file_name|>Endpoint.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0.
*
* THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
* ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
* MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
package com.interopbridges.scx.webservices;
/**
*
* <p>
* Concrete representation of an Endpoint to match what is described in the web
* service's WSDL.
* </p>
*
* <p>
*
* <pre>
* <service name="CalculatorService">
* <port name="CalculatorPort" binding="tns:CalculatorPortBinding">
* <soap:address location="http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService" />
* </port>
* </service>
* </pre>
*
* </p>
*
* <p>
* Typically this might look like:
* <ol>
* <li><b>http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService</b></li>
* <li><b>http://scxom-ws7-02:8080/axis2/services/DinnerFinderService</li>
* DinnerFinderServiceHttpSoap11Endpoint/</b>
* </ol>>
* </p>
*
* @author Christopher Crammond
*/
public class Endpoint implements EndpointMBean {
/**
* Key for describing for the (interopbridges) JMX type of MBean
*/
private String _jmxType = "endpoint";
/**
* String representing the full URL of the endpoint address. This should
* match the soap:address's location attribute from the WSDL. <br>
*
*/
private String _url;
/**
* Empty Constructor. It is considered to be a best practice to create this
* default constructor rather than relying on the compiler to auto-generate
* it.
*/
public Endpoint() {
this._url = "";
}
/**
* Preferred Constructor
*
<|fim▁hole|> * @param url
* String representing the full URL of the endpoint address.
*/
public Endpoint(String url) {
this._url = url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.EndpointMBean#getUrl()
*/
public String getUrl() {
return this._url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.IMBean#getJmxType()
*/
public String getJmxType() {
return this._jmxType;
}
}<|fim▁end|>
| |
<|file_name|>tasks.spec.ts<|end_file_name|><|fim▁begin|>describe('Tasks/Jobs', () => {
/**
* Before hook
* Setup the context
*/
before(() => {
cy.apps();
cy.importAppsTask();
});
/**
* After hook
* Clean the context
*/
after(() => {
// Clean up all executions
cy.cleanupTaskExecutions();
// Destroy all tasks
cy.destroyTasks();
// Destroy all applications
cy.unregisterApplications();
});
describe('Tasks', () => {
beforeEach(() => {
cy.tasks();
});
it('should create 3 tasks', () => {
cy.createTask('task1');
cy.createTask('task2');
cy.createTask('task3');
cy.get('span.pagination-total').should(elem => {
expect(Number(elem.text())).to.equal(3);
});
});
it('should show the task details', () => {
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button').first().click();
cy.get('app-view-card').should('be.exist');
cy.get('app-view-card').should('have.id', 'info');
});
it('should launch a task', () => {
cy.launchTask('task1');
cy.get('clr-spinner').should('not.exist');
cy.get('app-view-card').invoke('attr', 'keycontext').should('eq', 'execution');
});
it('should destroy a task', () => {
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button:nth-child(4)').first().click();
cy.get('.modal-dialog button').should('be.exist');
cy.get('.modal-dialog button').last().click();
cy.shouldShowToast('Destroy task', 'Successfully removed task "task3".');
cy.get('clr-spinner').should('not.exist');
cy.get('span.pagination-total').should(elem => {
expect(Number(elem.text())).to.equal(2);
});
});
it('should cleanup a task', () => {
cy.launchTask('task2');
cy.tasks();
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button:nth-child(6)').first().click();
cy.get('.modal-dialog').should('be.exist');
cy.get('button[data-cy=cleanup').click();
cy.shouldShowToast();
// TODO: check state
});
it('should clone a task', () => {
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button:nth-child(5)').first().click();
cy.get('.modal-dialog button').should('be.exist');
cy.get('.modal-dialog button').last().click();
cy.shouldShowToast('Task(s) clone', 'Task(s) have been cloned successfully');
cy.get('clr-spinner').should('not.exist');
cy.get('span.pagination-total').should(elem => {
expect(Number(elem.text())).to.equal(3);
});
});
});
describe('Task executions', () => {
beforeEach(() => {
cy.launchTask('task1');
cy.get('a[routerlink = "tasks-jobs/task-executions"]').should('be.visible');
cy.get('a[routerlink = "tasks-jobs/task-executions"]').click();
});
it('Should show a task execution', () => {
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow').should('be.exist');
cy.get('.datagrid-action-overflow button:nth-child(1)').first().click();
// TODO: check state
});
it('Should navigate to the task', () => {
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button').should('be.exist');
cy.get('.datagrid-action-overflow button:nth-child(2)').first().click();
// TODO: check state
});
it('Should relaunch the task', () => {
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button').should('be.exist');
cy.get('.datagrid-action-overflow button:nth-child(3)').first().click();
// TODO: check state
});
it('should clean up task executions', () => {
cy.taskExecutions();
cy.get('clr-spinner').should('not.exist');
cy.get('span.pagination-total').then(total => {
expect(Number(total.text())).to.gt(0);
cy.get('button[data-cy="groupActions"]').click();
cy.get('input[type="checkbox"] + label').first().click();
cy.get('button[data-cy="cleanupExecutions"]').click();
cy.get('button[data-cy="cleanup"]').should('be.exist');
cy.get('button[data-cy="cleanup"]').click();
cy.shouldShowToast();
cy.get('span.pagination-total').should('be.exist');
cy.get('span.pagination-total').then(totalUpdated => {
expect(Number(totalUpdated.text())).to.eq(0);
});
});
});
it('Should cleanup a task execution', () => {
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow button:nth-child(5)').first().click();
cy.get('button.close').should('be.exist');
cy.get('button.close').click();
// TODO: check state
});
});
describe('Job executions', () => {
before(() => {
cy.tasks();
cy.cleanupTaskExecutions();
cy.tasks();
});
<|fim▁hole|> cy.launchTask('job-sample');
cy.wait(10 * 1000);
// TODO: check state
});
it('Should show the details of a job execution', () => {
cy.jobExecutions();
cy.get('.datagrid-action-toggle').should('be.visible');
cy.get('.datagrid-action-toggle').last().click();
cy.get('.datagrid-action-overflow').should('be.exist');
cy.get('.datagrid-action-overflow button').first().click();
// TODO: check state
});
});
});<|fim▁end|>
|
it('Should create and launch a task job', () => {
cy.createTask('job-sample', 'timestamp-batch');
|
<|file_name|>Kangaroo.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates<|fim▁hole|>
import java.util.Scanner;
/**
*
* @author samas
*/
public class Kangaroo {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
try (Scanner in = new Scanner(System.in)) {
int x1 = in.nextInt();
int v1 = in.nextInt();
int x2 = in.nextInt();
int v2 = in.nextInt();
if (x2 >= x1 && v2 >= v1) {
System.out.println("NO");
} else if ((x1 - x2) % (v2 - v1) == 0) {
System.out.println("YES");
} else {
System.out.println("NO");
}
}
}
}<|fim▁end|>
|
* and open the template in the editor.
*/
package mx.neftaly.hackerrank.algorithms;
|
<|file_name|>Solution.py<|end_file_name|><|fim▁begin|>"""
Given an array nums, partition it into two (contiguous) subarrays left and right so that:
Every element in left is less than or equal to every element in right.
left and right are non-empty.
left has the smallest possible size.
Return the length of left after such a partitioning. It is guaranteed that such a partitioning exists.
Example 1:
Input: nums = [5,0,3,8,6]
Output: 3
Explanation: left = [5,0,3], right = [8,6]
Example 2:
Input: nums = [1,1,1,0,6,12]
Output: 4
Explanation: left = [1,1,1,0], right = [6,12]
Note:
2 <= nums.length <= 30000
0 <= nums[i] <= 106
It is guaranteed there is at least one way to partition nums as described.
"""
class Solution(object):
def partitionDisjoint(self, A):
"""
:type A: List[int]
:rtype: int
"""
mx, ms = [], []
for n in A:
if not mx:
mx.append(n)
else:
mx.append(max(mx[-1], n))
for n in reversed(A):
if not ms:
ms.append(n)
else:
ms.append(min(ms[-1], n))<|fim▁hole|> n2 = ms[i + 1]
if n2 >= n:
return i + 1
return len(A)<|fim▁end|>
|
ms = list(reversed(ms))
for i, n in enumerate(mx):
if i >= len(A) - 1:
continue
|
<|file_name|>mininet_multicast_dynamic.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from groupflow_shared import *
from mininet.net import *
from mininet.node import OVSSwitch, UserSwitch
from mininet.link import TCLink
from mininet.log import setLogLevel
from mininet.cli import CLI
from mininet.node import Node, RemoteController
from scipy.stats import truncnorm
from numpy.random import randint, uniform
from subprocess import *
import sys
import signal
from time import sleep, time
from datetime import datetime
from multiprocessing import Process, Pipe
import numpy as np
import traceback
import os.path
# Hardcoded purely for testing / debug, these will be moved once functionality is stable
ARRIVAL_RATE = 5 * (1.0 / 60)
SERVICE_RATE = 1.0 / 60
TRIAL_DURATION_SECONDS = 60.0 * 3
RECEIVERS_AT_TRIAL_START = 5
STATS_RECORDING_INTERVAL = 5
MEDIA_DURATION_SECONDS = 72
def mcastTestDynamic(topo, hosts = [], log_file_name = 'test_log.log', replacement_mode='none', link_weight_type = 'linear', number_of_groups = 30, pipe = None):
test_groups = []
test_success = True
# First, check if the log file already exists, and stop the test if it does
# (This is primarily in place to allow continuing an interrupted trial set by running the same console command)
if os.path.isfile(log_file_name):
print 'Skipping trial, log file already exists: ' + str(log_file_name)
if pipe is not None:
pipe.send(test_success)
pipe.close()
return
# Launch the external controller
pox_link_weight_type = link_weight_type
static_link_weight = 0
util_link_weight = 1
if link_weight_type == 'linear': # Linear link weights
pox_link_weight_type = 'linear'
static_link_weight = 0
util_link_weight = 1
elif link_weight_type == 'sh': # Shortest-hop routing
pox_link_weight_type = 'linear'
static_link_weight = 1
util_link_weight = 0
elif link_weight_type == 'exponential': # Exponential link weights
pox_link_weight_type = 'exponential'
static_link_weight = 0
util_link_weight = 1
pox_arguments = []
static_link_weight = 0
if util_link_weight == 0:
static_link_weight = 1
if 'periodic' in replacement_mode:
pox_arguments = ['pox.py', 'log', '--file=pox.log,w', 'openflow.discovery', '--link_timeout=30', 'openflow.keepalive',
'openflow.flow_tracker', '--query_interval=1', '--link_max_bw=19', '--link_cong_threshold=13', '--avg_smooth_factor=0.5', '--log_peak_usage=True',
'misc.benchmark_terminator', 'openflow.igmp_manager', 'misc.groupflow_event_tracer',
'openflow.groupflow', '--static_link_weight=' + str(static_link_weight), '--util_link_weight=' + str(util_link_weight), '--link_weight_type=' + link_weight_type, '--flow_replacement_mode=' + replacement_mode,
'--flow_replacement_interval=10',
'log.level', '--WARNING', '--openflow.flow_tracker=INFO']
else:
pox_arguments = ['pox.py', 'log', '--file=pox.log,w', 'openflow.discovery', '--link_timeout=30', 'openflow.keepalive',
'openflow.flow_tracker', '--query_interval=1', '--link_max_bw=19', '--link_cong_threshold=13', '--avg_smooth_factor=0.5', '--log_peak_usage=True',
'misc.benchmark_terminator', 'openflow.igmp_manager', 'misc.groupflow_event_tracer',
'openflow.groupflow', '--static_link_weight=' + str(static_link_weight), '--util_link_weight=' + str(util_link_weight), '--link_weight_type=' + link_weight_type, '--flow_replacement_mode=' + replacement_mode,
'--flow_replacement_interval=10',
'log.level', '--WARNING', '--openflow.flow_tracker=INFO']
print 'Launching external controller: ' + str(pox_arguments[0])
print 'Launch arguments:'
print ' '.join(pox_arguments)
with open(os.devnull, "w") as fnull:
pox_process = Popen(pox_arguments, stdout=fnull, stderr=fnull, shell=False, close_fds=True)
# Allow time for the log file to be generated
sleep(1)
# Determine the flow tracker log file
pox_log_file = open('./pox.log', 'r')
flow_log_path = None
event_log_path = None
got_flow_log_path = False
got_event_log_path = False
while (not got_flow_log_path) or (not got_event_log_path):
pox_log = pox_log_file.readline()
if 'Writing flow tracker info to file:' in pox_log:
pox_log_split = pox_log.split()
flow_log_path = pox_log_split[-1]
got_flow_log_path = True
if 'Writing event trace info to file:' in pox_log:
pox_log_split = pox_log.split()
event_log_path = pox_log_split[-1]
got_event_log_path = True
print 'Got flow tracker log file: ' + str(flow_log_path)
print 'Got event trace log file: ' + str(event_log_path)
print 'Controller initialized'
pox_log_offset = pox_log_file.tell()
pox_log_file.close()
# External controller launched
# Launch Mininet
net = Mininet(topo, controller=RemoteController, switch=OVSSwitch, link=TCLink, build=False, autoSetMacs=True)
# pox = RemoteController('pox', '127.0.0.1', 6633)
net.addController('pox', RemoteController, ip = '127.0.0.1', port = 6633)
net.start()
for switch_name in topo.get_switch_list():
#print switch_name + ' route add -host 127.0.0.1 dev lo'
net.get(switch_name).controlIntf = net.get(switch_name).intf('lo')
net.get(switch_name).cmd('route add -host 127.0.0.1 dev lo')
#print 'pox' + ' route add -host ' + net.get(switch_name).IP() + ' dev lo'
net.get('pox').cmd('route add -host ' + net.get(switch_name).IP() + ' dev lo')
#print net.get(switch_name).cmd('ifconfig')
topo.mcastConfig(net)
# Wait for controller topology discovery
controller_init_sleep_time = 10
print 'Waiting ' + str(controller_init_sleep_time) + ' seconds to allow for controller topology discovery.'
sleep(controller_init_sleep_time)
# Mininet launched<|fim▁hole|> rand_seed = int(time())
print 'Using random seed: ' + str(rand_seed)
np.random.seed(rand_seed)
trial_start_time = time() + MEDIA_DURATION_SECONDS + 10 # Assume generation of test group events will take no more than 10 seconds
trial_end_time = trial_start_time + TRIAL_DURATION_SECONDS
mcast_group_last_octet = 1
mcast_port = 5010
for i in range(0, number_of_groups):
mcast_ip = '224.1.1.{last_octet}'.format(last_octet = str(mcast_group_last_octet))
test_group = DynamicMulticastGroupDefinition(net.hosts, mcast_ip, mcast_port, mcast_port + 1)
print 'Generating events for group: ' + mcast_ip
test_group.generate_receiver_events(trial_start_time, TRIAL_DURATION_SECONDS, RECEIVERS_AT_TRIAL_START, ARRIVAL_RATE, SERVICE_RATE)
test_groups.append(test_group)
mcast_group_last_octet += 1
mcast_port += 2
test_group_start_times = []
for i in range(0, number_of_groups):
test_group_start_times.append(uniform(0, MEDIA_DURATION_SECONDS))
test_group_start_times.sort()
# Test groups generated
# Launch initial receiver applications
group_launch_index = 0
launch_start_time = time()
while len(test_group_start_times) > 0:
cur_time = time() - launch_start_time
if cur_time >= test_group_start_times[0]:
test_group_start_times.pop(0)
print 'Launching test group ' + str(group_launch_index) + ' at launch time: ' + str(cur_time)
test_groups[group_launch_index].launch_sender_application()
test_groups[group_launch_index].update_receiver_applications(trial_start_time)
group_launch_index += 1
else:
sleep_time = test_group_start_times[0] - cur_time
sleep(sleep_time)
# Wait for trial run start time
sleep_time = trial_start_time - time()
if sleep_time < 0:
print 'WARNING: sleep_time is negative!'
else:
print 'Waiting ' + str(sleep_time) + ' seconds to allow for group initialization.'
sleep(sleep_time) # Allow time for the controller to detect the topology
# Trial has started at this point
try:
while True:
cur_time = time()
if cur_time > trial_end_time:
print 'Reached trial end at time: ' + str(cur_time)
break
next_event_time = trial_end_time
for group in test_groups:
group.update_receiver_applications(cur_time)
next_event = group.get_next_receiver_event()
if next_event is not None and next_event[0] < next_event_time:
next_event_time = next_event[0]
sleep_time = next_event_time - time()
if sleep_time < 0:
print 'WARNING: sleep_time (' + str(sleep_time) + ') is negative!'
else:
#print 'Waiting ' + str(sleep_time) + ' for next event.\n'
sleep(sleep_time)
print 'Terminating network applications'
for group in test_groups:
group.terminate_group()
print 'Network applications terminated'
print 'Terminating controller'
pox_process.send_signal(signal.SIGINT)
sleep(1)
print 'Waiting for controller termination...'
pox_process.send_signal(signal.SIGKILL)
pox_process.wait()
print 'Controller terminated'
pox_process = None
net.stop()
sleep(3)
# Print packet loss statistics
recv_packets = sum(group.get_total_recv_packets() for group in test_groups)
lost_packets = sum(group.get_total_lost_packets() for group in test_groups)
packet_loss = 0
if (recv_packets + lost_packets) != 0:
packet_loss = (float(lost_packets) / (float(recv_packets) + float(lost_packets))) * 100
print 'RecvPackets: ' + str(recv_packets) + ' LostPackets: ' + str(lost_packets) + ' PacketLoss: ' + str(packet_loss) + '%'
# Calculate mean service time (sanity check to see that exponential service time generation is working as intended)
num_apps = 0
total_service_time = 0
for group in test_groups:
for recv_app in group.receiver_applications:
num_apps += 1
total_service_time += recv_app.service_time
print 'Average Service Time: ' + str(total_service_time / num_apps)
# Delete log file if test encountered an error, or write the statistic log file if the run was succesfull
if not test_success:
call('rm -rf ' + str(flow_log_path), shell=True)
call('rm -rf ' + str(event_log_path), shell=True)
else:
write_dynamic_stats_log(log_file_name, flow_log_path, event_log_path, test_groups, topo, ARRIVAL_RATE, SERVICE_RATE,
RECEIVERS_AT_TRIAL_START, trial_start_time, trial_end_time, STATS_RECORDING_INTERVAL)
except BaseException as e:
traceback.print_exc()
test_success = False
if pipe is not None:
pipe.send(test_success)
pipe.close()
topos = { 'mcast_test': ( lambda: MulticastTestTopo() ) }
def print_usage_text():
print 'GroupFlow Multicast Testing with Mininet'
print 'Usage - Automated Benchmarking:'
print '> mininet_multicast_pox <topology_path> <iterations_to_run> <log_file_prefix> <index_of_first_log_file> <parameter_sets (number is variable and unlimited)>'
print 'Parameter sets have the form: flow_replacement_mode,link_weight_type,number_of_groups'
print 'The topology path "manhattan" is currently hardcoded to generate a 20 Mbps, 5x5 Manhattan grid topology'
if __name__ == '__main__':
setLogLevel( 'info' )
# Uncomment for easy debug testing
#topo = ManhattanGridTopo(5, 4, 20, 1, False)
#hosts = topo.get_host_list()
#mcastTestDynamic(topo, hosts, 'test.log', 10, 'linear', 'none')
#sys.exit()
if len(sys.argv) >= 2:
if '-h' in str(sys.argv[1]) or 'help' in str(sys.argv[1]):
print_usage_text()
sys.exit()
if len(sys.argv) >= 6:
# Automated simulations - Differing link usage weights in Groupflow Module
log_prefix = sys.argv[3]
num_iterations = int(sys.argv[2])
first_index = int(sys.argv[4])
util_params = []
for param_index in range(5, len(sys.argv)):
param_split = sys.argv[param_index].split(',')
util_params.append((param_split[0], param_split[1], int(param_split[2])))
topo = None
if 'manhattan' in sys.argv[1]:
print 'Generating Manhattan Grid Topology'
topo = ManhattanGridTopo(5, 4, 20, 1, False)
else:
print 'Generating BRITE Specified Topology'
topo = BriteTopo(sys.argv[1])
hosts = topo.get_host_list()
start_time = time()
num_success = 0
num_failure = 0
print 'Simulations started at: ' + str(datetime.now())
for i in range(0,num_iterations):
for util_param in util_params:
test_success = False
while not test_success:
parent_pipe, child_pipe = Pipe()
p = Process(target=mcastTestDynamic, args=(topo, hosts, log_prefix + '_' + ','.join([util_param[0], util_param[1], str(util_param[2])]) + '_' + str(i + first_index) + '.log', util_param[0], util_param[1], util_param[2], child_pipe))
sim_start_time = time()
p.start()
p.join()
sim_end_time = time()
# Make extra sure the network terminated cleanly
call(['python', 'kill_running_test.py'])
test_success = parent_pipe.recv()
parent_pipe.close()
print 'Test Success: ' + str(test_success)
if test_success:
num_success += 1
else:
num_failure += 1
print 'Simulation ' + str(i+1) + '_' + ','.join([util_param[0], util_param[1], str(util_param[2])]) + ' completed at: ' + str(datetime.now()) + ' (runtime: ' + str(sim_end_time - sim_start_time) + ' seconds)'
end_time = time()
print ' '
print 'Simulations completed at: ' + str(datetime.now())
print 'Total runtime: ' + str(end_time - start_time) + ' seconds'
print 'Average runtime per sim: ' + str((end_time - start_time) / (num_iterations * len(util_params))) + ' seconds'
print 'Number of failed sims: ' + str(num_failure)
print 'Number of successful sims: ' + str(num_success)<|fim▁end|>
|
# Generate the test groups, and launch the sender applications
|
<|file_name|>migration.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
import sqlalchemy
from nova.db.sqlalchemy import api as db_session
from nova import exception
from nova.openstack.common.gettextutils import _
INIT_VERSION = 215
_REPOSITORY = None
get_engine = db_session.get_engine
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.NovaException(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.NovaException(
_("Upgrade DB using Essex release first."))
def db_initial_version():
return INIT_VERSION
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY<|fim▁hole|> assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY<|fim▁end|>
|
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
|
<|file_name|>Renderer.py<|end_file_name|><|fim▁begin|>from ctypes import c_void_p
import math
import numpy as np
from OpenGL.GL import *
from OpenGL.GLU import *
from PyEngine3D.Common import logger, COMMAND
from PyEngine3D.Common.Constants import *
from PyEngine3D.Utilities import *
from PyEngine3D.OpenGLContext import InstanceBuffer, FrameBufferManager, RenderBuffer, UniformBlock, CreateTexture
from .PostProcess import AntiAliasing, PostProcess
from . import RenderTargets, RenderOption, RenderingType, RenderGroup, RenderMode
from . import SkeletonActor, StaticActor, ScreenQuad, Line
from . import Spline3D
class Renderer(Singleton):
def __init__(self):
self.initialized = False
self.view_mode = GL_FILL
# managers
self.core_manager = None
self.viewport_manager = None
self.resource_manager = None
self.font_manager = None
self.scene_manager = None
self.debug_line_manager = None
self.render_option_manager = None
self.rendertarget_manager = None
self.framebuffer_manager = None
self.postprocess = None
# components
self.viewport = None
self.debug_texture = None
self.blend_enable = False
self.blend_equation = GL_FUNC_ADD
self.blend_func_src = GL_SRC_ALPHA
self.blend_func_dst = GL_ONE_MINUS_SRC_ALPHA
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
# scene constants uniform buffer
self.uniform_scene_buffer = None
self.uniform_scene_data = None
self.uniform_view_buffer = None
self.uniform_view_data = None
self.uniform_view_projection_buffer = None
self.uniform_view_projection_data = None
self.uniform_light_buffer = None
self.uniform_light_data = None
self.uniform_point_light_buffer = None
self.uniform_point_light_data = None
self.uniform_particle_common_buffer = None
self.uniform_particle_common_data = None
self.uniform_particle_infos_buffer = None
self.uniform_particle_infos_data = None
# material instances
self.scene_constants_material = None
self.debug_bone_material = None
self.shadowmap_material = None
self.shadowmap_skeletal_material = None
self.static_object_id_material = None
self.skeletal_object_id_material = None
self.selcted_static_object_material = None
self.selcted_skeletal_object_material = None
self.selcted_object_composite_material = None
self.render_color_material = None
self.render_heightmap_material = None
# font
self.font_instance_buffer = None
self.font_shader = None
self.actor_instance_buffer = None
self.render_custom_translucent_callbacks = []
def initialize(self, core_manager):
logger.info("Initialize Renderer")
self.core_manager = core_manager
self.viewport_manager = core_manager.viewport_manager
self.viewport = self.viewport_manager.main_viewport
self.resource_manager = core_manager.resource_manager
self.render_option_manager = core_manager.render_option_manager
self.font_manager = core_manager.font_manager
self.scene_manager = core_manager.scene_manager
self.debug_line_manager = core_manager.debug_line_manager
self.rendertarget_manager = core_manager.rendertarget_manager
self.postprocess = PostProcess()
self.postprocess.initialize()
self.framebuffer_manager = FrameBufferManager.instance()
# material instances
self.scene_constants_material = self.resource_manager.get_material_instance('scene_constants_main')
self.debug_bone_material = self.resource_manager.get_material_instance("debug_bone")
self.shadowmap_material = self.resource_manager.get_material_instance("shadowmap")
self.shadowmap_skeletal_material = self.resource_manager.get_material_instance(name="shadowmap_skeletal",
shader_name="shadowmap",
macros={"SKELETAL": 1})
self.static_object_id_material = self.resource_manager.get_material_instance(name="render_static_object_id",
shader_name="render_object_id")
self.skeletal_object_id_material = self.resource_manager.get_material_instance(name="render_skeletal_object_id",
shader_name="render_object_id",
macros={"SKELETAL": 1})
self.selcted_static_object_material = self.resource_manager.get_material_instance("selected_object")
self.selcted_skeletal_object_material = self.resource_manager.get_material_instance(name="selected_object_skeletal",
shader_name="selected_object",
macros={"SKELETAL": 1})
self.selcted_object_composite_material = self.resource_manager.get_material_instance("selected_object_composite")
self.render_color_material = self.resource_manager.get_material_instance(name="render_object_color", shader_name="render_object_color")
self.render_heightmap_material = self.resource_manager.get_material_instance(name="render_heightmap", shader_name="render_heightmap")
# font
self.font_shader = self.resource_manager.get_material_instance("font")
self.font_instance_buffer = InstanceBuffer(name="font_offset", location_offset=1, element_datas=[FLOAT4_ZERO, ])
# instance buffer
self.actor_instance_buffer = InstanceBuffer(name="actor_instance_buffer", location_offset=7, element_datas=[MATRIX4_IDENTITY, ])
# scene constants uniform buffer
program = self.scene_constants_material.get_program()
self.uniform_scene_data = np.zeros(1, dtype=[('TIME', np.float32),
('JITTER_FRAME', np.float32),
('RENDER_SSR', np.int32),
('RENDER_SSAO', np.int32),
('SCREEN_SIZE', np.float32, 2),
('BACKBUFFER_SIZE', np.float32, 2),
('MOUSE_POS', np.float32, 2),
('DELTA_TIME', np.float32),
('SCENE_DUMMY_0', np.int32)])
self.uniform_scene_buffer = UniformBlock("scene_constants", program, 0, self.uniform_scene_data)
self.uniform_view_data = np.zeros(1, dtype=[('VIEW', np.float32, (4, 4)),
('INV_VIEW', np.float32, (4, 4)),
('VIEW_ORIGIN', np.float32, (4, 4)),
('INV_VIEW_ORIGIN', np.float32, (4, 4)),
('PROJECTION', np.float32, (4, 4)),
('INV_PROJECTION', np.float32, (4, 4)),
('CAMERA_POSITION', np.float32, 3),
('VIEW_DUMMY_0', np.float32),
('NEAR_FAR', np.float32, 2),
('JITTER_DELTA', np.float32, 2),
('JITTER_OFFSET', np.float32, 2),
('VIEWCONSTANTS_DUMMY0', np.float32, 2)])
self.uniform_view_buffer = UniformBlock("view_constants", program, 1, self.uniform_view_data)
self.uniform_view_projection_data = np.zeros(1, dtype=[('VIEW_PROJECTION', np.float32, (4, 4)),
('PREV_VIEW_PROJECTION', np.float32, (4, 4))])
self.uniform_view_projection_buffer = UniformBlock("view_projection", program, 2,
self.uniform_view_projection_data)
self.uniform_light_data = np.zeros(1, dtype=[('SHADOW_MATRIX', np.float32, (4, 4)),
('LIGHT_POSITION', np.float32, 3),
('SHADOW_EXP', np.float32),
('LIGHT_DIRECTION', np.float32, 3),
('SHADOW_BIAS', np.float32),
('LIGHT_COLOR', np.float32, 3),
('SHADOW_SAMPLES', np.int32)])
self.uniform_light_buffer = UniformBlock("light_constants", program, 3, self.uniform_light_data)
self.uniform_point_light_data = np.zeros(MAX_POINT_LIGHTS, dtype=[('color', np.float32, 3),
('radius', np.float32),
('pos', np.float32, 3),
('render', np.float32)])
self.uniform_point_light_buffer = UniformBlock("point_light_constants", program, 4, self.uniform_point_light_data)
self.uniform_particle_common_data = np.zeros(1, dtype=[
('PARTICLE_COLOR', np.float32, 3),
('PARTICLE_ALIGN_MODE', np.int32),
('PARTICLE_CELL_COUNT', np.int32, 2),
('PARTICLE_BLEND_MODE', np.int32),
('PARTICLE_COMMON_DUMMY_0', np.int32)
])
self.uniform_particle_common_buffer = UniformBlock("particle_common", program, 5, self.uniform_particle_common_data)
self.uniform_particle_infos_data = np.zeros(1, dtype=[
('PARTICLE_PARENT_MATRIX', np.float32, (4, 4)),
('PARTICLE_DELAY', np.float32, 2),
('PARTICLE_LIFE_TIME', np.float32, 2),
('PARTICLE_TRANSFORM_ROTATION_MIN', np.float32, 3),
('PARTICLE_FADE_IN', np.float32),
('PARTICLE_TRANSFORM_ROTATION_MAX', np.float32, 3),
('PARTICLE_FADE_OUT', np.float32),
('PARTICLE_TRANSFORM_SCALE_MIN', np.float32, 3),
('PARTICLE_OPACITY', np.float32),
('PARTICLE_TRANSFORM_SCALE_MAX', np.float32, 3),
('PARTICLE_ENABLE_VECTOR_FIELD', np.int32),
('PARTICLE_VELOCITY_POSITION_MIN', np.float32, 3),
('PARTICLE_VECTOR_FIELD_STRENGTH', np.float32),
('PARTICLE_VELOCITY_POSITION_MAX', np.float32, 3),
('PARTICLE_VECTOR_FIELD_TIGHTNESS', np.float32),
('PARTICLE_VELOCITY_ROTATION_MIN', np.float32, 3),
('PARTICLE_MAX_COUNT', np.uint32),
('PARTICLE_VELOCITY_ROTATION_MAX', np.float32, 3),
('PARTICLE_SPAWN_COUNT', np.uint32),
('PARTICLE_VELOCITY_SCALE_MIN', np.float32, 3),
('PARTICLE_VELOCITY_STRETCH', np.float32),
('PARTICLE_VELOCITY_SCALE_MAX', np.float32, 3),
('PARTICLE_VELOCITY_ACCELERATION', np.float32),
('PARTICLE_VECTOR_FIELD_MATRIX', np.float32, (4, 4)),
('PARTICLE_VECTOR_FIELD_INV_MATRIX', np.float32, (4, 4)),
('PARTICLE_SPAWN_VOLUME_INFO', np.float32, 3),
('PARTICLE_SPAWN_VOLUME_TYPE', np.uint32),
('PARTICLE_SPAWN_VOLUME_MATRIX', np.float32, (4, 4)),
('PARTICLE_VELOCITY_LIMIT', np.float32, 2),
('PARTICLE_FORCE_GRAVITY', np.float32),
('PARTICLE_PLAY_SPEED', np.float32),
('PARTICLE_VELOCITY_TYPE', np.uint32),
('PARTICLE_FORCE_ELASTICITY', np.float32),
('PARTICLE_FORCE_FRICTION', np.float32),
('PARTICLE_DUMMY_0', np.uint32),
])
self.uniform_particle_infos_buffer = UniformBlock("particle_infos", program, 6, self.uniform_particle_infos_data)
def get_rendering_type_name(rendering_type):
rendering_type = str(rendering_type)
return rendering_type.split('.')[-1] if '.' in rendering_type else rendering_type
rendering_type_list = [get_rendering_type_name(RenderingType.convert_index_to_enum(x)) for x in range(RenderingType.COUNT.value)]
self.initialized = True
# Send to GUI
self.core_manager.send_rendering_type_list(rendering_type_list)
def close(self):
pass
def render_custom_translucent(self, render_custom_translucent_callback):
self.render_custom_translucent_callbacks.append(render_custom_translucent_callback)
def set_blend_state(self, blend_enable=True, equation=GL_FUNC_ADD, func_src=GL_SRC_ALPHA, func_dst=GL_ONE_MINUS_SRC_ALPHA):
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
self.blend_enable = blend_enable
if blend_enable:
self.blend_equation = equation
self.blend_func_src = func_src
self.blend_func_dst = func_dst
glEnable(GL_BLEND)
glBlendEquation(equation)
glBlendFunc(func_src, func_dst)
else:
glDisable(GL_BLEND)
def restore_blend_state_prev(self):
self.set_blend_state(self.blend_enable_prev,
self.blend_equation_prev,
self.blend_func_src_prev,
self.blend_func_dst_prev)
def set_view_mode(self, view_mode):
if view_mode == COMMAND.VIEWMODE_WIREFRAME:
self.view_mode = GL_LINE
elif view_mode == COMMAND.VIEWMODE_SHADING:
self.view_mode = GL_FILL
def reset_renderer(self):
self.scene_manager.update_camera_projection_matrix(aspect=self.core_manager.game_backend.aspect)
self.framebuffer_manager.clear_framebuffer()
self.rendertarget_manager.create_rendertargets()
self.scene_manager.reset_light_probe()
self.core_manager.gc_collect()
def ortho_view(self, look_at=True):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, self.viewport.width, 0, self.viewport.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def perspective_view(self, look_at=True):
camera = self.scene_manager.main_camera
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(camera.fov, camera.aspect, camera.near, camera.far)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def look_at(self):
camera = self.scene_manager.main_camera
camera_target = -camera.transform.front
camera_up = camera.transform.up
glScalef(*(1.0 / camera.transform.get_scale()))
gluLookAt(0.0, 0.0, 0.0, *camera_target, *camera_up)
glTranslatef(*(-camera.transform.get_pos()))
def set_debug_texture(self, texture):
if texture is not None and texture is not RenderTargets.BACKBUFFER and type(texture) != RenderBuffer:
self.debug_texture = texture
self.postprocess.is_render_material_instance = False
logger.info("Current texture : %s" % self.debug_texture.name)
else:
self.debug_texture = None
def bind_uniform_blocks(self):
camera = self.scene_manager.main_camera
main_light = self.scene_manager.main_light
if not camera or not main_light:
return
frame_count = self.core_manager.frame_count % 16
uniform_data = self.uniform_scene_data
uniform_data['TIME'] = self.core_manager.current_time
uniform_data['JITTER_FRAME'] = frame_count
uniform_data['RENDER_SSR'] = self.postprocess.is_render_ssr
uniform_data['RENDER_SSAO'] = self.postprocess.is_render_ssao
uniform_data['SCREEN_SIZE'] = (self.core_manager.game_backend.width, self.core_manager.game_backend.height)
uniform_data['BACKBUFFER_SIZE'] = (RenderTargets.BACKBUFFER.width, RenderTargets.BACKBUFFER.height)
uniform_data['MOUSE_POS'] = self.core_manager.get_mouse_pos()
uniform_data['DELTA_TIME'] = self.core_manager.delta
self.uniform_scene_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_view_data
uniform_data['VIEW'][...] = camera.view
uniform_data['INV_VIEW'][...] = camera.inv_view
uniform_data['VIEW_ORIGIN'][...] = camera.view_origin
uniform_data['INV_VIEW_ORIGIN'][...] = camera.inv_view_origin
uniform_data['PROJECTION'][...] = camera.projection_jitter
uniform_data['INV_PROJECTION'][...] = camera.inv_projection_jitter
uniform_data['CAMERA_POSITION'][...] = camera.transform.get_pos()
uniform_data['NEAR_FAR'][...] = (camera.near, camera.far)
uniform_data['JITTER_DELTA'][...] = self.postprocess.jitter_delta
uniform_data['JITTER_OFFSET'][...] = self.postprocess.jitter
self.uniform_view_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_light_data
uniform_data['SHADOW_MATRIX'][...] = main_light.shadow_view_projection
uniform_data['SHADOW_EXP'] = main_light.shadow_exp
uniform_data['SHADOW_BIAS'] = main_light.shadow_bias
uniform_data['SHADOW_SAMPLES'] = main_light.shadow_samples
uniform_data['LIGHT_POSITION'][...] = main_light.transform.get_pos()
uniform_data['LIGHT_DIRECTION'][...] = main_light.transform.front
uniform_data['LIGHT_COLOR'][...] = main_light.light_color[:3]
self.uniform_light_buffer.bind_uniform_block(data=uniform_data)
self.uniform_point_light_buffer.bind_uniform_block(data=self.uniform_point_light_data)
def render_light_probe(self, light_probe):
if light_probe.isRendered:
return
logger.info("Rendering Light Probe")
# Set Valid
light_probe.isRendered = True
camera = self.scene_manager.main_camera
old_pos = camera.transform.get_pos().copy()
old_rot = camera.transform.get_rotation().copy()
old_fov = camera.fov
old_aspect = camera.aspect
old_render_font = RenderOption.RENDER_FONT
old_render_skeleton = RenderOption.RENDER_SKELETON_ACTOR
old_render_effect = RenderOption.RENDER_EFFECT
old_render_collision = RenderOption.RENDER_COLLISION
old_render_ssr = self.postprocess.is_render_ssr
old_render_motion_blur = self.postprocess.is_render_motion_blur
old_antialiasing = self.postprocess.anti_aliasing
old_debug_absolute = self.postprocess.debug_absolute
old_debug_mipmap = self.postprocess.debug_mipmap
old_debug_intensity_min = self.postprocess.debug_intensity_min
old_debug_intensity_max = self.postprocess.debug_intensity_max
# set render light probe
RenderOption.RENDER_LIGHT_PROBE = True
RenderOption.RENDER_SKELETON_ACTOR = False
RenderOption.RENDER_EFFECT = False
RenderOption.RENDER_FONT = False
self.postprocess.is_render_motion_blur = False
self.postprocess.anti_aliasing = AntiAliasing.NONE_AA
camera.update_projection(fov=90.0, aspect=1.0)
def render_cube_face(dst_texture, target_face, pos, rotation):
camera.transform.set_pos(pos)
camera.transform.set_rotation(rotation)
camera.update(force_update=True)
# render
self.render_scene()
# copy
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(dst_texture, target_face=target_face)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.mirror_framebuffer(src_framebuffer)
return dst_texture
target_faces = [GL_TEXTURE_CUBE_MAP_POSITIVE_X,
GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GL_TEXTURE_CUBE_MAP_POSITIVE_Y,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
GL_TEXTURE_CUBE_MAP_POSITIVE_Z,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Z]
pos = light_probe.transform.get_pos()
camera_rotations = [[0.0, math.pi * 1.5, 0.0],
[0.0, math.pi * 0.5, 0.0],
[math.pi * -0.5, math.pi * 1.0, 0.0],
[math.pi * 0.5, math.pi * 1.0, 0.0],
[0.0, math.pi * 1.0, 0.0],
[0.0, 0.0, 0.0]]
# render atmosphere scene to light_probe textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = True
texture_cube = RenderTargets.LIGHT_PROBE_ATMOSPHERE
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# render final scene to temp textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = False
texture_cube = light_probe.texture_probe
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# convolution
texture_info = light_probe.texture_probe.get_texture_info()
texture_info['name'] = 'temp_cube'
temp_cube = CreateTexture(**texture_info)
mipmap_count = temp_cube.get_mipmap_count()
face_matrixies = [np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[0, 0, -1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[-1, 0, 0, 0], [0, 1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]], dtype=np.float32)]
convolve_environment = self.resource_manager.get_material_instance('convolve_environment')
convolve_environment.use_program()
for i in range(6):
for lod in range(mipmap_count):
self.framebuffer_manager.bind_framebuffer(temp_cube, target_face=target_faces[i], target_level=lod)
glClear(GL_COLOR_BUFFER_BIT)
convolve_environment.bind_uniform_data("texture_environment", texture_cube)
convolve_environment.bind_uniform_data("face_matrix", face_matrixies[i])
convolve_environment.bind_uniform_data("lod", float(lod))
convolve_environment.bind_uniform_data("mipmap_count", float(mipmap_count))
self.postprocess.draw_elements()
light_probe.replace_texture_probe(temp_cube)
self.rendertarget_manager.get_temporary('temp_cube', light_probe.texture_probe)
RenderOption.RENDER_LIGHT_PROBE = False
RenderOption.RENDER_SKELETON_ACTOR = old_render_skeleton
RenderOption.RENDER_EFFECT = old_render_effect
RenderOption.RENDER_FONT = old_render_font
RenderOption.RENDER_COLLISION = old_render_collision
self.postprocess.is_render_ssr = old_render_ssr
self.postprocess.is_render_motion_blur = old_render_motion_blur
self.postprocess.anti_aliasing = old_antialiasing
self.postprocess.debug_absolute = old_debug_absolute
self.postprocess.debug_mipmap = old_debug_mipmap
self.postprocess.debug_intensity_min = old_debug_intensity_min
self.postprocess.debug_intensity_max = old_debug_intensity_max
camera.update_projection(old_fov, old_aspect)
camera.transform.set_pos(old_pos)
camera.transform.set_rotation(old_rot)
camera.update(force_update=True)
def render_gbuffer(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# render terrain
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.GBUFFER)
# render static actor
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.static_solid_render_infos)
# render velocity
self.framebuffer_manager.bind_framebuffer(RenderTargets.VELOCITY)
glClear(GL_COLOR_BUFFER_BIT)
if RenderOption.RENDER_STATIC_ACTOR:
self.postprocess.render_velocity(RenderTargets.DEPTH)
# render skeletal actor gbuffer
if RenderOption.RENDER_SKELETON_ACTOR:
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
RenderTargets.VELOCITY,
depth_texture=RenderTargets.DEPTH)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.skeleton_solid_render_infos)
def render_shadow(self):
light = self.scene_manager.main_light
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# static shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.STATIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.SHADOW)
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR, RenderMode.SHADOW, self.scene_manager.static_shadow_render_infos, self.shadowmap_material)
# dyanmic shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.DYNAMIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR, RenderMode.SHADOW, self.scene_manager.skeleton_shadow_render_infos, self.shadowmap_skeletal_material)
# composite shadow maps
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
glDisable(GL_CULL_FACE)
self.postprocess.render_composite_shadowmap(RenderTargets.STATIC_SHADOWMAP, RenderTargets.DYNAMIC_SHADOWMAP)
def render_preprocess(self):
# Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# Screen Space Reflection
if self.postprocess.is_render_ssr:
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection(RenderTargets.HDR,
RenderTargets.WORLD_NORMAL,
RenderTargets.MATERIAL,
RenderTargets.VELOCITY,
RenderTargets.LINEAR_DEPTH)
# swap ssr resolve textures
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV = \
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection_resolve(RenderTargets.SCREEN_SPACE_REFLECTION,
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV,
RenderTargets.VELOCITY)
# SSAO
if self.postprocess.is_render_ssao:
temp_ssao = self.rendertarget_manager.get_temporary('temp_ssao', RenderTargets.SSAO)
self.framebuffer_manager.bind_framebuffer(RenderTargets.SSAO)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_ssao(texture_size=(RenderTargets.SSAO.width, RenderTargets.SSAO.height),
texture_lod=self.rendertarget_manager.texture_lod_in_ssao,
texture_normal=RenderTargets.WORLD_NORMAL,
texture_linear_depth=RenderTargets.LINEAR_DEPTH)
self.postprocess.render_gaussian_blur(RenderTargets.SSAO, temp_ssao)
def render_solid(self):
if RenderingType.DEFERRED_RENDERING == self.render_option_manager.rendering_type:
self.postprocess.render_deferred_shading(self.scene_manager.get_light_probe_texture(),
self.scene_manager.atmosphere)
elif RenderingType.FORWARD_RENDERING == self.render_option_manager.rendering_type:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_solid_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_solid_render_infos)
def render_translucent(self):
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_translucent_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_translucent_render_infos)
for render_custom_translucent_callback in self.render_custom_translucent_callbacks:
render_custom_translucent_callback()
self.render_custom_translucent_callbacks.clear()
def render_effect(self):
self.scene_manager.effect_manager.render()
def render_actors(self, render_group, render_mode, render_infos, scene_material_instance=None):
if len(render_infos) < 1:
return
last_actor = None
last_actor_material = None
last_actor_material_instance = None
if scene_material_instance is not None:
scene_material_instance.use_program()
scene_material_instance.bind_material_instance()
# render
for render_info in render_infos:
actor = render_info.actor
geometry = render_info.geometry
actor_material = render_info.material
actor_material_instance = render_info.material_instance
is_instancing = actor.is_instancing()
if RenderMode.GBUFFER == render_mode or RenderMode.FORWARD_SHADING == render_mode:
if last_actor_material != actor_material and actor_material is not None:
actor_material.use_program()
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
actor_material_instance.bind_material_instance()
actor_material_instance.bind_uniform_data('is_render_gbuffer', RenderMode.GBUFFER == render_mode)
if RenderMode.FORWARD_SHADING == render_mode:
actor_material_instance.bind_uniform_data('texture_probe', self.scene_manager.get_light_probe_texture())
actor_material_instance.bind_uniform_data('texture_shadow', RenderTargets.COMPOSITE_SHADOWMAP)
actor_material_instance.bind_uniform_data('texture_ssao', RenderTargets.SSAO)
actor_material_instance.bind_uniform_data('texture_scene_reflect', RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
# Bind Atmosphere
self.scene_manager.atmosphere.bind_precomputed_atmosphere(actor_material_instance)
elif RenderMode.SHADOW == render_mode:
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
# get diffuse texture from actor material instance
data_diffuse = actor_material_instance.get_uniform_data('texture_diffuse')
scene_material_instance.bind_uniform_data('texture_diffuse', data_diffuse)
if last_actor != actor:
material_instance = scene_material_instance or actor_material_instance
if RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', actor.get_object_id())
elif RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', actor.get_object_color())
material_instance.bind_uniform_data('is_instancing', is_instancing)
material_instance.bind_uniform_data('model', actor.transform.matrix)
if render_group == RenderGroup.SKELETON_ACTOR:
animation_buffer = actor.get_animation_buffer(geometry.skeleton.index)
prev_animation_buffer = actor.get_prev_animation_buffer(geometry.skeleton.index)
material_instance.bind_uniform_data('bone_matrices', animation_buffer, num=len(animation_buffer))
material_instance.bind_uniform_data('prev_bone_matrices', prev_animation_buffer, num=len(prev_animation_buffer))
# draw
if is_instancing:
geometry.draw_elements_instanced(actor.get_instance_render_count(), self.actor_instance_buffer, [actor.instance_matrix, ])
else:
geometry.draw_elements()
last_actor = actor
last_actor_material = actor_material
last_actor_material_instance = actor_material_instance
def render_selected_object(self):
selected_object = self.scene_manager.get_selected_object()
if selected_object is not None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_RGBA8)
glDisable(GL_DEPTH_TEST)
glDepthMask(False)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.set_blend_state(False)
object_type = type(selected_object)
if SkeletonActor == object_type and RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_skeletal_object_material)
elif StaticActor == object_type and RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_static_object_material)
elif Spline3D == object_type:
self.debug_line_manager.bind_render_spline_program()
self.debug_line_manager.render_spline(selected_object, Float4(1.0, 1.0, 1.0, 1.0))
else:
return
# composite
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.selcted_object_composite_material.use_program()
self.selcted_object_composite_material.bind_uniform_data("texture_mask", RenderTargets.TEMP_RGBA8)
self.postprocess.draw_elements()
def render_axis_gizmo(self, render_mode):
if self.scene_manager.get_selected_object() is not None:
axis_gizmo_actor = self.scene_manager.get_axis_gizmo()
material_instance = None
if RenderMode.GIZMO == render_mode:
material_instance = self.render_color_material
elif RenderMode.OBJECT_ID == render_mode:
material_instance = self.static_object_id_material
material_instance.use_program()
material_instance.bind_uniform_data('is_instancing', False)
material_instance.bind_uniform_data('model', axis_gizmo_actor.transform.matrix)
geometries = axis_gizmo_actor.get_geometries()
for i, geometry in enumerate(geometries):
if RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', axis_gizmo_actor.get_object_color(i))
elif RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', axis_gizmo_actor.get_object_id(i))
geometry.draw_elements()
def render_object_id(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.OBJECT_ID, depth_texture=RenderTargets.OBJECT_ID_DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.set_blend_state(False)
# render static actor object id
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_solid_render_infos,
self.static_object_id_material)
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_translucent_render_infos,
self.static_object_id_material)
# render skeletal actor object id
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.skeleton_solid_render_infos,
self.skeletal_object_id_material)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,<|fim▁hole|> self.scene_manager.skeleton_translucent_render_infos,
self.skeletal_object_id_material)
# spline object id
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
object_id = spline.get_object_id()
self.debug_line_manager.render_spline(spline, Float4(object_id, object_id, object_id, 1.0), add_width=10.0)
# spline gizmo object id
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.spline_gizmo_render_infos,
self.static_object_id_material)
# gizmo object id
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.OBJECT_ID)
def render_heightmap(self, actor):
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_HEIGHT_MAP)
self.set_blend_state(blend_enable=True, equation=GL_MAX, func_src=GL_ONE, func_dst=GL_ONE)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glDisable(GL_CULL_FACE)
glDisable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 1.0)
self.render_heightmap_material.use_program()
self.render_heightmap_material.bind_material_instance()
self.render_heightmap_material.bind_uniform_data('model', actor.transform.matrix)
self.render_heightmap_material.bind_uniform_data('bound_box_min', actor.bound_box.bound_min)
self.render_heightmap_material.bind_uniform_data('bound_box_max', actor.bound_box.bound_max)
actor.get_geometry(0).draw_elements()
if RenderTargets.TEMP_HEIGHT_MAP.enable_mipmap:
self.postprocess.render_generate_max_z(RenderTargets.TEMP_HEIGHT_MAP)
def render_bones(self):
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
mesh = self.resource_manager.get_mesh("Cube")
static_actors = self.scene_manager.static_actors[:]
if mesh and self.debug_bone_material:
material_instance = self.debug_bone_material
material_instance.use_program()
material_instance.bind()
def draw_bone(mesh, skeleton_mesh, parent_matrix, material_instance, bone, root_matrix, isAnimation):
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
if bone.children:
for child_bone in bone.children:
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
child_transform = skeleton_mesh.get_animation_transform(child_bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
child_transform = np.linalg.inv(child_bone.inv_bind_matrix)
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
material_instance.bind_uniform_data("mat2", np.dot(child_transform, root_matrix))
mesh.draw_elements()
draw_bone(mesh, skeleton_mesh, bone_transform.copy(), material_instance, child_bone, root_matrix, isAnimation)
else:
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
child_transform = np.dot(bone_transform, root_matrix)
child_transform[3, :] += child_transform[1, :]
material_instance.bind_uniform_data("mat2", child_transform)
mesh.draw_elements()
for static_actor in static_actors:
if static_actor.model and static_actor.model.mesh and static_actor.model.mesh.skeletons:
skeletons = static_actor.model.mesh.skeletons
skeleton_mesh = static_actor.model.mesh
frame_count = skeleton_mesh.get_animation_frame_count()
frame = math.fmod(self.core_manager.current_time * 30.0, frame_count) if frame_count > 0.0 else 0.0
isAnimation = frame_count > 0.0
for skeleton in skeletons:
matrix = static_actor.transform.matrix
for bone in skeleton.hierachy:
draw_bone(mesh, skeleton_mesh, Matrix4().copy(), material_instance, bone, matrix, isAnimation)
def render_postprocess(self):
# bind frame buffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# copy HDR target
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Temporal AA
if AntiAliasing.TAA == self.postprocess.anti_aliasing:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_temporal_antialiasing(RenderTargets.HDR_TEMP,
RenderTargets.TAA_RESOLVE,
RenderTargets.VELOCITY)
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.TAA_RESOLVE)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Bloom
if self.postprocess.is_render_bloom:
self.postprocess.render_bloom(RenderTargets.HDR)
# Light Shaft
if self.postprocess.is_render_light_shaft:
self.framebuffer_manager.bind_framebuffer(RenderTargets.LIGHT_SHAFT)
self.postprocess.render_light_shaft(RenderTargets.ATMOSPHERE, RenderTargets.DEPTH)
# Depth Of Field
if self.postprocess.is_render_depth_of_field:
self.postprocess.render_depth_of_field()
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
RenderTargets.HDR.generate_mipmap()
# Tone Map
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_tone_map(RenderTargets.HDR,
RenderTargets.BLOOM_0,
RenderTargets.BLOOM_1,
RenderTargets.BLOOM_2,
RenderTargets.BLOOM_3,
RenderTargets.BLOOM_4,
RenderTargets.LIGHT_SHAFT)
# MSAA Test
if AntiAliasing.MSAA == self.postprocess.anti_aliasing:
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# resolve MSAA
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Motion Blur
if self.postprocess.is_render_motion_blur:
backbuffer_copy = self.rendertarget_manager.get_temporary('backbuffer_copy', RenderTargets.BACKBUFFER)
self.framebuffer_manager.bind_framebuffer(backbuffer_copy)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_motion_blur(RenderTargets.VELOCITY, RenderTargets.BACKBUFFER)
# copy to backbuffer
src_framebuffer = self.framebuffer_manager.get_framebuffer(backbuffer_copy)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
def render_log(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.font_manager.render_log(self.viewport.width, self.viewport.height)
def render_text(self, text_render_data, offset_x, offset_y, canvas_width, canvas_height):
if 0 < text_render_data.render_count:
self.font_shader.use_program()
self.font_shader.bind_material_instance()
self.font_shader.bind_uniform_data("texture_font", text_render_data.font_data.texture)
self.font_shader.bind_uniform_data("font_size", text_render_data.font_size)
self.font_shader.bind_uniform_data("offset", (offset_x, offset_y))
self.font_shader.bind_uniform_data("inv_canvas_size", (1.0 / canvas_width, 1.0 / canvas_height))
self.font_shader.bind_uniform_data("count_of_side", text_render_data.font_data.count_of_side)
self.postprocess.draw_elements_instanced(text_render_data.render_count, self.font_instance_buffer, [text_render_data.render_queue, ])
def render_axis(self):
camera = self.scene_manager.main_camera
line_thickness = 2.0
line_length = 100.0
line_size = Float2(line_length / self.core_manager.game_backend.width, line_length / self.core_manager.game_backend.height)
line_offset = line_size - 1.0
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[2][0:2] * line_size, color=Float4(0.0, 0.0, 1.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[1][0:2] * line_size, color=Float4(0.0, 1.0, 0.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[0][0:2] * line_size, color=Float4(1.0, 0.0, 0.0, 1.0), width=line_thickness)
def render_scene(self):
main_camera = self.scene_manager.main_camera
# bind scene constants uniform blocks
self.bind_uniform_blocks()
self.set_blend_state(False)
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST)
glPolygonMode(GL_FRONT_AND_BACK, self.view_mode)
# glEnable(GL_FRAMEBUFFER_SRGB)
glEnable(GL_MULTISAMPLE)
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS)
glDepthFunc(GL_LEQUAL)
glEnable(GL_CULL_FACE)
glFrontFace(GL_CCW)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClearDepth(1.0)
if self.postprocess.is_render_shader() and not RenderOption.RENDER_LIGHT_PROBE:
""" debug shader """
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_material_instance()
elif RenderOption.RENDER_ONLY_ATMOSPHERE and RenderOption.RENDER_LIGHT_PROBE:
""" render light probe preprocess """
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.WORLD_NORMAL, depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 1.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
# done render light probe preprocess
return
else:
""" render normal scene """
self.scene_manager.ocean.simulateFFTWaves()
# render gbuffer & preprocess
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
self.render_gbuffer()
self.render_preprocess()
self.render_shadow()
# render solid
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
glFrontFace(GL_CCW)
glDepthMask(False) # cause depth prepass and gbuffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glClear(GL_COLOR_BUFFER_BIT)
self.render_solid()
# copy HDR Target
src_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
dst_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
dst_framebuffer.copy_framebuffer(src_framebuffer)
src_framebuffer.bind_framebuffer()
# set common projection matrix
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# render ocean
if self.scene_manager.ocean.is_render_ocean:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.scene_manager.ocean.render_ocean(atmosphere=self.scene_manager.atmosphere,
texture_scene=RenderTargets.HDR_TEMP,
texture_linear_depth=RenderTargets.LINEAR_DEPTH,
texture_probe=RenderTargets.LIGHT_PROBE_ATMOSPHERE,
texture_shadow=RenderTargets.COMPOSITE_SHADOWMAP)
# re copy Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.ATMOSPHERE,
RenderTargets.ATMOSPHERE_INSCATTER)
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
glEnable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(False)
# Composite Atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
self.set_blend_state(True, GL_FUNC_ADD, GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
composite_atmosphere = self.resource_manager.get_material_instance("precomputed_atmosphere.composite_atmosphere")
composite_atmosphere.use_program()
above_the_cloud = self.scene_manager.atmosphere.cloud_altitude < main_camera.transform.get_pos()[1]
composite_atmosphere.bind_uniform_data("above_the_cloud", above_the_cloud)
composite_atmosphere.bind_uniform_data("inscatter_power", self.scene_manager.atmosphere.inscatter_power)
composite_atmosphere.bind_uniform_data("texture_atmosphere", RenderTargets.ATMOSPHERE)
composite_atmosphere.bind_uniform_data("texture_inscatter", RenderTargets.ATMOSPHERE_INSCATTER)
composite_atmosphere.bind_uniform_data("texture_linear_depth", RenderTargets.LINEAR_DEPTH)
self.postprocess.draw_elements()
# prepare translucent
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
# Translucent
self.render_translucent()
# render particle
if RenderOption.RENDER_EFFECT:
glDisable(GL_CULL_FACE)
glEnable(GL_BLEND)
self.render_effect()
glDisable(GL_BLEND)
glEnable(GL_CULL_FACE)
# render probe done
if RenderOption.RENDER_LIGHT_PROBE:
return
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
self.set_blend_state(False)
self.render_postprocess()
if RenderOption.RENDER_OBJECT_ID:
self.render_object_id()
self.render_selected_object()
# debug render target
if self.debug_texture is not None:
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_texture(self.debug_texture)
if RenderOption.RENDER_FONT:
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.render_log()
if RenderOption.RENDER_DEBUG_LINE and self.debug_texture is None:
# render world axis
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
self.render_axis()
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
self.debug_line_manager.render_spline(spline)
self.debug_line_manager.render_debug_lines()
if RenderOption.RENDER_GIZMO and self.debug_texture is None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# render spline gizmo
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GIZMO,
self.scene_manager.spline_gizmo_render_infos,
self.render_color_material)
# render transform axis gizmo
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.GIZMO)<|fim▁end|>
| |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import click
from twentyfourhourvideo import video
@click.command(help='Plays a video.')
@click.argument('input', type=click.Path())
def main(input):
video.play(input)<|fim▁end|>
| |
<|file_name|>zip.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module provides functionality to work with zip files."""
# Perhaps all methods should work with a wildcard to limit searches in some
# way (examples: *.po, base.xlf, pootle-terminology.tbx)
#TODO: consider also providing directories as we currently provide files
#TODO: refactor with existing zip code (xpi.py, etc.)
from os import path
from zipfile import ZipFile
from translate.storage import factory
from translate.storage import directory
from translate.misc import wStringIO
class ZIPFile(directory.Directory):
"""This class represents a ZIP file like a directory."""
def __init__(self, filename=None):
self.filename = filename
self.filedata = []
def unit_iter(self):
"""Iterator over all the units in all the files in this zip file."""
for dirname, filename in self.file_iter():
strfile = wStringIO.StringIO(self.archive.read(path.join(dirname, filename)))
strfile.filename = filename
store = factory.getobject(strfile)
#TODO: don't regenerate all the storage objects<|fim▁hole|>
def scanfiles(self):
"""Populate the internal file data."""
self.filedata = []
self.archive = ZipFile(self.filename)
for completename in self.archive.namelist():
dir, name = path.split(completename)
self.filedata.append((dir, name))<|fim▁end|>
|
for unit in store.unit_iter():
yield unit
|
<|file_name|>gears.js.uncompressed.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
version https://git-lfs.github.com/spec/v1
oid sha256:ba35426ce7731b677aeb9fca46043dc31d9cd6a87abedb15382e4c3bc001b548
size 1760
|
<|file_name|>b58.rs<|end_file_name|><|fim▁begin|>// Rust Bitcoin Library
// Written in 2014 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! Base58 encoder and decoder
use std::{error, fmt, str};
use byteorder::{ByteOrder, LittleEndian};
use util::hash::DoubleSha256;
use address::Error;
static BASE58_CHARS: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";
static BASE58_DIGITS: [Option<u8>; 128] = [
None, None, None, None, None, None, None, None, // 0-7
None, None, None, None, None, None, None, None, // 8-15
None, None, None, None, None, None, None, None, // 16-23
None, None, None, None, None, None, None, None, // 24-31
None, None, None, None, None, None, None, None, // 32-39
None, None, None, None, None, None, None, None, // 40-47
None, Some(0), Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), // 48-55
Some(7), Some(8), None, None, None, None, None, None, // 56-63
None, Some(9), Some(10), Some(11), Some(12), Some(13), Some(14), Some(15), // 64-71
Some(16), None, Some(17), Some(18), Some(19), Some(20), Some(21), None, // 72-79
Some(22), Some(23), Some(24), Some(25), Some(26), Some(27), Some(28), Some(29), // 80-87
Some(30), Some(31), Some(32), None, None, None, None, None, // 88-95
None, Some(33), Some(34), Some(35), Some(36), Some(37), Some(38), Some(39), // 96-103
Some(40), Some(41), Some(42), Some(43), None, Some(44), Some(45), Some(46), // 104-111
Some(47), Some(48), Some(49), Some(50), Some(51), Some(52), Some(53), Some(54), // 112-119
Some(55), Some(56), Some(57), None, None, None, None, None, // 120-127
];
/// Decode base58-encoded string into a byte vector
pub fn from(data: &str) -> Result<Vec<u8>, Error> {
// 11/15 is just over log_256(58)
let mut scratch = vec![0u8; 1 + data.len() * 11 / 15];
// Build in base 256
for d58 in data.bytes() {
// Compute "X = X * 58 + next_digit" in base 256
if d58 as usize > BASE58_DIGITS.len() {
return Err(Error::BadByte(d58));
}
let mut carry = match BASE58_DIGITS[d58 as usize] {
Some(d58) => d58 as u32,
None => { return Err(Error::BadByte(d58)); }
};
for d256 in scratch.iter_mut().rev() {
carry += *d256 as u32 * 58;
*d256 = carry as u8;
carry /= 256;
}
assert_eq!(carry, 0);
}
// Copy leading zeroes directly
let mut ret: Vec<u8> = data.bytes().take_while(|&x| x == BASE58_CHARS[0])
.map(|_| 0)
.collect();
// Copy rest of string
ret.extend(scratch.into_iter().skip_while(|&x| x == 0));
Ok(ret)
}
/// Decode a base58check-encoded string
pub fn from_check(data: &str) -> Result<Vec<u8>, Error> {
let mut ret: Vec<u8> = from(data)?;
if ret.len() < 4 {
return Err(Error::TooShort(ret.len()));
}
let ck_start = ret.len() - 4;
let expected = DoubleSha256::from_data(&ret[..ck_start]).into_le().low_u32();
let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]);
if expected != actual {
return Err(Error::BadChecksum(expected, actual));
}
ret.truncate(ck_start);
Ok(ret)
}
fn encode_iter_utf8<I>(data: I) -> Vec<u8>
where
I: Iterator<Item = u8> + Clone,
{
let (len, _) = data.size_hint();
// 7/5 is just over log_58(256)
let mut ret = Vec::with_capacity(1 + len * 7 / 5);
let mut leading_zero_count = 0;
let mut leading_zeroes = true;
// Build string in little endian with 0-58 in place of characters...
for d256 in data {
let mut carry = d256 as usize;
if leading_zeroes && carry == 0 {
leading_zero_count += 1;
} else {
leading_zeroes = false;
}
for ch in ret.iter_mut() {
let new_ch = *ch as usize * 256 + carry;
*ch = (new_ch % 58) as u8;
carry = new_ch / 58;
}
while carry > 0 {
ret.push((carry % 58) as u8);
carry /= 58;
}
}
// ... then reverse it and convert to chars
for _ in 0..leading_zero_count {
ret.push(0);
}
ret.reverse();
for ch in ret.iter_mut() {
*ch = BASE58_CHARS[*ch as usize];
}
ret
}
fn encode_iter<I>(data: I) -> String
where
I: Iterator<Item = u8> + Clone,
{
let ret = encode_iter_utf8(data);
String::from_utf8(ret).unwrap()
}
/// Directly encode a slice as base58 into a `Formatter`.
fn encode_iter_to_fmt<I>(fmt: &mut fmt::Formatter, data: I) -> fmt::Result
where
I: Iterator<Item = u8> + Clone,
{
let ret = encode_iter_utf8(data);
fmt.write_str(str::from_utf8(&ret).unwrap())
}
/// Directly encode a slice as base58
pub fn encode_slice(data: &[u8]) -> String {
encode_iter(data.iter().cloned())
}
/// Obtain a string with the base58check encoding of a slice
/// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.)
pub fn check_encode_slice(data: &[u8]) -> String {
let checksum = DoubleSha256::from_data(&data);
encode_iter(
data.iter()
.cloned()
.chain(checksum[0..4].iter().cloned())
)
}
/// Obtain a string with the base58check encoding of a slice
/// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.)
pub fn check_encode_slice_to_fmt(fmt: &mut fmt::Formatter, data: &[u8]) -> fmt::Result {
let checksum = DoubleSha256::from_data(&data);
let iter = data.iter()
.cloned()
.chain(checksum[0..4].iter().cloned());
encode_iter_to_fmt(fmt, iter)
}
#[cfg(test)]
mod tests {
use super::*;
use util::hash::hex_bytes as hex_decode;
#[test]
fn test_base58_encode() {
// Basics
assert_eq!(&encode_slice(&[0][..]), "1");
assert_eq!(&encode_slice(&[1][..]), "2");
assert_eq!(&encode_slice(&[58][..]), "21");
assert_eq!(&encode_slice(&[13, 36][..]), "211");
// Leading zeroes
assert_eq!(&encode_slice(&[0, 13, 36][..]), "1211");
assert_eq!(&encode_slice(&[0, 0, 0, 0, 13, 36][..]), "1111211");
// Addresses
let addr = hex_decode("00f8917303bfa8ef24f292e8fa1419b20460ba064d").unwrap();
assert_eq!(&check_encode_slice(&addr[..]), "1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH");
}
#[test]
fn test_base58_decode() {
// Basics
assert_eq!(from("1").ok(), Some(vec![0u8]));
assert_eq!(from("2").ok(), Some(vec![1u8]));
assert_eq!(from("21").ok(), Some(vec![58u8]));
assert_eq!(from("211").ok(), Some(vec![13u8, 36]));
// Leading zeroes
assert_eq!(from("1211").ok(), Some(vec![0u8, 13, 36]));<|fim▁hole|> assert_eq!(from_check("1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH").ok(),
Some(hex_decode("00f8917303bfa8ef24f292e8fa1419b20460ba064d").unwrap()))
}
#[test]
fn test_base58_roundtrip() {
let s = "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs";
let v: Vec<u8> = from_check(s).unwrap();
assert_eq!(check_encode_slice(&v[..]), s);
assert_eq!(from_check(&check_encode_slice(&v[..])).ok(), Some(v));
}
}<|fim▁end|>
|
assert_eq!(from("111211").ok(), Some(vec![0u8, 0, 0, 13, 36]));
// Addresses
|
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RProtgenerics(RPackage):<|fim▁hole|> url = "https://git.bioconductor.org/packages/ProtGenerics"
list_url = homepage
version('1.8.0', git='https://git.bioconductor.org/packages/ProtGenerics', commit='b2b3bb0938e20f58fca905f6870de7dbc9dfd7a3')
depends_on('[email protected]:3.4.9', when='@1.8.0')<|fim▁end|>
|
"""S4 generic functions needed by Bioconductor proteomics packages."""
homepage = "https://bioconductor.org/packages/ProtGenerics/"
|
<|file_name|>qvm-stop.py<|end_file_name|><|fim▁begin|>import os
import re
import cmd
import sys
import time
import util
host = sys.argv[1]
cmd.run ("virsh shutdown %s"%(host))
while util.vm_is_running(host):<|fim▁hole|><|fim▁end|>
|
time.sleep(1)
|
<|file_name|>yDragBoxLayerTests.ts<|end_file_name|><|fim▁begin|>///<reference path="../testReference.ts" />
describe("Interactive Components", () => {
describe("YDragBoxLayer", () => {
let SVG_WIDTH = 400;
let SVG_HEIGHT = 400;
it("bounds()", () => {
let svg = TestMethods.generateSVG(SVG_WIDTH, SVG_HEIGHT);
let dbl = new Plottable.Components.YDragBoxLayer();
dbl.boxVisible(true);
dbl.renderTo(svg);
let topLeft = {
x: SVG_WIDTH / 4,
y: SVG_HEIGHT / 4
};
let bottomRight = {
x: SVG_WIDTH / 2,
y: SVG_HEIGHT / 2
};
dbl.bounds({
topLeft: topLeft,
bottomRight: bottomRight
});
let actualBounds = dbl.bounds();
assert.strictEqual(actualBounds.topLeft.x, 0, "box starts at left");
assert.strictEqual(actualBounds.topLeft.y, topLeft.y, "top edge set correctly");
assert.strictEqual(actualBounds.bottomRight.x, dbl.width(), "box ends at right");
assert.strictEqual(actualBounds.bottomRight.y, bottomRight.y, "bottom edge set correctly");
svg.remove();
});
it("resizes only in y", () => {
let svg = TestMethods.generateSVG(SVG_WIDTH, SVG_HEIGHT);
let dbl = new Plottable.Components.YDragBoxLayer();
dbl.boxVisible(true);
dbl.resizable(true);
dbl.renderTo(svg);
let topLeft = {
x: SVG_WIDTH / 4,
y: SVG_HEIGHT / 4
};
let bottomRight = {
x: SVG_WIDTH / 2,
y: SVG_HEIGHT / 2
};
dbl.bounds({
topLeft: topLeft,
bottomRight: bottomRight
});
let actualBounds = dbl.bounds();
let dragTo = {
x: SVG_WIDTH / 2,
y: SVG_HEIGHT * 3 / 4
};
TestMethods.triggerFakeDragSequence(dbl.background(), actualBounds.bottomRight, dragTo);
actualBounds = dbl.bounds();
assert.strictEqual(actualBounds.topLeft.x, 0, "box still starts at left");
assert.strictEqual(actualBounds.bottomRight.x, dbl.width(), "box still ends at right");
assert.strictEqual(actualBounds.bottomRight.y, dragTo.y, "resized in y");
svg.remove();
});
it("stays full width after resizing", () => {
let svg = TestMethods.generateSVG(SVG_WIDTH, SVG_HEIGHT);
let dbl = new Plottable.Components.YDragBoxLayer();
dbl.boxVisible(true);
dbl.resizable(true);
dbl.renderTo(svg);
let topLeft = {
x: SVG_WIDTH / 4,
y: SVG_HEIGHT / 4
};
let bottomRight = {
x: SVG_WIDTH / 2,
y: SVG_HEIGHT / 2
};
dbl.bounds({
topLeft: topLeft,
bottomRight: bottomRight
});
let widthBefore = dbl.width();
let boundsBefore = dbl.bounds();
svg.attr("width", 2 * SVG_WIDTH);
dbl.redraw();
assert.notStrictEqual(dbl.width(), widthBefore, "component changed size");
let boundsAfter = dbl.bounds();
assert.strictEqual(boundsAfter.topLeft.x, 0, "box still starts at left");
assert.strictEqual(boundsAfter.topLeft.y, boundsBefore.topLeft.y, "box keeps same top edge");
assert.strictEqual(boundsAfter.bottomRight.x, dbl.width(), "box still ends at right");
assert.strictEqual(boundsAfter.bottomRight.y, boundsBefore.bottomRight.y, "box keeps same bottom edge");
svg.remove();
});
it("throws error on getting x scale", () => {
let dbl = new Plottable.Components.YDragBoxLayer();
assert.throws(() => dbl.xScale(), "no xScale");
});
<|fim▁hole|> assert.throws(() => dbl.xScale(new Plottable.Scales.Linear()), "xScales cannot be set");
});
it("throws error on getting x extent", () => {
let dbl = new Plottable.Components.YDragBoxLayer();
assert.throws(() => dbl.xExtent(), "no xExtent");
});
it("moves only in y", () => {
let svg = TestMethods.generateSVG(SVG_WIDTH, SVG_HEIGHT);
let dbl = new Plottable.Components.YDragBoxLayer();
dbl.boxVisible(true);
dbl.movable(true);
dbl.renderTo(svg);
let topLeft = {
x: SVG_WIDTH / 4,
y: SVG_HEIGHT / 4
};
let bottomRight = {
x: SVG_WIDTH * 3 / 4,
y: SVG_HEIGHT * 3 / 4
};
dbl.bounds({
topLeft: topLeft,
bottomRight: bottomRight
});
let boundsBefore = dbl.bounds();
let dragDistance = 10;
TestMethods.triggerFakeDragSequence(dbl.background(),
{ x: SVG_WIDTH / 2, y: SVG_HEIGHT / 2 },
{ x: SVG_WIDTH / 2 + dragDistance, y: SVG_HEIGHT / 2 + dragDistance }
);
let boundsAfter = dbl.bounds();
assert.strictEqual(boundsAfter.topLeft.x, 0, "box still starts at left");
assert.strictEqual(boundsAfter.topLeft.y, boundsBefore.topLeft.y + dragDistance, "top edge moved");
assert.strictEqual(boundsAfter.bottomRight.x, dbl.width(), "box still ends at right");
assert.strictEqual(boundsAfter.bottomRight.y, boundsBefore.bottomRight.y + dragDistance, "bottom edge moved");
svg.remove();
});
it("destroy() does not error if scales are not inputted", () => {
let svg = TestMethods.generateSVG();
let sbl = new Plottable.Components.YDragBoxLayer();
sbl.renderTo(svg);
assert.doesNotThrow(() => sbl.destroy(), Error, "can destroy");
svg.remove();
});
});
});<|fim▁end|>
|
it("throws error on setting x scale", () => {
let dbl = new Plottable.Components.YDragBoxLayer();
|
<|file_name|>F2.py<|end_file_name|><|fim▁begin|>import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CAND ID', 'number': '2'},
{'name': 'CANDIDATE NAME', 'number': '3'},
{'name': 'STREET 1', 'number': '4'},
{'name': 'STREET 2', 'number': '5'},
{'name': 'CITY', 'number': '6'},
{'name': 'STATE', 'number': '7'},<|fim▁hole|> {'name': 'ZIP', 'number': '8'},
{'name': 'PTY/CODE', 'number': '9'},
{'name': 'CAN/OFFICE', 'number': '10'},
{'name': 'CAN/STATE', 'number': '11'},
{'name': 'CAN/DIST', 'number': '12'},
{'name': 'YEAR OF ELECTION 1900-2999', 'number': '13'},
{'name': 'FEC COMMITTEE ID NUMBER (PCC)', 'number': '14'},
{'name': 'COMMITTEE NAME (PCC)', 'number': '15'},
{'name': 'STREET 1', 'number': '16'},
{'name': 'STREET 2', 'number': '17'},
{'name': 'CITY', 'number': '18'},
{'name': 'STATE', 'number': '19'},
{'name': 'ZIP', 'number': '20'},
{'name': 'FEC COMMITTEE ID NUMBER (Auth)', 'number': '21'},
{'name': 'COMMITTEE NAME (Auth)', 'number': '22'},
{'name': 'STREET 1', 'number': '23'},
{'name': 'STREET 2', 'number': '24'},
{'name': 'CITY', 'number': '25'},
{'name': 'STATE', 'number': '26'},
{'name': 'ZIP', 'number': '27'},
{'name': 'NAME/CAN (as signed)', 'number': '28'},
{'name': 'Signed', 'number': '29-'},
{'name': 'PRI PERSONAL FUNDS DECLARED', 'number': '30'},
{'name': 'GEN PERSONAL FUNDS DECLARED', 'number': '31'},
]
self.fields_names = self.hash_names(self.fields)<|fim▁end|>
| |
<|file_name|>test_config_flow.py<|end_file_name|><|fim▁begin|>"""Tests for the Linky config flow."""
from pylinky.exceptions import (
PyLinkyAccessException,
PyLinkyEnedisException,
PyLinkyException,
PyLinkyWrongLoginException,
)
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.linky.const import DEFAULT_TIMEOUT, DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
USERNAME = "[email protected]"
USERNAME_2 = "[email protected]"
PASSWORD = "password"
TIMEOUT = 20
@pytest.fixture(name="login")
def mock_controller_login():
"""Mock a successful login."""
with patch(
"homeassistant.components.linky.config_flow.LinkyClient"
) as service_mock:
service_mock.return_value.login = Mock(return_value=True)
service_mock.return_value.close_session = Mock(return_value=None)
yield service_mock
@pytest.fixture(name="fetch_data")
def mock_controller_fetch_data():
"""Mock a successful get data."""
with patch(
"homeassistant.components.linky.config_flow.LinkyClient"
) as service_mock:
service_mock.return_value.fetch_data = Mock(return_value={})
service_mock.return_value.close_session = Mock(return_value=None)
yield service_mock
async def test_user(hass: HomeAssistantType, login, fetch_data):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=None
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# test with all provided
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_TIMEOUT] == DEFAULT_TIMEOUT
async def test_import(hass: HomeAssistantType, login, fetch_data):
"""Test import step."""
# import with username and password
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_TIMEOUT] == DEFAULT_TIMEOUT
# import with all
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: USERNAME_2,
CONF_PASSWORD: PASSWORD,
CONF_TIMEOUT: TIMEOUT,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME_2
assert result["title"] == USERNAME_2
assert result["data"][CONF_USERNAME] == USERNAME_2
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_TIMEOUT] == TIMEOUT
<|fim▁hole|>async def test_abort_if_already_setup(hass: HomeAssistantType, login, fetch_data):
"""Test we abort if Linky is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
unique_id=USERNAME,
).add_to_hass(hass)
# Should fail, same USERNAME (import)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same USERNAME (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_login_failed(hass: HomeAssistantType, login):
"""Test when we have errors during login."""
login.return_value.login.side_effect = PyLinkyAccessException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "access"}
hass.config_entries.flow.async_abort(result["flow_id"])
login.return_value.login.side_effect = PyLinkyWrongLoginException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "wrong_login"}
hass.config_entries.flow.async_abort(result["flow_id"])
async def test_fetch_failed(hass: HomeAssistantType, login):
"""Test when we have errors during fetch."""
login.return_value.fetch_data.side_effect = PyLinkyAccessException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "access"}
hass.config_entries.flow.async_abort(result["flow_id"])
login.return_value.fetch_data.side_effect = PyLinkyEnedisException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "enedis"}
hass.config_entries.flow.async_abort(result["flow_id"])
login.return_value.fetch_data.side_effect = PyLinkyException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
hass.config_entries.flow.async_abort(result["flow_id"])<|fim▁end|>
| |
<|file_name|>LuaNodeEmitter.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010, Anima Games, Benjamin Karaban, Laurent Schneider,
* Jérémie Comarmond, Didier Colin.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <Universe/Lua/LuaNode.h>
#include <Universe/Lua/LuaConstants.h>
#include <Universe/Particles/PartEffectAttractor.h>
#include <Universe/Particles/PartEffectForce.h>
namespace Universe
{
//-----------------------------------------------------------------------------
Ptr<NodeEmitter> LuaNode::getNodeEmitter(lua_State* L) const
{
if(_pNode->getNodeType() != NODE_EMITTER)
badNodeType(L, NODE_EMITTER);
else
return LM_DEBUG_PTR_CAST<NodeEmitter>(_pNode);
return null;
}
//-----------------------------------------------------------------------------
int LuaNode::setPeriod(lua_State* L)
{
LM_LUA_FUNC_START(L);
if(_pNode->getNodeType() == NODE_STORM)
<|fim▁hole|> else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->setPeriod(luat_tofloat(L, 1), luat_tofloat(L, 2));
}
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::setStickyParticles(lua_State* L)
{
LM_LUA_FUNC_START(L);
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->setStickyParticles(luat_toboolean(L, 1));
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::setGenerationTime(lua_State* L)
{
LM_LUA_FUNC_START(L);
if(_pNode->getNodeType() == NODE_STORM)
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
pStorm->setGenerationTime(luat_tofloat(L, 1), luat_tofloat(L, 2));
}
else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->setGenerationTime(luat_tofloat(L, 1), luat_tofloat(L, 2));
}
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::killAtEnd(lua_State* L)
{
LM_LUA_FUNC_START(L);
if(_pNode->getNodeType() == NODE_STORM)
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
pStorm->killAtEnd(luat_toboolean(L, 1));
}
else if(_pNode->getNodeType() == NODE_SOUND)
{
Ptr<NodeSoundSource> pSound(getNodeSoundSource(L));
pSound->killAtEnd(luat_toboolean(L, 1));
}
else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->killAtEnd(luat_toboolean(L, 1));
}
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::getPeriod(lua_State* L)
{
LM_LUA_FUNC_START(L);
float p1, p2;
if(_pNode->getNodeType() == NODE_STORM)
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
pStorm->getPeriod(p1, p2);
}
else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->getPeriod(p1, p2);
}
lua_pushnumber(L, p1);
lua_pushnumber(L, p2);
return 2;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::getGenerationTime(lua_State* L)
{
LM_LUA_FUNC_START(L);
double p1, p2;
if(_pNode->getNodeType() == NODE_STORM)
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
pStorm->getGenerationTime(p1, p2);
}
else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->getGenerationTime(p1, p2);
}
lua_pushnumber(L, p1);
lua_pushnumber(L, p2);
return 2;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::getStickyParticles(lua_State* L)
{
LM_LUA_FUNC_START(L);
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
luat_pushboolean(L, pEmitter->getStickyParticles());
return 1;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::getKillAtEnd(lua_State* L)
{
LM_LUA_FUNC_START(L);
if(_pNode->getNodeType() == NODE_STORM)
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
luat_pushboolean(L, pStorm->killAtEnd());
}
else if (_pNode->getNodeType() == NODE_SOUND)
{
Ptr<NodeSoundSource> pNode(getNodeSoundSource(L));
lua_pushboolean(L, pNode->killAtEnd());
}
else
{
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
luat_pushboolean(L, pEmitter->killAtEnd());
}
return 1;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::addAttractor(lua_State* L)
{
LM_LUA_FUNC_START(L);
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
Ptr<Universe::PartEffectAttractor> pAttractor(new Universe::PartEffectAttractor(luat_tovec3f(L, 1), luat_tofloat(L, 2)));
pEmitter->addEffect(pAttractor);
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::addForce(lua_State* L)
{
LM_LUA_FUNC_START(L);
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
Ptr<Universe::PartEffectForce> pForce(new Universe::PartEffectForce(luat_tovec3f(L, 1), luat_tofloat(L, 2)));
pEmitter->addEffect(pForce);
return 0;
LM_LUA_FUNC_END(L);
}
//-----------------------------------------------------------------------------
int LuaNode::removeEffect(lua_State* L)
{
LM_LUA_FUNC_START(L);
Ptr<NodeEmitter> pEmitter(getNodeEmitter(L));
pEmitter->removeEffects(luat_toeffect(L,1));
return 0;
LM_LUA_FUNC_END(L);
}
}<|fim▁end|>
|
{
Ptr<NodeStorm> pStorm(getNodeStorm(L));
pStorm->setPeriod(luat_tofloat(L, 1), luat_tofloat(L, 2));
}
|
<|file_name|>stackvec.rs<|end_file_name|><|fim▁begin|>use minimal_lexical::bigint;
#[cfg(feature = "alloc")]
pub use minimal_lexical::heapvec::HeapVec as VecType;
#[cfg(not(feature = "alloc"))]
pub use minimal_lexical::stackvec::StackVec as VecType;
pub fn vec_from_u32(x: &[u32]) -> VecType {
let mut vec = VecType::new();
#[cfg(not(all(target_pointer_width = "64", not(target_arch = "sparc"))))]
{
for &xi in x {
vec.try_push(xi as bigint::Limb).unwrap();
}
}
#[cfg(all(target_pointer_width = "64", not(target_arch = "sparc")))]
{
for xi in x.chunks(2) {
match xi.len() {
1 => vec.try_push(xi[0] as bigint::Limb).unwrap(),
2 => {
let xi0 = xi[0] as bigint::Limb;
let xi1 = xi[1] as bigint::Limb;<|fim▁hole|> },
_ => unreachable!(),
}
}
}
vec
}<|fim▁end|>
|
vec.try_push((xi1 << 32) | xi0).unwrap()
|
<|file_name|>apputil.py<|end_file_name|><|fim▁begin|>from project_cron.utils import processutil
def open(app_name):
script = '''
if application "%s" is not running then
tell application "%s" to activate
end if
''' % (app_name, app_name)
processutil.call(['/usr/bin/osascript', '-e', script])
def close(app_name):
script = 'tell application "%s" to quit' % app_name<|fim▁hole|><|fim▁end|>
|
processutil.call(['/usr/bin/osascript', '-e', script])
|
<|file_name|>solver.py<|end_file_name|><|fim▁begin|>'''Copyright (C) 2015 by Wesley Tansey
This file is part of the GFL library.
The GFL library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The GFL library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the GFL library. If not, see <http://www.gnu.org/licenses/>.
'''
import numpy as np
from numpy.ctypeslib import ndpointer
from ctypes import *
from pygfl.utils import *
'''Load the graph fused lasso library'''
try:
graphfl_lib = cdll.LoadLibrary("libgraphfl.so")
except OSError:
_libgraphfl_file = get_libgraphfl()
graphfl_lib = cdll.LoadLibrary(_libgraphfl_file)
graphfl = graphfl_lib.graph_fused_lasso_warm
graphfl.restype = c_int
graphfl.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'),
c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'),
c_double, c_double, c_double, c_int, c_double,
ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')]
weighted_graphfl = graphfl_lib.graph_fused_lasso_weight_warm
weighted_graphfl.restype = c_int
weighted_graphfl.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'),
c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'),
c_double, c_double, c_double, c_int, c_double,
ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')]
graphfl_lams = graphfl_lib.graph_fused_lasso_lams_warm
graphfl_lams.restype = c_int
graphfl_lams.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'),
c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'),
ndpointer(c_double, flags='C_CONTIGUOUS'), c_double, c_double, c_int, c_double,
ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')]
weighted_graphfl_lams = graphfl_lib.graph_fused_lasso_lams_weight_warm
weighted_graphfl_lams.restype = c_int
weighted_graphfl_lams.argtypes = [c_int, ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'),
c_int, ndpointer(c_int, flags='C_CONTIGUOUS'), ndpointer(c_int, flags='C_CONTIGUOUS'),
ndpointer(c_double, flags='C_CONTIGUOUS'), c_double, c_double, c_int, c_double,
ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS'), ndpointer(c_double, flags='C_CONTIGUOUS')]
class TrailSolver:
def __init__(self, alpha=2., inflate=2., maxsteps=100000, converge=1e-6, penalty='gfl', max_dp_steps=5000, gamma=1.):
self.alpha = alpha
self.inflate = inflate
self.maxsteps = maxsteps
self.converge = converge
self.penalty = penalty
self.max_dp_steps = max_dp_steps
self.gamma = gamma
def set_data(self, y, edges, ntrails, trails, breakpoints, weights=None):
self.y = y
self.edges = edges if type(edges) is defaultdict else edge_map_from_edge_list(edges)
self.nnodes = len(y)
self.ntrails = ntrails
self.trails = trails
self.breakpoints = breakpoints
self.weights = weights
self.beta = np.zeros(self.nnodes, dtype='double')
self.z = np.zeros(self.breakpoints[-1], dtype='double')
self.u = np.zeros(self.breakpoints[-1], dtype='double')
self.steps = []
def set_values_only(self, y, weights=None):
self.y = y
self.weights = weights
def solve(self, lam):
'''Solves the GFL for a fixed value of lambda.'''
if self.penalty == 'dp':
return self.solve_dp(lam)
if self.penalty == 'gfl':
return self.solve_gfl(lam)
if self.penalty == 'gamlasso':
return self.solve_gfl(lam)
raise Exception('Unknown penalty type: {0}'.format(self.penalty))
def solve_gfl(self, lam):
if hasattr(lam, '__len__'):
if self.weights is None:
s = graphfl_lams(self.nnodes, self.y,
self.ntrails, self.trails, self.breakpoints,
lam,<|fim▁hole|> s = weighted_graphfl_lams(self.nnodes, self.y, self.weights,
self.ntrails, self.trails, self.breakpoints,
lam,
self.alpha, self.inflate, self.maxsteps, self.converge,
self.beta, self.z, self.u)
else:
if self.weights is None:
s = graphfl(self.nnodes, self.y,
self.ntrails, self.trails, self.breakpoints,
lam,
self.alpha, self.inflate, self.maxsteps, self.converge,
self.beta, self.z, self.u)
else:
s = weighted_graphfl(self.nnodes, self.y, self.weights,
self.ntrails, self.trails, self.breakpoints,
lam,
self.alpha, self.inflate, self.maxsteps, self.converge,
self.beta, self.z, self.u)
self.steps.append(s)
return self.beta
def solve_dp(self, lam):
'''Solves the Graph-fused double Pareto (non-convex, local optima only)'''
cur_converge = self.converge+1
step = 0
# Get an initial estimate using the GFL
self.solve_gfl(lam)
beta2 = np.copy(self.beta)
while cur_converge > self.converge and step < self.max_dp_steps:
# Weight each edge differently
u = lam / (1 + np.abs(self.beta[self.trails[::2]] - self.beta[self.trails[1::2]]))
# Swap the beta buffers
temp = self.beta
self.beta = beta2
beta2 = temp
# Solve the edge-weighted GFL problem, which updates beta
self.solve_gfl(u)
# Check for convergence
cur_converge = np.sqrt(((self.beta - beta2)**2).sum())
step += 1
self.steps.append(step)
return self.beta
def solve_gamlasso(self, lam):
'''Solves the Graph-fused gamma lasso via POSE (Taddy, 2013)'''
weights = lam / (1 + self.gamma * np.abs(self.beta[self.trails[::2]] - self.beta[self.trails[1::2]]))
s = self.solve_gfl(u)
self.steps.append(s)
return self.beta
def log_likelihood(self, beta):
return -0.5 * ((self.y - beta)**2).sum()
def solution_path(self, min_lambda, max_lambda, lambda_bins, verbose=0):
'''Follows the solution path to find the best lambda value.'''
lambda_grid = np.exp(np.linspace(np.log(max_lambda), np.log(min_lambda), lambda_bins))
aic_trace = np.zeros(lambda_grid.shape) # The AIC score for each lambda value
aicc_trace = np.zeros(lambda_grid.shape) # The AICc score for each lambda value (correcting for finite sample size)
bic_trace = np.zeros(lambda_grid.shape) # The BIC score for each lambda value
dof_trace = np.zeros(lambda_grid.shape) # The degrees of freedom of each final solution
log_likelihood_trace = np.zeros(lambda_grid.shape)
beta_trace = []
best_idx = None
best_plateaus = None
# Solve the series of lambda values with warm starts at each point
for i, lam in enumerate(lambda_grid):
if verbose:
print('#{0} Lambda = {1}'.format(i, lam))
# Fit to the final values
beta = self.solve(lam)
if verbose:
print('Calculating degrees of freedom')
# Count the number of free parameters in the grid (dof)
plateaus = calc_plateaus(beta, self.edges)
dof_trace[i] = len(plateaus)
if verbose:
print('Calculating AIC')
# Get the negative log-likelihood
log_likelihood_trace[i] = self.log_likelihood(beta)
# Calculate AIC = 2k - 2ln(L)
aic_trace[i] = 2. * dof_trace[i] - 2. * log_likelihood_trace[i]
# Calculate AICc = AIC + 2k * (k+1) / (n - k - 1)
aicc_trace[i] = aic_trace[i] + 2 * dof_trace[i] * (dof_trace[i]+1) / (len(beta) - dof_trace[i] - 1.)
# Calculate BIC = -2ln(L) + k * (ln(n) - ln(2pi))
bic_trace[i] = -2 * log_likelihood_trace[i] + dof_trace[i] * (np.log(len(beta)) - np.log(2 * np.pi))
# Track the best model thus far
if best_idx is None or bic_trace[i] < bic_trace[best_idx]:
best_idx = i
best_plateaus = plateaus
# Save the trace of all the resulting parameters
beta_trace.append(np.array(beta))
if verbose:
print('DoF: {0} AIC: {1} AICc: {2} BIC: {3}'.format(dof_trace[i], aic_trace[i], aicc_trace[i], bic_trace[i]))
if verbose:
print('Best setting (by BIC): lambda={0} [DoF: {1}, AIC: {2}, AICc: {3} BIC: {4}]'.format(lambda_grid[best_idx], dof_trace[best_idx], aic_trace[best_idx], aicc_trace[best_idx], bic_trace[best_idx]))
return {'aic': aic_trace,
'aicc': aicc_trace,
'bic': bic_trace,
'dof': dof_trace,
'loglikelihood': log_likelihood_trace,
'beta': np.array(beta_trace),
'lambda': lambda_grid,
'best_idx': best_idx,
'best': beta_trace[best_idx],
'plateaus': best_plateaus}<|fim▁end|>
|
self.alpha, self.inflate, self.maxsteps, self.converge,
self.beta, self.z, self.u)
else:
|
<|file_name|>sphericalHarmonics.py<|end_file_name|><|fim▁begin|># ------------------- Information --------------------- #
# Author: Joey Dumont <[email protected]> #
# Date created: October 18th, 2013 #
# Date mod. October 18th, 2013 #
# Description: We plot the times it took to compute #
# sets of Wigner symbols of different #
# sizes. #
# ----------------------------------------------------- #
# --------------- Modules Importation ----------------- #
from pylab import *
from matplotlib.ticker import AutoMinorLocator
# ----------------- Data Importation ------------------ #
prec = loadtxt("precisionSph.dat")
# ------------------ Plotting data -------------------- #
fig1 = figure(figsize=(7,3))
ax1 = fig1.add_subplot(111)
ax1.plot(prec[:,0],prec[:,1], 'b-')
ax1.plot(prec[:,0],prec[:,2], 'r')
ax1.plot(prec[:,0],prec[:,3], 'k')
<|fim▁hole|>ax1.set_ylabel("Error")
ax1.set_yscale('log')
fig1.savefig("SphPrecision.pdf", bbox_inches="tight")<|fim▁end|>
|
minorLocator = AutoMinorLocator()
ax1.xaxis.set_minor_locator(minorLocator)
ax1.set_xlabel(r"$\ell$")
|
<|file_name|>standard_trainer.py<|end_file_name|><|fim▁begin|>'''@file standard_trainer.py
contains the StandardTrainer'''
from nabu.neuralnetworks.trainers import trainer
class StandardTrainer(trainer.Trainer):
'''a trainer with no added functionality'''
def aditional_loss(self):
'''
add an aditional loss
returns:
the aditional loss or None
'''
return None
def chief_only_hooks(self, outputs):
'''add hooks only for the chief worker
Args:
outputs: the outputs generated by the create graph method<|fim▁hole|> Returns:
a list of hooks
'''
return []
def hooks(self, outputs):
'''add hooks for the session
Args:
outputs: the outputs generated by the create graph method
Returns:
a list of hooks
'''
return []<|fim▁end|>
| |
<|file_name|>create-lang-edit-link.js<|end_file_name|><|fim▁begin|>/**
* create edit language file link
*/
export default (lang) => {<|fim▁hole|> return `https://github.com/electerm/electerm-locales/edit/master/locales/${lang}.js`
}<|fim▁end|>
| |
<|file_name|>qprotractor_ru.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="ru_RU">
<context>
<name>AboutDialog</name>
<message>
<location filename="about.ui" line="20"/>
<source>About QProtractor</source>
<translation>О QProtractor</translation>
</message>
<message utf8="true">
<location filename="about.ui" line="57"/>
<source>QProtractor
©2011 Alexey Guseynov (kibergus)</source>
<translation>QProtractor
©2011 Гусейнов Алексей (kibergus)</translation>
</message>
<message>
<location filename="about.ui" line="79"/>
<source>Controls:
Move protractor with left mouse button pressed.
Rotate protractor with right mouse button pressed.
Watch for current rotation angle near top edge of the tool.
Resize protractor with scroll.
Use double click to grab mouse control and measure
angle by pointing target with cursor.
Use right mouse button click for menu.
Hint:
Adjust protractor to your needs and use
menu->"Save state" to make this configuration default.</source>
<oldsource>Controls:
Move protractor with left mouse button pressed.
Rotate protractor with right mouse button presed.
Watch for current rotation angle near top edge of the tool.
Resize protractor with scroll.
Use double click to grab mouse control and measure
angle by pointing target with cursor.
Use right mouse button click for menu.
Hint:
Adjust protractor to your needs and use
menu->"Save state" to make this configuration default.</oldsource>
<translation>Управление:
Передвигайте транспортир с зажатой левой кнопной мыши.
Крутите транспортир с зажатой правой кнопкой мыши.
Текущий угол поворота отображается вверху.
Масштабируйте транспортир колесом мыши.
Двойной клик захватывает курсор мыши и позволяет
измерять угол указывая курсором на желаемую точку.
Клик правой кнопкой открывает меню.
Подсказка:
Настройте транспортир под свои нужды и сохраните эти
настройки выбрав пункт меню "Сохранить".</translation>
</message>
</context>
<context>
<name>Protractor</name>
<message>
<location filename="protractor.cpp" line="35"/>
<source>Protractor</source>
<translation>Транспортир</translation>
</message>
<message>
<location filename="protractor.cpp" line="91"/>
<source>Rotate &right</source>
<translation>Ноль справа (&R)</translation>
</message>
<message>
<location filename="protractor.cpp" line="94"/>
<source>Rotate &up</source>
<translation>Ноль вверху (&U)</translation>
</message>
<message>
<location filename="protractor.cpp" line="97"/>
<source>Rotate &left</source>
<translation>Ноль слева (&L)</translation>
</message>
<message>
<location filename="protractor.cpp" line="100"/>
<source>Rotate &down</source>
<translation>Ноль внизу (&D)</translation>
</message>
<message>
<location filename="protractor.cpp" line="105"/>
<source>Re&verse</source>
<oldsource>&Reverse</oldsource>
<translation>В другую сторону (&V)</translation>
</message>
<message>
<location filename="protractor.cpp" line="108"/>
<source>Change &units</source>
<translation>Градусы/радианы (&U)</translation>
</message>
<message>
<location filename="protractor.cpp" line="113"/>
<source>&Save state</source>
<translation>Сохранить (&S)</translation>
</message>
<message>
<location filename="protractor.cpp" line="116"/>
<source>&About/help</source>
<translation>О программе/справка (&A)</translation>
</message>
<message>
<location filename="protractor.cpp" line="119"/><|fim▁hole|> </message>
</context>
</TS><|fim▁end|>
|
<source>E&xit</source>
<translation>Выход (&X)</translation>
|
<|file_name|>append-test.js<|end_file_name|><|fim▁begin|>import { set } from 'ember-metal';
import { jQuery } from 'ember-views';
import { moduleFor, RenderingTest } from '../../utils/test-case';
import { Component, compile } from '../../utils/helpers';
import { strip } from '../../utils/abstract-test-case';
class AbstractAppendTest extends RenderingTest {
constructor() {
super();
this.components = [];
this.ids = [];
}
teardown() {
this.component = null;
this.components.forEach(component => {
this.runTask(() => component.destroy());
});
this.ids.forEach(id => {
let $element = jQuery(id).remove();
this.assert.strictEqual($element.length, 0, `Should not leak element: #${id}`);
});
super();
}
/* abstract append(component): Element; */
didAppend(component) {
this.components.push(component);
this.ids.push(component.elementId);
}
['@test lifecycle hooks during component append'](assert) {
let hooks = [];
let oldRegisterComponent = this.registerComponent;
let componentsByName = {};
// TODO: refactor/combine with other life-cycle tests
this.registerComponent = function(name, _options) {
function pushHook(hookName) {
hooks.push([name, hookName]);
}
let options = {
ComponentClass: _options.ComponentClass.extend({
init() {
expectDeprecation(() => { this._super(...arguments); }, /didInitAttrs called/);
if (name in componentsByName) {
throw new TypeError('Component named: ` ' + name + ' ` already registered');
}
componentsByName[name] = this;
pushHook('init');
this.on('init', () => pushHook('on(init)'));
},
didInitAttrs(options) {
pushHook('didInitAttrs', options);
},
didReceiveAttrs() {
pushHook('didReceiveAttrs');
},
willInsertElement() {
pushHook('willInsertElement');
},
willRender() {
pushHook('willRender');
},
didInsertElement() {
pushHook('didInsertElement');
},
didRender() {
pushHook('didRender');
},
didUpdateAttrs() {
pushHook('didUpdateAttrs');
},
willUpdate() {
pushHook('willUpdate');
},
didUpdate() {
pushHook('didUpdate');
},
willDestroyElement() {
pushHook('willDestroyElement');
},
willClearRender() {
pushHook('willClearRender');
},
didDestroyElement() {
pushHook('didDestroyElement');
},
willDestroy() {
pushHook('willDestroy');
this._super(...arguments);
}
}),
template: _options.template
};
oldRegisterComponent.call(this, name, options);
};
this.registerComponent('x-parent', {
ComponentClass: Component.extend({
layoutName: 'components/x-parent'
}),
template: '[parent: {{foo}}]{{#x-child bar=foo}}[yielded: {{foo}}]{{/x-child}}'
});
this.registerComponent('x-child', {
ComponentClass: Component.extend({
tagName: ''
}),
template: '[child: {{bar}}]{{yield}}'
});
let XParent = this.owner._lookupFactory('component:x-parent');
this.component = XParent.create({ foo: 'zomg' });
assert.deepEqual(hooks, [
['x-parent', 'init'],
['x-parent', 'didInitAttrs'],
['x-parent', 'didReceiveAttrs'],
['x-parent', 'on(init)']
], 'creation of x-parent');
hooks.length = 0;
this.element = this.append(this.component);
assert.deepEqual(hooks, [
['x-parent', 'willInsertElement'],
['x-child', 'init'],
['x-child', 'didInitAttrs'],
['x-child', 'didReceiveAttrs'],
['x-child', 'on(init)'],
['x-child', 'willRender'],
['x-child', 'willInsertElement'],
['x-child', 'didInsertElement'],
['x-child', 'didRender'],
['x-parent', 'didInsertElement'],
['x-parent', 'didRender']
], 'appending of x-parent');
hooks.length = 0;
this.runTask(() => componentsByName['x-parent'].rerender());
assert.deepEqual(hooks, [
['x-parent', 'willUpdate'],
['x-parent', 'willRender'],
['x-parent', 'didUpdate'],
['x-parent', 'didRender']
], 'rerender x-parent');
hooks.length = 0;
this.runTask(() => componentsByName['x-child'].rerender());
assert.deepEqual(hooks, [
['x-parent', 'willUpdate'],
['x-parent', 'willRender'],
['x-child', 'willUpdate'],
['x-child', 'willRender'],
['x-child', 'didUpdate'],
['x-child', 'didRender'],
['x-parent', 'didUpdate'],
['x-parent', 'didRender']
], 'rerender x-child');
hooks.length = 0;
this.runTask(() => set(this.component, 'foo', 'wow'));
assert.deepEqual(hooks, [
['x-parent', 'willUpdate'],
['x-parent', 'willRender'],
['x-child', 'didUpdateAttrs'],
['x-child', 'didReceiveAttrs'],
['x-child', 'willUpdate'],
['x-child', 'willRender'],
['x-child', 'didUpdate'],
['x-child', 'didRender'],
['x-parent', 'didUpdate'],
['x-parent', 'didRender']
], 'set foo = wow');
hooks.length = 0;
this.runTask(() => set(this.component, 'foo', 'zomg'));
assert.deepEqual(hooks, [
['x-parent', 'willUpdate'],
['x-parent', 'willRender'],
['x-child', 'didUpdateAttrs'],
['x-child', 'didReceiveAttrs'],
['x-child', 'willUpdate'],
['x-child', 'willRender'],
['x-child', 'didUpdate'],
['x-child', 'didRender'],
['x-parent', 'didUpdate'],
['x-parent', 'didRender']
], 'set foo = zomg');
hooks.length = 0;
this.runTask(() => this.component.destroy());
assert.deepEqual(hooks, [
['x-parent', 'willDestroyElement'],
['x-parent', 'willClearRender'],
['x-child', 'willDestroyElement'],
['x-child', 'willClearRender'],
['x-child', 'didDestroyElement'],
['x-parent', 'didDestroyElement'],
['x-parent', 'willDestroy'],
['x-child', 'willDestroy']
], 'destroy');
}
['@test appending, updating and destroying a single component'](assert) {
let willDestroyCalled = 0;
this.registerComponent('x-parent', {
ComponentClass: Component.extend({
layoutName: 'components/x-parent',
willDestroyElement() {
willDestroyCalled++;
}
}),
template: '[parent: {{foo}}]{{#x-child bar=foo}}[yielded: {{foo}}]{{/x-child}}'
});
this.registerComponent('x-child', {
ComponentClass: Component.extend({
tagName: ''
}),
template: '[child: {{bar}}]{{yield}}'
});
let XParent = this.owner._lookupFactory('component:x-parent');
this.component = XParent.create({ foo: 'zomg' });
assert.ok(!this.component.element, 'precond - should not have an element');
this.element = this.append(this.component);
let componentElement = this.component.element;
this.assertComponentElement(componentElement, { content: '[parent: zomg][child: zomg][yielded: zomg]' });
assert.equal(componentElement.parentElement, this.element, 'It should be attached to the target');
this.runTask(() => this.rerender());
this.assertComponentElement(componentElement, { content: '[parent: zomg][child: zomg][yielded: zomg]' });
assert.equal(componentElement.parentElement, this.element, 'It should be attached to the target');
this.runTask(() => set(this.component, 'foo', 'wow'));
this.assertComponentElement(componentElement, { content: '[parent: wow][child: wow][yielded: wow]' });
assert.equal(componentElement.parentElement, this.element, 'It should be attached to the target');
this.runTask(() => set(this.component, 'foo', 'zomg'));
this.assertComponentElement(componentElement, { content: '[parent: zomg][child: zomg][yielded: zomg]' });
assert.equal(componentElement.parentElement, this.element, 'It should be attached to the target');
this.runTask(() => this.component.destroy());
if (this.isHTMLBars) {
// Bug in Glimmer – component should not have .element at this point
assert.ok(!this.component.element, 'It should not have an element');
}
assert.ok(!componentElement.parentElement, 'The component element should be detached');
this.assert.equal(willDestroyCalled, 1);
}
['@test appending, updating and destroying multiple components'](assert) {
let willDestroyCalled = 0;
this.registerComponent('x-first', {
ComponentClass: Component.extend({
layoutName: 'components/x-first',
willDestroyElement() {
willDestroyCalled++;
}
}),
template: 'x-first {{foo}}!'
});
this.registerComponent('x-second', {
ComponentClass: Component.extend({
layoutName: 'components/x-second',
willDestroyElement() {
willDestroyCalled++;
}
}),
template: 'x-second {{bar}}!'
});
let First = this.owner._lookupFactory('component:x-first');
let Second = this.owner._lookupFactory('component:x-second');
let first = First.create({ foo: 'foo' });
let second = Second.create({ bar: 'bar' });
this.assert.ok(!first.element, 'precond - should not have an element');
this.assert.ok(!second.element, 'precond - should not have an element');
let wrapper1, wrapper2;
this.runTask(() => wrapper1 = this.append(first));
this.runTask(() => wrapper2 = this.append(second));
let componentElement1 = first.element;
let componentElement2 = second.element;
this.assertComponentElement(componentElement1, { content: 'x-first foo!' });
this.assertComponentElement(componentElement2, { content: 'x-second bar!' });
assert.equal(componentElement1.parentElement, wrapper1, 'The first component should be attached to the target');
assert.equal(componentElement2.parentElement, wrapper2, 'The second component should be attached to the target');
this.runTask(() => set(first, 'foo', 'FOO'));
this.assertComponentElement(componentElement1, { content: 'x-first FOO!' });
this.assertComponentElement(componentElement2, { content: 'x-second bar!' });
assert.equal(componentElement1.parentElement, wrapper1, 'The first component should be attached to the target');
assert.equal(componentElement2.parentElement, wrapper2, 'The second component should be attached to the target');
this.runTask(() => set(second, 'bar', 'BAR'));
this.assertComponentElement(componentElement1, { content: 'x-first FOO!' });
this.assertComponentElement(componentElement2, { content: 'x-second BAR!' });
assert.equal(componentElement1.parentElement, wrapper1, 'The first component should be attached to the target');
assert.equal(componentElement2.parentElement, wrapper2, 'The second component should be attached to the target');
this.runTask(() => {
set(first, 'foo', 'foo');
set(second, 'bar', 'bar');
});
this.assertComponentElement(componentElement1, { content: 'x-first foo!' });
this.assertComponentElement(componentElement2, { content: 'x-second bar!' });
assert.equal(componentElement1.parentElement, wrapper1, 'The first component should be attached to the target');
assert.equal(componentElement2.parentElement, wrapper2, 'The second component should be attached to the target');
this.runTask(() => {
first.destroy();
second.destroy();
});
if (this.isHTMLBars) {
// Bug in Glimmer – component should not have .element at this point
assert.ok(!first.element, 'The first component should not have an element');
assert.ok(!second.element, 'The second component should not have an element');
}
assert.ok(!componentElement1.parentElement, 'The first component element should be detached');
assert.ok(!componentElement2.parentElement, 'The second component element should be detached');
this.assert.equal(willDestroyCalled, 2);
}
['@test can appendTo while rendering'](assert) {
let owner = this.owner;
let append = (component) => {
return this.append(component);
};
let element1, element2;
this.registerComponent('first-component', {
ComponentClass: Component.extend({
layout: compile('component-one'),
didInsertElement() {
element1 = this.element;
let SecondComponent = owner._lookupFactory('component:second-component');
append(SecondComponent.create());
}
})
});
this.registerComponent('second-component', {
ComponentClass: Component.extend({
layout: compile(`component-two`),
didInsertElement() {
element2 = this.element;
}
})
});
let FirstComponent = this.owner._lookupFactory('component:first-component');
this.runTask(() => append(FirstComponent.create()));
this.assertComponentElement(element1, { content: 'component-one' });
this.assertComponentElement(element2, { content: 'component-two' });
}
['@test can appendTo and remove while rendering'](assert) {
let owner = this.owner;
let append = (component) => {
return this.append(component);
};
let element1, element2, element3, element4, component1, component2;
this.registerComponent('foo-bar', {
ComponentClass: Component.extend({
layout: compile('foo-bar'),
init() {
this._super(...arguments);
component1 = this;
},
didInsertElement() {
element1 = this.element;
let OtherRoot = owner._lookupFactory('component:other-root');
this._instance = OtherRoot.create({
didInsertElement() {
element2 = this.element;
}
});
append(this._instance);
},
willDestroy() {
this._instance.destroy();
}
})
});
this.registerComponent('baz-qux', {
ComponentClass: Component.extend({
layout: compile('baz-qux'),
init() {
this._super(...arguments);
component2 = this;
},
didInsertElement() {
element3 = this.element;
let OtherRoot = owner._lookupFactory('component:other-root');
this._instance = OtherRoot.create({
didInsertElement() {
element4 = this.element;
}
});
append(this._instance);
},
willDestroy() {
this._instance.destroy();
}
})
});
let instantiatedRoots = 0;
let destroyedRoots = 0;
this.registerComponent('other-root', {
ComponentClass: Component.extend({
layout: compile(`fake-thing: {{counter}}`),
init() {
this._super(...arguments);
this.counter = instantiatedRoots++;
},
willDestroy() {
destroyedRoots++;
this._super(...arguments);
}
})
});
this.render(strip`
{{#if showFooBar}}
{{foo-bar}}
{{else}}
{{baz-qux}}
{{/if}}
`, { showFooBar: true });
this.assertComponentElement(element1, { });
this.assertComponentElement(element2, { content: 'fake-thing: 0' });
assert.equal(instantiatedRoots, 1);
this.assertStableRerender();
this.runTask(() => set(this.context, 'showFooBar', false));
assert.equal(instantiatedRoots, 2);
assert.equal(destroyedRoots, 1);
this.assertComponentElement(element3, { });
this.assertComponentElement(element4, { content: 'fake-thing: 1' });
this.runTask(() => {
component1.destroy();
component2.destroy();
});
assert.equal(instantiatedRoots, 2);
assert.equal(destroyedRoots, 2);
}
}
moduleFor('append: no arguments (attaching to document.body)', class extends AbstractAppendTest {
append(component) {
this.runTask(() => component.append());
this.didAppend(component);
return document.body;
}
});
moduleFor('appendTo: a selector', class extends AbstractAppendTest {
append(component) {
this.runTask(() => component.appendTo('#qunit-fixture'));
this.didAppend(component);
return jQuery('#qunit-fixture')[0];
}
['@test raises an assertion when the target does not exist in the DOM'](assert) {
this.registerComponent('foo-bar', {
ComponentClass: Component.extend({
layoutName: 'components/foo-bar'
}),
template: 'FOO BAR!'
});
let FooBar = this.owner._lookupFactory('component:foo-bar');
this.component = FooBar.create();
assert.ok(!this.component.element, 'precond - should not have an element');
this.runTask(() => {
expectAssertion(() => {
this.component.appendTo('#does-not-exist-in-dom');
}, /You tried to append to \(#does-not-exist-in-dom\) but that isn't in the DOM/);
});
assert.ok(!this.component.element, 'component should not have an element');
}
});
moduleFor('appendTo: an element', class extends AbstractAppendTest {
append(component) {
let element = jQuery('#qunit-fixture')[0];
this.runTask(() => component.appendTo(element));
this.didAppend(component);
return element;
}
});
moduleFor('appendTo: with multiple components', class extends AbstractAppendTest {
append(component) {
this.runTask(() => component.appendTo('#qunit-fixture'));
this.didAppend(component);
return jQuery('#qunit-fixture')[0];
}
});
moduleFor('renderToElement: no arguments (defaults to a body context)', class extends AbstractAppendTest {
append(component) {
expectDeprecation(/Using the `renderToElement` is deprecated in favor of `appendTo`. Called in/);
let wrapper;
this.runTask(() => wrapper = component.renderToElement());
this.didAppend(component);
this.assert.equal(wrapper.tagName, 'BODY', 'wrapper is a body element');
this.assert.notEqual(wrapper, document.body, 'wrapper is not document.body');
this.assert.ok(!wrapper.parentNode, 'wrapper is detached');
return wrapper;
}<|fim▁hole|>
moduleFor('renderToElement: a div', class extends AbstractAppendTest {
append(component) {
expectDeprecation(/Using the `renderToElement` is deprecated in favor of `appendTo`. Called in/);
let wrapper;
this.runTask(() => wrapper = component.renderToElement('div'));
this.didAppend(component);
this.assert.equal(wrapper.tagName, 'DIV', 'wrapper is a body element');
this.assert.ok(!wrapper.parentNode, 'wrapper is detached');
return wrapper;
}
});<|fim▁end|>
|
});
|
<|file_name|>elf.rs<|end_file_name|><|fim▁begin|>pub const ELF_CLASS: u8 = 2;
pub type ElfAddr = u64;
pub type ElfOff = u64;
pub type ElfHalf = u16;
pub type ElfWord = u32;
pub type ElfXword = u64;
/// An ELF header
#[repr(packed)]
#[derive(Debug)]
pub struct ElfHeader {
/// The "magic number" (4 bytes)
pub magic: [u8; 4],
/// 64 or 32 bit?
pub class: u8,
/// Little (1) or big endianness (2)?
pub endian: u8,
/// The ELF version (set to 1 for default)
pub ver: u8,
/// Operating system ABI (0x03 for Linux)
pub abi: [u8; 2],
/// Unused
pub pad: [u8; 7],
/// Specify whether the object is relocatable, executable, shared, or core (in order).
pub _type: ElfHalf,
/// Instruction set archcitecture
pub machine: ElfHalf,
/// Second version
pub ver_2: ElfWord,
/// The ELF entry
pub entry: ElfAddr,
/// The program header table offset
pub ph_off: ElfOff,
/// The section header table offset
pub sh_off: ElfOff,
/// The flags set
pub flags: ElfWord,
/// The header table length<|fim▁hole|> pub ph_len: ElfHalf,
/// The section header table entry length
pub sh_ent_len: ElfHalf,
/// The section header table length
pub sh_len: ElfHalf,
/// The section header table string index
pub sh_str_index: ElfHalf,
}
/// An ELF segment
#[repr(packed)]
#[derive(Debug)]
pub struct ElfSegment {
pub _type: ElfWord,
pub flags: ElfWord,
pub off: ElfOff,
pub vaddr: ElfAddr,
pub paddr: ElfAddr,
pub file_len: ElfXword,
pub mem_len: ElfXword,
pub align: ElfXword,
}
/// An ELF section
#[repr(packed)]
#[derive(Debug)]
pub struct ElfSection {
pub name: ElfWord,
pub _type: ElfWord,
pub flags: ElfXword,
pub addr: ElfAddr,
pub off: ElfOff,
pub len: ElfXword,
pub link: ElfWord,
pub info: ElfWord,
pub addr_align: ElfXword,
pub ent_len: ElfXword,
}
/// An ELF symbol
#[repr(packed)]
#[derive(Debug)]
pub struct ElfSymbol {
pub name: ElfWord,
pub info: u8,
pub other: u8,
pub sh_index: ElfHalf,
pub value: ElfAddr,
pub size: ElfXword,
}<|fim▁end|>
|
pub h_len: ElfHalf,
/// The program header table entry length
pub ph_ent_len: ElfHalf,
/// The program head table length
|
<|file_name|>libraries.py<|end_file_name|><|fim▁begin|># Copyright 2006 Joe Wreschnig
# 2013 Nick Boultbee
# 2013,2014 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Base library classes.
These classes are the most basic library classes. As such they are the
least useful but most content-agnostic.
"""
import os
import shutil
import time
from gi.repository import GObject
from senf import fsn2text, fsnative
from quodlibet import _
from quodlibet.formats import MusicFile, AudioFileError, load_audio_files, \
dump_audio_files, SerializationError
from quodlibet.query import Query
from quodlibet.qltk.notif import Task
from quodlibet.util.atomic import atomic_save
from quodlibet.util.collection import Album
from quodlibet.util.collections import DictMixin
from quodlibet import util
from quodlibet import formats
from quodlibet.util.dprint import print_d, print_w
from quodlibet.util.path import unexpand, mkdir, normalize_path, ishidden, \
ismount
class Library(GObject.GObject, DictMixin):
"""A Library contains useful objects.
The only required method these objects support is a .key
attribute, but specific types of libraries may require more
advanced interfaces.
Every method which takes a sequence of items expects items to
implement __iter__, __len__ and __contains__.
Likewise the signals emit sequences which implement
__iter__, __len__ and __contains__ e.g. set(), list() or tuple().
WARNING: The library implements the dict interface with the exception
that iterating over it yields values and not keys.
"""
__gsignals__ = {
'changed': (GObject.SignalFlags.RUN_LAST, None, (object,)),
'removed': (GObject.SignalFlags.RUN_LAST, None, (object,)),
'added': (GObject.SignalFlags.RUN_LAST, None, (object,)),
}
librarian = None
dirty = False
def __init__(self, name=None):
super(Library, self).__init__()
self._contents = {}
self._name = name
if self.librarian is not None and name is not None:
self.librarian.register(self, name)
def destroy(self):
if self.librarian is not None and self._name is not None:
self.librarian._unregister(self, self._name)
def changed(self, items):
"""Alert other users that these items have changed.
This causes a 'changed' signal. If a librarian is available
this function will call its changed method instead, and all
libraries that librarian manages may fire a 'changed' signal.
The item list may be filtered to those items actually in the
library. If a librarian is available, it will handle the
filtering instead. That means if this method is delegated to
the librarian, this library's changed signal may not fire, but
another's might.
"""
if not items:
return
if self.librarian and self in self.librarian.libraries.values():
print_d("Changing %d items via librarian." % len(items), self)
self.librarian.changed(items)
else:
items = {item for item in items if item in self}
if not items:
return
print_d("Changing %d items directly." % len(items), self)
self._changed(items)
def _changed(self, items):
assert isinstance(items, set)
# Called by the changed method and Librarians.
if not items:
return
print_d("Changing %d items." % len(items), self)
self.dirty = True
self.emit('changed', items)
def __iter__(self):
"""Iterate over the items in the library."""
return iter(self._contents.values())
def iteritems(self):
return iter(self._contents.items())
def iterkeys(self):
return iter(self._contents.keys())
def itervalues(self):
return iter(self._contents.values())
def __len__(self):
"""The number of items in the library."""
return len(self._contents)
def __getitem__(self, key):
"""Find a item given its key."""
return self._contents[key]
def __contains__(self, item):
"""Check if a key or item is in the library."""
try:
return item in self._contents or item.key in self._contents
except AttributeError:
return False
def get_content(self):
"""All items including hidden ones for saving the library
(see FileLibrary with masked items)
"""
return list(self.values())
def keys(self):
return self._contents.keys()
def values(self):
return self._contents.values()
def _load_item(self, item):
"""Load (add) an item into this library"""
# Subclasses should override this if they want to check
# item validity; see `FileLibrary`.
print_d("Loading %r." % item.key, self)
self.dirty = True
self._contents[item.key] = item
def _load_init(self, items):
"""Load many items into the library (on start)"""
# Subclasses should override this if they want to check
# item validity; see `FileLibrary`.
content = self._contents
for item in items:
content[item.key] = item
def add(self, items):
"""Add items. This causes an 'added' signal.
Return the sequence of items actually added, filtering out items
already in the library.
"""
items = {item for item in items if item not in self}
if not items:
return items
print_d("Adding %d items." % len(items), self)
for item in items:
self._contents[item.key] = item
self.dirty = True
self.emit('added', items)
return items
def remove(self, items):
"""Remove items. This causes a 'removed' signal.
Return the sequence of items actually removed.
"""
items = {item for item in items if item in self}
if not items:
return items
print_d("Removing %d items." % len(items), self)
for item in items:
del(self._contents[item.key])
self.dirty = True
self.emit('removed', items)
return items
def _load_items(filename):
"""Load items from disk.
In case of an error returns default or an empty list.
"""
try:
with open(filename, "rb") as fp:
data = fp.read()
except EnvironmentError:
print_w("Couldn't load library file from: %r" % filename)
return []
try:
items = load_audio_files(data)
except SerializationError:
# there are too many ways this could fail
util.print_exc()
# move the broken file out of the way
try:
shutil.copy(filename, filename + ".not-valid")<|fim▁hole|> except EnvironmentError:
util.print_exc()
return []
return items
class PicklingMixin(object):
"""A mixin to provide persistence of a library by pickling to disk"""
filename = None
def load(self, filename):
"""Load a library from a file, containing a picked list.
Loading does not cause added, changed, or removed signals.
"""
self.filename = filename
print_d("Loading contents of %r." % filename, self)
items = _load_items(filename)
# this loads all items without checking their validity, but makes
# sure that non-mounted items are masked
self._load_init(items)
print_d("Done loading contents of %r." % filename, self)
def save(self, filename=None):
"""Save the library to the given filename, or the default if `None`"""
if filename is None:
filename = self.filename
print_d("Saving contents to %r." % filename, self)
try:
dirname = os.path.dirname(filename)
mkdir(dirname)
with atomic_save(filename, "wb") as fileobj:
fileobj.write(dump_audio_files(self.get_content()))
except SerializationError:
# Can happen when we try to pickle while the library is being
# modified, like in the periodic 15min save.
# Ignore, as it should try again later or on program exit.
util.print_exc()
except EnvironmentError:
print_w("Couldn't save library to path: %r" % filename)
else:
self.dirty = False
class PicklingLibrary(Library, PicklingMixin):
"""A library that pickles its contents to disk"""
def __init__(self, name=None):
print_d("Using pickling persistence for library \"%s\"" % name)
PicklingMixin.__init__(self)
Library.__init__(self, name)
class AlbumLibrary(Library):
"""An AlbumLibrary listens to a SongLibrary and sorts its songs into
albums.
The library behaves like a dictionary: the keys are album_keys of
AudioFiles, the values are Album objects.
"""
def __init__(self, library):
self.librarian = None
print_d("Initializing Album Library to watch %r" % library._name)
super(AlbumLibrary, self).__init__(
"AlbumLibrary for %s" % library._name)
self._library = library
self._asig = library.connect('added', self.__added)
self._rsig = library.connect('removed', self.__removed)
self._csig = library.connect('changed', self.__changed)
self.__added(library, library.values(), signal=False)
def load(self):
# deprecated
pass
def destroy(self):
for sig in [self._asig, self._rsig, self._csig]:
self._library.disconnect(sig)
def _get(self, item):
return self._contents.get(item)
def __add(self, items):
changed = set()
new = set()
for song in items:
key = song.album_key
if key in self._contents:
changed.add(self._contents[key])
else:
album = Album(song)
self._contents[key] = album
new.add(album)
self._contents[key].songs.add(song)
changed -= new
return changed, new
def __added(self, library, items, signal=True):
changed, new = self.__add(items)
for album in changed:
album.finalize()
if signal:
if new:
self.emit('added', new)
if changed:
self.emit('changed', changed)
def __removed(self, library, items):
changed = set()
removed = set()
for song in items:
key = song.album_key
album = self._contents[key]
album.songs.remove(song)
changed.add(album)
if not album.songs:
removed.add(album)
del self._contents[key]
changed -= removed
for album in changed:
album.finalize()
if removed:
self.emit('removed', removed)
if changed:
self.emit('changed', changed)
def __changed(self, library, items):
"""Album keys could change between already existing ones.. so we
have to do it the hard way and search by id."""
print_d("Updating affected albums for %d items" % len(items))
changed = set()
removed = set()
to_add = []
for song in items:
# in case the key hasn't changed
key = song.album_key
if key in self._contents and song in self._contents[key].songs:
changed.add(self._contents[key])
else: # key changed.. look for it in each album
to_add.append(song)
for key, album in self._contents.items():
if song in album.songs:
album.songs.remove(song)
if not album.songs:
removed.add(album)
else:
changed.add(album)
break
# get new albums and changed ones because keys could have changed
add_changed, new = self.__add(to_add)
changed |= add_changed
# check if albums that were empty at some point are still empty
for album in removed:
if not album.songs:
del self._contents[album.key]
changed.discard(album)
for album in changed:
album.finalize()
if removed:
self.emit("removed", removed)
if changed:
self.emit("changed", changed)
if new:
self.emit("added", new)
class SongLibrary(PicklingLibrary):
"""A library for songs.
Items in this kind of library must support (roughly) the AudioFile
interface.
"""
def __init__(self, *args, **kwargs):
super(SongLibrary, self).__init__(*args, **kwargs)
@util.cached_property
def albums(self):
return AlbumLibrary(self)
def destroy(self):
super(SongLibrary, self).destroy()
if "albums" in self.__dict__:
self.albums.destroy()
def tag_values(self, tag):
"""Return a set of all values for the given tag."""
return {value for song in self.values()
for value in song.list(tag)}
def rename(self, song, newname, changed=None):
"""Rename a song.
This requires a special method because it can change the
song's key.
The 'changed' signal may fire for this library or the changed
song is added to the passed changed set().
If the song exists in multiple libraries you cannot use this
method. Instead, use the librarian.
"""
print_d("Renaming %r to %r" % (song.key, newname), self)
del(self._contents[song.key])
song.rename(newname)
self._contents[song.key] = song
if changed is not None:
print_d("%s: Delaying changed signal." % (type(self).__name__,))
changed.add(song)
else:
self.changed({song})
def query(self, text, sort=None, star=Query.STAR):
"""Query the library and return matching songs."""
if isinstance(text, bytes):
text = text.decode('utf-8')
songs = self.values()
if text != "":
songs = list(filter(Query(text, star).search, songs))
return songs
def iter_paths(root, exclude=[], skip_hidden=True):
"""yields paths contained in root (symlinks dereferenced)
Any path starting with any of the path parts included in exclude
are ignored (before and after dereferencing symlinks)
Directory symlinks are not followed (except root itself)
Args:
root (fsnative)
exclude (List[fsnative])
skip_hidden (bool): Ignore files which are hidden or where any
of the parent directories are hidden.
Yields:
fsnative: absolute dereferenced paths
"""
assert isinstance(root, fsnative)
assert all((isinstance(p, fsnative) for p in exclude))
assert os.path.abspath(root)
def skip(path):
if skip_hidden and ishidden(path):
return True
# FIXME: normalize paths..
return any((path.startswith(p) for p in exclude))
if skip_hidden and ishidden(root):
return
for path, dnames, fnames in os.walk(root):
if skip_hidden:
dnames[:] = list(filter(
lambda d: not ishidden(os.path.join(path, d)), dnames))
for filename in fnames:
fullfilename = os.path.join(path, filename)
if skip(fullfilename):
continue
fullfilename = os.path.realpath(fullfilename)
if skip(fullfilename):
continue
yield fullfilename
class FileLibrary(PicklingLibrary):
"""A library containing items on a local(-ish) filesystem.
These must support the valid, exists, mounted, and reload methods,
and have a mountpoint attribute.
"""
def __init__(self, name=None):
super(FileLibrary, self).__init__(name)
self._masked = {}
def _load_init(self, items):
"""Add many items to the library, check if the
mountpoints are available and mark items as masked if not.
Does not check if items are valid.
"""
mounts = {}
contents = self._contents
masked = self._masked
for item in items:
mountpoint = item.mountpoint
if mountpoint not in mounts:
is_mounted = ismount(mountpoint)
# In case mountpoint is mounted through autofs we need to
# access a sub path for it to mount
# https://github.com/quodlibet/quodlibet/issues/2146
if not is_mounted:
item.exists()
is_mounted = ismount(mountpoint)
mounts[mountpoint] = is_mounted
# at least one not mounted, make sure masked has an entry
if not is_mounted:
masked.setdefault(mountpoint, {})
if mounts[mountpoint]:
contents[item.key] = item
else:
masked[mountpoint][item.key] = item
def _load_item(self, item, force=False):
"""Add an item, or refresh it if it's already in the library.
No signals will be fired.
Return a tuple of booleans: (changed, removed)
"""
print_d("Loading %r." % item.key, self)
valid = item.valid()
# The item is fine; add it if it's not present.
if not force and valid:
print_d("%r is valid." % item.key, self)
self._contents[item.key] = item
return False, False
else:
# Either we should force a load, or the item is not okay.
# We're going to reload; this could change the key. So
# remove the item if it's currently in.
try:
del(self._contents[item.key])
except KeyError:
present = False
else:
present = True
# If the item still exists, reload it.
if item.exists():
try:
item.reload()
except AudioFileError:
print_d("Error reloading %r." % item.key, self)
util.print_exc()
return False, True
else:
print_d("Reloaded %r." % item.key, self)
self._contents[item.key] = item
return True, False
elif not item.mounted():
# We don't know if the item is okay or not, since
# it's not not mounted. If the item was present
# we need to mark it as removed.
print_d("Masking %r." % item.key, self)
self._masked.setdefault(item.mountpoint, {})
self._masked[item.mountpoint][item.key] = item
return False, present
else:
# The item doesn't exist at all anymore. Mark it as
# removed if it was present, otherwise nothing.
print_d("Ignoring (so removing) %r." % item.key, self)
return False, present
def reload(self, item, changed=None, removed=None):
"""Reload a song, possibly noting its status.
If sets are given, it assumes the caller will handle signals,
and only updates the sets. Otherwise, it handles signals
itself. It *always* handles library contents, so do not
try to remove (again) a song that appears in the removed set.
"""
was_changed, was_removed = self._load_item(item, force=True)
assert not (was_changed and was_removed)
if was_changed:
if changed is None:
self.emit('changed', {item})
else:
changed.add(item)
elif was_removed:
if removed is None:
self.emit('removed', {item})
else:
removed.add(item)
def rebuild(self, paths, force=False, exclude=[], cofuncid=None):
"""Reload or remove songs if they have changed or been deleted.
This generator rebuilds the library over the course of iteration.
Any paths given will be scanned for new files, using the 'scan'
method.
Only items present in the library when the rebuild is started
will be checked.
If this function is copooled, set "cofuncid" to enable pause/stop
buttons in the UI.
"""
print_d("Rebuilding, force is %s." % force, self)
task = Task(_("Library"), _("Checking mount points"))
if cofuncid:
task.copool(cofuncid)
for i, (point, items) in task.list(enumerate(self._masked.items())):
if ismount(point):
self._contents.update(items)
del(self._masked[point])
self.emit('added', list(items.values()))
yield True
task = Task(_("Library"), _("Scanning library"))
if cofuncid:
task.copool(cofuncid)
changed, removed = set(), set()
for i, (key, item) in task.list(enumerate(sorted(self.items()))):
if key in self._contents and force or not item.valid():
self.reload(item, changed, removed)
# These numbers are pretty empirical. We should yield more
# often than we emit signals; that way the main loop stays
# interactive and doesn't get bogged down in updates.
if len(changed) > 100:
self.emit('changed', changed)
changed = set()
if len(removed) > 100:
self.emit('removed', removed)
removed = set()
if len(changed) > 5 or i % 100 == 0:
yield True
print_d("Removing %d, changing %d." % (len(removed), len(changed)),
self)
if removed:
self.emit('removed', removed)
if changed:
self.emit('changed', changed)
for value in self.scan(paths, exclude, cofuncid):
yield value
def add_filename(self, filename, add=True):
"""Add a file based on its filename.
Subclasses must override this to open the file correctly.
"""
raise NotImplementedError
def contains_filename(self, filename):
"""Returns if a song for the passed filename is in the library.
Returns:
bool
"""
raise NotImplementedError
def scan(self, paths, exclude=[], cofuncid=None):
def need_yield(last_yield=[0]):
current = time.time()
if abs(current - last_yield[0]) > 0.015:
last_yield[0] = current
return True
return False
def need_added(last_added=[0]):
current = time.time()
if abs(current - last_added[0]) > 1.0:
last_added[0] = current
return True
return False
# first scan each path for new files
paths_to_load = []
for scan_path in paths:
print_d("Scanning %r." % scan_path)
desc = _("Scanning %s") % (fsn2text(unexpand(scan_path)))
with Task(_("Library"), desc) as task:
if cofuncid:
task.copool(cofuncid)
for real_path in iter_paths(scan_path, exclude=exclude):
if need_yield():
task.pulse()
yield
# skip unknown file extensions
if not formats.filter(real_path):
continue
# already loaded
if self.contains_filename(real_path):
continue
paths_to_load.append(real_path)
yield
# then (try to) load all new files
with Task(_("Library"), _("Loading files")) as task:
if cofuncid:
task.copool(cofuncid)
added = []
for real_path in task.gen(paths_to_load):
item = self.add_filename(real_path, False)
if item is not None:
added.append(item)
if len(added) > 100 or need_added():
self.add(added)
added = []
yield
if added and need_yield():
yield
if added:
self.add(added)
added = []
yield True
def get_content(self):
"""Return visible and masked items"""
items = list(self.values())
for masked in self._masked.values():
items.extend(masked.values())
# Item keys are often based on filenames, in which case
# sorting takes advantage of the filesystem cache when we
# reload/rescan the files.
items.sort(key=lambda item: item.key)
return items
def masked(self, item):
"""Return true if the item is in the library but masked."""
try:
point = item.mountpoint
except AttributeError:
# Checking a key.
for point in self._masked.values():
if item in point:
return True
else:
# Checking a full item.
return item in self._masked.get(point, {}).values()
def unmask(self, point):
print_d("Unmasking %r." % point, self)
items = self._masked.pop(point, {})
if items:
self.add(items.values())
def mask(self, point):
print_d("Masking %r." % point, self)
removed = {}
for item in self.values():
if item.mountpoint == point:
removed[item.key] = item
if removed:
self.remove(removed.values())
self._masked.setdefault(point, {}).update(removed)
@property
def masked_mount_points(self):
"""List of mount points that contain masked items"""
return list(self._masked.keys())
def get_masked(self, mount_point):
"""List of items for a mount point"""
return list(self._masked.get(mount_point, {}).values())
def remove_masked(self, mount_point):
"""Remove all songs for a masked point"""
self._masked.pop(mount_point, {})
class SongFileLibrary(SongLibrary, FileLibrary):
"""A library containing song files.
Pickles contents to disk as `FileLibrary`"""
def __init__(self, name=None):
print_d("Initializing SongFileLibrary \"%s\"." % name)
super(SongFileLibrary, self).__init__(name)
def contains_filename(self, filename):
key = normalize_path(filename, True)
return key in self._contents
def get_filename(self, filename):
key = normalize_path(filename, True)
return self._contents.get(key)
def add_filename(self, filename, add=True):
"""Add a song to the library based on filename.
If 'add' is true, the song will be added and the 'added' signal
may be fired.
Example (add=False):
load many songs and call Library.add(songs) to add all in one go.
The song is returned if it is in the library after this call.
Otherwise, None is returned.
"""
key = normalize_path(filename, True)
song = None
if key not in self._contents:
song = MusicFile(filename)
if song and add:
self.add([song])
else:
print_d("Already got file %r." % filename)
song = self._contents[key]
return song<|fim▁end|>
| |
<|file_name|>StatisticConfigurationCacheByUuidLoaderWriter.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Crown Copyright
*
* This file is part of Stroom-Stats.
*
* Stroom-Stats is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Stroom-Stats is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Stroom-Stats. If not, see <http://www.gnu.org/licenses/>.
*/
package stroom.stats.configuration;
import org.ehcache.spi.loaderwriter.CacheLoaderWriter;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.context.internal.ManagedSessionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.Map;
public class StatisticConfigurationCacheByUuidLoaderWriter implements CacheLoaderWriter<String,StatisticConfiguration>{
private static final Logger LOGGER = LoggerFactory.getLogger(StatisticConfigurationCacheByUuidLoaderWriter.class);
private final StroomStatsStoreEntityDAO stroomStatsStoreEntityDAO;
private final SessionFactory sessionFactory;
@Inject
public StatisticConfigurationCacheByUuidLoaderWriter(final StroomStatsStoreEntityDAO stroomStatsStoreEntityDAO,
final SessionFactory sessionFactory) {
this.stroomStatsStoreEntityDAO = stroomStatsStoreEntityDAO;
this.sessionFactory = sessionFactory;
}
@Override
public StatisticConfiguration load(final String key) throws Exception {
LOGGER.trace("load called for key {}", key);
//EHCache doesn't cache null values so if we can't find a stat config for this uuid,
//just return null
try (Session session = sessionFactory.openSession()) {
ManagedSessionContext.bind(session);
session.beginTransaction();
StatisticConfiguration statisticConfiguration = stroomStatsStoreEntityDAO.loadByUuid(key).orElse(null);
LOGGER.trace("Returning statisticConfiguration {}", statisticConfiguration);
return statisticConfiguration;
} catch (Exception e) {
throw new RuntimeException(String.format("Error loading stat store entity by uuid %s", key), e);
}
}
@Override
public Map<String, StatisticConfiguration> loadAll(final Iterable<? extends String> keys)
throws Exception {
throw new UnsupportedOperationException("loadAll (getAll) is not currently supported on this cache");
}
@Override
public void write(final String key, final StatisticConfiguration value) throws Exception {
throw new UnsupportedOperationException("CRUD operations are not currently supported on this cache");
}
<|fim▁hole|>
@Override
public void delete(final String key) throws Exception {
throw new UnsupportedOperationException("CRUD operations are not currently supported on this cache");
}
@Override
public void deleteAll(final Iterable<? extends String> keys) throws Exception {
throw new UnsupportedOperationException("CRUD operations are not currently supported on this cache");
}
}<|fim▁end|>
|
@Override
public void writeAll(final Iterable<? extends Map.Entry<? extends String, ? extends StatisticConfiguration>> entries) throws Exception {
throw new UnsupportedOperationException("CRUD operations are not currently supported on this cache");
}
|
<|file_name|>TestSchedule.java<|end_file_name|><|fim▁begin|>package testing;
/**
* Copyright (C) 2015 Matthew Mussomele
*
* This file is part of ChoiceOptimizationAlgorithm
*
* ChoiceOptimizationAlgorithm is free software: you can redistribute it
* and/or modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import duty_scheduler.RA;
import duty_scheduler.RA.RABuilder;
import duty_scheduler.Duty;
import duty_scheduler.Schedule;
import duty_scheduler.Schedule.ScheduleBuilder;
import java.util.ArrayList;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
/**
* JUnit Testing Class for the duty_scheduler.Schedule class.
*
* @author Matthew Mussomele
*/
public class TestSchedule {
private static final int THIS_YEAR = 2015;
private ScheduleBuilder testBuilder;
private Schedule test;
private ArrayList<RA> raList;
private ArrayList<Duty> dutyList;
/**
* Fill the four instance variables with some simple test instances before every test.
*/
@Before public void setUp() {
dutyList = new ArrayList<Duty>();
for (int i = 0; i < 6; i += 1) {
dutyList.add(new Duty(THIS_YEAR, 1, i + 1, "/"));
}
raList = new ArrayList<RA>();
for (int i = 0; i < 2; i += 1) {
RABuilder builder = new RABuilder(String.format("RA%d", i), 6, 3);
for (int j = 0; j < 6; j += 1) {
if (i == 0) {
builder.putPreference(dutyList.get(j), j);
} else {
builder.putPreference(dutyList.get(j), 5 - j);
}
}
raList.add(builder.build());
}
ScheduleBuilder builder = new ScheduleBuilder(raList.size(), dutyList.size());
for (int i = 0; i < 3; i += 1) {
builder.putAssignment(raList.get(0), dutyList.get(5 - i));
}
for (int i = 0; i < 3; i += 1) {
builder.putAssignment(raList.get(1), dutyList.get(i));
}
testBuilder = builder;
test = builder.build();
}
/**
* Tests that the ScheduleBuilder works and properly returns a Schedule instance
*/
@Test public void testBuild() {
for (int i = 0; i < raList.size(); i += 1) {
assertTrue(testBuilder.doneAssigning(raList.get(i)));<|fim▁hole|> }
/**
* Tests that the built Schedule's basic functions perform to the requirements of the package.
* These tests are to ensure that these methods are not changed by a programmer in such a way
* that would cause Schedules to be lost in a Hash or Tree based structure.
*/
@Test public void testBasics() {
for (int i = 0; i < raList.size(); i += 1) {
for (Duty duty : test.getAssignments(raList.get(i))) {
assertTrue(dutyList.contains(duty));
}
}
assertTrue(test.equals(test));
assertEquals(0, test.compareTo(test));
}
}<|fim▁end|>
|
}
assertNotNull(test);
|
<|file_name|>italianhello.py<|end_file_name|><|fim▁begin|>def italianhello():
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(105,78)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,144,60,75)
i01.moveHand("left",112,111,105,102,81,10)<|fim▁hole|> for w in range(0,3):
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.60)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 0.60, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(83,98)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,157,47,75)
i01.moveHand("left",112,111,105,102,81,10)
i01.moveHand("right",3,0,62,41,117,94)
if w==1:
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.60)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 0.65, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(83,70)
i01.mouth.speakBlocking("ciao , il mio nome e inmoov one")
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",57,145,50,68)
i01.moveHand("left",100,90,85,80,71,15)
i01.moveHand("right",3,0,31,12,26,45)
sleep(1)
i01.moveHead(83,98)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,157,47,75)
i01.moveHand("left",112,111,105,102,81,10)
i01.moveHand("right",3,0,62,41,117,94)
sleep(1)
i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85)
i01.setHandSpeed("right", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85)
i01.setArmSpeed("right", 0.75, 0.85, 0.95, 0.85)
i01.setArmSpeed("left", 0.95, 0.65, 0.75, 0.75)
i01.setHeadSpeed(0.75, 0.75)
i01.moveHead(79,100)
i01.moveArm("left",5,94,28,15)
i01.moveArm("right",5,82,28,15)
i01.moveHand("left",42,58,42,55,71,35)
i01.moveHand("right",81,50,82,60,105,113)
ear.resumeListening()<|fim▁end|>
|
i01.moveHand("right",0,0,0,50,82,180)
ear.pauseListening()
sleep(1)
|
<|file_name|>util.go<|end_file_name|><|fim▁begin|>package command
import (
"fmt"
"io/ioutil"
"os"
"path"
"runtime"
"time"
getter "github.com/hashicorp/go-getter"
"github.com/hashicorp/nomad/helper/discover"
)
// fetchBinary fetches the nomad binary and returns the temporary directory where it exists
func fetchBinary(bin string) (string, error) {
nomadBinaryDir, err := ioutil.TempDir("", "")
if err != nil {
return "", fmt.Errorf("failed to create temp dir: %v", err)
}
if bin == "" {<|fim▁hole|> if err != nil {
return "", fmt.Errorf("failed to discover nomad binary: %v", err)
}
}
dest := path.Join(nomadBinaryDir, "nomad")
if runtime.GOOS == "windows" {
dest = dest + ".exe"
}
if err = getter.GetFile(dest, bin); err != nil {
return "", fmt.Errorf("failed to get nomad binary: %v", err)
}
return nomadBinaryDir, nil
}
func procWaitTimeout(p *os.Process, d time.Duration) error {
stop := make(chan struct{})
go func() {
p.Wait()
stop <- struct{}{}
}()
select {
case <-stop:
return nil
case <-time.NewTimer(d).C:
return fmt.Errorf("timeout waiting for process %d to exit", p.Pid)
}
}<|fim▁end|>
|
bin, err = discover.NomadExecutable()
|
<|file_name|>resolver.js<|end_file_name|><|fim▁begin|>'use strict';
var fetchUrl = require('fetch').fetchUrl;<|fim▁hole|>var httpStatusCodes = require('./http.json');
var urllib = require('url');
var mime = require('mime');
// Expose to the world
module.exports.resolve = resolve;
module.exports.removeParams = removeParams;
/**
* Resolves an URL by stepping through all redirects
*
* @param {String} url The URL to be checked
* @param {Object} options Optional options object
* @param {Function} callback Callback function with error and url
*/
function resolve(url, options, callback) {
var urlOptions = {};
if (typeof options == 'function' && !callback) {
callback = options;
options = undefined;
}
options = options || {};
urlOptions.method = options.method || 'HEAD';
urlOptions.disableGzip = true; // no need for gzipping with HEAD
urlOptions.asyncDnsLoookup = true;
urlOptions.timeout = options.timeout || 10000;
urlOptions.userAgent = options.userAgent || (packageInfo.name + '/' + packageInfo.version + ' (+' + packageInfo.homepage + ')');
urlOptions.removeParams = [].concat(options.removeParams || [/^utm_/, 'ref']);
urlOptions.agent = options.agent || false;
urlOptions.rejectUnauthorized = false;
urlOptions.headers = options.headers || {};
urlOptions.maxRedirects = options.maxRedirects || 10;
fetchUrl(url, urlOptions, function(error, meta) {
var err, url;
if (error) {
err = new Error(error.message || error);
err.statusCode = 0;
return callback(err);
}
if (meta.status != 200) {
err = new Error('Server responded with ' + meta.status + ' ' + (httpStatusCodes[meta.status] || 'Invalid request'));
err.statusCode = meta.status;
return callback(err);
}
url = meta.finalUrl;
if (urlOptions.removeParams && urlOptions.removeParams.length) {
url = removeParams(url, urlOptions.removeParams);
}
var fileParams = detectFileParams(meta);
return callback(null, url, fileParams.filename, fileParams.contentType);
});
}
function detectFileParams(meta) {
var urlparts = urllib.parse(meta.finalUrl);
var filename = (urlparts.pathname || '').split('/').pop();
var contentType = (meta.responseHeaders['content-type'] || 'application/octet-stream').toLowerCase().split(';').shift().trim();
var fileParts;
var extension = '';
var contentExtension;
(meta.responseHeaders['content-disposition'] || '').split(';').forEach(function(line) {
var parts = line.trim().split('='),
key = parts.shift().toLowerCase().trim();
if (key == 'filename') {
filename = parts.join('=').trim();
}
});
if (contentType == 'application/octet-stream') {
contentType = mime.lookup(filename) || 'application/octet-stream';
} else {
fileParts = filename.split('.');
if (fileParts.length > 1) {
extension = fileParts.pop().toLowerCase();
}
contentExtension = mime.extension(contentType);
if (contentExtension && extension != contentExtension) {
extension = contentExtension;
}
if (extension) {
if (!fileParts.length || (fileParts.length == 1 && !fileParts[0])) {
fileParts = ['index'];
}
fileParts.push(extension);
}
filename = fileParts.join('.');
}
return {
filename: filename,
contentType: contentType
};
}
/**
* Removes matching GET params from an URL
*
* @param {String} url URL to be checked
* @param {Array} params An array of key matches to be removed
* @return {String} URL
*/
function removeParams(url, params) {
var parts, query = {},
deleted = false;
parts = urllib.parse(url, true, true);
delete parts.search;
if (parts.query) {
Object.keys(parts.query).forEach(function(key) {
for (var i = 0, len = params.length; i < len; i++) {
if (params[i] instanceof RegExp && key.match(params[i])) {
deleted = true;
return;
} else if (key == params[i]) {
deleted = true;
return;
}
}
query[key] = parts.query[key];
});
parts.query = query;
}
return deleted ? urllib.format(parts) : url;
}<|fim▁end|>
|
var packageInfo = require('../package.json');
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.