prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>shipbonussupercarrierc2shieldresists.py<|end_file_name|><|fim▁begin|># shipBonusSupercarrierC2ShieldResists # # Used by: # Ship: Wyvern type = "passive" <|fim▁hole|>def handler(fit, src, context): fit.ship.boostItemAttr("shieldThermalDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"), skill="Caldari Carrier") fit.ship.boostItemAttr("shieldEmDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"), skill="Caldari Carrier") fit.ship.boostItemAttr("shieldKineticDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"), skill="Caldari Carrier") fit.ship.boostItemAttr("shieldExplosiveDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"), skill="Caldari Carrier")<|fim▁end|>
<|file_name|>record-pat.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. enum t1 { a(int), b(uint), } struct T2 {x: t1, y: int} enum t3 { c(T2, uint), } fn m(input: t3) -> int { match input { t3::c(T2 {x: t1::a(m), ..}, _) => { return m; } t3::c(T2 {x: t1::b(m), y: y}, z) => { return ((m + z) as int) + y; } } } pub fn main() { assert_eq!(m(t3::c(T2 {x: t1::a(10), y: 5}, 4u)), 10); assert_eq!(m(t3::c(T2 {x: t1::b(10u), y: 5}, 4u)), 19);<|fim▁hole|><|fim▁end|>
}
<|file_name|>using_similarity.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from similaritymeasures import Similarity def main(): """ main function to create Similarity class instance and get use of it """ measures = Similarity() print measures.euclidean_distance([0,3,4,5],[7,6,3,-1]) print measures.jaccard_similarity([0,1,2,5,6],[0,2,3,5,7,9]) if __name__ == "__main__":<|fim▁hole|><|fim▁end|>
main()
<|file_name|>ec2instanceinfo.py<|end_file_name|><|fim▁begin|>############################################################################## #copyright 2013, Hamid MEDJAHED ([email protected]) Prologue # #Licensed under the Apache License, Version 2.0 (the "License"); # #you may not use this file except in compliance with the License. # #You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # #Unless required by applicable law or agreed to in writing, software # #distributed under the License is distributed on an "AS IS" BASIS, # #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # #See the License for the specific language governing permissions and # #limitations under the License. # ############################################################################## import HTMLParser data = ''' <table cellspacing="0" class="table table-bordered table-hover table-condensed" id="data"> <thead> <tr> <th class="name">Name</th> <th class="memory">Memory</th> <th class="computeunits"> <abbr title="One EC2 Compute Unit provides the equivalent CPU capacity of a 1.0-1.2 GHz 2007 Opteron or 2007 Xeon processor.">Compute Units</abbr> </th> <th class="storage">Storage</th> <th class="architecture">Architecture</th> <th class="ioperf">I/O Performance</th> <th class="maxips"> <abbr title="Adding additional IPs requires launching the instance in a VPC.">Max IPs</abbr> </th> <th class="apiname">API Name</th> <th class="cost">Linux cost</th> <th class="cost">Windows cost</th> </tr> </thead> <tbody> <tr> <td class="name">M1 Small</td> <td class="memory"><span sort="1.7">1.70 GB</span></td> <td class="computeunits"><span sort="1">1</span></td> <td class="storage"><span sort="160">160 GB</span></td> <td class="architecture">32/64-bit</td> <td class="ioperf"><span sort="1">Moderate</span></td> <td class="maxips">8</td> <td class="apiname">m1.small</td> <td class="cost" hour_cost="0.060">$0.060 per hour</td> <td class="cost" hour_cost="0.115">$0.115 per hour</td> </tr> <tr> <td class="name">M1 Medium</td> <td class="memory"><span sort="3.75">3.75 GB</span></td> <td class="computeunits"><span sort="2">2</span></td> <td class="storage"><span sort="410">410 GB</span></td> <td class="architecture">32/64-bit</td> <td class="ioperf"><span sort="1">Moderate</span></td> <td class="maxips">12</td> <td class="apiname">m1.medium</td> <td class="cost" hour_cost="0.12">$0.12 per hour</td> <td class="cost" hour_cost="0.23">$0.23 per hour</td> </tr> <tr> <td class="name">M1 Large</td> <td class="memory"><span sort="7.5">7.50 GB</span></td> <td class="computeunits"><span sort="4">4</span></td> <td class="storage"><span sort="850">850 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="3">High</span></td> <td class="maxips">30</td> <td class="apiname">m1.large</td> <td class="cost" hour_cost="0.24">$0.24 per hour</td> <td class="cost" hour_cost="0.46">$0.46 per hour</td> </tr> <tr> <td class="name">M1 Extra Large</td> <td class="memory"><span sort="15">15.00 GB</span></td> <td class="computeunits"><span sort="8">8</span></td> <td class="storage"><span sort="1690">1690 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="3">High</span></td> <td class="maxips">60</td> <td class="apiname">m1.xlarge</td> <td class="cost" hour_cost="0.48">$0.48 per hour</td> <td class="cost" hour_cost="0.92">$0.92 per hour</td> </tr> <tr> <td class="name">Micro</td> <td class="memory"><span sort="0.6">0.60 GB</span></td> <td class="computeunits"><span sort="2">2</span></td> <td class="storage"><span sort="0">0 GB</span></td> <td class="architecture">32/64-bit</td> <td class="ioperf"><span sort="0">Low</span></td> <td class="maxips">1</td> <td class="apiname">t1.micro</td> <td class="cost" hour_cost="0.02">$0.02 per hour</td> <td class="cost" hour_cost="0.02">$0.02 per hour</td> </tr> <tr> <td class="name">High-Memory Extra Large</td> <td class="memory"><span sort="17.10">17.10 GB</span></td> <td class="computeunits"><span sort="6.5">6.5</span></td> <td class="storage"><span sort="420">420 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="1">Moderate</span></td> <td class="maxips">60</td> <td class="apiname">m2.xlarge</td> <td class="cost" hour_cost="0.41">$0.41 per hour</td> <td class="cost" hour_cost="0.57">$0.57 per hour</td> </tr> <tr> <td class="name">High-Memory Double Extra Large</td> <td class="memory"><span sort="34.2">34.20 GB</span></td> <td class="computeunits"><span sort="13">13</span></td> <td class="storage"><span sort="850">850 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="2">High</span></td> <td class="maxips">120</td> <td class="apiname">m2.2xlarge</td> <td class="cost" hour_cost="0.82">$0.82 per hour</td> <td class="cost" hour_cost="1.14">$1.14 per hour</td> </tr> <tr> <td class="name">High-Memory Quadruple Extra Large</td> <td class="memory"><span sort="68.4">68.40 GB</span></td> <td class="computeunits"><span sort="26">26</span></td> <td class="storage"><span sort="1690">1690 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="3">High</span></td> <td class="maxips">240</td> <td class="apiname">m2.4xlarge</td> <td class="cost" hour_cost="1.64">$1.64 per hour</td> <td class="cost" hour_cost="2.28">$2.28 per hour</td> </tr> <tr> <td class="name">M3 Extra Large</td> <td class="memory"><span sort="15">15.00 GB</span></td> <td class="computeunits"><span sort="13">13</span></td> <td class="storage"><span sort="0">0 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="1">Moderate</span></td> <td class="maxips">60</td> <td class="apiname">m3.xlarge</td> <td class="cost" hour_cost="0.50">$0.50 per hour</td> <td class="cost" hour_cost="0.98">$0.98 per hour</td> </tr> <tr> <td class="name">M3 Double Extra Large</td> <td class="memory"><span sort="30">30.00 GB</span></td> <td class="computeunits"><span sort="26">26</span></td> <td class="storage"><span sort="0">0 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="2">High</span></td> <td class="maxips">120</td> <td class="apiname">m3.2xlarge</td> <td class="cost" hour_cost="1.00">$1.00 per hour</td> <td class="cost" hour_cost="1.96">$1.96 per hour</td> </tr> <tr> <td class="name">High-CPU Medium</td> <td class="memory"><span sort="1.7">1.70 GB</span></td> <td class="computeunits"><span sort="5">5</span></td> <td class="storage"><span sort="350">350 GB</span></td> <td class="architecture">32_64-bit</td> <td class="ioperf"><span sort="1">Moderate</span></td> <td class="maxips">12</td> <td class="apiname">c1.medium</td> <td class="cost" hour_cost="0.145">$0.145 per hour</td> <td class="cost" hour_cost="0.285">$0.285 per hour</td> </tr> <tr> <td class="name">High-CPU Extra Large</td> <td class="memory"><span sort="7">7.00 GB</span></td> <td class="computeunits"><span sort="20">20</span></td> <td class="storage"><span sort="1690">1690 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="2">High</span></td> <td class="maxips">60</td> <td class="apiname">c1.xlarge</td> <td class="cost" hour_cost="0.58">$0.58 per hour</td> <td class="cost" hour_cost="1.14">$1.14 per hour</td> </tr> <tr> <td class="name">Cluster Compute Quadruple Extra Large</td> <td class="memory"><span sort="23">23.00 GB</span></td> <td class="computeunits"><span sort="33.5">33.5</span></td> <td class="storage"><span sort="1690">1690 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">1</td> <td class="apiname">cc1.4xlarge</td><|fim▁hole|> <tr> <td class="name">Cluster Compute Eight Extra Large</td> <td class="memory"><span sort="60.5">60.50 GB</span></td> <td class="computeunits"><span sort="88">88</span></td> <td class="storage"><span sort="3370">3370 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">240</td> <td class="apiname">cc2.8xlarge</td> <td class="cost" hour_cost="2.40">$2.40 per hour</td> <td class="cost" hour_cost="2.97">$2.97 per hour</td> </tr> <tr> <td class="name">Cluster GPU Quadruple Extra Large</td> <td class="memory"><span sort="22">22.00 GB</span></td> <td class="computeunits"><span sort="33.5">33.5</span></td> <td class="storage"><span sort="1690">1690 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">1</td> <td class="apiname">cg1.4xlarge</td> <td class="cost" hour_cost="2.10">$2.10 per hour</td> <td class="cost" hour_cost="2.60">$2.60 per hour</td> </tr> <tr> <td class="name">High I/O Quadruple Extra Large</td> <td class="memory"><span sort="60.5">60.50 GB</span></td> <td class="computeunits"><span sort="35">35</span></td> <td class="storage"><span sort="2048">2048 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">1</td> <td class="apiname">hi1.4xlarge</td> <td class="cost" hour_cost="3.10">$3.10 per hour</td> <td class="cost" hour_cost="3.58">$3.58 per hour</td> </tr> <tr> <td class="name">High Storage Eight Extra Large</td> <td class="memory"><span sort="117.00">117.00 GB</span></td> <td class="computeunits"><span sort="35">35</span></td> <td class="storage"><span sort="49152">48 TB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">1</td> <td class="apiname">hs1.8xlarge</td> <td class="cost" hour_cost="4.600">$4.600 per hour</td> <td class="cost" hour_cost="4.931">$4.931 per hour</td> </tr> <tr> <td class="name">High Memory Cluster Eight Extra Large</td> <td class="memory"><span sort="244.00">244.00 GB</span></td> <td class="computeunits"><span sort="88">88</span></td> <td class="storage"><span sort="240">240 GB</span></td> <td class="architecture">64-bit</td> <td class="ioperf"><span sort="4">Very High</sort></td> <td class="maxips">1</td> <td class="apiname">cr1.8xlarge</td> <td class="cost" hour_cost="3.500">$3.500 per hour</td> <td class="cost" hour_cost="3.831">$3.831 per hour</td> </tr> </tbody> </table> ''' class TableParser(HTMLParser.HTMLParser): def __init__(self): HTMLParser.HTMLParser.__init__(self) self.in_td = False self.flavors = [] def handle_starttag(self, tag, attrs): if tag == 'td': self.in_td = True def handle_data(self, data): if self.in_td: self.flavors.append(data) def handle_endtag(self, tag): self.in_td = False<|fim▁end|>
<td class="cost" hour_cost="1.30">$1.30 per hour</td> <td class="cost" hour_cost="1.61">$1.61 per hour</td> </tr>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Copyright (c) 2015, Philipp Klaus. All rights reserved. License: GPLv3 """ from distutils.core import setup setup(name='netio230a', version = '1.1.9', description = 'Python package to control the Koukaam NETIO-230A', long_description = 'Python software to access the Koukaam NETIO-230A and NETIO-230B: power distribution units / controllable power outlets with Ethernet interface', author = 'Philipp Klaus', author_email = '[email protected]', url = 'https://github.com/pklaus/netio230a', license = 'GPL3+',<|fim▁hole|> zip_safe = True, platforms = 'any', keywords = 'Netio230A Koukaam PDU', classifiers = [ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: GPL License', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ] )<|fim▁end|>
packages = ['netio230a'], scripts = ['scripts/netio230a_cli', 'scripts/netio230a_discovery', 'scripts/netio230a_fakeserver'],
<|file_name|>static.py<|end_file_name|><|fim▁begin|>from django import template from django.apps import apps from django.utils.encoding import iri_to_uri from django.utils.six.moves.urllib.parse import urljoin register = template.Library() class PrefixNode(template.Node): def __repr__(self): return "<PrefixNode for %r>" % self.name def __init__(self, varname=None, name=None): if name is None: raise template.TemplateSyntaxError( "Prefix nodes must be given a name to return.") self.varname = varname self.name = name @classmethod def handle_token(cls, parser, token, name): """ Class method to parse prefix node and return a Node. """ # token.split_contents() isn't useful here because tags using this method don't accept variable as arguments tokens = token.contents.split() if len(tokens) > 1 and tokens[1] != 'as': raise template.TemplateSyntaxError( "First argument in '%s' must be 'as'" % tokens[0]) if len(tokens) > 1: varname = tokens[2] else: varname = None return cls(varname, name) @classmethod def handle_simple(cls, name): try: from django.conf import settings except ImportError: prefix = '' else: prefix = iri_to_uri(getattr(settings, name, '')) return prefix def render(self, context): prefix = self.handle_simple(self.name)<|fim▁hole|> if self.varname is None: return prefix context[self.varname] = prefix return '' @register.tag def get_static_prefix(parser, token): """ Populates a template variable with the static prefix, ``settings.STATIC_URL``. Usage:: {% get_static_prefix [as varname] %} Examples:: {% get_static_prefix %} {% get_static_prefix as static_prefix %} """ return PrefixNode.handle_token(parser, token, "STATIC_URL") @register.tag def get_media_prefix(parser, token): """ Populates a template variable with the media prefix, ``settings.MEDIA_URL``. Usage:: {% get_media_prefix [as varname] %} Examples:: {% get_media_prefix %} {% get_media_prefix as media_prefix %} """ return PrefixNode.handle_token(parser, token, "MEDIA_URL") class StaticNode(template.Node): def __init__(self, varname=None, path=None): if path is None: raise template.TemplateSyntaxError( "Static template nodes must be given a path to return.") self.path = path self.varname = varname def url(self, context): path = self.path.resolve(context) return self.handle_simple(path) def render(self, context): url = self.url(context) if self.varname is None: return url context[self.varname] = url return '' @classmethod def handle_simple(cls, path): if apps.is_installed('django.contrib.staticfiles'): from django.contrib.staticfiles.storage import staticfiles_storage return staticfiles_storage.url(path) else: return urljoin(PrefixNode.handle_simple("STATIC_URL"), path) @classmethod def handle_token(cls, parser, token): """ Class method to parse prefix node and return a Node. """ bits = token.split_contents() if len(bits) < 2: raise template.TemplateSyntaxError( "'%s' takes at least one argument (path to file)" % bits[0]) path = parser.compile_filter(bits[1]) if len(bits) >= 2 and bits[-2] == 'as': varname = bits[3] else: varname = None return cls(varname, path) @register.tag('static') def do_static(parser, token): """ Joins the given path with the STATIC_URL setting. Usage:: {% static path [as varname] %} Examples:: {% static "myapp/css/base.css" %} {% static variable_with_path %} {% static "myapp/css/base.css" as admin_base_css %} {% static variable_with_path as varname %} """ return StaticNode.handle_token(parser, token) def static(path): """ Given a relative path to a static asset, return the absolute path to the asset. """ return StaticNode.handle_simple(path)<|fim▁end|>
<|file_name|>prune.rs<|end_file_name|><|fim▁begin|>use super::node::*; use super::verification::{is_node_list_serial, is_node_serial}; use super::NodeCellRef; use super::MIN_ENTRY_KEY; use super::*; use itertools::Itertools; use std::borrow::Borrow; use std::collections::{HashMap, HashSet}; use std::fmt::Debug; use std::iter::Iterator; type AlterPair = (EntryKey, NodeCellRef); #[derive(Debug)] pub struct AlteredNodes { pub removed: Vec<AlterPair>, pub key_modified: Vec<AlterPair>, } impl AlteredNodes { fn summary(&self) -> String { format!( "Removed {}, modified {}", self.removed.len(), self.key_modified.len() ) } fn is_empty(&self) -> bool { self.removed.is_empty() && self.key_modified.is_empty() } } pub fn prune<'a, KS, PS>( node: &NodeCellRef, altered: AlteredNodes, level: usize, ) -> (AlteredNodes, Vec<NodeWriteGuard<KS, PS>>) where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { let mut level_page_altered = AlteredNodes { removed: vec![], key_modified: vec![], }; if altered.is_empty() { debug!("Nothing to do at level {}, skipped", level); return (level_page_altered, vec![]); } debug!("Start prune at level {}", level); // Follwing procedures will scan the pages from left to right in key order, we need to sort it to // make sure the keys we are probing are also sorted // altered.removed.sort_by(|a, b| a.0.cmp(&b.0)); // altered.key_modified.sort_by(|a, b| a.0.cmp(&b.0)); // altered.added.sort_by(|a, b| a.0.cmp(&b.0)); let mut all_pages = probe_key_range(node, &altered, level); debug!( "Prune had selected {} pages at level {}", all_pages.len(), level ); if all_pages.is_empty() { trace!("No node to prune at this level - {}", level); return (level_page_altered, vec![]); } if cfg!(debug_assertions) && !is_node_list_serial(&all_pages) { error!("Node list not serial after selection. Dumping"); for (i, page) in all_pages.iter().enumerate() { println!( "{}\t{}: {:?} - [{:?} keys] -> {:?}", i, page.type_name(), page.node_ref().address(), page.keys().len(), page.right_bound() ); } panic!(); } if !altered.removed.is_empty() { // Locating refrences in the pages to be removed let page_children_to_be_retained = ref_to_be_retained(&mut all_pages, &altered, level); trace!( "Prune retains {:?} pages {}, items {}, empty {}", page_children_to_be_retained, page_children_to_be_retained.len(), page_children_to_be_retained .iter() .map(|p| p.len()) .sum::<usize>(), page_children_to_be_retained .iter() .filter(|p| p.is_empty()) .count() ); if cfg!(debug_assertions) { for (pid, page) in page_children_to_be_retained.iter().enumerate() { for entry in page { let node = read_unchecked::<KS, PS>(&entry.1); debug_assert!( !node.is_empty_node(), "{} - {:?}, {}", pid, entry, node.type_name() ); } } } // make all the necessary changes in current level pages according to is living children all_pages = filter_retained( all_pages, page_children_to_be_retained, &mut level_page_altered, level, ); debug!("Sub altred {}", altered.summary()); debug!("Cur altered {}", level_page_altered.summary()); } debug!( "Prune had selected {} living pages at level {}", all_pages.len(), level ); // alter keys if altered.key_modified.len() > 0 { update_and_mark_altered_keys(&mut all_pages, &altered, &mut level_page_altered); } debug!("Sub altred {}", altered.summary()); debug!("Cur altered {}", level_page_altered.summary()); if level != 0 { all_pages = update_right_nodes(all_pages); debug!( "Prune had updated right nodes for {} living pages at level {}", all_pages.len(), level ); debug_assert!( is_node_list_serial(&all_pages), "node not serial before checking corner cases" ); if merge_single_ref_pages(&mut all_pages, &mut level_page_altered) { all_pages = update_right_nodes(all_pages); } debug!( "Prune had merged all single ref pages, now have {} living pages at level {}", all_pages.len(), level ); debug_assert!( is_node_list_serial(&all_pages), "node not serial after checked corner cases" ); } debug!("Sub altred {}", altered.summary()); debug!("Cur altered {}", level_page_altered.summary()); debug!("Prune completed at level {}", level); (level_page_altered, all_pages) } fn removed_iter<'a>(altered_keys: &AlteredNodes) -> impl Iterator<Item = &AlterPair> { altered_keys.removed.iter() } fn altered_iter<'a>(altered_keys: &AlteredNodes) -> impl Iterator<Item = &AlterPair> { altered_keys.key_modified.iter() } fn probe_key_range<KS, PS>( node: &NodeCellRef, altered: &AlteredNodes, level: usize, ) -> Vec<NodeWriteGuard<KS, PS>> where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { trace!("Acquiring first prune node"); let mut all_pages = vec![write_node::<KS, PS>(node)]; // collect all pages in bound and in this level let max_key = { let removed = removed_iter(&altered) .map(|(e, _)| e) .last() .unwrap_or(&*MIN_ENTRY_KEY); let alted = altered_iter(&altered) .map(|(e, _)| e) .last() .unwrap_or(&*MIN_ENTRY_KEY); std::cmp::max(removed, alted) }; debug!("Max key to prune to is {:?}", max_key); // This process will probe pages by all alter node types in this level to select the right pages // which contains those entries to work with loop { let (next, ends) = { let last_page = all_pages.last().unwrap().borrow(); if last_page.is_ref_none() { break; } debug_assert!(!last_page.is_empty_node(), "at {:?}", last_page.node_ref()); let last_innode = last_page.innode(); debug_assert!( is_node_serial(last_page), "node not serial on fetching pages {:?} - {:?} | {}", last_page.keys(), last_page.innode().ptrs.as_slice_immute(), level ); let next_node_ref = &last_innode.right; if cfg!(debug_assertions) { let next_node = read_unchecked::<KS, PS>(next_node_ref); // right refercing nodes are out of order, need to investigate debug_assert!(!next_node.node_ref().ptr_eq(last_page.node_ref())); for p in &all_pages { if p.node_ref().ptr_eq(next_node.node_ref()) { panic!("Duplicated unordered node from right referecing"); } if !next_node.is_none() { assert!(p.right_bound() < next_node.right_bound()); } } } debug!("Obtain node lock for {:?}...", next_node_ref); let next_node = write_node::<KS, PS>(next_node_ref); debug!("Obtained node lock for {:?}", next_node_ref); let ends = &last_innode.right_bound > max_key; debug!("Collecting node {:?}", next_node.node_ref()); (next_node, ends) }; // all_pages contains all of the entry keys we need to work for remove, add and modify all_pages.push(next); if ends { break; } } debug!( "Prune selected at level {}, {} pages", level, all_pages.len() ); return all_pages; } fn ref_to_be_retained<'a, KS, PS>( all_pages: &mut Vec<NodeWriteGuard<KS, PS>>, altered: &AlteredNodes, level: usize, ) -> Vec<Vec<(usize, NodeCellRef)>> where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { let removed = altered .removed .iter() .map(|(_, r)| r.address()) .collect::<HashSet<_>>(); debug!( "Remove set is {:?}, size {} at level {}", removed, altered.removed.len(), level ); debug_assert!(all_pages.first().unwrap().first_key() <= &altered.removed.first().unwrap().0); let mut remove_count = 0; let matching_refs = all_pages .iter() .map(|page| { // removed is a sequential external nodes that have been removed and have been set to empty // nodes are ordered so we can iterate them while scanning the reference in upper levels. debug_assert!( is_node_serial(page), "node not serial before live selection - {}", level ); if page.is_ref_none() { return vec![]; } page.innode().ptrs.as_slice_immute()[..page.len() + 1] .iter() .enumerate() .filter_map(|(i, sub_level)| { if removed.contains(&sub_level.address()) { remove_count += 1; return None; } Some((i, sub_level.clone())) }) .collect_vec() }) .collect_vec(); debug!( "Remove count is {}, should be {} at level {}", remove_count, removed.len(), level ); if cfg!(debug_assertions) && remove_count != removed.len() { warn!("Remove set and actual removed numner does not match"); } matching_refs } fn filter_retained<KS, PS>( all_pages: Vec<NodeWriteGuard<KS, PS>>, retained: Vec<Vec<(usize, NodeCellRef)>>, level_page_altered: &mut AlteredNodes, level: usize, ) -> Vec<NodeWriteGuard<KS, PS>> where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { return all_pages .into_iter() .zip(retained) .filter_map(|(mut page, retained_refs)| { let is_not_none = !page.is_ref_none(); if is_not_none && retained_refs.len() == 0 { // check if all the children ptr in this page have been removed // if yes mark it and upper level will handel it level_page_altered .removed .push((page.right_bound().clone(), page.node_ref().clone())); page.make_empty_node(false); None } else { if is_not_none { // extract all live child ptrs and construct a new page from them let mut new_keys = KS::init(); let mut new_ptrs = PS::init(); let ptr_len = retained_refs.len(); // Copy retained keys and refs to target page for (i, &(oi, _)) in retained_refs.iter().skip(1).enumerate() { new_keys.as_slice()[i] = page.keys()[oi - 1].clone(); } for (i, (_, ptr)) in retained_refs.into_iter().enumerate() { new_ptrs.as_slice()[i] = ptr; } { debug_assert!( is_node_serial(&page), "node not serial before update - {}", level ); let mut innode = page.innode_mut(); innode.len = ptr_len - 1; innode.keys = new_keys; innode.ptrs = new_ptrs; trace!( "Found non-empty node, new ptr length {}, node len {}", ptr_len, innode.len ); } debug_assert!( is_node_serial(&page), "node not serial after update - {}", level ); } Some(page) } }) .collect_vec(); } fn update_and_mark_altered_keys<'a, KS, PS>( pages: &mut Vec<NodeWriteGuard<KS, PS>>, to_be_altered: &AlteredNodes, next_level_altered: &mut AlteredNodes, ) where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { let mut modified_set = HashMap::new(); for modified_items in &to_be_altered.key_modified { modified_set.insert(modified_items.1.address(), modified_items.0.clone()); } let mut modified = 0; debug!("Doing key modification, {} items", modified_set.len()); for page in pages { for (i, ptr) in page.innode().ptrs.as_slice_immute().iter().enumerate() { if let Some(modified_key) = modified_set.get_mut(&ptr.address()) { let modify_to = mem::take(modified_key); let alter_index = i + 1; debug!("Alter key at {} with {:?}", alter_index, modify_to); if alter_index < page.len() { debug!("Modify key to {:?}", modify_to); page.innode_mut().keys.as_slice()[alter_index] = modify_to; } else { debug!("Modify key on boundary, postpone for {:?}", modify_to); // This page have changed its boundary, should postpone modification to upper level *page.right_bound_mut() = modify_to.clone(); next_level_altered .key_modified .push((modify_to, page.node_ref().clone())); } modified += 1; break; } } } debug!( "Done key modification, {}, expect {}", modified, modified_set.len() ); } fn update_right_nodes<KS, PS>(all_pages: Vec<NodeWriteGuard<KS, PS>>) -> Vec<NodeWriteGuard<KS, PS>> where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { // This procedure will also remove empty nodes if all_pages.is_empty() { trace!("No nodes available to update right node"); return all_pages; } let mut non_emptys = all_pages .into_iter() .filter(|p| p.is_ref_none() || !p.is_empty_node()) .collect_vec(); let right_refs = non_emptys .iter() .enumerate() .map(|(i, p)| { if p.is_ref_none() { return NodeCellRef::default(); } let right_ref = if i == non_emptys.len() - 1 { non_emptys[i].right_ref().unwrap().clone() } else { non_emptys[i + 1].node_ref().clone() }; debug_assert!(!p.is_none()); if cfg!(debug_assertions) { let right = read_unchecked::<KS, PS>(&right_ref); if !right.is_none() { let left = p.right_bound(); let right = right.right_bound(); assert!(left < right, "failed on checking left right page right bound, expecting {:?} less than {:?}", left, right); } } right_ref }) .collect_vec(); non_emptys .iter_mut() .zip(right_refs.into_iter()) .for_each(|(p, r)| { if !p.is_ref_none() { debug_assert!(!p.node_ref().ptr_eq(&r)); *p.right_ref_mut().unwrap() = r } }); return non_emptys; } // Return true if the case is handled fn merge_single_ref_pages<KS, PS>( all_pages: &mut Vec<NodeWriteGuard<KS, PS>>, level_page_altered: &mut AlteredNodes, ) -> bool where KS: Slice<EntryKey> + Debug + 'static, PS: Slice<NodeCellRef> + 'static, { // dealing with corner cases // here, a page may have one ptr and no keys, then the remaining ptr need to be merge with right page let num_pages = all_pages.len(); debug!("Checking single ptr node, {} pages", num_pages); let mut index = 0; let mut corner_case_handled = false; while index < num_pages - 1 { if all_pages[index].is_ref_none() || all_pages[index + 1].is_ref_none() { break; } let left_node_len = all_pages[index].len(); let right_node_len = all_pages[index + 1].len(); if left_node_len == 0 || right_node_len == 0 { debug!("Found single ref node, prep to merge"); corner_case_handled = true; // left or right node (or both) have only one ptr and length 0 // We are not going to use any complicated approach, will take it slow // but easy to understand and debug // Here, we use one vec for keys and one for ptrs let mut keys = vec![]; let mut ptrs = vec![]; // Ensure node serial debug_assert!(verification::is_node_serial(&all_pages[index])); debug_assert!(verification::is_node_serial(&all_pages[index + 1])); debug_assert!(all_pages[index].right_bound() <= all_pages[index + 1].right_bound()); // Collect all left node keys for key in all_pages[index].keys() { keys.push(key.clone()); } // Collect left node right bound key keys.push(all_pages[index].right_bound().clone()); // Collect right node keys for key in all_pages[index + 1].keys() { keys.push(key.clone()); } // Ensure all keys are serial after keys are collected if cfg!(debug_assertions) { debug_assert!(verification::are_keys_serial(keys.as_slice())); } // Collect all left node ptrs for ptr in all_pages[index].ptrs() { ptrs.push(ptr.clone()); } // Collect all right node ptrs for ptr in all_pages[index + 1].ptrs() { ptrs.push(ptr.clone()); } let num_keys = keys.len(); // Ensure key and value numbers matches debug_assert_eq!(num_keys + 1, ptrs.len()); // Here we have two strategy to deal with single node problem // When both left and right node have no keys and one ptr, we should // combine two nodes to right and remove the left node; // this can be hazardous for node search in only one direction // When only one of the node has more than one keys and two ptrs, we can split the // combined vec in the half and distribute them let left_keys = if num_keys < 3 { num_keys } else { num_keys / 2 }; if num_keys >= KS::slice_len() { debug!( "Merge single node by rebalancing with right, (l:{}, r{}) of {} at {}",<|fim▁hole|> right_node_len, KS::slice_len(), index ); // Move and setup left node for i in 0..left_keys { all_pages[index].innode_mut().keys.as_slice()[i] = mem::take(&mut keys[i]); all_pages[index].innode_mut().ptrs.as_slice()[i] = mem::take(&mut ptrs[i]); } all_pages[index].innode_mut().ptrs.as_slice()[left_keys] = mem::take(&mut ptrs[left_keys]); all_pages[index].innode_mut().len = left_keys; // Update the right boundary of the left node let left_node_right_bound = mem::take(&mut keys[left_keys]); all_pages[index].innode_mut().right_bound = left_node_right_bound.clone(); // The right boundary of left node has been changed, put the left node to alter list level_page_altered .key_modified .push((left_node_right_bound, all_pages[index].node_ref().clone())); // Move and setup right node let right_keys_offset = left_keys + 1; for i in right_keys_offset..num_keys { let right_index = i - right_keys_offset; all_pages[index + 1].innode_mut().keys.as_slice()[right_index] = mem::take(&mut keys[i]); all_pages[index + 1].innode_mut().ptrs.as_slice()[right_index] = mem::take(&mut ptrs[i]); } all_pages[index + 1].innode_mut().ptrs.as_slice()[num_keys] = mem::take(&mut ptrs[num_keys]); all_pages[index + 1].innode_mut().len = num_keys - right_keys_offset; } else { debug!( "Merge single node by eliminating left node with {} keys", num_keys ); let first_key = keys[0].clone(); for i in 0..num_keys { all_pages[index + 1].innode_mut().keys.as_slice()[i] = mem::take(&mut keys[i]); all_pages[index + 1].innode_mut().ptrs.as_slice()[i] = mem::take(&mut ptrs[i]); } all_pages[index + 1].innode_mut().ptrs.as_slice()[num_keys] = mem::take(&mut ptrs[num_keys]); all_pages[index + 1].innode_mut().len = num_keys; // make left node empty all_pages[index].make_empty_node(false); level_page_altered .removed .push((first_key, all_pages[index].node_ref().clone())); } } index += 1; } debug!("Single ptr processed: {}", corner_case_handled); return corner_case_handled; }<|fim▁end|>
left_node_len,
<|file_name|>vecset.rs<|end_file_name|><|fim▁begin|>/** * A set implemented as a binary vector. */ use std::cmp::Ordering; use std::iter::{FromIterator, IntoIterator, range_step}; use std::default::Default; use std::slice; use std::fmt; use test::Bencher; /** * A set implemented using a sorted vector. Due to memory locality and * caching, this set implementation will generally be faster than the * HashSet for moderately-sized sets with smallish keys. * * Benchmarks show that it outperforms the HashSet up to around * 10,000 items; but your mileage may vary depending on the size of * your cache and the sixe of the elements you store. */ #[derive(Clone, PartialEq, Eq)] pub struct VecSet<T> { items: Vec<T> } /** * It is assumed that comparing T's for ordering is cheap. */ impl<T: Ord> VecSet<T> { pub fn new() -> VecSet<T> { VecSet { items: Vec::new() } } pub fn with_capacity(n: usize) -> VecSet<T> { VecSet { items: Vec::with_capacity(n) } } pub fn len(&self) -> usize { self.items.len() } /** * Inserts an element into the set. Returns true if the element was added, * or false if it already existed in the set. */ pub fn insert(&mut self, v: T) -> bool {; if self.items.is_empty() || (v > self.items[self.items.len()-1]) { self.items.push(v); true } else { let index = lower_bound(&self.items[..], &v); if self.items[index] == v { false } else { self.items.insert(index, v); true } } } /** * Removes an element from the set. Returns true if the element was removed<|fim▁hole|> * and false if the element was not in the set to remove. */ pub fn remove(&mut self, v: &T) -> bool { if !self.items.is_empty() { let index = lower_bound(&self.items[..], v); if (index < self.items.len()) && (self.items[index] == *v) { self.items.remove(index); return true; } } return false; } pub fn contains(&self, v: &T) -> bool { if self.items.is_empty() { return false } let index = lower_bound(&self.items[..], v); if (index < self.items.len()) && (self.items[index] == *v) { true } else { false } } /** * */ pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn iter(&self) -> slice::Iter<T> { self.items.iter() } pub fn vec<'a>(&'a self) -> &'a Vec<T> { &self.items } pub fn clear(&mut self) { self.items = Vec::new(); } pub fn reserve(&mut self, n: usize) { self.items.reserve(n) } } #[test] fn new_vecset_is_empty() { let s = VecSet::<isize>::new(); assert!(s.is_empty()); assert!(s.len() == 0); } #[test] fn inserting_elements_affects_size() { let mut s : VecSet<usize> = VecSet::new(); for x in (0 .. 100) { s.insert(x); assert_eq!(x+1, s.len()); } } #[test] fn inserting_duplicates_does_not_affect_size() { let mut s : VecSet<isize> = FromIterator::from_iter(1..5); s.insert(1); s.insert(2); s.insert(3); s.insert(4); assert_eq!(4, s.len()); } #[test] fn inserting_in_random_order_creates_valid_set() { let mut s : VecSet<isize> = FromIterator::from_iter( [3, 1, 2, 6, 5, 4, 0].iter().map(|& i| i)); s.insert(1); s.insert(2); s.insert(3); s.insert(4); assert_eq!(7, s.len()); } #[test] fn removing_items_from_empty_set_returns_false(){ let mut set : VecSet<isize> = VecSet::new(); assert_eq!(false, set.remove(&42)) } #[test] fn removing_item_returns_true() { let mut set : VecSet<isize> = FromIterator::from_iter((0..5)); assert_eq!(5, set.len()); assert_eq!(true, set.remove(&3)); assert_eq!(4, set.len()); assert_eq!(false, set.remove(&3)); assert_eq!(4, set.len()); } #[test] fn removing_first_item_returns_true() { let mut set : VecSet<isize> = FromIterator::from_iter((0..5)); assert_eq!(5, set.len()); assert_eq!(true, set.remove(&0)); assert_eq!(4, set.len()); assert_eq!(false, set.remove(&0)); assert_eq!(4, set.len()); } #[test] fn removing_last_item_returns_true() { let mut set : VecSet<isize> = FromIterator::from_iter((0..5)); assert_eq!(5, set.len()); assert_eq!(true, set.remove(&4)); assert_eq!(4, set.len()); assert_eq!(false, set.remove(&4)); assert_eq!(4, set.len()); } #[test] fn removing_non_existant_items_returns_false(){ let mut set : VecSet<isize> = FromIterator::from_iter((0..5)); assert_eq!(5, set.len()); assert_eq!(false, set.remove(&42)); assert_eq!(5, set.len()); } #[cfg(test)] static RANDOM_TEST_DATA : &'static [usize] = &[ 57, 84, 22, 88, 21, 71, 71, 10, 3, 56, 9, 81, 78, 46, 84, 73, 28, 54, 40, 70, 9, 86, 6, 7, 53, 52, 5, 6, 68, 78, 20, 13, 91, 6, 57, 50, 95, 18, 64, 95, 78, 39, 56, 91, 43, 20, 98, 87, 46, 10, 44, 20, 90, 10, 49, 51, 93, 9, 41, 13, 5, 53, 83, 39, 46, 99, 14, 66, 94, 77, 76, 91, 52, 67, 41, 12, 58, 11, 76, 72, 88, 63, 7, 82, 8, 68, 78, 46, 4, 25, 44, 3, 82, 6, 2, 32, 7, 100, 94, 87, ]; #[bench] fn insert_1000_32_vecset(b: &mut Bencher) { b.iter(|| { let mut v : VecSet<(u64, u64)> = VecSet::new(); for i in (0 .. 10) { for x in RANDOM_TEST_DATA.iter() { let val : u64 = (*x as u64) * 100 * (i+1) as u64; v.insert((val, val)); } } }) } //Comparison benchmark using HashSet // #[bench] // fn insert_1000_32_hashset(b: &mut Bencher) { // use std::collections::HashSet; // // b.iter(|| { // let mut v : HashSet<(u64, u64)> = HashSet::new(); // for i in (0 .. 10) { // for x in RandomTestData.iter() { // let val : u64 = (*x as u64) * 100 * (i+1) as u64; // v.insert((val, val)); // } // } // }) // } #[bench] fn remove_1000_32_vecset(b: &mut Bencher) { let mut s : VecSet<(u64, u64)> = VecSet::new(); for i in (0 .. 50) { for x in RANDOM_TEST_DATA.iter() { let val : u64 = (*x as u64) * 100 * (i+1) as u64; s.insert((val, val)); } } b.iter(|| { let mut v = s.clone(); for i in (0 .. 50) { for x in RANDOM_TEST_DATA.iter() { let val : u64 = (*x as u64) * 100 * (i+1) as u64; v.remove(&(val, val)); } } }) } // #[bench] // fn remove_1000_32_hashset(b: &mut Bencher) { // use std::collections::HashSet; // // let mut s : HashSet<(u64, u64)> = HashSet::new(); // for i in (0 .. 50) { // for x in RANDOM_TEST_DATA.iter() { // let val : u64 = (*x as u64) * 100 * (i+1) as u64; // s.insert((val, val)); // } // } // // b.iter(|| { // let mut v = s.clone(); // for i in (0 .. 50) { // for x in RANDOM_TEST_DATA.iter() { // let val : u64 = (*x as u64) * 100 * (i+1) as u64; // v.remove(&(val, val)); // } // } // }) // } // ---------------------------------------------------------------------------- // Default Trait // ---------------------------------------------------------------------------- impl<T: Ord> Default for VecSet<T> { #[inline] fn default() -> VecSet<T> { VecSet::new() } } #[test] fn default_vecset_is_empty() { let s : VecSet<usize> = Default::default(); assert!(s.is_empty()); assert!(s.len() == 0); } // ---------------------------------------------------------------------------- // FromIterator trait // ---------------------------------------------------------------------------- impl<T: Ord> FromIterator<T> for VecSet<T> { fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> VecSet<T> { let mut result = VecSet::new(); result.extend(iter); result } } // ---------------------------------------------------------------------------- // Extend trait // ---------------------------------------------------------------------------- impl<T: Ord> Extend<T> for VecSet<T> { fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) { for v in iter { self.insert(v); } } } #[test] fn extending_inserts_elements() { let mut s = VecSet::<usize>::new(); s.extend(0 .. 100); assert_eq!(100, s.len()); } #[test] fn extending_does_not_insert_duplicates_elements() { let mut s = VecSet::<usize>::new(); s.extend(0..100); s.extend(0..100); assert_eq!(100, s.len()); } // ---------------------------------------------------------------------------- // // ---------------------------------------------------------------------------- impl<T: fmt::Debug> fmt::Debug for VecSet<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.items.fmt(f) } } // ---------------------------------------------------------------------------- // A map stored in a vector // ---------------------------------------------------------------------------- /** * A map stored in a vector. Faster than a HashMap for small map sizes and * small values. */ pub struct VecMap<K, V> { keys: Vec<K>, values: Vec<V> } impl<K: Ord, V: Clone> VecMap<K, V> { pub fn new() -> VecMap<K, V> { VecMap { keys: Vec::new(), values: Vec::new() } } pub fn with_capacity(n: usize) -> VecMap<K, V> { VecMap { keys: Vec::with_capacity(n), values: Vec::with_capacity(n) } } /** * Inserts a key-value pair into the map. If the key already had a value * present in the map, that value is returned. Returns None otherwise. */ pub fn insert(&mut self, k: K, v: V) -> Option<V> { let n = self.keys.len(); let mut rval = None; if self.keys.is_empty() || (self.keys[n-1] < k) { self.keys.push(k); self.values.push(v); } else { let index = lower_bound(&self.keys[..], &k); if self.keys[index] == k { rval = Some(self.values[index].clone()); self.values[index] = v; } else { self.keys.insert(index, k); self.values.insert(index, v); } } rval } pub fn remove(&mut self, k: &K) -> Option<V> { let n = self.keys.len(); let mut rval = None; if !self.keys.is_empty() && *k <= self.keys[n-1] { let index = lower_bound(&self.keys[..], &k); if self.keys[index] == *k { rval = Some(self.values[index].clone()); self.keys.remove(index); self.values.remove(index); } } rval } pub fn get(&self, k: &K) -> Option<&V> { let n = self.keys.len(); let mut rval = None; if !self.keys.is_empty() && *k <= self.keys[n-1] { let index = lower_bound(&self.keys[..], &k); if self.keys[index] == *k { rval = Some(&self.values[index]) } } rval } pub fn is_empty(&self) -> bool { self.keys.is_empty() } pub fn len(&self) -> usize { self.keys.len() } } #[test] fn new_vecmap_is_empty() { let m : VecMap<usize,String> = VecMap::new(); assert!(m.is_empty()); assert_eq!(0, m.len()); } #[test] fn inserting_elements_affects_vecmap_size() { let mut m : VecMap<usize,String> = VecMap::new(); for x in (0 .. 100) { m.insert(x, format!("{:?}", x)); assert_eq!(x+1, m.len()); } } #[test] fn inserting_items_are_findable() { let mut m : VecMap<usize,String> = VecMap::new(); for x in (0 .. 100) { m.insert(x, format!("{:?}", x)); } for x in (0 .. 100) { let text = format!("{:?}", x); match m.get(&x) { Some(s) => assert!(*s == text), _ => panic!("Expected Some(x), got None") } } } #[test] fn inserting_duplicates_does_not_affect_vecmap_size() { let mut m : VecMap<usize,String> = VecMap::new(); m.insert(42, "first".to_string()); m.insert(42, "second".to_string()); assert_eq!(1, m.len()); } #[test] fn removing_items_from_empty_map_returns_none() { let mut m : VecMap<usize, String> = VecMap::new(); assert!(m.remove(&42) == None); } #[test] fn removing_item_from_vecmap_returns_item() { let mut m : VecMap<usize,String> = VecMap::new(); m.insert(42, "the answer".to_string()); assert!(m.remove(&42) == Some("the answer".to_string())); assert_eq!(0, m.len()); } #[test] fn removing_non_existant_item_form_map_returns_none() { let mut m : VecMap<usize,String> = VecMap::new(); m.insert(42, "the answer".to_string()); assert!(m.remove(&7) == None); } // ---------------------------------------------------------------------------- // VecMap Default Trait // ---------------------------------------------------------------------------- impl<K: Ord, V: Clone> Default for VecMap<K, V> { #[inline] fn default() -> VecMap<K,V> { VecMap::new() } } #[test] fn default_vecmap_is_empty() { let s : VecSet<usize> = Default::default(); assert!(s.is_empty()); assert!(s.len() == 0); } // ---------------------------------------------------------------------------- // VecMap Extend trait // ---------------------------------------------------------------------------- impl<K: Ord, V: Clone> Extend<(K, V)> for VecMap<K, V> { fn extend<I: IntoIterator<Item=(K, V)>>(&mut self, iter: I) { for (k, v) in iter { self.insert(k, v); } } } #[test] fn extending_vecmap_inserts_elements() { let mut s = VecSet::<usize>::new(); s.extend(0 .. 100); assert_eq!(100, s.len()); } // ---------------------------------------------------------------------------- // Helper functions // ---------------------------------------------------------------------------- /** * Analogue of the C++ std::lower_bound algorithm. * * Searches a slice for the smallest item not less than the supplied value. The * slice is assumed to be non-empty and sorted. Uses a binary search to make * things a bit faster. * * Behaviour is undefied for an unsorted slice. */ fn lower_bound<T:Ord>(items: &[T], val: &T) -> usize { if items.is_empty() { panic!("Empty slice passed to lower_bound"); } let mut limit = items.len(); let mut base = 0; while limit > 0 { let index = base + (limit >> 1); match items[index].cmp(val) { Ordering::Equal => return index, Ordering::Greater => (), Ordering::Less => { base = index + 1; limit -= 1; } } limit = limit >> 1; } base } #[test] fn lower_bound_finds_single_element() { assert!(lower_bound(&[42], &42) == 0); } #[test] fn lower_bound_gives_sensible_bound_on_smaller_single_element() { assert!(lower_bound(&[42], &41) == 0); } #[test] fn lower_bound_gives_sensible_bound_on_larger_than_largest_element() { assert!(lower_bound(&[42], &43) == 1); } #[test] fn lower_bound_finds_existing_elements() { let data : Vec<usize> = FromIterator::from_iter((0..100)); for x in (0..100) { assert_eq!(x, lower_bound(&data[..], &x)); } } #[test] fn lower_bound_finds_appropriate_bound() { let data : Vec<usize> = FromIterator::from_iter(range_step(0, 100, 3)); for x in (0 .. 100) { assert_eq!((2 + x)/3, lower_bound(&data[..], &x)); } }<|fim▁end|>
<|file_name|>test_game.py<|end_file_name|><|fim▁begin|>from unittest2 import TestCase, main from hadafuna.core.game import KoikoiGame <|fim▁hole|> pass if __name__ == '__main__': main(verbosity=2)<|fim▁end|>
class KoikoiGameTest(TestCase):
<|file_name|>python.py<|end_file_name|><|fim▁begin|>""" A Python "serializer". Doesn't do much serializing per se -- just converts to and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for other serializers. """ from __future__ import unicode_literals from collections import OrderedDict from django.apps import apps from django.conf import settings from django.core.serializers import base from django.db import DEFAULT_DB_ALIAS, models from django.utils import six from django.utils.encoding import force_text, is_protected_type class Serializer(base.Serializer): """ Serializes a QuerySet to basic Python objects. """ internal_use_only = True def start_serialization(self): self._current = None self.objects = [] def end_serialization(self): pass def start_object(self, obj): self._current = OrderedDict() def end_object(self, obj): self.objects.append(self.get_dump_object(obj)) self._current = None def get_dump_object(self, obj): model = obj._meta.proxy_for_model if obj._deferred else obj.__class__<|fim▁hole|> data['fields'] = self._current return data def handle_field(self, obj, field): value = field._get_val_from_obj(obj) # Protected types (i.e., primitives like None, numbers, dates, # and Decimals) are passed through as is. All other values are # converted to string first. if is_protected_type(value): self._current[field.name] = value else: self._current[field.name] = field.value_to_string(obj) def handle_fk_field(self, obj, field): if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'): related = getattr(obj, field.name) if related: value = related.natural_key() else: value = None else: value = getattr(obj, field.get_attname()) if not is_protected_type(value): value = field.value_to_string(obj) self._current[field.name] = value def handle_m2m_field(self, obj, field): if field.remote_field.through._meta.auto_created: if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'): m2m_value = lambda value: value.natural_key() else: m2m_value = lambda value: force_text(value._get_pk_val(), strings_only=True) self._current[field.name] = [m2m_value(related) for related in getattr(obj, field.name).iterator()] def getvalue(self): return self.objects def Deserializer(object_list, **options): """ Deserialize simple Python objects back into Django ORM instances. It's expected that you pass the Python objects themselves (instead of a stream or a string) to the constructor """ db = options.pop('using', DEFAULT_DB_ALIAS) ignore = options.pop('ignorenonexistent', False) for d in object_list: # Look up the model and starting build a dict of data for it. try: Model = _get_model(d["model"]) except base.DeserializationError: if ignore: continue else: raise data = {} if 'pk' in d: try: data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk')) except Exception as e: raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None) m2m_data = {} field_names = {f.name for f in Model._meta.get_fields()} # Handle each field for (field_name, field_value) in six.iteritems(d["fields"]): if ignore and field_name not in field_names: # skip fields no longer on model continue if isinstance(field_value, str): field_value = force_text( field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True ) field = Model._meta.get_field(field_name) # Handle M2M relations if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel): if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'): def m2m_convert(value): if hasattr(value, '__iter__') and not isinstance(value, six.text_type): return field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*value).pk else: return force_text(field.remote_field.model._meta.pk.to_python(value), strings_only=True) else: m2m_convert = lambda v: force_text(field.remote_field.model._meta.pk.to_python(v), strings_only=True) try: m2m_data[field.name] = [] for pk in field_value: m2m_data[field.name].append(m2m_convert(pk)) except Exception as e: raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), pk) # Handle FK fields elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel): if field_value is not None: try: if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'): if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type): obj = field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*field_value) value = getattr(obj, field.remote_field.field_name) # If this is a natural foreign key to an object that # has a FK/O2O as the foreign key, use the FK value if field.remote_field.model._meta.pk.remote_field: value = value.pk else: value = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value) data[field.attname] = value else: data[field.attname] = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value) except Exception as e: raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value) else: data[field.attname] = None # Handle all other fields else: try: data[field.name] = field.to_python(field_value) except Exception as e: raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value) obj = base.build_instance(Model, data, db) yield base.DeserializedObject(obj, m2m_data) def _get_model(model_identifier): """ Helper to look up a model from an "app_label.model_name" string. """ try: return apps.get_model(model_identifier) except (LookupError, TypeError): raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)<|fim▁end|>
data = OrderedDict([('model', force_text(model._meta))]) if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'): data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
<|file_name|>model_spinnaker_plugin_info.go<|end_file_name|><|fim▁begin|>/* * Spinnaker API * * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * API version: 1.0.0 * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package swagger type SpinnakerPluginInfo struct { ProjectUrl string `json:"projectUrl,omitempty"` Description string `json:"description,omitempty"` Provider string `json:"provider,omitempty"`<|fim▁hole|> RepositoryId string `json:"repositoryId,omitempty"` }<|fim▁end|>
Name string `json:"name,omitempty"` Releases []SpinnakerPluginRelease `json:"releases"` Id string `json:"id,omitempty"`
<|file_name|>yaml.rs<|end_file_name|><|fim▁begin|>//! Conveniently read from a YAML file, e.g. for loading configs. use std::io::Read; use std::fs::File; use std::path::Path; use yaml_rust::{YamlLoader, Yaml}; /// Easily load a YAML file by filename. pub fn load_from_yaml(fname: &str) -> Yaml { let path = Path::new(fname); let mut file = File::open(&path).unwrap(); let mut s = String::new(); file.read_to_string(&mut s).unwrap();<|fim▁hole|> // just return the first yaml doc let mut docs = YamlLoader::load_from_str(&s).unwrap(); docs.remove(0) }<|fim▁end|>
<|file_name|>htmlstyleelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use cssparser::{Parser as CssParser, ParserInput}; use dom::bindings::cell::DomRefCell; use dom::bindings::codegen::Bindings::HTMLStyleElementBinding; use dom::bindings::codegen::Bindings::HTMLStyleElementBinding::HTMLStyleElementMethods; use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use dom::bindings::inheritance::Castable; use dom::bindings::root::{DomRoot, MutNullableDom}; use dom::cssstylesheet::CSSStyleSheet; use dom::document::Document; use dom::element::{Element, ElementCreator}; use dom::eventtarget::EventTarget; use dom::htmlelement::HTMLElement; use dom::node::{ChildrenMutation, Node, UnbindContext, document_from_node, window_from_node}; use dom::stylesheet::StyleSheet as DOMStyleSheet; use dom::virtualmethods::VirtualMethods; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; use net_traits::ReferrerPolicy; use servo_arc::Arc; use std::cell::Cell; use style::media_queries::parse_media_query_list; use style::parser::ParserContext as CssParserContext; use style::stylesheets::{CssRuleType, Stylesheet, Origin}; use style_traits::PARSING_MODE_DEFAULT; use stylesheet_loader::{StylesheetLoader, StylesheetOwner}; #[dom_struct] pub struct HTMLStyleElement { htmlelement: HTMLElement, #[ignore_heap_size_of = "Arc"] stylesheet: DomRefCell<Option<Arc<Stylesheet>>>, cssom_stylesheet: MutNullableDom<CSSStyleSheet>, /// https://html.spec.whatwg.org/multipage/#a-style-sheet-that-is-blocking-scripts parser_inserted: Cell<bool>, in_stack_of_open_elements: Cell<bool>, pending_loads: Cell<u32>, any_failed_load: Cell<bool>, line_number: u64, } impl HTMLStyleElement { fn new_inherited(local_name: LocalName, prefix: Option<Prefix>, document: &Document, creator: ElementCreator) -> HTMLStyleElement { HTMLStyleElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document), stylesheet: DomRefCell::new(None), cssom_stylesheet: MutNullableDom::new(None), parser_inserted: Cell::new(creator.is_parser_created()), in_stack_of_open_elements: Cell::new(creator.is_parser_created()), pending_loads: Cell::new(0), any_failed_load: Cell::new(false), line_number: creator.return_line_number(), } } #[allow(unrooted_must_root)] pub fn new(local_name: LocalName, prefix: Option<Prefix>, document: &Document, creator: ElementCreator) -> DomRoot<HTMLStyleElement> { Node::reflect_node(box HTMLStyleElement::new_inherited(local_name, prefix, document, creator), document, HTMLStyleElementBinding::Wrap) } pub fn parse_own_css(&self) { let node = self.upcast::<Node>();<|fim▁hole|> let window = window_from_node(node); let doc = document_from_node(self); let mq_attribute = element.get_attribute(&ns!(), &local_name!("media")); let mq_str = match mq_attribute { Some(a) => String::from(&**a.value()), None => String::new(), }; let data = node.GetTextContent().expect("Element.textContent must be a string"); let url = window.get_url(); let context = CssParserContext::new_for_cssom(&url, Some(CssRuleType::Media), PARSING_MODE_DEFAULT, doc.quirks_mode()); let shared_lock = node.owner_doc().style_shared_lock().clone(); let mut input = ParserInput::new(&mq_str); let css_error_reporter = window.css_error_reporter(); let mq = Arc::new(shared_lock.wrap(parse_media_query_list(&context, &mut CssParser::new(&mut input), css_error_reporter))); let loader = StylesheetLoader::for_element(self.upcast()); let sheet = Stylesheet::from_str(&data, window.get_url(), Origin::Author, mq, shared_lock, Some(&loader), css_error_reporter, doc.quirks_mode(), self.line_number as u32); let sheet = Arc::new(sheet); // No subresource loads were triggered, just fire the load event now. if self.pending_loads.get() == 0 { self.upcast::<EventTarget>().fire_event(atom!("load")); } self.set_stylesheet(sheet); } // FIXME(emilio): This is duplicated with HTMLLinkElement::set_stylesheet. pub fn set_stylesheet(&self, s: Arc<Stylesheet>) { let doc = document_from_node(self); if let Some(ref s) = *self.stylesheet.borrow() { doc.remove_stylesheet(self.upcast(), s) } *self.stylesheet.borrow_mut() = Some(s.clone()); self.cssom_stylesheet.set(None); doc.add_stylesheet(self.upcast(), s); } pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> { self.stylesheet.borrow().clone() } pub fn get_cssom_stylesheet(&self) -> Option<DomRoot<CSSStyleSheet>> { self.get_stylesheet().map(|sheet| { self.cssom_stylesheet.or_init(|| { CSSStyleSheet::new(&window_from_node(self), self.upcast::<Element>(), "text/css".into(), None, // todo handle location None, // todo handle title sheet) }) }) } } impl VirtualMethods for HTMLStyleElement { fn super_type(&self) -> Option<&VirtualMethods> { Some(self.upcast::<HTMLElement>() as &VirtualMethods) } fn children_changed(&self, mutation: &ChildrenMutation) { self.super_type().unwrap().children_changed(mutation); // https://html.spec.whatwg.org/multipage/#update-a-style-block // Handles the case when: // "The element is not on the stack of open elements of an HTML parser or XML parser, // and one of its child nodes is modified by a script." // TODO: Handle Text child contents being mutated. if self.upcast::<Node>().is_in_doc() && !self.in_stack_of_open_elements.get() { self.parse_own_css(); } } fn bind_to_tree(&self, tree_in_doc: bool) { self.super_type().unwrap().bind_to_tree(tree_in_doc); // https://html.spec.whatwg.org/multipage/#update-a-style-block // Handles the case when: // "The element is not on the stack of open elements of an HTML parser or XML parser, // and it becomes connected or disconnected." if tree_in_doc && !self.in_stack_of_open_elements.get() { self.parse_own_css(); } } fn pop(&self) { self.super_type().unwrap().pop(); // https://html.spec.whatwg.org/multipage/#update-a-style-block // Handles the case when: // "The element is popped off the stack of open elements of an HTML parser or XML parser." self.in_stack_of_open_elements.set(false); if self.upcast::<Node>().is_in_doc() { self.parse_own_css(); } } fn unbind_from_tree(&self, context: &UnbindContext) { if let Some(ref s) = self.super_type() { s.unbind_from_tree(context); } if context.tree_in_doc { if let Some(s) = self.stylesheet.borrow_mut().take() { document_from_node(self).remove_stylesheet(self.upcast(), &s) } } } } impl StylesheetOwner for HTMLStyleElement { fn increment_pending_loads_count(&self) { self.pending_loads.set(self.pending_loads.get() + 1) } fn load_finished(&self, succeeded: bool) -> Option<bool> { assert!(self.pending_loads.get() > 0, "What finished?"); if !succeeded { self.any_failed_load.set(true); } self.pending_loads.set(self.pending_loads.get() - 1); if self.pending_loads.get() != 0 { return None; } let any_failed = self.any_failed_load.get(); self.any_failed_load.set(false); Some(any_failed) } fn parser_inserted(&self) -> bool { self.parser_inserted.get() } fn referrer_policy(&self) -> Option<ReferrerPolicy> { None } fn set_origin_clean(&self, origin_clean: bool) { if let Some(stylesheet) = self.get_cssom_stylesheet() { stylesheet.set_origin_clean(origin_clean); } } } impl HTMLStyleElementMethods for HTMLStyleElement { // https://drafts.csswg.org/cssom/#dom-linkstyle-sheet fn GetSheet(&self) -> Option<DomRoot<DOMStyleSheet>> { self.get_cssom_stylesheet().map(DomRoot::upcast) } }<|fim▁end|>
let element = self.upcast::<Element>(); assert!(node.is_in_doc());
<|file_name|>update_interface_machine.py<|end_file_name|><|fim▁begin|># -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor<|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains the logic for `aq update interface --machine`.""" from aquilon.exceptions_ import ArgumentError, AquilonError from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611 from aquilon.worker.dbwrappers.interface import (verify_port_group, choose_port_group, assign_address, rename_interface) from aquilon.worker.locks import lock_queue from aquilon.worker.templates.machine import PlenaryMachineInfo from aquilon.worker.processes import DSDBRunner from aquilon.aqdb.model import Machine, Interface, Model from aquilon.utils import first_of class CommandUpdateInterfaceMachine(BrokerCommand): required_parameters = ["interface", "machine"] def render(self, session, logger, interface, machine, mac, model, vendor, boot, pg, autopg, comments, master, clear_master, default_route, rename_to, **arguments): """This command expects to locate an interface based only on name and machine - all other fields, if specified, are meant as updates. If the machine has a host, dsdb may need to be updated. The boot flag can *only* be set to true. This is mostly technical, as at this point in the interface it is difficult to tell if the flag was unset or set to false. However, it also vastly simplifies the dsdb logic - we never have to worry about a user trying to remove the boot flag from a host in dsdb. """ audit_results = [] dbhw_ent = Machine.get_unique(session, machine, compel=True) dbinterface = Interface.get_unique(session, hardware_entity=dbhw_ent, name=interface, compel=True) oldinfo = DSDBRunner.snapshot_hw(dbhw_ent) if arguments.get('hostname', None): # Hack to set an intial interface for an aurora host... dbhost = dbhw_ent.host if dbhost.archetype.name == 'aurora' and \ dbhw_ent.primary_ip and not dbinterface.addresses: assign_address(dbinterface, dbhw_ent.primary_ip, dbhw_ent.primary_name.network) # We may need extra IP verification (or an autoip option)... # This may also throw spurious errors if attempting to set the # port_group to a value it already has. if pg is not None and dbinterface.port_group != pg.lower().strip(): dbinterface.port_group = verify_port_group( dbinterface.hardware_entity, pg) elif autopg: dbinterface.port_group = choose_port_group( session, logger, dbinterface.hardware_entity) audit_results.append(('pg', dbinterface.port_group)) if master: if dbinterface.addresses: # FIXME: as a special case, if the only address is the # primary IP, then we could just move it to the master # interface. However this can be worked around by bonding # the interface before calling "add host", so don't bother # for now. raise ArgumentError("Can not enslave {0:l} because it has " "addresses.".format(dbinterface)) dbmaster = Interface.get_unique(session, hardware_entity=dbhw_ent, name=master, compel=True) if dbmaster in dbinterface.all_slaves(): raise ArgumentError("Enslaving {0:l} would create a circle, " "which is not allowed.".format(dbinterface)) dbinterface.master = dbmaster if clear_master: if not dbinterface.master: raise ArgumentError("{0} is not a slave.".format(dbinterface)) dbinterface.master = None if comments: dbinterface.comments = comments if boot: # Should we also transfer the primary IP to the new boot interface? # That could get tricky if the new interface already has an IP # address... for i in dbhw_ent.interfaces: if i == dbinterface: i.bootable = True i.default_route = True else: i.bootable = False i.default_route = False if default_route is not None: dbinterface.default_route = default_route if not first_of(dbhw_ent.interfaces, lambda x: x.default_route): logger.client_info("Warning: {0:l} has no default route, hope " "that's ok.".format(dbhw_ent)) #Set this mac address last so that you can update to a bootable #interface *before* adding a mac address. This is so the validation #that takes place in the interface class doesn't have to be worried #about the order of update to bootable=True and mac address if mac: q = session.query(Interface).filter_by(mac=mac) other = q.first() if other and other != dbinterface: raise ArgumentError("MAC address {0} is already in use by " "{1:l}.".format(mac, other)) dbinterface.mac = mac if model or vendor: if not dbinterface.model_allowed: raise ArgumentError("Model/vendor can not be set for a {0:lc}." .format(dbinterface)) dbmodel = Model.get_unique(session, name=model, vendor=vendor, machine_type='nic', compel=True) dbinterface.model = dbmodel if rename_to: rename_interface(session, dbinterface, rename_to) session.flush() session.refresh(dbhw_ent) plenary_info = PlenaryMachineInfo(dbhw_ent, logger=logger) key = plenary_info.get_write_key() try: lock_queue.acquire(key) plenary_info.write(locked=True) if dbhw_ent.host and dbhw_ent.host.archetype.name != "aurora": dsdb_runner = DSDBRunner(logger=logger) dsdb_runner.update_host(dbhw_ent, oldinfo) dsdb_runner.commit_or_rollback() except AquilonError, err: plenary_info.restore_stash() raise ArgumentError(err) except: plenary_info.restore_stash() raise finally: lock_queue.release(key) for name, value in audit_results: self.audit_result(session, name, value, **arguments) return<|fim▁end|>
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>rule.ts<|end_file_name|><|fim▁begin|>import {Expression} from 'aurelia-binding'; export interface RuleProperty { /** * The property name. null indicates the rule targets the object itself. */ name: string|null; /** * The displayName of the property (or object). */<|fim▁hole|> property: RuleProperty; condition: (value: TValue, object?: TObject) => boolean|Promise<boolean>; config: Object; when: { (object: TObject): boolean }|null; messageKey: string; message: Expression|null; tag?: string; }<|fim▁end|>
displayName: string|null; } export interface Rule<TObject, TValue> {
<|file_name|>game.py<|end_file_name|><|fim▁begin|>from merc.actor import Actor from merc.collision import Collision from merc.player import Player from pprint import pprint import sys import json import numpy import itertools from PIL import Image, ImageDraw, ImageColor class Game: NUM_TWEENS = 10 def __init__(self, data): """ `data` should contain the JSON output of Octane """ self.__dict__ = data self.frame = None self.tween = 0 self.seconds_remaining = 0 self.actors = {} self.players = {} self.ball_actor = None self.grouped_actors = {} def processFrames(self): """ Step through the frames one by one. Build the actors, update the game state, link the actors, and generate stats. """ for frame in self.Frames: self.frame = frame for id, data in frame['Spawned'].items(): self.actors[id] = Actor(id, data) for id, data in frame['Updated'].items(): self.actors[id].update(data, self.frame['Number']) self.updateState() self.linkActors() #self.checkCollisions() def updateState(self): """ Update the game state. Creates a sort of cache to help find commonly used stuff. """ self.ball_actor = None self.grouped_actors = {} for actor in self.actors.values(): actor_class = actor.getClass() if actor_class == 'TAGame.GameEvent_Soccar_TA': # shortcut for the time remaining s = actor.getProp('TAGame.GameEvent_Soccar_TA:SecondsRemaining', -1) if s >= 0: self.seconds_remaining = s elif actor_class == 'TAGame.Ball_TA': # shortcut to find the ball actor self.ball_actor = actor else: # group similar actors together if not actor_class in self.grouped_actors: self.grouped_actors[actor_class] = [] self.grouped_actors[actor_class].append(actor) def linkActors(self): """ Some actors have relationships with each other, so we set those relationships here. """ ''' components -> car -> pri -> team ''' # link pri -> team if 'TAGame.PRI_TA' in self.grouped_actors: for pri_actor in self.grouped_actors['TAGame.PRI_TA']: if hasattr(pri_actor, 'team'): continue team_prop = pri_actor.getProp('Engine.PlayerReplicationInfo:Team') if not team_prop: continue pri_actor.team = self.findActor(team_prop[1]) # link components to car components = [ 'TAGame.CarComponent_Boost_TA', 'TAGame.CarComponent_Jump_TA', 'TAGame.CarComponent_DoubleJump_TA', 'TAGame.CarComponent_Dodge_TA', 'TAGame.CarComponent_FlipCar_TA', ] for component in components: if component in self.grouped_actors: for component_actor in self.grouped_actors[component]: if hasattr(component_actor, 'car'): continue car_prop = component_actor.getProp('TAGame.CarComponent_TA:Vehicle') if not car_prop: continue component_actor.car = self.findActor(car_prop[1]) if not component_actor.car:<|fim▁hole|> component_actor.car.components = [] if 'TAGame.Car_TA' in self.grouped_actors: # link car -> pri for car_actor in self.grouped_actors['TAGame.Car_TA']: if hasattr(car_actor, 'pri'): continue pri_prop = car_actor.getProp('Engine.Pawn:PlayerReplicationInfo') if not pri_prop: continue car_actor.pri = self.findActor(pri_prop[1]) # create / update players for car_actor in self.grouped_actors['TAGame.Car_TA']: player_id = car_actor.getPlayerId() if not player_id: continue if player_id not in self.players: self.players[player_id] = Player(player_id) self.players[player_id].update(car_actor) def findActor(self, find_actor_id): """ Attempts to find and return an actor with the given `find_actor_id`. Returns None when the actor cannot be found. """ find_actor_id = int(find_actor_id) for actor_id, actor in self.actors.items(): if int(actor_id) == find_actor_id: return actor return None def checkCollisions(self): """ Determine when and where each collision happened during this game. Save the collision data in `self.players`. """ if 'TAGame.Car_TA' not in self.grouped_actors: # no need to check collisions when no cars exist return # each frame, we only want to check actors that are within Collisions.FRAME_CHECK_RADIUS # units of each other # each frame, we only want to tween if anyone is within Collisions.FRAME_CHECK_RADIUS # units of each other # create tuples of actors that we want to check this frame pairs = [] ball = self.ball_actor for car in self.grouped_actors['TAGame.Car_TA']: # we dont want to check cars that arent linked with players yet player_id = car.getPlayerId() if not player_id: continue player = self.players[player_id] # check if the last collision with the ball was within a certain number of frames # if it is, we should skip this pair last_collision = player.getLastCollisionWithActor(ball.id) if last_collision and last_collision.frame_number > self.frame['Number'] - Collision.MIN_FRAMES_BETWEEN: continue # skip if the distance is over the limit dist = self.distance(ball, car) if not dist: continue if dist > Collision.FRAME_CHECK_RADIUS: continue pairs.append((ball, car)) if len(pairs) <= 0: # only tween if any pairs need to be checked return self.tween = 0 # save which actors have collided collided = [] for i in range(self.NUM_TWEENS): for actor1, actor2 in pairs: # combine actor ids into a key for faster lookup key = actor1.id + actor2.id * 1024 if key in collided: # dont allow multiple collisions between the same actors per frame continue # determine the check radius check_radius = Collision.CAR_AND_BALL_RADIUS if actor1.isClass('TAGame.Car_TA'): if actor2.isClass('TAGame.Car_TA'): check_radius = Collision.CAR_AND_CAR_RADIUS else: check_radius = Collision.CAR_AND_BALL_RADIUS collision = self.collides(actor1, actor2, check_radius) if collision: self.handleCollision(actor1, actor2, collision) collided.append(key) self.tween += 1 self.tween = 0 def handleCollision(self, actor1, actor2, collision): """ Handles a single collision between two actors. """ if (actor1.isClass('TAGame.Car_TA')): player_id = actor1.getPlayerId() if player_id: self.players[player_id].addCollision(collision) if (actor2.isClass('TAGame.Car_TA')): player_id = actor2.getPlayerId() if player_id: self.players[player_id].addCollision(collision) print("*** Collision! ***", self.seconds_remaining, self.frame['Number'], self.tween, "[{0}] x [{1}]".format(actor1.getName(), actor2.getName()), collision.point) def distance(self, actor1, actor2, return_midpoint=False): """ Returns the distance between two actors. Optionally also returns the midpoint between those two actors. """ rval = False if return_midpoint: rval = (False, False) rb1 = actor1.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS) rb2 = actor2.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS) if not rb1 or not rb2: return rval p1 = numpy.array(rb1['Position']) p2 = numpy.array(rb2['Position']) dist = numpy.linalg.norm(p1 - p2) if return_midpoint: return (dist, numpy.median([p1, p2], axis=0)) return dist def collides(self, actor1, actor2, check_radius): """ Returns a Collision if the two actors intersect. Otherwise returns False. """ (dist, midpoint) = self.distance(actor1, actor2, True) if not dist: return False if dist > check_radius + Collision.TOLERANCE: return False shape1 = actor1.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS) shape2 = actor2.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS) if not shape1 or not shape2: return False if shape1.intersects(shape2, Collision.TOLERANCE): return Collision(midpoint, self.frame['Number'], actor1.id, actor2.id) return False<|fim▁end|>
continue if not hasattr(component_actor.car, 'components'):
<|file_name|>testing.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package testing provides support for automated testing of Go packages. // It is intended to be used in concert with the ``go test'' command, which automates // execution of any function of the form // func TestXxx(*testing.T) // where Xxx can be any alphanumeric string (but the first letter must not be in // [a-z]) and serves to identify the test routine. // These TestXxx routines should be declared within the package they are testing. // // Tests and benchmarks may be skipped if not applicable like this: // func TestTimeConsuming(t *testing.T) { // if testing.Short() { // t.Skip("skipping test in short mode.") // } // ... // } // // Functions of the form // func BenchmarkXxx(*testing.B) // are considered benchmarks, and are executed by the "go test" command when // the -test.bench flag is provided. Benchmarks are run sequentially. // // For a description of the testing flags, see // http://golang.org/cmd/go/#Description_of_testing_flags. // // A sample benchmark function looks like this: // func BenchmarkHello(b *testing.B) { // for i := 0; i < b.N; i++ { // fmt.Sprintf("hello") // } // } // // The benchmark function must run the target code b.N times. // The benchmark package will vary b.N until the benchmark function lasts // long enough to be timed reliably. The output // BenchmarkHello 10000000 282 ns/op // means that the loop ran 10000000 times at a speed of 282 ns per loop. // // If a benchmark needs some expensive setup before running, the timer // may be reset: // func BenchmarkBigLen(b *testing.B) { // big := NewBig() // b.ResetTimer() // for i := 0; i < b.N; i++ { // big.Len() // } // } // // The package also runs and verifies example code. Example functions may // include a concluding line comment that begins with "Output:" and is compared with // the standard output of the function when the tests are run. (The comparison // ignores leading and trailing space.) These are examples of an example: // // func ExampleHello() { // fmt.Println("hello") // // Output: hello // } // // func ExampleSalutations() { // fmt.Println("hello, and") // fmt.Println("goodbye") // // Output: // // hello, and // // goodbye // } // // Example functions without output comments are compiled but not executed. // // The naming convention to declare examples for a function F, a type T and // method M on type T are: // // func ExampleF() { ... } // func ExampleT() { ... } // func ExampleT_M() { ... } // // Multiple example functions for a type/function/method may be provided by // appending a distinct suffix to the name. The suffix must start with a // lower-case letter. // // func ExampleF_suffix() { ... } // func ExampleT_suffix() { ... } // func ExampleT_M_suffix() { ... } // // The entire test file is presented as the example when it contains a single // example function, at least one other function, type, variable, or constant // declaration, and no test or benchmark functions. package testing import ( "bytes" "flag" "fmt" "os" "runtime" "runtime/pprof"<|fim▁hole|> "sync" "time" ) var ( // The short flag requests that tests run more quickly, but its functionality // is provided by test writers themselves. The testing package is just its // home. The all.bash installation script sets it to make installation more // efficient, but by default the flag is off so a plain "go test" will do a // full test of the package. short = flag.Bool("test.short", false, "run smaller test suite to save time") // Report as tests are run; default is silent for success. chatty = flag.Bool("test.v", false, "verbose: print additional output") match = flag.String("test.run", "", "regular expression to select tests and examples to run") memProfile = flag.String("test.memprofile", "", "write a memory profile to the named file after execution") memProfileRate = flag.Int("test.memprofilerate", 0, "if >=0, sets runtime.MemProfileRate") cpuProfile = flag.String("test.cpuprofile", "", "write a cpu profile to the named file during execution") blockProfile = flag.String("test.blockprofile", "", "write a goroutine blocking profile to the named file after execution") blockProfileRate = flag.Int("test.blockprofilerate", 1, "if >= 0, calls runtime.SetBlockProfileRate()") timeout = flag.Duration("test.timeout", 0, "if positive, sets an aggregate time limit for all tests") cpuListStr = flag.String("test.cpu", "", "comma-separated list of number of CPUs to use for each test") parallel = flag.Int("test.parallel", runtime.GOMAXPROCS(0), "maximum test parallelism") haveExamples bool // are there examples? cpuList []int ) // common holds the elements common between T and B and // captures common methods such as Errorf. type common struct { mu sync.RWMutex // guards output and failed output []byte // Output generated by test or benchmark. failed bool // Test or benchmark has failed. skipped bool // Test of benchmark has been skipped. start time.Time // Time test or benchmark started duration time.Duration self interface{} // To be sent on signal channel when done. signal chan interface{} // Output for serial tests. } // Short reports whether the -test.short flag is set. func Short() bool { return *short } // Verbose reports whether the -test.v flag is set. func Verbose() bool { return *chatty } // decorate prefixes the string with the file and line of the call site // and inserts the final newline if needed and indentation tabs for formatting. func decorate(s string) string { _, file, line, ok := runtime.Caller(3) // decorate + log + public function. if ok { // Truncate file name at last file name separator. if index := strings.LastIndex(file, "/"); index >= 0 { file = file[index+1:] } else if index = strings.LastIndex(file, "\\"); index >= 0 { file = file[index+1:] } } else { file = "???" line = 1 } buf := new(bytes.Buffer) // Every line is indented at least one tab. buf.WriteByte('\t') fmt.Fprintf(buf, "%s:%d: ", file, line) lines := strings.Split(s, "\n") if l := len(lines); l > 1 && lines[l-1] == "" { lines = lines[:l-1] } for i, line := range lines { if i > 0 { // Second and subsequent lines are indented an extra tab. buf.WriteString("\n\t\t") } buf.WriteString(line) } buf.WriteByte('\n') return buf.String() } // T is a type passed to Test functions to manage test state and support formatted test logs. // Logs are accumulated during execution and dumped to standard error when done. type T struct { common name string // Name of test. startParallel chan bool // Parallel tests will wait on this. } // Fail marks the function as having failed but continues execution. func (c *common) Fail() { c.mu.Lock() defer c.mu.Unlock() c.failed = true } // Failed reports whether the function has failed. func (c *common) Failed() bool { c.mu.RLock() defer c.mu.RUnlock() return c.failed } // FailNow marks the function as having failed and stops its execution. // Execution will continue at the next test or benchmark. // FailNow must be called from the goroutine running the // test or benchmark function, not from other goroutines // created during the test. Calling FailNow does not stop // those other goroutines. func (c *common) FailNow() { c.Fail() // Calling runtime.Goexit will exit the goroutine, which // will run the deferred functions in this goroutine, // which will eventually run the deferred lines in tRunner, // which will signal to the test loop that this test is done. // // A previous version of this code said: // // c.duration = ... // c.signal <- c.self // runtime.Goexit() // // This previous version duplicated code (those lines are in // tRunner no matter what), but worse the goroutine teardown // implicit in runtime.Goexit was not guaranteed to complete // before the test exited. If a test deferred an important cleanup // function (like removing temporary files), there was no guarantee // it would run on a test failure. Because we send on c.signal during // a top-of-stack deferred function now, we know that the send // only happens after any other stacked defers have completed. runtime.Goexit() } // log generates the output. It's always at the same stack depth. func (c *common) log(s string) { c.mu.Lock() defer c.mu.Unlock() c.output = append(c.output, decorate(s)...) } // Log formats its arguments using default formatting, analogous to Println, // and records the text in the error log. func (c *common) Log(args ...interface{}) { c.log(fmt.Sprintln(args...)) } // Logf formats its arguments according to the format, analogous to Printf, // and records the text in the error log. func (c *common) Logf(format string, args ...interface{}) { c.log(fmt.Sprintf(format, args...)) } // Error is equivalent to Log followed by Fail. func (c *common) Error(args ...interface{}) { c.log(fmt.Sprintln(args...)) c.Fail() } // Errorf is equivalent to Logf followed by Fail. func (c *common) Errorf(format string, args ...interface{}) { c.log(fmt.Sprintf(format, args...)) c.Fail() } // Fatal is equivalent to Log followed by FailNow. func (c *common) Fatal(args ...interface{}) { c.log(fmt.Sprintln(args...)) c.FailNow() } // Fatalf is equivalent to Logf followed by FailNow. func (c *common) Fatalf(format string, args ...interface{}) { c.log(fmt.Sprintf(format, args...)) c.FailNow() } // Skip is equivalent to Log followed by SkipNow. func (c *common) Skip(args ...interface{}) { c.log(fmt.Sprintln(args...)) c.SkipNow() } // Skipf is equivalent to Logf followed by SkipNow. func (c *common) Skipf(format string, args ...interface{}) { c.log(fmt.Sprintf(format, args...)) c.SkipNow() } // SkipNow marks the test as having been skipped and stops its execution. // Execution will continue at the next test or benchmark. See also FailNow. // SkipNow must be called from the goroutine running the test, not from // other goroutines created during the test. Calling SkipNow does not stop // those other goroutines. func (c *common) SkipNow() { c.skip() runtime.Goexit() } func (c *common) skip() { c.mu.Lock() defer c.mu.Unlock() c.skipped = true } // Skipped reports whether the test was skipped. func (c *common) Skipped() bool { c.mu.RLock() defer c.mu.RUnlock() return c.skipped } // Parallel signals that this test is to be run in parallel with (and only with) // other parallel tests. func (t *T) Parallel() { t.signal <- (*T)(nil) // Release main testing loop <-t.startParallel // Wait for serial tests to finish } // An internal type but exported because it is cross-package; part of the implementation // of the "go test" command. type InternalTest struct { Name string F func(*T) } func tRunner(t *T, test *InternalTest) { t.start = time.Now() // When this goroutine is done, either because test.F(t) // returned normally or because a test failure triggered // a call to runtime.Goexit, record the duration and send // a signal saying that the test is done. defer func() { t.duration = time.Now().Sub(t.start) // If the test panicked, print any test output before dying. if err := recover(); err != nil { t.Fail() t.report() panic(err) } t.signal <- t }() test.F(t) } // An internal function but exported because it is cross-package; part of the implementation // of the "go test" command. func Main(matchString func(pat, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample) { flag.Parse() parseCpuList() before() startAlarm() haveExamples = len(examples) > 0 testOk := RunTests(matchString, tests) exampleOk := RunExamples(matchString, examples) if !testOk || !exampleOk { fmt.Println("FAIL") os.Exit(1) } fmt.Println("PASS") stopAlarm() RunBenchmarks(matchString, benchmarks) after() } func (t *T) report() { tstr := fmt.Sprintf("(%.2f seconds)", t.duration.Seconds()) format := "--- %s: %s %s\n%s" if t.Failed() { fmt.Printf(format, "FAIL", t.name, tstr, t.output) } else if *chatty { if t.Skipped() { fmt.Printf(format, "SKIP", t.name, tstr, t.output) } else { fmt.Printf(format, "PASS", t.name, tstr, t.output) } } } func RunTests(matchString func(pat, str string) (bool, error), tests []InternalTest) (ok bool) { ok = true if len(tests) == 0 && !haveExamples { fmt.Fprintln(os.Stderr, "testing: warning: no tests to run") return } for _, procs := range cpuList { runtime.GOMAXPROCS(procs) // We build a new channel tree for each run of the loop. // collector merges in one channel all the upstream signals from parallel tests. // If all tests pump to the same channel, a bug can occur where a test // kicks off a goroutine that Fails, yet the test still delivers a completion signal, // which skews the counting. var collector = make(chan interface{}) numParallel := 0 startParallel := make(chan bool) for i := 0; i < len(tests); i++ { matched, err := matchString(*match, tests[i].Name) if err != nil { fmt.Fprintf(os.Stderr, "testing: invalid regexp for -test.run: %s\n", err) os.Exit(1) } if !matched { continue } testName := tests[i].Name if procs != 1 { testName = fmt.Sprintf("%s-%d", tests[i].Name, procs) } t := &T{ common: common{ signal: make(chan interface{}), }, name: testName, startParallel: startParallel, } t.self = t if *chatty { fmt.Printf("=== RUN %s\n", t.name) } go tRunner(t, &tests[i]) out := (<-t.signal).(*T) if out == nil { // Parallel run. go func() { collector <- <-t.signal }() numParallel++ continue } t.report() ok = ok && !out.Failed() } running := 0 for numParallel+running > 0 { if running < *parallel && numParallel > 0 { startParallel <- true running++ numParallel-- continue } t := (<-collector).(*T) t.report() ok = ok && !t.Failed() running-- } } return } // before runs before all testing. func before() { if *memProfileRate > 0 { runtime.MemProfileRate = *memProfileRate } if *cpuProfile != "" { f, err := os.Create(*cpuProfile) if err != nil { fmt.Fprintf(os.Stderr, "testing: %s", err) return } if err := pprof.StartCPUProfile(f); err != nil { fmt.Fprintf(os.Stderr, "testing: can't start cpu profile: %s", err) f.Close() return } // Could save f so after can call f.Close; not worth the effort. } if *blockProfile != "" && *blockProfileRate >= 0 { runtime.SetBlockProfileRate(*blockProfileRate) } } // after runs after all testing. func after() { if *cpuProfile != "" { pprof.StopCPUProfile() // flushes profile to disk } if *memProfile != "" { f, err := os.Create(*memProfile) if err != nil { fmt.Fprintf(os.Stderr, "testing: %s", err) return } if err = pprof.WriteHeapProfile(f); err != nil { fmt.Fprintf(os.Stderr, "testing: can't write %s: %s", *memProfile, err) } f.Close() } if *blockProfile != "" && *blockProfileRate >= 0 { f, err := os.Create(*blockProfile) if err != nil { fmt.Fprintf(os.Stderr, "testing: %s", err) return } if err = pprof.Lookup("block").WriteTo(f, 0); err != nil { fmt.Fprintf(os.Stderr, "testing: can't write %s: %s", *blockProfile, err) } f.Close() } } var timer *time.Timer // startAlarm starts an alarm if requested. func startAlarm() { if *timeout > 0 { timer = time.AfterFunc(*timeout, alarm) } } // stopAlarm turns off the alarm. func stopAlarm() { if *timeout > 0 { timer.Stop() } } // alarm is called if the timeout expires. func alarm() { panic("test timed out") } func parseCpuList() { if len(*cpuListStr) == 0 { cpuList = append(cpuList, runtime.GOMAXPROCS(-1)) } else { for _, val := range strings.Split(*cpuListStr, ",") { cpu, err := strconv.Atoi(val) if err != nil || cpu <= 0 { fmt.Fprintf(os.Stderr, "testing: invalid value %q for -test.cpu", val) os.Exit(1) } cpuList = append(cpuList, cpu) } } }<|fim▁end|>
"strconv" "strings"
<|file_name|>Membership.LoginRequest.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>namespace Mervalito.Membership { export interface LoginRequest extends Serenity.ServiceRequest { Username?: string; Password?: string; } }<|fim▁end|>
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use prelude::v1::*; use ffi::{CStr, CString}; use fmt; use io::{self, Error, ErrorKind}; use libc::{self, c_int, c_char, c_void, socklen_t}; use mem; use net::{SocketAddr, Shutdown, IpAddr}; use str::from_utf8; use sys::c; use sys::net::{cvt, cvt_r, cvt_gai, Socket, init, wrlen_t}; use sys_common::{AsInner, FromInner, IntoInner}; use time::Duration; //////////////////////////////////////////////////////////////////////////////// // sockaddr and misc bindings //////////////////////////////////////////////////////////////////////////////// pub fn setsockopt<T>(sock: &Socket, opt: c_int, val: c_int, payload: T) -> io::Result<()> { unsafe { let payload = &payload as *const T as *const c_void; try!(cvt(libc::setsockopt(*sock.as_inner(), opt, val, payload, mem::size_of::<T>() as socklen_t))); Ok(()) } } pub fn getsockopt<T: Copy>(sock: &Socket, opt: c_int, val: c_int) -> io::Result<T> { unsafe { let mut slot: T = mem::zeroed(); let mut len = mem::size_of::<T>() as socklen_t; try!(cvt(c::getsockopt(*sock.as_inner(), opt, val, &mut slot as *mut _ as *mut _, &mut len))); assert_eq!(len as usize, mem::size_of::<T>()); Ok(slot) } } fn sockname<F>(f: F) -> io::Result<SocketAddr> where F: FnOnce(*mut libc::sockaddr, *mut socklen_t) -> c_int { unsafe { let mut storage: libc::sockaddr_storage = mem::zeroed(); let mut len = mem::size_of_val(&storage) as socklen_t; try!(cvt(f(&mut storage as *mut _ as *mut _, &mut len))); sockaddr_to_addr(&storage, len as usize) } } fn sockaddr_to_addr(storage: &libc::sockaddr_storage, len: usize) -> io::Result<SocketAddr> { match storage.ss_family as libc::c_int { libc::AF_INET => { assert!(len as usize >= mem::size_of::<libc::sockaddr_in>()); Ok(SocketAddr::V4(FromInner::from_inner(unsafe { *(storage as *const _ as *const libc::sockaddr_in) }))) } libc::AF_INET6 => { assert!(len as usize >= mem::size_of::<libc::sockaddr_in6>()); Ok(SocketAddr::V6(FromInner::from_inner(unsafe { *(storage as *const _ as *const libc::sockaddr_in6) }))) } _ => { Err(Error::new(ErrorKind::InvalidInput, "invalid argument")) } } } //////////////////////////////////////////////////////////////////////////////// // get_host_addresses //////////////////////////////////////////////////////////////////////////////// extern "system" {<|fim▁hole|>} pub struct LookupHost { original: *mut libc::addrinfo, cur: *mut libc::addrinfo, } impl Iterator for LookupHost { type Item = io::Result<SocketAddr>; fn next(&mut self) -> Option<io::Result<SocketAddr>> { unsafe { if self.cur.is_null() { return None } let ret = sockaddr_to_addr(mem::transmute((*self.cur).ai_addr), (*self.cur).ai_addrlen as usize); self.cur = (*self.cur).ai_next as *mut libc::addrinfo; Some(ret) } } } impl Drop for LookupHost { fn drop(&mut self) { unsafe { freeaddrinfo(self.original) } } } pub fn lookup_host(host: &str) -> io::Result<LookupHost> { init(); let c_host = try!(CString::new(host)); let mut res = 0 as *mut _; unsafe { try!(cvt_gai(getaddrinfo(c_host.as_ptr(), 0 as *const _, 0 as *const _, &mut res))); Ok(LookupHost { original: res, cur: res }) } } //////////////////////////////////////////////////////////////////////////////// // lookup_addr //////////////////////////////////////////////////////////////////////////////// extern "system" { fn getnameinfo(sa: *const libc::sockaddr, salen: socklen_t, host: *mut c_char, hostlen: libc::size_t, serv: *mut c_char, servlen: libc::size_t, flags: c_int) -> c_int; } const NI_MAXHOST: usize = 1025; pub fn lookup_addr(addr: &IpAddr) -> io::Result<String> { init(); let saddr = SocketAddr::new(*addr, 0); let (inner, len) = saddr.into_inner(); let mut hostbuf = [0 as c_char; NI_MAXHOST]; let data = unsafe { try!(cvt_gai(getnameinfo(inner, len, hostbuf.as_mut_ptr(), NI_MAXHOST as libc::size_t, 0 as *mut _, 0, 0))); CStr::from_ptr(hostbuf.as_ptr()) }; match from_utf8(data.to_bytes()) { Ok(name) => Ok(name.to_string()), Err(_) => Err(io::Error::new(io::ErrorKind::Other, "failed to lookup address information")) } } //////////////////////////////////////////////////////////////////////////////// // TCP streams //////////////////////////////////////////////////////////////////////////////// pub struct TcpStream { inner: Socket, } impl TcpStream { pub fn connect(addr: &SocketAddr) -> io::Result<TcpStream> { init(); let sock = try!(Socket::new(addr, libc::SOCK_STREAM)); let (addrp, len) = addr.into_inner(); try!(cvt_r(|| unsafe { libc::connect(*sock.as_inner(), addrp, len) })); Ok(TcpStream { inner: sock }) } pub fn socket(&self) -> &Socket { &self.inner } pub fn into_socket(self) -> Socket { self.inner } pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.inner.set_timeout(dur, libc::SO_RCVTIMEO) } pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.inner.set_timeout(dur, libc::SO_SNDTIMEO) } pub fn read_timeout(&self) -> io::Result<Option<Duration>> { self.inner.timeout(libc::SO_RCVTIMEO) } pub fn write_timeout(&self) -> io::Result<Option<Duration>> { self.inner.timeout(libc::SO_SNDTIMEO) } pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> { self.inner.read(buf) } pub fn write(&self, buf: &[u8]) -> io::Result<usize> { let ret = try!(cvt(unsafe { libc::send(*self.inner.as_inner(), buf.as_ptr() as *const c_void, buf.len() as wrlen_t, 0) })); Ok(ret as usize) } pub fn peer_addr(&self) -> io::Result<SocketAddr> { sockname(|buf, len| unsafe { libc::getpeername(*self.inner.as_inner(), buf, len) }) } pub fn socket_addr(&self) -> io::Result<SocketAddr> { sockname(|buf, len| unsafe { libc::getsockname(*self.inner.as_inner(), buf, len) }) } pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { use libc::consts::os::bsd44::SHUT_RDWR; let how = match how { Shutdown::Write => libc::SHUT_WR, Shutdown::Read => libc::SHUT_RD, Shutdown::Both => SHUT_RDWR, }; try!(cvt(unsafe { libc::shutdown(*self.inner.as_inner(), how) })); Ok(()) } pub fn duplicate(&self) -> io::Result<TcpStream> { self.inner.duplicate().map(|s| TcpStream { inner: s }) } } impl FromInner<Socket> for TcpStream { fn from_inner(socket: Socket) -> TcpStream { TcpStream { inner: socket } } } impl fmt::Debug for TcpStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut res = f.debug_struct("TcpStream"); if let Ok(addr) = self.socket_addr() { res.field("addr", &addr); } if let Ok(peer) = self.peer_addr() { res.field("peer", &peer); } let name = if cfg!(windows) {"socket"} else {"fd"}; res.field(name, &self.inner.as_inner()) .finish() } } //////////////////////////////////////////////////////////////////////////////// // TCP listeners //////////////////////////////////////////////////////////////////////////////// pub struct TcpListener { inner: Socket, } impl TcpListener { pub fn bind(addr: &SocketAddr) -> io::Result<TcpListener> { init(); let sock = try!(Socket::new(addr, libc::SOCK_STREAM)); // On platforms with Berkeley-derived sockets, this allows // to quickly rebind a socket, without needing to wait for // the OS to clean up the previous one. if !cfg!(windows) { try!(setsockopt(&sock, libc::SOL_SOCKET, libc::SO_REUSEADDR, 1 as c_int)); } // Bind our new socket let (addrp, len) = addr.into_inner(); try!(cvt(unsafe { libc::bind(*sock.as_inner(), addrp, len) })); // Start listening try!(cvt(unsafe { libc::listen(*sock.as_inner(), 128) })); Ok(TcpListener { inner: sock }) } pub fn socket(&self) -> &Socket { &self.inner } pub fn into_socket(self) -> Socket { self.inner } pub fn socket_addr(&self) -> io::Result<SocketAddr> { sockname(|buf, len| unsafe { libc::getsockname(*self.inner.as_inner(), buf, len) }) } pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> { let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() }; let mut len = mem::size_of_val(&storage) as socklen_t; let sock = try!(self.inner.accept(&mut storage as *mut _ as *mut _, &mut len)); let addr = try!(sockaddr_to_addr(&storage, len as usize)); Ok((TcpStream { inner: sock, }, addr)) } pub fn duplicate(&self) -> io::Result<TcpListener> { self.inner.duplicate().map(|s| TcpListener { inner: s }) } } impl FromInner<Socket> for TcpListener { fn from_inner(socket: Socket) -> TcpListener { TcpListener { inner: socket } } } impl fmt::Debug for TcpListener { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut res = f.debug_struct("TcpListener"); if let Ok(addr) = self.socket_addr() { res.field("addr", &addr); } let name = if cfg!(windows) {"socket"} else {"fd"}; res.field(name, &self.inner.as_inner()) .finish() } } //////////////////////////////////////////////////////////////////////////////// // UDP //////////////////////////////////////////////////////////////////////////////// pub struct UdpSocket { inner: Socket, } impl UdpSocket { pub fn bind(addr: &SocketAddr) -> io::Result<UdpSocket> { init(); let sock = try!(Socket::new(addr, libc::SOCK_DGRAM)); let (addrp, len) = addr.into_inner(); try!(cvt(unsafe { libc::bind(*sock.as_inner(), addrp, len) })); Ok(UdpSocket { inner: sock }) } pub fn socket(&self) -> &Socket { &self.inner } pub fn into_socket(self) -> Socket { self.inner } pub fn socket_addr(&self) -> io::Result<SocketAddr> { sockname(|buf, len| unsafe { libc::getsockname(*self.inner.as_inner(), buf, len) }) } pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() }; let mut addrlen = mem::size_of_val(&storage) as socklen_t; let n = try!(cvt(unsafe { libc::recvfrom(*self.inner.as_inner(), buf.as_mut_ptr() as *mut c_void, buf.len() as wrlen_t, 0, &mut storage as *mut _ as *mut _, &mut addrlen) })); Ok((n as usize, try!(sockaddr_to_addr(&storage, addrlen as usize)))) } pub fn send_to(&self, buf: &[u8], dst: &SocketAddr) -> io::Result<usize> { let (dstp, dstlen) = dst.into_inner(); let ret = try!(cvt(unsafe { libc::sendto(*self.inner.as_inner(), buf.as_ptr() as *const c_void, buf.len() as wrlen_t, 0, dstp, dstlen) })); Ok(ret as usize) } pub fn duplicate(&self) -> io::Result<UdpSocket> { self.inner.duplicate().map(|s| UdpSocket { inner: s }) } pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.inner.set_timeout(dur, libc::SO_RCVTIMEO) } pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.inner.set_timeout(dur, libc::SO_SNDTIMEO) } pub fn read_timeout(&self) -> io::Result<Option<Duration>> { self.inner.timeout(libc::SO_RCVTIMEO) } pub fn write_timeout(&self) -> io::Result<Option<Duration>> { self.inner.timeout(libc::SO_SNDTIMEO) } } impl FromInner<Socket> for UdpSocket { fn from_inner(socket: Socket) -> UdpSocket { UdpSocket { inner: socket } } } impl fmt::Debug for UdpSocket { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut res = f.debug_struct("UdpSocket"); if let Ok(addr) = self.socket_addr() { res.field("addr", &addr); } let name = if cfg!(windows) {"socket"} else {"fd"}; res.field(name, &self.inner.as_inner()) .finish() } }<|fim▁end|>
fn getaddrinfo(node: *const c_char, service: *const c_char, hints: *const libc::addrinfo, res: *mut *mut libc::addrinfo) -> c_int; fn freeaddrinfo(res: *mut libc::addrinfo);
<|file_name|>index-server.js<|end_file_name|><|fim▁begin|>// Run only by vendor node. // In an ideal world this would be run in the same process/context of // atom-shell but there are many hurdles atm, see // https://github.com/atom/atom-shell/issues/533 // increase the libuv threadpool size to 1.5x the number of logical CPUs. process.env.UV_THREADPOOL_SIZE = Math.ceil(Math.max(4, require('os').cpus().length * 1.5)); process.title = 'mapbox-studio'; if (process.platform === 'win32') { // HOME is undefined on windows process.env.HOME = process.env.USERPROFILE; // NULL out PATH to avoid potential conflicting dlls process.env.PATH = ''; } var tm = require('./lib/tm'); var path = require('path'); var getport = require('getport'); var package_json = require('./package.json'); var server; var config = require('minimist')(process.argv.slice(2)); config.shell = config.shell || false; config.port = config.port || undefined; config.test = config.test || false;<|fim▁hole|> var usage = function usage() { var str = [ '' , ' Usage: mbstudio [options]' , '' , ' where [options] is any of:' , ' --version - Returns running version then exits' , ' --port - Port to run on (default: ' + config.port + ')' , ' --cwd - Working directory to run within (default: ' + config.cwd + ')' // TODO - are these used? , ' --shell - (default: ' + config.shell + ')' , ' --test - (default: ' + config.test + ')' , '' , 'mbstudio@' + package_json.version + ' ' + path.resolve(__dirname, '..') , 'node@' + process.versions.node ].join('\n') return str } if (config.version) { logger.debug(package_json.version); process.exit(0); } if (config.help || config.h) { logger.debug(usage()); process.exit(0); } if (!config.port) { getport(3000, 3999, configure); } else { configure(); } function configure(err, port) { if (err) throw err; config.port = config.port || port; tm.config(config, listen); } function listen(err) { if (err) throw err; server = require('./lib/server'); if (config.shell) { server.listen(tm.config().port, '127.0.0.1', finish); } else { server.listen(tm.config().port, finish); } } function finish(err) { if (err) throw err; server.emit('ready'); logger.debug('Mapbox Studio @ http://localhost:'+tm.config().port+'/'); }<|fim▁end|>
config.cwd = path.resolve(config.cwd || process.env.HOME); var logger = require('fastlog')('', 'debug', '<${timestamp}>');
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import sys import pytest py3 = sys.version_info[0] >= 3 class DummyCollector(pytest.collect.File): def collect(self):<|fim▁hole|> def pytest_pycollect_makemodule(path, parent): bn = path.basename if "py3" in bn and not py3 or ("py2" in bn and py3): return DummyCollector(path, parent=parent)<|fim▁end|>
return []
<|file_name|>edit_detail.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This file is part of Shoop. # # Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals import six from django.utils.translation import ugettext_lazy as _ from django.views.generic.detail import DetailView from shoop.core.models import PaymentMethod, ShippingMethod from shoop.utils.excs import Problem from shoop.utils.importing import load class _BaseMethodDetailView(DetailView): model = None # Overridden below title = _(u"Edit Details") <|fim▁hole|> # This view only dispatches further to the method module's own detail view class object = self.get_object() module = object.module if not module.admin_detail_view_class: raise Problem("Module %s has no admin detail view" % module.name) if isinstance(module.admin_detail_view_class, six.text_type): view_class = load(module.admin_detail_view_class) else: view_class = module.admin_detail_view_class kwargs["object"] = object return view_class(model=self.model).dispatch(request, *args, **kwargs) class ShippingMethodEditDetailView(_BaseMethodDetailView): model = ShippingMethod class PaymentMethodEditDetailView(_BaseMethodDetailView): model = PaymentMethod<|fim▁end|>
def dispatch(self, request, *args, **kwargs):
<|file_name|>set.py<|end_file_name|><|fim▁begin|># set command to set global variables from lib.utils import * def _help(): usage = ''' Usage: set [options] (var) [value] [options]: -h Print this help. -del (var) Delete variable (var) if defined. where (var) is a valid global variable if [value] is not given, current value is returned ''' print(usage) def main(argv): if '-h' in argv: _help() return # The shell doesnt send the # command name in the arg list # so the next line is not needed # anymore # argv.pop(0) #remove arg # to show all vars if len(argv) < 1: for i in prop.vars(): print(i, ' = ', prop.get(i)) return if '-del' in argv: try: var = argv[1] # detect system vars if var == 'save_state' or var == 'c_char': err(4, add='Cant delete system variable "' + var + '"') return prop.delete(var) return except IndexError: err(4, add='variable name was missing') return var = argv[0] if len(argv) < 2: val = prop.get(var) if val == NULL: err(4, var) return print(val) return # remove name of var<|fim▁hole|> # make the rest the val val = make_s(argv) try: prop.set(var, val) except ValueError: err(4, add="can't create this variable")<|fim▁end|>
argv.pop(0)
<|file_name|>pmi.py<|end_file_name|><|fim▁begin|># pylint: disable=C0103 """Wrapper module for libpcp_import - Performace Co-Pilot Log Import API # # Copyright (C) 2012-2015 Red Hat. # # This file is part of the "pcp" module, the python interfaces for the # Performance Co-Pilot toolkit. # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # Example use of this module for creating a PCP archive: import math import time import pmapi from pcp import pmi # Create a new archive log = pmi.pmiLogImport("loadtest") log.pmiSetHostname("www.abc.com") log.pmiSetTimezone("EST-10") # Add a metric with an instance domain domain = 60 # Linux kernel pmid = log.pmiID(domain, 2, 0) indom = log.pmiInDom(domain, 2) units = log.pmiUnits(0, 0, 0, 0, 0, 0) log.pmiAddMetric("kernel.all.load", pmid, pmapi.PM_TYPE_FLOAT, indom, pmapi.PM_SEM_INSTANT, units) log.pmiAddInstance(indom, "1 minute", 1) log.pmiAddInstance(indom, "5 minute", 5) log.pmiAddInstance(indom, "15 minute", 15) # Create a record with a timestamp log.pmiPutValue("kernel.all.load", "1 minute", "%f" % 0.01) log.pmiPutValue("kernel.all.load", "5 minute", "%f" % 0.05) log.pmiPutValue("kernel.all.load", "15 minute", "%f" % 0.15) timetuple = math.modf(time.time()) useconds = int(timetuple[0] * 1000000) seconds = int(timetuple[1]) log.pmiWrite(seconds, useconds) del log """ from pcp.pmapi import pmID, pmInDom, pmUnits, pmResult from cpmi import pmiErrSymDict, PMI_MAXERRMSGLEN import ctypes from ctypes import cast, c_int, c_char_p, POINTER # Performance Co-Pilot PMI library (C) LIBPCP_IMPORT = ctypes.CDLL(ctypes.util.find_library("pcp_import")) ## # PMI Log Import Services LIBPCP_IMPORT.pmiDump.restype = None LIBPCP_IMPORT.pmiDump.argtypes = None LIBPCP_IMPORT.pmiID.restype = pmID LIBPCP_IMPORT.pmiID.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_int] LIBPCP_IMPORT.pmiInDom.restype = pmInDom LIBPCP_IMPORT.pmiInDom.argtypes = [ctypes.c_int, ctypes.c_int] LIBPCP_IMPORT.pmiUnits.restype = pmUnits LIBPCP_IMPORT.pmiUnits.argtypes = [ ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int] LIBPCP_IMPORT.pmiErrStr_r.restype = c_char_p LIBPCP_IMPORT.pmiErrStr_r.argtypes = [c_int, c_char_p, c_int] LIBPCP_IMPORT.pmiStart.restype = c_int LIBPCP_IMPORT.pmiStart.argtypes = [c_char_p, c_int] LIBPCP_IMPORT.pmiUseContext.restype = c_int LIBPCP_IMPORT.pmiUseContext.argtypes = [c_int] LIBPCP_IMPORT.pmiEnd.restype = c_int LIBPCP_IMPORT.pmiEnd.argtypes = None LIBPCP_IMPORT.pmiSetHostname.restype = c_int LIBPCP_IMPORT.pmiSetHostname.argtypes = [c_char_p] LIBPCP_IMPORT.pmiSetTimezone.restype = c_int LIBPCP_IMPORT.pmiSetTimezone.argtypes = [c_char_p] LIBPCP_IMPORT.pmiAddMetric.restype = c_int LIBPCP_IMPORT.pmiAddMetric.argtypes = [ c_char_p, pmID, c_int, pmInDom, c_int, pmUnits] LIBPCP_IMPORT.pmiAddInstance.restype = c_int LIBPCP_IMPORT.pmiAddInstance.argtypes = [pmInDom, c_char_p, c_int] LIBPCP_IMPORT.pmiPutValue.restype = c_int LIBPCP_IMPORT.pmiPutValue.argtypes = [c_char_p, c_char_p, c_char_p] LIBPCP_IMPORT.pmiGetHandle.restype = c_int LIBPCP_IMPORT.pmiGetHandle.argtypes = [c_char_p, c_char_p] LIBPCP_IMPORT.pmiPutValueHandle.restype = c_int LIBPCP_IMPORT.pmiPutValueHandle.argtypes = [c_int, c_char_p] LIBPCP_IMPORT.pmiWrite.restype = c_int LIBPCP_IMPORT.pmiWrite.argtypes = [c_int, c_int] LIBPCP_IMPORT.pmiPutResult.restype = c_int LIBPCP_IMPORT.pmiPutResult.argtypes = [POINTER(pmResult)] # # definition of exception classes # class pmiErr(Exception): ''' Encapsulation for PMI interface error code ''' def __str__(self): error_code = self.args[0] try: error_symbol = pmiErrSymDict[error_code] error_string = ctypes.create_string_buffer(PMI_MAXERRMSGLEN) error_string = LIBPCP_IMPORT.pmiErrStr_r(error_code, error_string, PMI_MAXERRMSGLEN) except KeyError: error_symbol = error_string = "" return "%s %s" % (error_symbol, error_string) # # class LogImport # # This class wraps the PMI (Log Import) library functions # class pmiLogImport(object): """Defines a PCP Log Import archive context This is used to create a PCP archive from an external source """ ## # property read methods def read_path(self): """ Property for archive path """ return self._path def read_ctx(self): """ Property for log import context """ return self._ctx ##<|fim▁hole|> ctx = property(read_ctx, None, None, None) ## # overloads def __init__(self, path, inherit = 0): if type(path) != type(b''): path = path.encode('utf-8') self._path = path # the archive path (file name) self._ctx = LIBPCP_IMPORT.pmiStart(c_char_p(path), inherit) if self._ctx < 0: raise pmiErr(self._ctx) def __del__(self): if LIBPCP_IMPORT: LIBPCP_IMPORT.pmiUseContext(self._ctx) LIBPCP_IMPORT.pmiEnd() self._ctx = -1 ## # PMI Log Import Services def pmiSetHostname(self, hostname): """PMI - set the source host name for a Log Import archive """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(hostname) != type(b''): hostname = hostname.encode('utf-8') status = LIBPCP_IMPORT.pmiSetHostname(c_char_p(hostname)) if status < 0: raise pmiErr(status) return status def pmiSetTimezone(self, timezone): """PMI - set the source timezone for a Log Import archive """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(timezone) != type(b''): timezone = timezone.encode('utf-8') status = LIBPCP_IMPORT.pmiSetTimezone(c_char_p(timezone)) if status < 0: raise pmiErr(status) return status @staticmethod def pmiID(domain, cluster, item): """PMI - construct a pmID data structure (helper routine) """ return LIBPCP_IMPORT.pmiID(domain, cluster, item) @staticmethod def pmiInDom(domain, serial): """PMI - construct a pmInDom data structure (helper routine) """ return LIBPCP_IMPORT.pmiInDom(domain, serial) @staticmethod def pmiUnits(dim_space, dim_time, dim_count, scale_space, scale_time, scale_count): # pylint: disable=R0913 """PMI - construct a pmiUnits data structure (helper routine) """ return LIBPCP_IMPORT.pmiUnits(dim_space, dim_time, dim_count, scale_space, scale_time, scale_count) def pmiAddMetric(self, name, pmid, typed, indom, sem, units): # pylint: disable=R0913 """PMI - add a new metric definition to a Log Import context """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(name) != type(b''): name = name.encode('utf-8') status = LIBPCP_IMPORT.pmiAddMetric(c_char_p(name), pmid, typed, indom, sem, units) if status < 0: raise pmiErr(status) return status def pmiAddInstance(self, indom, instance, instid): """PMI - add element to an instance domain in a Log Import context """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(instance) != type(b''): instance = instance.encode('utf-8') status = LIBPCP_IMPORT.pmiAddInstance(indom, c_char_p(instance), instid) if status < 0: raise pmiErr(status) return status def pmiPutValue(self, name, inst, value): """PMI - add a value for a metric-instance pair """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(name) != type(b''): name = name.encode('utf-8') if type(inst) != type(b''): inst = inst.encode('utf-8') if type(value) != type(b''): value = value.encode('utf-8') status = LIBPCP_IMPORT.pmiPutValue(c_char_p(name), c_char_p(inst), c_char_p(value)) if status < 0: raise pmiErr(status) return status def pmiGetHandle(self, name, inst): """PMI - define a handle for a metric-instance pair """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(name) != type(b''): name = name.encode('utf-8') if type(inst) != type(b''): inst = inst.encode('utf-8') status = LIBPCP_IMPORT.pmiGetHandle(c_char_p(name), c_char_p(inst)) if status < 0: raise pmiErr(status) return status def pmiPutValueHandle(self, handle, value): """PMI - add a value for a metric-instance pair via a handle """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) if type(value) != type(b''): value = value.encode('utf-8') status = LIBPCP_IMPORT.pmiPutValueHandle(handle, c_char_p(value)) if status < 0: raise pmiErr(status) return status def pmiWrite(self, sec, usec): """PMI - flush data to a Log Import archive """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) status = LIBPCP_IMPORT.pmiWrite(sec, usec) if status < 0: raise pmiErr(status) return status def put_result(self, result): """PMI - add a data record to a Log Import archive """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) status = LIBPCP_IMPORT.pmiPutResult(cast(result, POINTER(pmResult))) if status < 0: raise pmiErr(status) return status @staticmethod def pmiDump(): """PMI - dump the current Log Import contexts (diagnostic) """ LIBPCP_IMPORT.pmiDump() def pmiEnd(self): """PMI - close current context and finish a Log Import archive """ status = LIBPCP_IMPORT.pmiUseContext(self._ctx) if status < 0: raise pmiErr(status) status = LIBPCP_IMPORT.pmiEnd() self._ctx = -1 if status < 0: raise pmiErr(status) return status<|fim▁end|>
# property definitions path = property(read_path, None, None, None)
<|file_name|>TimeShift.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.functions; import java.time.Duration; import java.time.Instant; import java.time.Year; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Locale; import org.apache.commons.lang3.LocaleUtils; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.engine.util.CompoundVariable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.samplers.Sampler; import org.apache.jmeter.threads.JMeterVariables; import org.apache.jmeter.util.JMeterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; /** * timeShifting Function permit to shift a date * <p> * Parameters: * <ul> * <li>format date @see * https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html * (optional - defaults to epoch time in millisecond)</li> * <li>date to shift formatted * as first param (optional - defaults now)</li> * <li>amount of (seconds, minutes, hours, days ) to add (optional - default nothing is add)</li> * <li>a string of the locale for the format ( optional )</li> * <li>variable name ( optional )</li> * </ul> * Returns:<|fim▁hole|> * @since 3.3 */ public class TimeShift extends AbstractFunction { private static final Logger log = LoggerFactory.getLogger(TimeShift.class); private static final String KEY = "__timeShift"; // $NON-NLS-1$ private static final List<String> desc = Arrays.asList(JMeterUtils.getResString("time_format_shift"), JMeterUtils.getResString("date_to_shift"), JMeterUtils.getResString("value_to_shift"), JMeterUtils.getResString("locale_format"), JMeterUtils.getResString("function_name_paropt")); // Ensure that these are set, even if no parameters are provided private String format = ""; //$NON-NLS-1$ private CompoundVariable dateToShiftCompound; // $NON-NLS-1$ private CompoundVariable amountToShiftCompound; // $NON-NLS-1$ private Locale locale = JMeterUtils.getLocale(); // $NON-NLS-1$ private String variableName = ""; //$NON-NLS-1$ private ZoneId systemDefaultZoneID = ZoneId.systemDefault(); private static final class LocaleFormatObject { private String format; private Locale locale; public LocaleFormatObject(String format, Locale locale) { this.format = format; this.locale = locale; } public String getFormat() { return format; } public Locale getLocale() { return locale; } @Override public int hashCode() { return format.hashCode() + locale.hashCode(); } @Override public boolean equals(Object other) { if (!(other instanceof LocaleFormatObject)) { return false; } LocaleFormatObject otherError = (LocaleFormatObject) other; return format.equals(otherError.getFormat()) && locale.getDisplayName().equals(otherError.getLocale().getDisplayName()); } /** * @see java.lang.Object#toString() */ @Override public String toString() { return "LocaleFormatObject [format=" + format + ", locale=" + locale + "]"; } } /** Date time format cache handler **/ private Cache<LocaleFormatObject, DateTimeFormatter> dateTimeFormatterCache = null; public TimeShift() { super(); } /** {@inheritDoc} */ @Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { String amountToShift = amountToShiftCompound.execute().trim(); String dateToShift = dateToShiftCompound.execute().trim(); ZonedDateTime zonedDateTimeToShift = ZonedDateTime.now(systemDefaultZoneID); DateTimeFormatter formatter = null; if (!StringUtils.isEmpty(format)) { try { LocaleFormatObject lfo = new LocaleFormatObject(format, locale); formatter = dateTimeFormatterCache.get(lfo, this::createFormatter); } catch (IllegalArgumentException ex) { log.error("Format date pattern '{}' is invalid " + "(see https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html)", format, ex); // $NON-NLS-1$ return ""; } } if (!dateToShift.isEmpty()) { try { if (formatter != null) { zonedDateTimeToShift = ZonedDateTime.parse(dateToShift, formatter); } else { zonedDateTimeToShift = ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(dateToShift)), systemDefaultZoneID); } } catch (DateTimeParseException | NumberFormatException ex) { log.error("Failed to parse the date '{}' to shift with formatter '{}'", dateToShift, formatter, ex); // $NON-NLS-1$ } } // Check amount value to shift if (!StringUtils.isEmpty(amountToShift)) { try { Duration duration = Duration.parse(amountToShift); zonedDateTimeToShift = zonedDateTimeToShift.plus(duration); } catch (DateTimeParseException ex) { log.error( "Failed to parse the amount duration '{}' to shift " + "(see https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html#parse-java.lang.CharSequence-) ", amountToShift, ex); // $NON-NLS-1$ } } String dateString; if (formatter != null) { dateString = zonedDateTimeToShift.format(formatter); } else { dateString = String.valueOf(zonedDateTimeToShift.toInstant().toEpochMilli()); } if (!StringUtils.isEmpty(variableName)) { JMeterVariables vars = getVariables(); if (vars != null) {// vars will be null on TestPlan vars.put(variableName, dateString); } } return dateString; } private DateTimeFormatter createFormatter(LocaleFormatObject format) { log.debug("Create a new instance of DateTimeFormatter for format '{}' in the cache", format); return new DateTimeFormatterBuilder().appendPattern(format.getFormat()) .parseDefaulting(ChronoField.NANO_OF_SECOND, 0) .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) .parseDefaulting(ChronoField.HOUR_OF_DAY, 0) .parseDefaulting(ChronoField.DAY_OF_MONTH, 1) .parseDefaulting(ChronoField.MONTH_OF_YEAR, 1) .parseDefaulting(ChronoField.YEAR_OF_ERA, Year.now().getValue()) .parseDefaulting(ChronoField.OFFSET_SECONDS, ZonedDateTime.now().getOffset().getTotalSeconds()) .toFormatter(format.getLocale()); } /** {@inheritDoc} */ @Override public void setParameters(Collection<CompoundVariable> parameters) throws InvalidVariableException { checkParameterCount(parameters, 4, 5); Object[] values = parameters.toArray(); format = ((CompoundVariable) values[0]).execute().trim(); dateToShiftCompound = (CompoundVariable) values[1]; amountToShiftCompound = (CompoundVariable) values[2]; if (values.length == 4) { variableName = ((CompoundVariable) values[3]).execute().trim(); } else { String localeAsString = ((CompoundVariable) values[3]).execute().trim(); if (!localeAsString.trim().isEmpty()) { locale = LocaleUtils.toLocale(localeAsString); } variableName = ((CompoundVariable) values[4]).execute().trim(); } // Create the cache if (dateTimeFormatterCache == null) { dateTimeFormatterCache = Caffeine.newBuilder() .maximumSize(100).build(); } } /** {@inheritDoc} */ @Override public String getReferenceKey() { return KEY; } /** {@inheritDoc} */ @Override public List<String> getArgumentDesc() { return desc; } }<|fim▁end|>
* <p>a formatted date with the specified number of (seconds, minutes, * hours, days or months ) added. Value is also saved in the variable for * later re-use. *
<|file_name|>domparser.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::DOMParserBinding; use dom::bindings::utils::{CacheableWrapper, WrapperCache}; use dom::bindings::utils::{BindingObject, DerivedWrapper}; use dom::domparser::DOMParser;<|fim▁hole|> impl CacheableWrapper for DOMParser { fn get_wrappercache(&mut self) -> &mut WrapperCache { unsafe { cast::transmute(&self.wrapper) } } fn wrap_object_shared(@mut self, cx: *JSContext, scope: *JSObject) -> *JSObject { let mut unused = false; DOMParserBinding::Wrap(cx, scope, self, &mut unused) } } impl BindingObject for DOMParser { fn GetParentObject(&self, _cx: *JSContext) -> @mut CacheableWrapper { return self.owner as @mut CacheableWrapper; } } impl DerivedWrapper for DOMParser { fn wrap(&mut self, _cx: *JSContext, _scope: *JSObject, _vp: *mut JSVal) -> i32 { fail!(~"nyi") } fn wrap_shared(@mut self, cx: *JSContext, scope: *JSObject, vp: *mut JSVal) -> i32 { let obj = self.wrap_object_shared(cx, scope); if obj.is_null() { return 0; } else { unsafe { *vp = RUST_OBJECT_TO_JSVAL(obj) }; return 1; } } }<|fim▁end|>
use js::jsapi::{JSContext, JSObject, JSVal}; use js::glue::bindgen::{RUST_OBJECT_TO_JSVAL};
<|file_name|>wallet_listsinceblock.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2017 The Starwels developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the listsincelast RPC.""" from test_framework.test_framework import StarwelsTestFramework from test_framework.util import assert_equal, assert_array_result, assert_raises_rpc_error class ListSinceBlockTest (StarwelsTestFramework): def set_test_params(self): self.num_nodes = 4 self.setup_clean_chain = True def run_test(self): self.nodes[2].generate(101) self.sync_all() self.test_no_blockhash() self.test_invalid_blockhash() self.test_reorg() self.test_double_spend() self.test_double_send() def test_no_blockhash(self): txid = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) blockhash, = self.nodes[2].generate(1) self.sync_all() txs = self.nodes[0].listtransactions() assert_array_result(txs, {"txid": txid}, { "category": "receive", "amount": 1, "blockhash": blockhash, "confirmations": 1, }) assert_equal( self.nodes[0].listsinceblock(), {"lastblock": blockhash, "removed": [], "transactions": txs}) assert_equal( self.nodes[0].listsinceblock(""), {"lastblock": blockhash, "removed": [], "transactions": txs}) def test_invalid_blockhash(self): assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock, "42759cde25462784395a337460bde75f58e73d3f08bd31fdc3507cbac856a2c4") assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock, "0000000000000000000000000000000000000000000000000000000000000000") assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock, "invalid-hex") def test_reorg(self): ''' `listsinceblock` did not behave correctly when handed a block that was no longer in the main chain: ab0 / \ aa1 [tx0] bb1 | | aa2 bb2 | | aa3 bb3 | bb4 Consider a client that has only seen block `aa3` above. It asks the node to `listsinceblock aa3`. But at some point prior the main chain switched to the bb chain. Previously: listsinceblock would find height=4 for block aa3 and compare this to height=5 for the tip of the chain (bb4). It would then return results restricted to bb3-bb4. Now: listsinceblock finds the fork at ab0 and returns results in the range bb1-bb4. This test only checks that [tx0] is present. ''' # Split network into two self.split_network() # send to nodes[0] from nodes[2] senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) # generate on both sides lastblockhash = self.nodes[1].generate(6)[5] self.nodes[2].generate(7) self.log.info('lastblockhash=%s' % (lastblockhash)) self.sync_all([self.nodes[:2], self.nodes[2:]]) self.join_network() # listsinceblock(lastblockhash) should now include tx, as seen from nodes[0] lsbres = self.nodes[0].listsinceblock(lastblockhash) found = False for tx in lsbres['transactions']: if tx['txid'] == senttx: found = True break assert found def test_double_spend(self): ''' This tests the case where the same UTXO is spent twice on two separate blocks as part of a reorg. ab0 / \ aa1 [tx1] bb1 [tx2] | | aa2 bb2 | | aa3 bb3 | bb4 Problematic case: 1. User 1 receives USDH in tx1 from utxo1 in block aa1. 2. User 2 receives USDH in tx2 from utxo1 (same) in block bb1 3. User 1 sees 2 confirmations at block aa3. 4. Reorg into bb chain. 5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now invalidated. Currently the solution to this is to detect that a reorg'd block is asked for in listsinceblock, and to iterate back over existing blocks up until the fork point, and to include all transactions that relate to the node wallet. ''' self.sync_all() # Split network into two self.split_network() # share utxo between nodes[1] and nodes[2] utxos = self.nodes[2].listunspent() utxo = utxos[0] privkey = self.nodes[2].dumpprivkey(utxo['address'])<|fim▁hole|> # send from nodes[1] using utxo to nodes[0] change = '%.8f' % (float(utxo['amount']) - 1.0003) recipientDict = { self.nodes[0].getnewaddress(): 1, self.nodes[1].getnewaddress(): change, } utxoDicts = [{ 'txid': utxo['txid'], 'vout': utxo['vout'], }] txid1 = self.nodes[1].sendrawtransaction( self.nodes[1].signrawtransaction( self.nodes[1].createrawtransaction(utxoDicts, recipientDict))['hex']) # send from nodes[2] using utxo to nodes[3] recipientDict2 = { self.nodes[3].getnewaddress(): 1, self.nodes[2].getnewaddress(): change, } self.nodes[2].sendrawtransaction( self.nodes[2].signrawtransaction( self.nodes[2].createrawtransaction(utxoDicts, recipientDict2))['hex']) # generate on both sides lastblockhash = self.nodes[1].generate(3)[2] self.nodes[2].generate(4) self.join_network() self.sync_all() # gettransaction should work for txid1 assert self.nodes[0].gettransaction(txid1)['txid'] == txid1, "gettransaction failed to find txid1" # listsinceblock(lastblockhash) should now include txid1, as seen from nodes[0] lsbres = self.nodes[0].listsinceblock(lastblockhash) assert any(tx['txid'] == txid1 for tx in lsbres['removed']) # but it should not include 'removed' if include_removed=false lsbres2 = self.nodes[0].listsinceblock(blockhash=lastblockhash, include_removed=False) assert 'removed' not in lsbres2 def test_double_send(self): ''' This tests the case where the same transaction is submitted twice on two separate blocks as part of a reorg. The former will vanish and the latter will appear as the true transaction (with confirmations dropping as a result). ab0 / \ aa1 [tx1] bb1 | | aa2 bb2 | | aa3 bb3 [tx1] | bb4 Asserted: 1. tx1 is listed in listsinceblock. 2. It is included in 'removed' as it was removed, even though it is now present in a different block. 3. It is listed with a confirmations count of 2 (bb3, bb4), not 3 (aa1, aa2, aa3). ''' self.sync_all() # Split network into two self.split_network() # create and sign a transaction utxos = self.nodes[2].listunspent() utxo = utxos[0] change = '%.8f' % (float(utxo['amount']) - 1.0003) recipientDict = { self.nodes[0].getnewaddress(): 1, self.nodes[2].getnewaddress(): change, } utxoDicts = [{ 'txid': utxo['txid'], 'vout': utxo['vout'], }] signedtxres = self.nodes[2].signrawtransaction( self.nodes[2].createrawtransaction(utxoDicts, recipientDict)) assert signedtxres['complete'] signedtx = signedtxres['hex'] # send from nodes[1]; this will end up in aa1 txid1 = self.nodes[1].sendrawtransaction(signedtx) # generate bb1-bb2 on right side self.nodes[2].generate(2) # send from nodes[2]; this will end up in bb3 txid2 = self.nodes[2].sendrawtransaction(signedtx) assert_equal(txid1, txid2) # generate on both sides lastblockhash = self.nodes[1].generate(3)[2] self.nodes[2].generate(2) self.join_network() self.sync_all() # gettransaction should work for txid1 self.nodes[0].gettransaction(txid1) # listsinceblock(lastblockhash) should now include txid1 in transactions # as well as in removed lsbres = self.nodes[0].listsinceblock(lastblockhash) assert any(tx['txid'] == txid1 for tx in lsbres['transactions']) assert any(tx['txid'] == txid1 for tx in lsbres['removed']) # find transaction and ensure confirmations is valid for tx in lsbres['transactions']: if tx['txid'] == txid1: assert_equal(tx['confirmations'], 2) # the same check for the removed array; confirmations should STILL be 2 for tx in lsbres['removed']: if tx['txid'] == txid1: assert_equal(tx['confirmations'], 2) if __name__ == '__main__': ListSinceBlockTest().main()<|fim▁end|>
self.nodes[1].importprivkey(privkey)
<|file_name|>ResponseCookiesTestCase.java<|end_file_name|><|fim▁begin|>/* * JBoss, Home of Professional Open Source. * Copyright 2019 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|> package io.undertow.servlet.test.response.cookies; import java.util.Arrays; import java.util.Comparator; import javax.servlet.ServletException; import io.undertow.servlet.api.ServletInfo; import io.undertow.servlet.test.util.DeploymentUtils; import io.undertow.testutils.DefaultServer; import io.undertow.testutils.HttpClientUtils; import io.undertow.testutils.TestHttpClient; import io.undertow.util.StatusCodes; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Test for response.addCookie * * @author Flavia Rainone */ @RunWith(DefaultServer.class) public class ResponseCookiesTestCase { @BeforeClass public static void setup() throws ServletException { DeploymentUtils.setupServlet( new ServletInfo("add-cookies", AddCookiesServlet.class) .addMapping("/add-cookies"), new ServletInfo("duplicate-cookies", DuplicateCookiesServlet.class) .addMapping("/duplicate-cookies"), new ServletInfo("overwrite-cookies", OverwriteCookiesServlet.class) .addMapping("/overwrite-cookies"), new ServletInfo("jsessionid-cookies", JSessionIDCookiesServlet.class) .addMapping("/jsessionid-cookies")); } @Test public void addCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/add-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(2, setCookieHeaders.length); assertEquals("test1=test1; path=/test", setCookieHeaders[0].getValue()); assertEquals("test2=test2", setCookieHeaders[1].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void duplicateCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/duplicate-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(7, setCookieHeaders.length); Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString)); assertEquals("test1=test1; path=/test1_1", setCookieHeaders[0].getValue()); assertEquals("test1=test1; path=/test1_2", setCookieHeaders[1].getValue()); assertEquals("test2=test2; path=/test2", setCookieHeaders[2].getValue()); assertEquals("test2=test2; path=/test2; domain=www.domain2.com", setCookieHeaders[3].getValue()); assertEquals("test3=test3", setCookieHeaders[4].getValue()); assertEquals("test3=test3; domain=www.domain3-1.com", setCookieHeaders[5].getValue()); assertEquals("test3=test3; domain=www.domain3-2.com", setCookieHeaders[6].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void overwriteCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/overwrite-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(5, setCookieHeaders.length); Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString)); assertTrue("Header " + setCookieHeaders[0] + "didn't match expected regex", setCookieHeaders[0].getValue().matches("JSESSIONID=.*; path=/servletContext")); assertEquals("test=test10; domain=www.domain.com", setCookieHeaders[1].getValue()); assertEquals("test=test2; path=/test", setCookieHeaders[2].getValue()); assertEquals("test=test5", setCookieHeaders[3].getValue()); assertEquals("test=test8; path=/test; domain=www.domain.com", setCookieHeaders[4].getValue()); } finally { client.getConnectionManager().shutdown(); } } @Test public void jsessionIdCookies() throws Exception { final TestHttpClient client = new TestHttpClient(); try { final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/jsessionid-cookies"); final HttpResponse result = client.execute(get); assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); final String response = HttpClientUtils.readResponse(result); assertEquals("Served at: /servletContext", response); final Header[] setCookieHeaders = result.getHeaders("Set-Cookie"); assertEquals(3, setCookieHeaders.length); assertTrue("Header " + setCookieHeaders[0] + "didn't start with expected prefix", setCookieHeaders[0].getValue().startsWith("JSESSIONID=_bug_fix; path=/path3; Max-Age=500; Expires=")); assertTrue("Header " + setCookieHeaders[1] + "didn't start with expected prefix", setCookieHeaders[1].getValue().startsWith("JSESSIONID=_bug_fix; path=/path4; Max-Age=1000; Expires=")); assertTrue("Header " + setCookieHeaders[2] + "didn't match expected regex", setCookieHeaders[2].getValue().matches("JSESSIONID=.*; path=/servletContext")); } finally { client.getConnectionManager().shutdown(); } } }<|fim▁end|>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use rand::Rng; use std::cmp::Ordering; use std::io; fn main() { println!("Guess the number!");<|fim▁hole|> println!("The secret number is: {}", secret_number); loop { println!("Please input your guess."); let mut guess = String::new(); // ANCHOR: here // --snip-- io::stdin() .read_line(&mut guess) .expect("Failed to read line"); // ANCHOR: ch19 let guess: u32 = match guess.trim().parse() { Ok(num) => num, Err(_) => continue, }; // ANCHOR_END: ch19 println!("You guessed: {}", guess); // --snip-- // ANCHOR_END: here match guess.cmp(&secret_number) { Ordering::Less => println!("Too small!"), Ordering::Greater => println!("Too big!"), Ordering::Equal => { println!("You win!"); break; } } } }<|fim▁end|>
let secret_number = rand::thread_rng().gen_range(1..101);
<|file_name|>test_node.py<|end_file_name|><|fim▁begin|>import unittest from golem.network.p2p.node import Node def is_ip_address(address): """ Check if @address is correct IP address :param address: Address to be checked :return: True if is correct, false otherwise """ from ipaddress import ip_address, AddressValueError try: # will raise error in case of incorrect address ip_address(unicode(address)) return True except (ValueError, AddressValueError): return False class TestNode(unittest.TestCase): def test_str(self): n = Node(node_name="Blabla", key="ABC") self.assertNotIn("at", str(n)) self.assertNotIn("at", "{}".format(n)) self.assertIn("Blabla", str(n)) self.assertIn("Blabla", "{}".format(n)) self.assertIn("ABC", str(n)) self.assertIn("ABC", "{}".format(n)) def test_collect_network_info(self): """ Test configuring Node object """<|fim▁hole|> assert is_ip_address(node.pub_addr) assert is_ip_address(node.prv_addr) for address in node.prv_addresses: assert is_ip_address(address)<|fim▁end|>
node = Node() node.collect_network_info()
<|file_name|>sized_unsized_cast.rs<|end_file_name|><|fim▁begin|>use crate::structured_errors::StructuredDiagnostic; use rustc_errors::{DiagnosticBuilder, DiagnosticId}; use rustc_middle::ty::{Ty, TypeFoldable}; use rustc_session::Session; use rustc_span::Span; pub struct SizedUnsizedCast<'tcx> { pub sess: &'tcx Session, pub span: Span, pub expr_ty: Ty<'tcx>, pub cast_ty: String, } impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCast<'tcx> { fn session(&self) -> &Session { self.sess } fn code(&self) -> DiagnosticId { rustc_errors::error_code!(E0607) } fn diagnostic_common(&self) -> DiagnosticBuilder<'tcx> { if self.expr_ty.references_error() { self.sess.diagnostic().struct_dummy() } else { self.sess.struct_span_fatal_with_code( self.span, &format!( "cannot cast thin pointer `{}` to fat pointer `{}`", self.expr_ty, self.cast_ty ), self.code(), ) } } fn diagnostic_extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { err.help( "Thin pointers are \"simple\" pointers: they are purely a reference to a memory address. Fat pointers are pointers referencing \"Dynamically Sized Types\" (also called DST). DST don't have a statically known size, therefore they can only exist behind some kind of pointers that contain additional information. Slices and trait objects are DSTs. In the case of slices, the additional information the fat pointer holds is their size. To fix this error, don't try to cast directly between thin and fat pointers. For more information about casts, take a look at The Book: https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions", ); err<|fim▁hole|>}<|fim▁end|>
}
<|file_name|>test_presigned.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 SwiftStack, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import requests from swift.common.middleware.s3api.etree import fromstring import test.functional as tf from test.functional.s3api import S3ApiBase from test.functional.s3api.utils import get_error_code, get_error_msg def setUpModule(): tf.setup_package() def tearDownModule(): tf.teardown_package() class TestS3ApiPresignedUrls(S3ApiBase): def test_bucket(self): bucket = 'test-bucket' req_objects = ('object', 'object2') max_bucket_listing = tf.cluster_info['s3api'].get( 'max_bucket_listing', 1000) # GET Bucket (Without Object) status, _junk, _junk = self.conn.make_request('PUT', bucket) self.assertEqual(status, 200) url, headers = self.conn.generate_url_and_headers('GET', bucket) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertCommonResponseHeaders(resp.headers) self.assertIsNotNone(resp.headers['content-type']) self.assertEqual(resp.headers['content-length'], str(len(resp.content))) elem = fromstring(resp.content, 'ListBucketResult') self.assertEqual(elem.find('Name').text, bucket) self.assertIsNone(elem.find('Prefix').text) self.assertIsNone(elem.find('Marker').text) self.assertEqual(elem.find('MaxKeys').text, str(max_bucket_listing)) self.assertEqual(elem.find('IsTruncated').text, 'false') objects = elem.findall('./Contents') self.assertEqual(list(objects), []) # GET Bucket (With Object) for obj in req_objects: status, _junk, _junk = self.conn.make_request('PUT', bucket, obj) self.assertEqual( status, 200, 'Got %d response while creating %s' % (status, obj)) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertCommonResponseHeaders(resp.headers) self.assertIsNotNone(resp.headers['content-type']) self.assertEqual(resp.headers['content-length'], str(len(resp.content))) elem = fromstring(resp.content, 'ListBucketResult') self.assertEqual(elem.find('Name').text, bucket) self.assertIsNone(elem.find('Prefix').text) self.assertIsNone(elem.find('Marker').text) self.assertEqual(elem.find('MaxKeys').text, str(max_bucket_listing)) self.assertEqual(elem.find('IsTruncated').text, 'false') resp_objects = elem.findall('./Contents') self.assertEqual(len(list(resp_objects)), 2) for o in resp_objects: self.assertIn(o.find('Key').text, req_objects) self.assertIsNotNone(o.find('LastModified').text) self.assertRegexpMatches( o.find('LastModified').text, r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$') self.assertIsNotNone(o.find('ETag').text) self.assertEqual(o.find('Size').text, '0') self.assertIsNotNone(o.find('StorageClass').text is not None) self.assertEqual(o.find('Owner/ID').text, self.conn.user_id) self.assertEqual(o.find('Owner/DisplayName').text, self.conn.user_id) # DELETE Bucket for obj in req_objects: self.conn.make_request('DELETE', bucket, obj) url, headers = self.conn.generate_url_and_headers('DELETE', bucket) resp = requests.delete(url, headers=headers) self.assertEqual(resp.status_code, 204, 'Got %d %s' % (resp.status_code, resp.content)) def test_expiration_limits(self): if os.environ.get('S3_USE_SIGV4'): self._test_expiration_limits_v4() else: self._test_expiration_limits_v2() def _test_expiration_limits_v2(self): bucket = 'test-bucket' # Expiration date is too far in the future url, headers = self.conn.generate_url_and_headers( 'GET', bucket, expires_in=2 ** 32) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 403, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(get_error_code(resp.content), 'AccessDenied') self.assertIn('Invalid date (should be seconds since epoch)', get_error_msg(resp.content)) <|fim▁hole|> bucket = 'test-bucket' # Expiration is negative url, headers = self.conn.generate_url_and_headers( 'GET', bucket, expires_in=-1) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 400, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(get_error_code(resp.content), 'AuthorizationQueryParametersError') self.assertIn('X-Amz-Expires must be non-negative', get_error_msg(resp.content)) # Expiration date is too far in the future for exp in (7 * 24 * 60 * 60 + 1, 2 ** 63 - 1): url, headers = self.conn.generate_url_and_headers( 'GET', bucket, expires_in=exp) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 400, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(get_error_code(resp.content), 'AuthorizationQueryParametersError') self.assertIn('X-Amz-Expires must be less than 604800 seconds', get_error_msg(resp.content)) # Expiration date is *way* too far in the future, or isn't a number for exp in (2 ** 63, 'foo'): url, headers = self.conn.generate_url_and_headers( 'GET', bucket, expires_in=2 ** 63) resp = requests.get(url, headers=headers) self.assertEqual(resp.status_code, 400, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(get_error_code(resp.content), 'AuthorizationQueryParametersError') self.assertEqual('X-Amz-Expires should be a number', get_error_msg(resp.content)) def test_object(self): bucket = 'test-bucket' obj = 'object' status, _junk, _junk = self.conn.make_request('PUT', bucket) self.assertEqual(status, 200) # HEAD/missing object head_url, headers = self.conn.generate_url_and_headers( 'HEAD', bucket, obj) resp = requests.head(head_url, headers=headers) self.assertEqual(resp.status_code, 404, 'Got %d %s' % (resp.status_code, resp.content)) # Wrong verb resp = requests.get(head_url) self.assertEqual(resp.status_code, 403, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(get_error_code(resp.content), 'SignatureDoesNotMatch') # PUT empty object put_url, headers = self.conn.generate_url_and_headers( 'PUT', bucket, obj) resp = requests.put(put_url, data='', headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) # GET empty object get_url, headers = self.conn.generate_url_and_headers( 'GET', bucket, obj) resp = requests.get(get_url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(resp.content, '') # PUT over object resp = requests.put(put_url, data='foobar', headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) # GET non-empty object resp = requests.get(get_url, headers=headers) self.assertEqual(resp.status_code, 200, 'Got %d %s' % (resp.status_code, resp.content)) self.assertEqual(resp.content, 'foobar') # DELETE Object delete_url, headers = self.conn.generate_url_and_headers( 'DELETE', bucket, obj) resp = requests.delete(delete_url, headers=headers) self.assertEqual(resp.status_code, 204, 'Got %d %s' % (resp.status_code, resp.content)) # Final cleanup status, _junk, _junk = self.conn.make_request('DELETE', bucket) self.assertEqual(status, 204) class TestS3ApiPresignedUrlsSigV4(TestS3ApiPresignedUrls): @classmethod def setUpClass(cls): os.environ['S3_USE_SIGV4'] = "True" @classmethod def tearDownClass(cls): del os.environ['S3_USE_SIGV4'] def setUp(self): super(TestS3ApiPresignedUrlsSigV4, self).setUp()<|fim▁end|>
def _test_expiration_limits_v4(self):
<|file_name|>milestone.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package setmilestone implements the `/milestone` command which allows members of the milestone // maintainers team to specify a milestone to be applied to an Issue or PR. package milestone import ( "fmt"<|fim▁hole|> "github.com/sirupsen/logrus" "k8s.io/test-infra/prow/github" "k8s.io/test-infra/prow/pluginhelp" "k8s.io/test-infra/prow/plugins" ) const pluginName = "milestone" var ( milestoneRegex = regexp.MustCompile(`(?m)^/milestone\s+(.+?)\s*$`) mustBeSigLead = "You must be a member of the [%s/%s](https://github.com/orgs/%s/teams/%s/members) github team to set the milestone." invalidMilestone = "The provided milestone is not valid for this repository. Milestones in this repository: [%s]\n\nUse `/milestone %s` to clear the milestone." milestoneTeamMsg = "The milestone maintainers team is the Github team with ID: %d." clearKeyword = "clear" ) type githubClient interface { CreateComment(owner, repo string, number int, comment string) error ClearMilestone(org, repo string, num int) error SetMilestone(org, repo string, issueNum, milestoneNum int) error ListTeamMembers(id int, role string) ([]github.TeamMember, error) ListMilestones(org, repo string) ([]github.Milestone, error) } func init() { plugins.RegisterGenericCommentHandler(pluginName, handleGenericComment, helpProvider) } func helpProvider(config *plugins.Configuration, enabledRepos []string) (*pluginhelp.PluginHelp, error) { pluginHelp := &pluginhelp.PluginHelp{ Description: "The milestone plugin allows members of a configurable GitHub team to set the milestone on an issue or pull request.", Config: func(repos []string) map[string]string { configMap := make(map[string]string) for _, repo := range repos { team, exists := config.RepoMilestone[repo] if exists { configMap[repo] = fmt.Sprintf(milestoneTeamMsg, team) } } configMap[""] = fmt.Sprintf(milestoneTeamMsg, config.RepoMilestone[""]) return configMap }(enabledRepos), } pluginHelp.AddCommand(pluginhelp.Command{ Usage: "/milestone <version> or /milestone clear", Description: "Updates the milestone for an issue or PR", Featured: false, WhoCanUse: "Members of the milestone maintainers GitHub team can use the '/milestone' command.", Examples: []string{"/milestone v1.10", "/milestone v1.9", "/milestone clear"}, }) return pluginHelp, nil } func handleGenericComment(pc plugins.PluginClient, e github.GenericCommentEvent) error { return handle(pc.GitHubClient, pc.Logger, &e, pc.PluginConfig.RepoMilestone) } func buildMilestoneMap(milestones []github.Milestone) map[string]int { m := make(map[string]int) for _, ms := range milestones { m[ms.Title] = ms.Number } return m } func handle(gc githubClient, log *logrus.Entry, e *github.GenericCommentEvent, repoMilestone map[string]plugins.Milestone) error { if e.Action != github.GenericCommentActionCreated { return nil } milestoneMatch := milestoneRegex.FindStringSubmatch(e.Body) if len(milestoneMatch) != 2 { return nil } org := e.Repo.Owner.Login repo := e.Repo.Name milestone, exists := repoMilestone[fmt.Sprintf("%s/%s", org, repo)] if !exists { // fallback default milestone = repoMilestone[""] } milestoneMaintainers, err := gc.ListTeamMembers(milestone.MaintainersID, github.RoleAll) if err != nil { return err } found := false for _, person := range milestoneMaintainers { login := github.NormLogin(e.User.Login) if github.NormLogin(person.Login) == login { found = true break } } if !found { // not in the milestone maintainers team msg := fmt.Sprintf(mustBeSigLead, org, milestone.MaintainersTeam, org, milestone.MaintainersTeam) return gc.CreateComment(org, repo, e.Number, plugins.FormatResponseRaw(e.Body, e.HTMLURL, e.User.Login, msg)) } milestones, err := gc.ListMilestones(org, repo) if err != nil { log.WithError(err).Errorf("Error listing the milestones in the %s/%s repo", org, repo) return err } proposedMilestone := milestoneMatch[1] // special case, if the clear keyword is used if proposedMilestone == clearKeyword { if err := gc.ClearMilestone(org, repo, e.Number); err != nil { log.WithError(err).Errorf("Error clearing the milestone for %s/%s#%d.", org, repo, e.Number) } return nil } milestoneMap := buildMilestoneMap(milestones) milestoneNumber, ok := milestoneMap[proposedMilestone] if !ok { slice := make([]string, 0, len(milestoneMap)) for k := range milestoneMap { slice = append(slice, fmt.Sprintf("`%s`", k)) } sort.Strings(slice) msg := fmt.Sprintf(invalidMilestone, strings.Join(slice, ", "), clearKeyword) return gc.CreateComment(org, repo, e.Number, plugins.FormatResponseRaw(e.Body, e.HTMLURL, e.User.Login, msg)) } if err := gc.SetMilestone(org, repo, e.Number, milestoneNumber); err != nil { log.WithError(err).Errorf("Error adding the milestone %s to %s/%s#%d.", proposedMilestone, org, repo, e.Number) } return nil }<|fim▁end|>
"regexp" "sort" "strings"
<|file_name|>shared_settings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Common/Shared code related to the Settings dialog # Copyright (C) 2010-2018 Filipe Coelho <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # For a full copy of the GNU General Public License see the COPYING file # ------------------------------------------------------------------------------------------------------------ # Imports (Global) if True: from PyQt5.QtCore import pyqtSlot, QSettings from PyQt5.QtWidgets import QDialog, QDialogButtonBox else: from PyQt4.QtCore import pyqtSlot, QSettings from PyQt4.QtGui import QDialog, QDialogButtonBox # ------------------------------------------------------------------------------------------------------------ # Imports (Custom Stuff) import ui_settings_app from shared import * from patchcanvas_theme import * # ------------------------------------------------------------------------------------------------------------ # Global variables # Tab indexes TAB_INDEX_MAIN = 0 TAB_INDEX_CANVAS = 1 TAB_INDEX_LADISH = 2 TAB_INDEX_NONE = 3 # PatchCanvas defines CANVAS_ANTIALIASING_SMALL = 1 CANVAS_EYECANDY_SMALL = 1 # LADISH defines LADISH_CONF_KEY_DAEMON_NOTIFY = "/org/ladish/daemon/notify" LADISH_CONF_KEY_DAEMON_SHELL = "/org/ladish/daemon/shell" LADISH_CONF_KEY_DAEMON_TERMINAL = "/org/ladish/daemon/terminal" LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART = "/org/ladish/daemon/studio_autostart" LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY = "/org/ladish/daemon/js_save_delay" # LADISH defaults LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT = True LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT = "sh" LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT = "x-terminal-emulator" LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT = True LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY_DEFAULT = 0 # Internal defaults global SETTINGS_DEFAULT_PROJECT_FOLDER SETTINGS_DEFAULT_PROJECT_FOLDER = HOME # ------------------------------------------------------------------------------------------------------------ # Change internal defaults def setDefaultProjectFolder(folder): global SETTINGS_DEFAULT_PROJECT_FOLDER SETTINGS_DEFAULT_PROJECT_FOLDER = folder # ------------------------------------------------------------------------------------------------------------ # Settings Dialog class SettingsW(QDialog): def __init__(self, parent, appName, hasOpenGL=False): QDialog.__init__(self, parent) self.ui = ui_settings_app.Ui_SettingsW() self.ui.setupUi(self) # ------------------------------------------------------------- # Set default settings self.fRefreshInterval = 120 self.fAutoHideGroups = True self.fUseSystemTray = True self.fCloseToTray = False # ------------------------------------------------------------- # Set app-specific settings if appName == "catarina": self.fAutoHideGroups = False self.ui.lw_page.hideRow(TAB_INDEX_MAIN) self.ui.lw_page.hideRow(TAB_INDEX_LADISH) self.ui.lw_page.setCurrentCell(TAB_INDEX_CANVAS, 0) elif appName == "catia": self.fUseSystemTray = False self.ui.group_main_paths.setEnabled(False) self.ui.group_main_paths.setVisible(False) self.ui.group_tray.setEnabled(False) self.ui.group_tray.setVisible(False) self.ui.lw_page.hideRow(TAB_INDEX_LADISH) self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0) elif appName == "claudia": self.ui.cb_jack_port_alias.setEnabled(False) self.ui.cb_jack_port_alias.setVisible(False) self.ui.label_jack_port_alias.setEnabled(False) self.ui.label_jack_port_alias.setVisible(False) self.ui.lw_page.setCurrentCell(TAB_INDEX_MAIN, 0) else: self.ui.lw_page.hideRow(TAB_INDEX_MAIN) self.ui.lw_page.hideRow(TAB_INDEX_CANVAS) self.ui.lw_page.hideRow(TAB_INDEX_LADISH) self.ui.stackedWidget.setCurrentIndex(TAB_INDEX_NONE) return # ------------------------------------------------------------- # Load settings self.loadSettings() # ------------------------------------------------------------- # Set-up GUI if not hasOpenGL: self.ui.cb_canvas_use_opengl.setChecked(False) self.ui.cb_canvas_use_opengl.setEnabled(False) self.ui.lw_page.item(0, 0).setIcon(getIcon(appName, 48)) self.ui.label_icon_main.setPixmap(getIcon(appName, 48).pixmap(48, 48)) # ------------------------------------------------------------- # Set-up connections self.accepted.connect(self.slot_saveSettings) self.ui.buttonBox.button(QDialogButtonBox.Reset).clicked.connect(self.slot_resetSettings) self.ui.b_main_def_folder_open.clicked.connect(self.slot_getAndSetProjectPath) def loadSettings(self): settings = QSettings() if not self.ui.lw_page.isRowHidden(TAB_INDEX_MAIN): self.ui.le_main_def_folder.setText(settings.value("Main/DefaultProjectFolder", SETTINGS_DEFAULT_PROJECT_FOLDER, type=str)) self.ui.cb_tray_enable.setChecked(settings.value("Main/UseSystemTray", self.fUseSystemTray, type=bool)) self.ui.cb_tray_close_to.setChecked(settings.value("Main/CloseToTray", self.fCloseToTray, type=bool)) self.ui.sb_gui_refresh.setValue(settings.value("Main/RefreshInterval", self.fRefreshInterval, type=int)) self.ui.cb_jack_port_alias.setCurrentIndex(settings.value("Main/JackPortAlias", 2, type=int)) # --------------------------------------- if not self.ui.lw_page.isRowHidden(TAB_INDEX_CANVAS): self.ui.cb_canvas_hide_groups.setChecked(settings.value("Canvas/AutoHideGroups", self.fAutoHideGroups, type=bool)) self.ui.cb_canvas_bezier_lines.setChecked(settings.value("Canvas/UseBezierLines", True, type=bool)) self.ui.cb_canvas_eyecandy.setCheckState(settings.value("Canvas/EyeCandy", CANVAS_EYECANDY_SMALL, type=int)) self.ui.cb_canvas_use_opengl.setChecked(settings.value("Canvas/UseOpenGL", False, type=bool)) self.ui.cb_canvas_render_aa.setCheckState(settings.value("Canvas/Antialiasing", CANVAS_ANTIALIASING_SMALL, type=int)) self.ui.cb_canvas_render_hq_aa.setChecked(settings.value("Canvas/HighQualityAntialiasing", False, type=bool)) themeName = settings.value("Canvas/Theme", getDefaultThemeName(), type=str) for i in range(Theme.THEME_MAX): thisThemeName = getThemeName(i) self.ui.cb_canvas_theme.addItem(thisThemeName) if thisThemeName == themeName: self.ui.cb_canvas_theme.setCurrentIndex(i) # --------------------------------------- if not self.ui.lw_page.isRowHidden(TAB_INDEX_LADISH): self.ui.cb_ladish_notify.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_NOTIFY, LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT, type=bool)) self.ui.le_ladish_shell.setText(settings.value(LADISH_CONF_KEY_DAEMON_SHELL, LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT, type=str)) self.ui.le_ladish_terminal.setText(settings.value(LADISH_CONF_KEY_DAEMON_TERMINAL, LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT, type=str)) self.ui.cb_ladish_studio_autostart.setChecked(settings.value(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART, LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT, type=bool))<|fim▁hole|> settings = QSettings() if not self.ui.lw_page.isRowHidden(TAB_INDEX_MAIN): settings.setValue("Main/RefreshInterval", self.ui.sb_gui_refresh.value()) if self.ui.group_tray.isEnabled(): settings.setValue("Main/UseSystemTray", self.ui.cb_tray_enable.isChecked()) settings.setValue("Main/CloseToTray", self.ui.cb_tray_close_to.isChecked()) if self.ui.group_main_paths.isEnabled(): settings.setValue("Main/DefaultProjectFolder", self.ui.le_main_def_folder.text()) if self.ui.cb_jack_port_alias.isEnabled(): settings.setValue("Main/JackPortAlias", self.ui.cb_jack_port_alias.currentIndex()) # --------------------------------------- if not self.ui.lw_page.isRowHidden(TAB_INDEX_CANVAS): settings.setValue("Canvas/Theme", self.ui.cb_canvas_theme.currentText()) settings.setValue("Canvas/AutoHideGroups", self.ui.cb_canvas_hide_groups.isChecked()) settings.setValue("Canvas/UseBezierLines", self.ui.cb_canvas_bezier_lines.isChecked()) settings.setValue("Canvas/UseOpenGL", self.ui.cb_canvas_use_opengl.isChecked()) settings.setValue("Canvas/HighQualityAntialiasing", self.ui.cb_canvas_render_hq_aa.isChecked()) # 0, 1, 2 match their enum variants settings.setValue("Canvas/EyeCandy", self.ui.cb_canvas_eyecandy.checkState()) settings.setValue("Canvas/Antialiasing", self.ui.cb_canvas_render_aa.checkState()) # --------------------------------------- if not self.ui.lw_page.isRowHidden(TAB_INDEX_LADISH): settings.setValue(LADISH_CONF_KEY_DAEMON_NOTIFY, self.ui.cb_ladish_notify.isChecked()) settings.setValue(LADISH_CONF_KEY_DAEMON_SHELL, self.ui.le_ladish_shell.text()) settings.setValue(LADISH_CONF_KEY_DAEMON_TERMINAL, self.ui.le_ladish_terminal.text()) settings.setValue(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART, self.ui.cb_ladish_studio_autostart.isChecked()) settings.setValue(LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY, self.ui.sb_ladish_jsdelay.value()) @pyqtSlot() def slot_resetSettings(self): if self.ui.lw_page.currentRow() == TAB_INDEX_MAIN: self.ui.le_main_def_folder.setText(SETTINGS_DEFAULT_PROJECT_FOLDER) self.ui.cb_tray_enable.setChecked(self.fUseSystemTray) self.ui.cb_tray_close_to.setChecked(self.fCloseToTray) self.ui.sb_gui_refresh.setValue(self.fRefreshInterval) self.ui.cb_jack_port_alias.setCurrentIndex(2) elif self.ui.lw_page.currentRow() == TAB_INDEX_CANVAS: self.ui.cb_canvas_theme.setCurrentIndex(0) self.ui.cb_canvas_hide_groups.setChecked(self.fAutoHideGroups) self.ui.cb_canvas_bezier_lines.setChecked(True) self.ui.cb_canvas_eyecandy.setCheckState(Qt.PartiallyChecked) self.ui.cb_canvas_use_opengl.setChecked(False) self.ui.cb_canvas_render_aa.setCheckState(Qt.PartiallyChecked) self.ui.cb_canvas_render_hq_aa.setChecked(False) elif self.ui.lw_page.currentRow() == TAB_INDEX_LADISH: self.ui.cb_ladish_notify.setChecked(LADISH_CONF_KEY_DAEMON_NOTIFY_DEFAULT) self.ui.cb_ladish_studio_autostart.setChecked(LADISH_CONF_KEY_DAEMON_STUDIO_AUTOSTART_DEFAULT) self.ui.le_ladish_shell.setText(LADISH_CONF_KEY_DAEMON_SHELL_DEFAULT) self.ui.le_ladish_terminal.setText(LADISH_CONF_KEY_DAEMON_TERMINAL_DEFAULT) @pyqtSlot() def slot_getAndSetProjectPath(self): getAndSetPath(self, self.ui.le_main_def_folder.text(), self.ui.le_main_def_folder) def done(self, r): QDialog.done(self, r) self.close()<|fim▁end|>
self.ui.sb_ladish_jsdelay.setValue(settings.value(LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY, LADISH_CONF_KEY_DAEMON_JS_SAVE_DELAY_DEFAULT, type=int)) @pyqtSlot() def slot_saveSettings(self):
<|file_name|>TestPatternSetFilter.java<|end_file_name|><|fim▁begin|>/* * Copyright 2010-2012 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.ning.metrics.collector.filtering; import com.ning.metrics.collector.endpoint.ParsedRequest; import org.testng.Assert; import org.testng.annotations.Test; import java.util.Collections; import java.util.HashSet; import java.util.Set;<|fim▁hole|>{ @Test(groups = "fast") public void testNullValue() throws Exception { final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor(null), createPatternSet("pattern1", "pattern2")); Assert.assertEquals(filter.passesFilter(null, null), false); } @Test(groups = "fast") public void testEmptySetPatternEventRESTRequestFilter() throws Exception { final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor("test-host"), Collections.<Pattern>emptySet()); Assert.assertEquals(filter.passesFilter(null, null), false); } @Test(groups = "fast") public void testSinglePatternEventRESTRequestFilter() throws Exception { final Filter<ParsedRequest> filterShouldMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host")); Assert.assertEquals(filterShouldMatch.passesFilter(null, null), true); final Filter<ParsedRequest> filterDoesNotMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen")); Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), false); } @Test(groups = "fast") public void testMultiplePatternEventRESTRequestFilter() throws Exception { final Filter<ParsedRequest> trueFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host", "nothing")); Assert.assertTrue(trueFilter.passesFilter(null, null)); final Filter<ParsedRequest> falseFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen", "nothing")); Assert.assertFalse(falseFilter.passesFilter(null, null)); } @Test(groups = "fast") public void testSinglePatternEventInclusionFilter() throws Exception { final Filter<ParsedRequest> filterShouldMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("test-host")); Assert.assertEquals(filterShouldMatch.passesFilter(null, null), false); final Filter<ParsedRequest> filterDoesNotMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("mugen")); Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), true); } private Set<Pattern> createPatternSet(final String... patterns) { final Set<Pattern> patternSet = new HashSet<Pattern>(); for (final String str : patterns) { patternSet.add(Pattern.compile(str)); } return patternSet; } private FieldExtractor createFieldExtractor(final String value) { return new FieldExtractor() { @Override public String getField(final String eventName, final ParsedRequest annotation) { return value; } }; } }<|fim▁end|>
import java.util.regex.Pattern; public class TestPatternSetFilter
<|file_name|>cpu_backend_threadpool_test.cc<|end_file_name|><|fim▁begin|>/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/lite/kernels/cpu_backend_threadpool.h" #include <vector> #include <gtest/gtest.h> #include "tensorflow/lite/kernels/cpu_backend_context.h" namespace tflite { <|fim▁hole|>namespace { class TestGenerateArrayOfIncrementingIntsTask : public cpu_backend_threadpool::Task { public: TestGenerateArrayOfIncrementingIntsTask(int* buffer, int start, int end) : buffer_(buffer), start_(start), end_(end) {} void Run() override { for (int i = start_; i < end_; i++) { buffer_[i] = i; } } private: int* buffer_; int start_; int end_; }; void TestGenerateArrayOfIncrementingInts(int num_threads, int size) { // The buffer that our threads will write to. std::vector<int> buffer(size); // The tasks that our threads will run. std::vector<TestGenerateArrayOfIncrementingIntsTask> tasks; // Create task objects. int rough_size_per_thread = size / num_threads; int start = 0; for (int thread = 0; thread < num_threads; thread++) { int end = start + rough_size_per_thread; if (thread == num_threads - 1) { end = size; } tasks.emplace_back(buffer.data(), start, end); start = end; } ASSERT_EQ(num_threads, tasks.size()); CpuBackendContext context; // This SetMaxNumThreads is only to satisfy an assertion in Execute. // What actually determines the number of threads used is the parameter // passed to Execute, since Execute does 1:1 mapping of tasks to threads. context.SetMaxNumThreads(num_threads); // Execute tasks on the threadpool. cpu_backend_threadpool::Execute(tasks.size(), tasks.data(), &context); // Check contents of the generated buffer. for (int i = 0; i < size; i++) { ASSERT_EQ(buffer[i], i); } } TEST(CpuBackendThreadpoolTest, OneThreadSize100) { TestGenerateArrayOfIncrementingInts(1, 100); } TEST(CpuBackendThreadpoolTest, ThreeThreadsSize1000000) { TestGenerateArrayOfIncrementingInts(3, 1000000); } TEST(CpuBackendThreadpoolTest, TenThreadsSize1234567) { TestGenerateArrayOfIncrementingInts(10, 1234567); } } // namespace } // namespace tflite int main(int argc, char** argv) { ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }<|fim▁end|>
<|file_name|>others.rs<|end_file_name|><|fim▁begin|>use super::prelude::*; use stencila_schema::*; replaceable_struct!(Date, value); patchable_struct!( Organization, // All properties except `id` (as at 2021-11-18) // Commented out properties have types that do not yet have a `impl Patchable`. //address, alternate_names, //brands, //contact_points, departments, //description, //funders, id, //identifiers, //images, legal_name, //logo, //members, name, parent_organization, url ); patchable_struct!( Person, // All properties except `id` (as at 2021-11-18) // Commented out properties have types that do not yet have a `impl Patchable`. //address, affiliations, alternate_names, //description, emails, family_names, //funders, given_names, honorific_prefix, honorific_suffix, id, //identifiers, //images, job_title, member_of,<|fim▁hole|>);<|fim▁end|>
name, telephone_numbers, url
<|file_name|>BooleanIndexingTest.java<|end_file_name|><|fim▁begin|>package org.nd4j.linalg.indexing; import com.google.common.base.Function; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.nd4j.linalg.BaseNd4jTest; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.accum.MatchCondition; import org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndReplace; import org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.factory.Nd4jBackend; import org.nd4j.linalg.indexing.conditions.AbsValueGreaterThan; import org.nd4j.linalg.indexing.conditions.Condition; import org.nd4j.linalg.indexing.conditions.Conditions; import org.nd4j.linalg.indexing.functions.Value; import java.util.Arrays; import static org.junit.Assert.*; /** * @author [email protected] */ @RunWith(Parameterized.class) public class BooleanIndexingTest extends BaseNd4jTest { public BooleanIndexingTest(Nd4jBackend backend) { super(backend); } /* 1D array checks */ @Test public void testAnd1() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertTrue(BooleanIndexing.and(array, Conditions.greaterThan(0.5f))); } @Test public void testAnd2() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertTrue(BooleanIndexing.and(array, Conditions.lessThan(6.0f))); } @Test public void testAnd3() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertFalse(BooleanIndexing.and(array, Conditions.lessThan(5.0f))); } @Test public void testAnd4() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertFalse(BooleanIndexing.and(array, Conditions.greaterThan(4.0f))); } @Test public void testAnd5() throws Exception { INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f}); assertTrue(BooleanIndexing.and(array, Conditions.greaterThanOrEqual(1e-5f))); } @Test public void testAnd6() throws Exception { INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f}); assertFalse(BooleanIndexing.and(array, Conditions.lessThan(1e-5f))); } @Test public void testAnd7() throws Exception { INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f}); assertTrue(BooleanIndexing.and(array, Conditions.equals(1e-5f))); } @Test public void testOr1() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertTrue(BooleanIndexing.or(array, Conditions.greaterThan(3.0f))); } @Test public void testOr2() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}); assertTrue(BooleanIndexing.or(array, Conditions.lessThan(3.0f))); } @Test public void testOr3() throws Exception { INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});<|fim▁hole|> @Test public void testApplyWhere1() throws Exception { INDArray array = Nd4j.create(new float[] {-1f, -1f, -1f, -1f, -1f}); BooleanIndexing.applyWhere(array, Conditions.lessThan(Nd4j.EPS_THRESHOLD), new Value(Nd4j.EPS_THRESHOLD)); //System.out.println("Array contains: " + Arrays.toString(array.data().asFloat())); assertTrue(BooleanIndexing.and(array, Conditions.equals(Nd4j.EPS_THRESHOLD))); } @Test public void testApplyWhere2() throws Exception { INDArray array = Nd4j.create(new float[] {0f, 0f, 0f, 0f, 0f}); BooleanIndexing.applyWhere(array, Conditions.lessThan(1.0f), new Value(1.0f)); assertTrue(BooleanIndexing.and(array, Conditions.equals(1.0f))); } @Test public void testApplyWhere3() throws Exception { INDArray array = Nd4j.create(new float[] {1e-18f, 1e-18f, 1e-18f, 1e-18f, 1e-18f}); BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-12f), new Value(1e-12f)); //System.out.println("Array contains: " + Arrays.toString(array.data().asFloat())); assertTrue(BooleanIndexing.and(array, Conditions.equals(1e-12f))); } @Test public void testApplyWhere4() throws Exception { INDArray array = Nd4j.create(new float[] {1e-18f, Float.NaN, 1e-18f, 1e-18f, 1e-18f}); BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-12f), new Value(1e-12f)); //System.out.println("Array contains: " + Arrays.toString(array.data().asFloat())); BooleanIndexing.applyWhere(array, Conditions.isNan(), new Value(1e-16f)); System.out.println("Array contains: " + Arrays.toString(array.data().asFloat())); assertFalse(BooleanIndexing.or(array, Conditions.isNan())); assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-12f))); assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-16f))); } /* 2D array checks */ @Test public void test2dAnd1() throws Exception { INDArray array = Nd4j.zeros(10, 10); assertTrue(BooleanIndexing.and(array, Conditions.equals(0f))); } @Test public void test2dAnd2() throws Exception { INDArray array = Nd4j.zeros(10, 10); array.slice(4).putScalar(2, 1e-5f); System.out.println(array); assertFalse(BooleanIndexing.and(array, Conditions.equals(0f))); } @Test public void test2dAnd3() throws Exception { INDArray array = Nd4j.zeros(10, 10); array.slice(4).putScalar(2, 1e-5f); assertFalse(BooleanIndexing.and(array, Conditions.greaterThan(0f))); } @Test public void test2dAnd4() throws Exception { INDArray array = Nd4j.zeros(10, 10); array.slice(4).putScalar(2, 1e-5f); assertTrue(BooleanIndexing.or(array, Conditions.greaterThan(1e-6f))); } @Test public void test2dApplyWhere1() throws Exception { INDArray array = Nd4j.ones(4, 4); array.slice(3).putScalar(2, 1e-5f); //System.out.println("Array before: " + Arrays.toString(array.data().asFloat())); BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-4f), new Value(1e-12f)); //System.out.println("Array after 1: " + Arrays.toString(array.data().asFloat())); assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-12f))); assertTrue(BooleanIndexing.or(array, Conditions.equals(1.0f))); assertFalse(BooleanIndexing.and(array, Conditions.equals(1e-12f))); } /** * This test fails, because it highlights current mechanics on SpecifiedIndex stuff. * Internally there's * * @throws Exception */ @Test public void testSliceAssign1() throws Exception { INDArray array = Nd4j.zeros(4, 4); INDArray patch = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f}); INDArray slice = array.slice(1); int[] idx = new int[] {0, 1, 3}; INDArrayIndex[] range = new INDArrayIndex[] {new SpecifiedIndex(idx)}; INDArray subarray = slice.get(range); System.out.println("Subarray: " + Arrays.toString(subarray.data().asFloat()) + " isView: " + subarray.isView()); slice.put(range, patch); System.out.println("Array after being patched: " + Arrays.toString(array.data().asFloat())); assertFalse(BooleanIndexing.and(array, Conditions.equals(0f))); } @Test public void testConditionalAssign1() throws Exception { INDArray array1 = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7}); INDArray array2 = Nd4j.create(new double[] {7, 6, 5, 4, 3, 2, 1}); INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 3, 2, 1}); BooleanIndexing.replaceWhere(array1, array2, Conditions.greaterThan(4)); assertEquals(comp, array1); } @Test public void testCaSTransform1() throws Exception { INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndSet(array, 3, Conditions.equals(0))); assertEquals(comp, array); } @Test public void testCaSTransform2() throws Exception { INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray comp = Nd4j.create(new double[] {3, 2, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndSet(array, 3.0, Conditions.lessThan(2))); assertEquals(comp, array); } @Test public void testCaSPairwiseTransform1() throws Exception { INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndSet(array, comp, Conditions.lessThan(5))); assertEquals(comp, array); } @Test public void testCaRPairwiseTransform1() throws Exception { INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndReplace(array, comp, Conditions.lessThan(1))); assertEquals(comp, array); } @Test public void testCaSPairwiseTransform2() throws Exception { INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray y = Nd4j.create(new double[] {2, 4, 3, 0, 5}); INDArray comp = Nd4j.create(new double[] {2, 4, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndSet(x, y, Conditions.epsNotEquals(0.0))); assertEquals(comp, x); } @Test public void testCaRPairwiseTransform2() throws Exception { INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5}); INDArray comp = Nd4j.create(new double[] {2, 4, 0, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.epsNotEquals(0.0))); assertEquals(comp, x); } @Test public void testCaSPairwiseTransform3() throws Exception { INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5}); INDArray comp = Nd4j.create(new double[] {2, 4, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.lessThan(4))); assertEquals(comp, x); } @Test public void testCaRPairwiseTransform3() throws Exception { INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5}); INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5}); INDArray comp = Nd4j.create(new double[] {2, 2, 3, 4, 5}); Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.lessThan(2))); assertEquals(comp, x); } @Test public void testMatchConditionAllDimensions1() throws Exception { INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); int val = (int) Nd4j.getExecutioner().exec(new MatchCondition(array, Conditions.lessThan(5)), Integer.MAX_VALUE) .getDouble(0); assertEquals(5, val); } @Test public void testMatchConditionAllDimensions2() throws Exception { INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, Double.NaN, 5, 6, 7, 8, 9}); int val = (int) Nd4j.getExecutioner().exec(new MatchCondition(array, Conditions.isNan()), Integer.MAX_VALUE) .getDouble(0); assertEquals(1, val); } @Test public void testMatchConditionAllDimensions3() throws Exception { INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, Double.NEGATIVE_INFINITY, 5, 6, 7, 8, 9}); int val = (int) Nd4j.getExecutioner() .exec(new MatchCondition(array, Conditions.isInfinite()), Integer.MAX_VALUE).getDouble(0); assertEquals(1, val); } @Test public void testAbsValueGreaterThan() { final double threshold = 2; Condition absValueCondition = new AbsValueGreaterThan(threshold); Function<Number, Number> clipFn = new Function<Number, Number>() { @Override public Number apply(Number number) { System.out.println("Number: " + number.doubleValue()); return (number.doubleValue() > threshold ? threshold : -threshold); } }; Nd4j.getRandom().setSeed(12345); INDArray orig = Nd4j.rand(1, 20).muli(6).subi(3); //Random numbers: -3 to 3 INDArray exp = orig.dup(); INDArray after = orig.dup(); for (int i = 0; i < exp.length(); i++) { double d = exp.getDouble(i); if (d > threshold) { exp.putScalar(i, threshold); } else if (d < -threshold) { exp.putScalar(i, -threshold); } } BooleanIndexing.applyWhere(after, absValueCondition, clipFn); System.out.println(orig); System.out.println(exp); System.out.println(after); assertEquals(exp, after); } @Test public void testMatchConditionAlongDimension1() throws Exception { INDArray array = Nd4j.ones(3, 10); array.getRow(2).assign(0.0); boolean result[] = BooleanIndexing.and(array, Conditions.equals(0.0), 1); boolean comp[] = new boolean[] {false, false, true}; System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } @Test public void testMatchConditionAlongDimension2() throws Exception { INDArray array = Nd4j.ones(3, 10); array.getRow(2).assign(0.0).putScalar(0, 1.0); System.out.println("Array: " + array); boolean result[] = BooleanIndexing.or(array, Conditions.lessThan(0.9), 1); boolean comp[] = new boolean[] {false, false, true}; System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } @Test public void testMatchConditionAlongDimension3() throws Exception { INDArray array = Nd4j.ones(3, 10); array.getRow(2).assign(0.0).putScalar(0, 1.0); boolean result[] = BooleanIndexing.and(array, Conditions.lessThan(0.0), 1); boolean comp[] = new boolean[] {false, false, false}; System.out.println("Result: " + Arrays.toString(result)); assertArrayEquals(comp, result); } @Test public void testConditionalUpdate() { INDArray arr = Nd4j.linspace(-2, 2, 5); INDArray ones = Nd4j.ones(5); INDArray exp = Nd4j.create(new double[] {1, 1, 0, 1, 1}); Nd4j.getExecutioner().exec(new CompareAndSet(ones, arr, ones, Conditions.equals(0.0))); assertEquals(exp, ones); } @Test public void testFirstIndex1() { INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0}); INDArray result = BooleanIndexing.firstIndex(arr, Conditions.greaterThanOrEqual(3)); assertEquals(2, result.getDouble(0), 0.0); } @Test public void testFirstIndex2() { INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0}); INDArray result = BooleanIndexing.firstIndex(arr, Conditions.lessThan(3)); assertEquals(0, result.getDouble(0), 0.0); } @Test public void testLastIndex1() { INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0}); INDArray result = BooleanIndexing.lastIndex(arr, Conditions.greaterThanOrEqual(3)); assertEquals(8, result.getDouble(0), 0.0); } @Test public void testFirstIndex2D() { INDArray arr = Nd4j.create(new double[] {1, 2, 3, 0, 1, 3, 7, 8, 9}).reshape('c', 3, 3); INDArray result = BooleanIndexing.firstIndex(arr, Conditions.greaterThanOrEqual(2), 1); INDArray exp = Nd4j.create(new double[] {1, 2, 0}); assertEquals(exp, result); } @Test public void testLastIndex2D() { INDArray arr = Nd4j.create(new double[] {1, 2, 3, 0, 1, 3, 7, 8, 0}).reshape('c', 3, 3); INDArray result = BooleanIndexing.lastIndex(arr, Conditions.greaterThanOrEqual(2), 1); INDArray exp = Nd4j.create(new double[] {2, 2, 1}); assertEquals(exp, result); } @Test public void testEpsEquals1() throws Exception { INDArray array = Nd4j.create(new double[]{-1, -1, -1e-8, 1e-8, 1, 1}); MatchCondition condition = new MatchCondition(array, Conditions.epsEquals(0.0)); int numZeroes = Nd4j.getExecutioner().exec(condition, Integer.MAX_VALUE).getInt(0); assertEquals(2, numZeroes); } @Override public char ordering() { return 'c'; } }<|fim▁end|>
assertFalse(BooleanIndexing.or(array, Conditions.greaterThan(6.0f))); }
<|file_name|>package_bundles.py<|end_file_name|><|fim▁begin|>import os import re import shutil import zipfile import requests import json from shutil import copy2 from urllib.request import urlretrieve, urlopen # Input parameters version_param = os.environ.get('RELEASE_VERSION') is_latest_param = True if version_param == "master" else False # build constants m2repo_path = '/m2repo' tmp_path = './tmp/%s' % version_param policies_path = "%s/policies" % tmp_path resources_path = "%s/resources" % tmp_path fetchers_path = "%s/fetchers" % tmp_path services_path = "%s/services" % tmp_path reporters_path = "%s/reporters" % tmp_path repositories_path = "%s/repositories" % tmp_path connectors_path = "%s/connectors" % tmp_path snapshotPattern = re.compile('.*-SNAPSHOT') def clean(): if os.path.exists(tmp_path): shutil.rmtree(tmp_path) os.makedirs(tmp_path, exist_ok=True) os.makedirs(policies_path, exist_ok=True) os.makedirs(fetchers_path, exist_ok=True) os.makedirs(resources_path, exist_ok=True) os.makedirs(services_path, exist_ok=True) os.makedirs(reporters_path, exist_ok=True) os.makedirs(repositories_path, exist_ok=True) os.makedirs(connectors_path, exist_ok=True) def get_policies(release_json): components = release_json['components'] search_pattern = re.compile('gravitee-policy-.*') policies = [] for component in components: if search_pattern.match(component['name']) and 'gravitee-policy-api' != component['name']: policies.append(component) if "gravitee-policy-ratelimit" == component['name']: policies.append({"name": "gravitee-policy-quota", "version": component['version']}) if int(component['version'].replace(".", "").replace("-SNAPSHOT", "")) >= 1100: policies.append({"name": "gravitee-policy-spikearrest", "version": component['version']}) return policies def get_resources(release_json): components_name = [ "gravitee-resource-cache", "gravitee-resource-oauth2-provider-generic", "gravitee-resource-oauth2-provider-am" ] resources = [] for component_name in components_name: resources.append(get_component_by_name(release_json, component_name)) return resources def get_fetchers(release_json): components = release_json['components'] search_pattern = re.compile('gravitee-fetcher-.*') fetchers = [] for component in components: if search_pattern.match(component['name']) and 'gravitee-fetcher-api' != component['name']: fetchers.append(component) return fetchers def get_reporters(release_json): components_name = [ "gravitee-reporter-file", "gravitee-reporter-tcp", "gravitee-elasticsearch" ] reporters = [] for component_name in components_name: reporters.append(get_component_by_name(release_json, component_name)) return reporters def get_repositories(release_json): components_name = [ "gravitee-repository-mongodb", "gravitee-repository-jdbc", "gravitee-elasticsearch", "gravitee-repository-gateway-bridge-http" ] repositories = [] for component_name in components_name: repositories.append(get_component_by_name(release_json, component_name)) return repositories def get_services(release_json): components_name = [ "gravitee-service-discovery-consul" ] components = release_json['components'] search_pattern = re.compile('gravitee-policy-ratelimit') services = [] for component in components: if search_pattern.match(component['name']): service = component.copy() service['name'] = 'gravitee-gateway-services-ratelimit' services.append(service) break for component_name in components_name: services.append(get_component_by_name(release_json, component_name)) return services def get_connectors(release_json): components = release_json['components'] search_pattern = re.compile('gravitee-.*-connectors-ws') connectors = [] for component in components: if search_pattern.match(component['name']): connectors.append(component) return connectors def get_component_by_name(release_json, component_name): components = release_json['components'] search_pattern = re.compile(component_name) for component in components: if search_pattern.match(component['name']): return component def get_download_url(group_id, artifact_id, version, t): m2path = "%s/%s/%s/%s/%s-%s.%s" % (m2repo_path, group_id.replace(".", "/"), artifact_id, version, artifact_id, version, t) if os.path.exists(m2path): return m2path else: sonatypeUrl = "https://oss.sonatype.org/service/local/artifact/maven/redirect?r=%s&g=%s&a=%s&v=%s&e=%s" % ( ("snapshots" if snapshotPattern.match(version) else "releases"), group_id.replace(".", "/"), artifact_id, version, t) f = urlopen(sonatypeUrl) return f.geturl() def get_suffix_path_by_name(name): if name.find("policy") == -1: suffix = name[name.find('-') + 1:name.find('-', name.find('-') + 1)] if suffix == "gateway": return "services" if suffix == "repository": return "repositories" if suffix == "cockpit": return "connectors" return suffix + "s" else: return "policies" def download(name, filename_path, url): print('\nDowloading %s\n%s' % (name, url)) if url.startswith("http"): filename_path = tmp_path + "/" + get_suffix_path_by_name(name) + url[url.rfind('/'):] urlretrieve(url, filename_path) else: copy2(url, filename_path) print('\nDowloaded in %s' % filename_path) return filename_path def unzip(files): unzip_dirs = []<|fim▁hole|> zip_file.extractall("%s/%s" % (tmp_path, dist_dir)) unzip_dir = "%s/%s/%s" % (tmp_path, dist_dir, sorted(zip_file.namelist())[0]) unzip_dirs.append(unzip_dir) preserve_permissions(unzip_dir) return sorted(unzip_dirs) def preserve_permissions(d): search_bin_pattern = re.compile(".*/bin$") search_gravitee_pattern = re.compile("gravitee(\.bat)?") perm = 0o0755 for dirname, subdirs, files in os.walk(d): if search_bin_pattern.match(dirname): for file in files: if search_gravitee_pattern.match(file): file_path = "%s/%s" % (dirname, file) print(" set permission %o to %s" % (perm, file_path)) os.chmod(file_path, perm) def copy_files_into(src_dir, dest_dir, exclude_pattern=None): if exclude_pattern is None: exclude_pattern = [] filenames = [os.path.join(src_dir, fn) for fn in next(os.walk(src_dir))[2]] print(" copy") print(" %s" % filenames) print(" into") print(" %s" % dest_dir) for file in filenames: to_exclude = False for pattern in exclude_pattern: search_pattern = re.compile(pattern) if search_pattern.match(file): to_exclude = True break if to_exclude: print("[INFO] %s is excluded from files." % file) continue copy2(file, dest_dir) def download_policies(policies): paths = [] for policy in policies: if policy['name'] != "gravitee-policy-core": url = get_download_url("io.gravitee.policy", policy['name'], policy['version'], "zip") paths.append( download(policy['name'], '%s/%s-%s.zip' % (policies_path, policy['name'], policy['version']), url)) return paths def download_management_api(mgmt_api, default_version): v = default_version if 'version' not in mgmt_api else mgmt_api['version'] url = get_download_url("io.gravitee.management.standalone", "gravitee-management-api-standalone-distribution-zip", v, "zip") return download(mgmt_api['name'], '%s/%s-%s.zip' % (tmp_path, mgmt_api['name'], v), url) def download_managementV3_api(mgmt_api, default_version): v = default_version if 'version' not in mgmt_api else mgmt_api['version'] url = get_download_url("io.gravitee.rest.api.standalone.distribution", "gravitee-rest-api-standalone-distribution-zip", v, "zip") return download(mgmt_api['name'], '%s/%s-%s.zip' % (tmp_path, mgmt_api['name'], v), url) def download_gateway(gateway, default_version): v = default_version if 'version' not in gateway else gateway['version'] url = get_download_url("io.gravitee.gateway.standalone", "gravitee-gateway-standalone-distribution-zip", v, "zip") return download(gateway['name'], '%s/%s-%s.zip' % (tmp_path, gateway['name'], v), url) def download_fetchers(fetchers): paths = [] for fetcher in fetchers: url = get_download_url("io.gravitee.fetcher", fetcher['name'], fetcher['version'], "zip") paths.append( download(fetcher['name'], '%s/%s-%s.zip' % (fetchers_path, fetcher['name'], fetcher['version']), url)) return paths def download_resources(resources): paths = [] for resource in resources: url = get_download_url("io.gravitee.resource", resource['name'], resource['version'], "zip") paths.append( download(resource['name'], '%s/%s-%s.zip' % (resources_path, resource['name'], resource['version']), url)) return paths def download_services(services): paths = [] for service in services: # for release < 1.22 if service is not None: if service['name'] == "gravitee-gateway-services-ratelimit": url = get_download_url("io.gravitee.policy", service['name'], service['version'], "zip") else: url = get_download_url("io.gravitee.discovery", service['name'], service['version'], "zip") paths.append( download(service['name'], '%s/%s-%s.zip' % (services_path, service['name'], service['version']), url)) return paths def download_connectors(connectors): paths = [] for connector in connectors: url = get_download_url("io.gravitee.cockpit", connector['name'], connector['version'], "zip") paths.append( download(connector['name'], '%s/%s-%s.zip' % (resources_path, connector['name'], connector['version']), url)) return paths def download_ui(ui, default_version): v = default_version if 'version' not in ui else ui['version'] url = get_download_url("io.gravitee.management", ui['name'], v, "zip") return download(ui['name'], '%s/%s-%s.zip' % (tmp_path, ui['name'], v), url) def download_portal_ui(ui, default_version): v = default_version if 'version' not in ui else ui['version'] url = get_download_url("io.gravitee.portal", ui['name'], v, "zip") return download(ui['name'], '%s/%s-%s.zip' % (tmp_path, ui['name'], v), url) def download_reporters(reporters): paths = [] for reporter in reporters: name = "gravitee-reporter-elasticsearch" if "gravitee-elasticsearch" == reporter['name'] else reporter['name'] url = get_download_url("io.gravitee.reporter", name, reporter['version'], "zip") paths.append( download(name, '%s/%s-%s.zip' % (reporters_path, name, reporter['version']), url)) return paths def download_repositories(repositories): paths = [] for repository in repositories: if repository['name'] != "gravitee-repository-gateway-bridge-http": name = "gravitee-repository-elasticsearch" if "gravitee-elasticsearch" == repository['name'] else repository['name'] url = get_download_url("io.gravitee.repository", name, repository['version'], "zip") paths.append(download(name, '%s/%s-%s.zip' % (repositories_path, name, repository['version']), url)) else: for name in ["gravitee-repository-gateway-bridge-http-client", "gravitee-repository-gateway-bridge-http-server"]: url = get_download_url("io.gravitee.gateway", name, repository['version'], "zip") paths.append(download(name, '%s/%s-%s.zip' % (repositories_path, name, repository['version']), url)) return paths def prepare_gateway_bundle(gateway): print("==================================") print("Prepare %s" % gateway) bundle_path = unzip([gateway])[0] print(" bundle_path: %s" % bundle_path) copy_files_into(policies_path, bundle_path + "plugins") copy_files_into(resources_path, bundle_path + "plugins") copy_files_into(repositories_path, bundle_path + "plugins", [".*gravitee-repository-elasticsearch.*"]) copy_files_into(reporters_path, bundle_path + "plugins") copy_files_into(services_path, bundle_path + "plugins") copy_files_into(connectors_path, bundle_path + "plugins") os.makedirs(bundle_path + "plugins/ext/repository-jdbc", exist_ok=True) def prepare_ui_bundle(ui): print("==================================") print("Prepare %s" % ui) bundle_path = unzip([ui])[0] print(" bundle_path: %s" % bundle_path) def prepare_mgmt_bundle(mgmt): print("==================================") print("Prepare %s" % mgmt) bundle_path = unzip([mgmt])[0] print(" bundle_path: %s" % bundle_path) copy_files_into(policies_path, bundle_path + "plugins") copy_files_into(resources_path, bundle_path + "plugins") copy_files_into(fetchers_path, bundle_path + "plugins") copy_files_into(repositories_path, bundle_path + "plugins", [".*gravitee-repository-ehcache.*", ".*gravitee-repository-gateway-bridge-http-client.*", ".*gravitee-repository-gateway-bridge-http-server.*"]) copy_files_into(services_path, bundle_path + "plugins", [".*gravitee-gateway-services-ratelimit.*"]) copy_files_into(connectors_path, bundle_path + "plugins") os.makedirs(bundle_path + "plugins/ext/repository-jdbc", exist_ok=True) def prepare_policies(version): print("==================================") print("Prepare Policies") dist_dir = get_dist_dir_name() policies_dist_path = "%s/%s/gravitee-policies-%s" % (tmp_path, dist_dir, version) os.makedirs(policies_dist_path, exist_ok=True) copy_files_into(policies_path, policies_dist_path) copy_files_into(services_path, policies_dist_path) def package(version, release_json): print("==================================") print("Packaging") packages = [] exclude_from_full_zip_list = [re.compile(".*graviteeio-policies.*")] dist_dir = get_dist_dir_name() full_zip_name = "graviteeio-full-%s" % version # how to create a symbolic link ? #if jdbc: # full_zip_name = "graviteeio-full-jdbc-%s" % version full_zip_path = "%s/%s/%s.zip" % (tmp_path, dist_dir, full_zip_name) dirs = [os.path.join("%s/%s/" % (tmp_path, dist_dir), fn) for fn in next(os.walk("%s/%s/" % (tmp_path, dist_dir)))[1]] # add release.json jsonfile_name = "release.json" jsonfile_absname = os.path.join("%s/%s/%s" % (tmp_path, dist_dir, jsonfile_name)) jsonfile = open(jsonfile_absname, "w") jsonfile.write("%s" % json.dumps(release_json, indent=4)) jsonfile.close() with zipfile.ZipFile(full_zip_path, "w", zipfile.ZIP_DEFLATED) as full_zip: print("Create %s" % full_zip_path) packages.append(full_zip_path) full_zip.write(jsonfile_absname, jsonfile_name) for d in dirs: with zipfile.ZipFile("%s.zip" % d, "w", zipfile.ZIP_DEFLATED) as bundle_zip: print("Create %s.zip" % d) packages.append("%s.zip" % d) dir_abs_path = os.path.abspath(d) dir_name = os.path.split(dir_abs_path)[1] for dirname, subdirs, files in os.walk(dir_abs_path): exclude_from_full_zip = False for pattern in exclude_from_full_zip_list: if pattern.match(d): exclude_from_full_zip = True break for filename in files: absname = os.path.abspath(os.path.join(dirname, filename)) arcname = absname[len(dir_abs_path) - len(dir_name):] bundle_zip.write(absname, arcname) if exclude_from_full_zip is False: full_zip.write(absname, "%s/%s" % (full_zip_name, arcname)) if len(files) == 0: absname = os.path.abspath(dirname) arcname = absname[len(dir_abs_path) - len(dir_name):] bundle_zip.write(absname, arcname) if exclude_from_full_zip is False: full_zip.write(absname, "%s/%s" % (full_zip_name, arcname)) return packages def rename(string): return string.replace("gravitee", "graviteeio") \ .replace("management-standalone", "management-api") \ .replace("management-webui", "management-ui") \ .replace("portal-webui", "portal-ui") \ .replace("standalone-", "") def clean_dir_names(): print("==================================") print("Clean directory names") dirs = [os.path.join("%s/%s/" % (tmp_path, get_dist_dir_name()), fn) for fn in next(os.walk("%s/%s/" % (tmp_path, get_dist_dir_name())))[1]] for d in dirs: os.rename(d, rename(d)) def response_pretty_print(r): print("###########################################################") print("STATUS %s" % r.status_code) print("HEADERS \n%s" % r.headers) print("RESPONSE \n%s" % r.text) print("###########################################################\n\n") r.raise_for_status() def get_dist_dir_name(): dist_dir = "dist" return dist_dir def main(): if is_latest_param: release_json_url = "https://raw.githubusercontent.com/gravitee-io/release/master/release.json" else: release_json_url = "https://raw.githubusercontent.com/gravitee-io/release/%s/release.json" % version_param print(release_json_url) release_json = requests.get(release_json_url) print(release_json) release_json = release_json.json() version = release_json['version'] print("Create bundles for Gravitee.io v%s" % version) clean() v3 = int(version[0]) > 1 if v3: portal_ui = download_portal_ui(get_component_by_name(release_json, "gravitee-portal-webui"), version) mgmt_api = download_managementV3_api(get_component_by_name(release_json, "gravitee-management-rest-api"), version) else: mgmt_api = download_management_api(get_component_by_name(release_json, "gravitee-management-rest-api"), version) ui = download_ui(get_component_by_name(release_json, "gravitee-management-webui"), version) gateway = download_gateway(get_component_by_name(release_json, "gravitee-gateway"), version) download_policies(get_policies(release_json)) download_resources(get_resources(release_json)) download_fetchers(get_fetchers(release_json)) download_services(get_services(release_json)) download_reporters(get_reporters(release_json)) download_repositories(get_repositories(release_json)) if int(version.replace(".", "").replace("-SNAPSHOT", "")) > 354: download_connectors(get_connectors(release_json)) if v3: prepare_ui_bundle(portal_ui) prepare_gateway_bundle(gateway) prepare_ui_bundle(ui) prepare_mgmt_bundle(mgmt_api) prepare_policies(version) clean_dir_names() package(version, release_json) main()<|fim▁end|>
dist_dir = get_dist_dir_name() for file in files: with zipfile.ZipFile(file) as zip_file:
<|file_name|>poi_flag_email.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python ### ### in poiFlagEmail() below, write code that returns a boolean<|fim▁hole|>import sys import reader import poi_emails def getToFromStrings(f): f.seek(0) to_string, from_string, cc_string = reader.getAddresses(f) to_emails = reader.parseAddresses( to_string ) from_emails = reader.parseAddresses( from_string ) cc_emails = reader.parseAddresses( cc_string ) return to_emails, from_emails, cc_emails ### POI flag an email def poiFlagEmail(f): """ given an email file f, return a trio of booleans for whether that email is to, from, or cc'ing a poi """ to_emails, from_emails, cc_emails = getToFromStrings(f) ### list of email addresses of all the POIs poi_email_list = poi_emails.poiEmails() to_poi = False from_poi = False cc_poi = False ### to_poi and cc_poi are related functions, which flag whether ### the email under inspection is addressed to a POI, or if a POI is in cc ### you don't have to change this code at all ### there can be many "to" emails, but only one "from", so the ### "to" processing needs to be a little more complicated if to_emails: ctr = 0 while not to_poi and ctr < len(to_emails): if to_emails[ctr] in poi_email_list: to_poi = True ctr += 1 if cc_emails: ctr = 0 while not to_poi and ctr < len(cc_emails): if cc_emails[ctr] in poi_email_list: cc_poi = True ctr += 1 ################################# ######## your code below ######## ### set from_poi to True if ##### ### the email is from a POI ##### ################################# if from_emails and from_emails[0] in poi_email_list: from_poi = True ################################# return to_poi, from_poi, cc_poi<|fim▁end|>
### indicating if a given emails is from a POI ###
<|file_name|>le.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { // pub trait FixedSizeArray<T> { // /// Converts the array to immutable slice // fn as_slice(&self) -> &[T]; // /// Converts the array to mutable slice // fn as_mut_slice(&mut self) -> &mut [T]; // } // macro_rules! array_impls { // ($($N:expr)+) => { // $( // #[unstable(feature = "core")] // impl<T> FixedSizeArray<T> for [T; $N] { // #[inline] // fn as_slice(&self) -> &[T] { // &self[..] // } // #[inline] // fn as_mut_slice(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[unstable(feature = "array_as_ref",<|fim▁hole|> // fn as_ref(&self) -> &[T] { // &self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsMut<[T]> for [T; $N] { // #[inline] // fn as_mut(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Copy> Clone for [T; $N] { // fn clone(&self) -> [T; $N] { // *self // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: Hash> Hash for [T; $N] { // fn hash<H: hash::Hasher>(&self, state: &mut H) { // Hash::hash(&self[..], state) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: fmt::Debug> fmt::Debug for [T; $N] { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // fmt::Debug::fmt(&&self[..], f) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a [T; $N] { // type Item = &'a T; // type IntoIter = Iter<'a, T>; // // fn into_iter(self) -> Iter<'a, T> { // self.iter() // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a mut [T; $N] { // type Item = &'a mut T; // type IntoIter = IterMut<'a, T>; // // fn into_iter(self) -> IterMut<'a, T> { // self.iter_mut() // } // } // // // NOTE: some less important impls are omitted to reduce code bloat // __impl_slice_eq1! { [A; $N], [B; $N] } // __impl_slice_eq2! { [A; $N], [B] } // __impl_slice_eq2! { [A; $N], &'b [B] } // __impl_slice_eq2! { [A; $N], &'b mut [B] } // // __impl_slice_eq2! { [A; $N], &'b [B; $N] } // // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Eq> Eq for [T; $N] { } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:PartialOrd> PartialOrd for [T; $N] { // #[inline] // fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> { // PartialOrd::partial_cmp(&&self[..], &&other[..]) // } // #[inline] // fn lt(&self, other: &[T; $N]) -> bool { // PartialOrd::lt(&&self[..], &&other[..]) // } // #[inline] // fn le(&self, other: &[T; $N]) -> bool { // PartialOrd::le(&&self[..], &&other[..]) // } // #[inline] // fn ge(&self, other: &[T; $N]) -> bool { // PartialOrd::ge(&&self[..], &&other[..]) // } // #[inline] // fn gt(&self, other: &[T; $N]) -> bool { // PartialOrd::gt(&&self[..], &&other[..]) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Ord> Ord for [T; $N] { // #[inline] // fn cmp(&self, other: &[T; $N]) -> Ordering { // Ord::cmp(&&self[..], &&other[..]) // } // } // )+ // } // } // array_impls! { // 0 1 2 3 4 5 6 7 8 9 // 10 11 12 13 14 15 16 17 18 19 // 20 21 22 23 24 25 26 27 28 29 // 30 31 32 // } type T = i32; type A = T; type B = T; #[test] fn le_test1() { let array_a: [A; 21] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ]; let array_b: [B; 21] = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21 ]; assert_eq!(array_a.lt(&array_b), true); assert_eq!(array_a <= array_b, true); } #[test] fn le_test2() { let array_a: [A; 21] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ]; let array_b: [B; 21] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ]; assert_eq!(array_a.le(&array_b), true); assert_eq!(array_a <= array_b, true); } #[test] fn le_test3() { let array_a: [A; 21] = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21 ]; let array_b: [B; 21] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 ]; assert_eq!(array_a.le(&array_b), false); assert_eq!(array_a <= array_b, false); } }<|fim▁end|>
// reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsRef<[T]> for [T; $N] { // #[inline]
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>""" Authors: Tim Hessels UNESCO-IHE 2016 Contact: [email protected] Repository: https://github.com/wateraccounting/wa Module: Collect/MOD17 Description: This module downloads MOD17 GPP data from http://e4ftl01.cr.usgs.gov/. Use the MOD17.GPP_8daily function to download and create 8 daily GPP images in Gtiff format. The data is available between 2000-02-18 till present. Examples: from wa.Collect import MOD17 MOD17.GPP_8daily(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) MOD17.NPP_yearly(Dir='C:/Temp3/', Startdate='2003-12-01', Enddate='2003-12-20', latlim=[41, 45], lonlim=[-8, -5]) """ from .GPP_8daily import main as GPP_8daily from .NPP_yearly import main as NPP_yearly __all__ = ['GPP_8daily', 'NPP_yearly'] __version__ = '0.1'<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>tabId.js<|end_file_name|><|fim▁begin|>/* -------------------------------------------------------------------------- */ /* Copyright 2002-2020, OpenNebula Project, OpenNebula Systems */ /* */<|fim▁hole|>/* Licensed under the Apache License, Version 2.0 (the "License"); you may */ /* not use this file except in compliance with the License. You may obtain */ /* a copy of the License at */ /* */ /* http://www.apache.org/licenses/LICENSE-2.0 */ /* */ /* Unless required by applicable law or agreed to in writing, software */ /* distributed under the License is distributed on an "AS IS" BASIS, */ /* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ /* See the License for the specific language governing permissions and */ /* limitations under the License. */ /* -------------------------------------------------------------------------- */ define(function(require){ return 'acls-tab'; });<|fim▁end|>
<|file_name|>security_related.py<|end_file_name|><|fim▁begin|># (C) Copyright 2016 Vit Mojzis, [email protected] # # This program is distributed under the terms of the GNU General Public License # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sepolicyanalysis.domain_grouping as grouping import sys # read "security_related.conf" and return corresponding types # returns (domain_types, resource_types) def get_security_types(): try: packages = set() types = set() exclude = set() txt = open("/etc/sepolicyanalysis/security_related.conf", "r") packages = {} for line in txt: if (len(line) < 1) or (line[0] == '#'):<|fim▁hole|> continue if line.startswith("packages="): packages = set([x.strip() for x in line[9:].split(",")]) if line.startswith("types=="): types = set([x.strip() for x in line[6:].split(",")]) if line.startswith("exclude="): exclude = set([x.strip() for x in line[8:].split(",")]) #all types given in "types=" are treated as domains ! domain_grouping = grouping.group_types_cil() groups = set() for name in packages: group = domain_grouping.get(name, None) if group: groups.add(group) #get types corresponding to given packages domains, resources = grouping.get_types(groups) domains = domains | types # remove excluded types domains = domains - exclude resources = resources - exclude return domains, resources except IOError as e: print('Could not read "security_related.conf"!', file=sys.stderr) return set(), set()<|fim▁end|>
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.contrib.auth.models import django.utils.timezone from django.conf import settings import django.core.validators import forum.models class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='ForumUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True)), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, max_length=30, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, verbose_name='username')), ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)), ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)), ('email', models.EmailField(max_length=254, verbose_name='email address', blank=True)), ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('nickname', models.CharField(max_length=200, null=True, blank=True)), ('avatar', models.CharField(max_length=200, null=True, blank=True)), ('signature', models.CharField(max_length=500, null=True, blank=True)), ('location', models.CharField(max_length=200, null=True, blank=True)), ('website', models.URLField(null=True, blank=True)), ('company', models.CharField(max_length=200, null=True, blank=True)), ('role', models.IntegerField(null=True, blank=True)), ('balance', models.IntegerField(null=True, blank=True)), ('reputation', models.IntegerField(null=True, blank=True)), ('self_intro', models.CharField(max_length=500, null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ('twitter', models.CharField(max_length=200, null=True, blank=True)), ('github', models.CharField(max_length=200, null=True, blank=True)), ('douban', models.CharField(max_length=200, null=True, blank=True)), ('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', verbose_name='groups')), ('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')), ], options={ 'abstract': False, 'verbose_name': 'user', 'verbose_name_plural': 'users', }, managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( name='Favorite', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('involved_type', models.IntegerField(null=True, blank=True)), ('created', models.DateTimeField(null=True, blank=True)), ], ), migrations.CreateModel( name='Node', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=200, null=True, blank=True)), ('slug', models.SlugField(max_length=200, null=True, blank=True)), ('thumb', models.CharField(max_length=200, null=True, blank=True)), ('introduction', models.CharField(max_length=500, null=True, blank=True)), ('created', models.DateTimeField(null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ('topic_count', models.IntegerField(null=True, blank=True)), ('custom_style', forum.models.NormalTextField(null=True, blank=True)), ('limit_reputation', models.IntegerField(null=True, blank=True)), ], ), migrations.CreateModel( name='Notification', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('content', forum.models.NormalTextField(null=True, blank=True)), ('status', models.IntegerField(null=True, blank=True)), ('involved_type', models.IntegerField(null=True, blank=True)), ('occurrence_time', models.DateTimeField(null=True, blank=True)), ], ), migrations.CreateModel( name='Plane', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=200, null=True, blank=True)), ('created', models.DateTimeField(null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ], ), migrations.CreateModel( name='Reply', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('content', forum.models.NormalTextField(null=True, blank=True)), ('created', models.DateTimeField(null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ('up_vote', models.IntegerField(null=True, blank=True)), ('down_vote', models.IntegerField(null=True, blank=True)), ('last_touched', models.DateTimeField(null=True, blank=True)), ('author', models.ForeignKey(related_name='reply_author', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ], ), migrations.CreateModel( name='Topic', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=200, null=True, blank=True)), ('slug', models.SlugField(max_length=200, null=True, blank=True)), ('content', forum.models.NormalTextField(null=True, blank=True)), ('status', models.IntegerField(null=True, blank=True)), ('hits', models.IntegerField(null=True, blank=True)), ('created', models.DateTimeField(null=True, blank=True)), ('updated', models.DateTimeField(null=True, blank=True)), ('reply_count', models.IntegerField(null=True, blank=True)), ('last_replied_time', models.DateTimeField(null=True, blank=True)), ('up_vote', models.IntegerField(null=True, blank=True)),<|fim▁hole|> ('last_touched', models.DateTimeField(null=True, blank=True)), ('author', models.ForeignKey(related_name='topic_author', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('last_replied_by', models.ForeignKey(related_name='topic_last', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('node', models.ForeignKey(blank=True, to='forum.Node', null=True)), ], ), migrations.CreateModel( name='Transaction', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('type', models.IntegerField(null=True, blank=True)), ('reward', models.IntegerField(null=True, blank=True)), ('current_balance', models.IntegerField(null=True, blank=True)), ('occurrence_time', models.DateTimeField(null=True, blank=True)), ('involved_reply', models.ForeignKey(related_name='trans_reply', blank=True, to='forum.Reply', null=True)), ('involved_topic', models.ForeignKey(related_name='trans_topic', blank=True, to='forum.Topic', null=True)), ('involved_user', models.ForeignKey(related_name='trans_involved', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('user', models.ForeignKey(related_name='trans_user', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ], ), migrations.CreateModel( name='Vote', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('status', models.IntegerField(null=True, blank=True)), ('involved_type', models.IntegerField(null=True, blank=True)), ('occurrence_time', models.DateTimeField(null=True, blank=True)), ('involved_reply', models.ForeignKey(related_name='vote_reply', blank=True, to='forum.Reply', null=True)), ('involved_topic', models.ForeignKey(related_name='vote_topic', blank=True, to='forum.Topic', null=True)), ('involved_user', models.ForeignKey(related_name='vote_user', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ('trigger_user', models.ForeignKey(related_name='vote_trigger', blank=True, to=settings.AUTH_USER_MODEL, null=True)), ], ), migrations.AddField( model_name='reply', name='topic', field=models.ForeignKey(blank=True, to='forum.Topic', null=True), ), migrations.AddField( model_name='notification', name='involved_reply', field=models.ForeignKey(related_name='notify_reply', blank=True, to='forum.Reply', null=True), ), migrations.AddField( model_name='notification', name='involved_topic', field=models.ForeignKey(related_name='notify_topic', blank=True, to='forum.Topic', null=True), ), migrations.AddField( model_name='notification', name='involved_user', field=models.ForeignKey(related_name='notify_user', blank=True, to=settings.AUTH_USER_MODEL, null=True), ), migrations.AddField( model_name='notification', name='trigger_user', field=models.ForeignKey(related_name='notify_trigger', blank=True, to=settings.AUTH_USER_MODEL, null=True), ), migrations.AddField( model_name='node', name='plane', field=models.ForeignKey(blank=True, to='forum.Plane', null=True), ), migrations.AddField( model_name='favorite', name='involved_reply', field=models.ForeignKey(related_name='fav_reply', blank=True, to='forum.Reply', null=True), ), migrations.AddField( model_name='favorite', name='involved_topic', field=models.ForeignKey(related_name='fav_topic', blank=True, to='forum.Topic', null=True), ), migrations.AddField( model_name='favorite', name='owner_user', field=models.ForeignKey(related_name='fav_user', blank=True, to=settings.AUTH_USER_MODEL, null=True), ), ]<|fim▁end|>
('down_vote', models.IntegerField(null=True, blank=True)),
<|file_name|>feed_parse_extract17LiterarycornerWordpressCom.py<|end_file_name|><|fim▁begin|>def extract17LiterarycornerWordpressCom(item): ''' Parser for '17literarycorner.wordpress.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('King Of Hell\'s Genius Pampered Wife', 'King Of Hell\'s Genius Pampered Wife', 'translated'), ('KOH', 'King Of Hell\'s Genius Pampered Wife', 'translated'), ('Addicted to Boundlessly Pampering You', 'Addicted to Boundlessly Pampering You', 'translated'), ('ATBPY', 'Addicted to Boundlessly Pampering You', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) if item['tags'] == ['Uncategorized']: titlemap = [ ('KOH Chapter ', 'King Of Hell\'s Genius Pampered Wife', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel'), ] for titlecomponent, name, tl_type in titlemap: if titlecomponent.lower() in item['title'].lower(): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) <|fim▁hole|><|fim▁end|>
return False
<|file_name|>Declare.java<|end_file_name|><|fim▁begin|>package modulo0.tree; public class Declare extends Stat { public Declare(Var x, Expr e, Stat s) { super("Declare", new M0Node[] { x, e, s }); } @Override<|fim▁hole|> } }<|fim▁end|>
protected Stat copy() { return new Declare((Var) getChild(0), (Expr) getChild(1), (Stat) getChild(2));
<|file_name|>yeast.cpp<|end_file_name|><|fim▁begin|>/* * yeast.cpp is part of Brewtarget, and is Copyright the following * authors 2009-2014 * - marker5a * - Philip Greggory Lee <[email protected]> * - plut0nium * - Samuel Östling <[email protected]> * * Brewtarget is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Brewtarget is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License<|fim▁hole|>#include <QDomNode> #include <QDomElement> #include <QDomText> #include <QObject> #include "yeast.h" #include "brewtarget.h" QStringList Yeast::types = QStringList() << "Ale" << "Lager" << "Wheat" << "Wine" << "Champagne"; QStringList Yeast::forms = QStringList() << "Liquid" << "Dry" << "Slant" << "Culture"; QStringList Yeast::flocculations = QStringList() << "Low" << "Medium" << "High" << "Very High"; QHash<QString,QString> Yeast::tagToProp = Yeast::tagToPropHash(); QHash<QString,QString> Yeast::tagToPropHash() { QHash<QString,QString> propHash; propHash["NAME"] = "name"; //propHash["TYPE"] = "type"; //propHash["FORM"] = "form"; propHash["AMOUNT"] = "amount"; propHash["INVENTORY"] = "inventory"; propHash["AMOUNT_IS_WEIGHT"] = "amountIsWeight"; propHash["LABORATORY"] = "laboratory"; propHash["PRODUCT_ID"] = "productID"; propHash["MIN_TEMPERATURE"] = "minTemperature_c"; propHash["MAX_TEMPERATURE"] = "maxTemperature_c"; //propHash["FLOCCULATION"] = "flocculation"; propHash["ATTENUATION"] = "attenuation_pct"; propHash["NOTES"] = "notes"; propHash["BEST_FOR"] = "bestFor"; propHash["TIMES_CULTURED"] = "timesCultured"; propHash["MAX_REUSE"] = "maxReuse"; propHash["ADD_TO_SECONDARY"] = "addToSecondary"; return propHash; } bool operator<(Yeast &y1, Yeast &y2) { return y1.name() < y2.name(); } bool operator==(Yeast &y1, Yeast &y2) { return y1.name() == y2.name(); } //============================CONSTRUCTORS====================================== Yeast::Yeast() : BeerXMLElement() { } Yeast::Yeast(Yeast const& other) : BeerXMLElement(other) { } //============================="GET" METHODS==================================== QString Yeast::laboratory() const { return get("laboratory").toString();; } QString Yeast::productID() const { return get("product_id").toString(); } QString Yeast::notes() const { return get("notes").toString(); } QString Yeast::bestFor() const { return get("best_for").toString(); } const QString Yeast::typeString() const { return types.at(type()); } const QString Yeast::formString() const { return forms.at(form()); } const QString Yeast::flocculationString() const { return flocculations.at(flocculation()); } double Yeast::amount() const { return get("amount").toDouble(); } double Yeast::minTemperature_c() const { return get("min_temperature").toDouble(); } double Yeast::maxTemperature_c() const { return get("max_temperature").toDouble(); } double Yeast::attenuation_pct() const { return get("attenuation").toDouble(); } int Yeast::inventory() const { return getInventory("quanta").toInt(); } int Yeast::timesCultured() const { return get("times_cultured").toInt(); } int Yeast::maxReuse() const { return get("max_reuse").toInt(); } bool Yeast::addToSecondary() const { return get("add_to_secondary").toBool(); } bool Yeast::amountIsWeight() const { return get("amount_is_weight").toBool(); } Yeast::Form Yeast::form() const { return static_cast<Yeast::Form>( forms.indexOf(get("form").toString())); } Yeast::Flocculation Yeast::flocculation() const { return static_cast<Yeast::Flocculation>( flocculations.indexOf(get("flocculation").toString())); } Yeast::Type Yeast::type() const { return static_cast<Yeast::Type>( types.indexOf(get("ytype").toString())); } const QString Yeast::typeStringTr() const { static QStringList typesTr = QStringList() << QObject::tr("Ale") << QObject::tr("Lager") << QObject::tr("Wheat") << QObject::tr("Wine") << QObject::tr("Champagne"); return typesTr.at(type()); } const QString Yeast::formStringTr() const { static QStringList formsTr = QStringList() << QObject::tr("Liquid") << QObject::tr("Dry") << QObject::tr("Slant") << QObject::tr("Culture"); return formsTr.at(form()); } const QString Yeast::flocculationStringTr() const { static QStringList flocculationsTr = QStringList() << QObject::tr("Low") << QObject::tr("Medium") << QObject::tr("High") << QObject::tr("Very High"); return flocculationsTr.at(flocculation()); } //============================="SET" METHODS==================================== void Yeast::setType( Yeast::Type t ) { set("type", "ytype", types.at(t)); } void Yeast::setForm( Yeast::Form f ) { set("form", "form", forms.at(f)); } void Yeast::setAmount( double var ) { if( var < 0.0 ) Brewtarget::logW( QString("Yeast: amount < 0: %1").arg(var) ); else set("amount", "amount", var); } void Yeast::setInventoryQuanta( int var ) { if( var < 0.0 ) Brewtarget::logW( QString("Yeast: inventory < 0: %1").arg(var) ); else setInventory("inventory", "quanta", var); } void Yeast::setAmountIsWeight( bool var ) { set("amountIsWeight", "amount_is_weight", var); } void Yeast::setLaboratory( const QString& var ) { set("laboratory", "laboratory", var); } void Yeast::setProductID( const QString& var ) { set("productID", "product_id", var); } void Yeast::setMinTemperature_c( double var ) { if( var < -273.15 ) return; else set("minTemperature_c", "min_temperature", var); } void Yeast::setMaxTemperature_c( double var ) { if( var < -273.15 ) return; else set("maxTemperature_c", "max_temperature", var); } void Yeast::setFlocculation( Yeast::Flocculation f ) { set("flocculation", "flocculation", flocculations.at(f)); } void Yeast::setAttenuation_pct( double var ) { if( var < 0.0 || var > 100.0 ) return; else set("attenuation", "attenuation", var); } void Yeast::setNotes( const QString& var ) { set("notes", "notes", var); } void Yeast::setBestFor( const QString& var ) { set("bestFor", "best_for", var); } void Yeast::setTimesCultured( int var ) { if( var < 0 ) return; else set("timesCultured", "times_cultured", var); } void Yeast::setMaxReuse( int var ) { if( var < 0 ) return; else set("maxReuse", "max_reuse", var); } void Yeast::setAddToSecondary( bool var ) { set("addToSecondary", "add_to_secondary", var); } //========================OTHER METHODS========================================= bool Yeast::isValidType(const QString& str) const { static const QString types[] = {"Ale", "Lager", "Wheat", "Wine", "Champagne"}; unsigned int i, size = 5; for( i = 0; i < size; ++i ) if( str == types[i] ) return true; return false; } bool Yeast::isValidForm(const QString& str) const { static const QString forms[] = {"Liquid", "Dry", "Slant", "Culture"}; unsigned int i, size=4; for( i = 0; i < size; ++i ) if( str == forms[i] ) return true; return false; } bool Yeast::isValidFlocculation(const QString& str) const { static const QString floc[] = {"Low", "Medium", "High", "Very High"}; unsigned int i, size=4; for( i = 0; i < size; ++i ) if( str == floc[i] ) return true; return false; }<|fim▁end|>
* along with this program. If not, see <http://www.gnu.org/licenses/>. */
<|file_name|>EZModule.py<|end_file_name|><|fim▁begin|>#################################################################################################### # Copyright (C) 2016 by Ingo Keller, Katrin Lohan # # <[email protected]> # # # # This file is part of pyJD (Python/Yarp Tools for the JD robot). # # # # pyJD is free software: you can redistribute it and/or modify it under the terms of the # # GNU Affero General Public License as published by the Free Software Foundation, either # # version 3 of the License, or (at your option) any later version. # # # # pyJD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; #<|fim▁hole|># # # You should have received a copy of the GNU Affero General Public License # # along with pyJD. If not, see <http://www.gnu.org/licenses/>. # #################################################################################################### import argparse import socket import time import yarp EMSG_YARP_NOT_FOUND = "Could not connect to the yarp server. Try running 'yarp detect'." EMSG_ROBOT_NOT_FOUND = 'Could not connect to the robot at %s:%s' class EZModule(yarp.RFModule): """ The EZBModule class provides a base class for developing modules for the JD robot. """ # Default IP Address and Port for the JD Humanoid Robot. TCP_IP = '192.168.1.1' TCP_PORT = 23 # Existing motor ID's are D0-D9, D12-D14 and D16-D18 there are more limits LIMITS = [ (30, 180), (70, 170), (0, 170), (0, 170), (0, 60), (0, 180), (0, 90), (0, 60), (0, 180), (0, 180), (0, 180), (0, 160), (0, 180), (0, 130), (0, 180), (0, 160), (0, 180), (50, 130), (0, 180), (0, 180), (0, 180) ] def __init__(self, ip, port, prefix): yarp.RFModule.__init__(self) self.ip = ip self.port = int(port) self.prefix = prefix # self.last_pos = [-1] * len(EZModule.LIMITS) def configure(self, rf): name = self.__class__.__name__ if self.prefix: name = self.prefix + '/' + name self.setName(name) # RPC Port self.rpc_port = yarp.RpcServer() # name settings port_name = '/%s/%s' % (name, 'rpc') if not self.rpc_port.open(port_name): raise RuntimeError, EMSG_YARP_NOT_FOUND self.attach_rpc_server(self.rpc_port) return True def interruptModule(self): self.rpc_port.interrupt() for x in dir(self): if x.endswith('Port') and 'interrupt' in dir(getattr(self, x)): getattr(self, x).interrupt() return True def close(self): self.rpc_port.close() for x in dir(self): if x.endswith('Port') and 'close' in dir(getattr(self, x)): getattr(self, x).close() return True def getPeriod(self): return 0.1 def updateModule(self): # XXX: I do not know why we need that, but if method is empty the module gets stuck time.sleep(0.000001) return True def createInputPort(self, name, mode = 'unbuffered'): """ This method returns an input port. @param obj - the object that the port is created for @param name - if a name is provided it gets appended to the modules name @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ return self.__createPort(name + ':i', None, mode) def __createPort(self, name, target = None, mode = 'unbuffered'): """ This method returns a port object. @param name - yarp name for the port @param obj - object for which the port is created @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ # create port if mode == 'buffered': port = yarp.BufferedPortBottle() elif mode == 'rpcclient': port = yarp.RpcClient() elif mode == 'rpcserver': port = yarp.RpcServer() else: port = yarp.Port() # build port name port_name = [''] # prefix handling if hasattr(self, 'prefix') and self.prefix: port_name.append(self.prefix) port_name.append(self.__class__.__name__) port_name.append(name) # open port if not port.open('/'.join(port_name)): raise RuntimeError, EMSG_YARP_NOT_FOUND # add output if given if target: port.addOutput(target) if hasattr(self, '_ports'): self._ports.append(port) return port def createOutputPort(self, name, target = None, mode = 'unbuffered'): """ This method returns an output port. @param obj - the object that the port is created for @param name - if a name is provided it gets appended to the modules name @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ return self.__createPort(name + ':o', target, mode) #################################################################################################### # # Default methods for running the modules standalone # #################################################################################################### def createArgParser(): """ This method creates a base argument parser. @return Argument Parser object """ parser = argparse.ArgumentParser(description='Create a JDModule to control the JD robot.') parser.add_argument( '-i', '--ip', dest = 'ip', default = str(EZModule.TCP_IP), help = 'IP address for the JD robot.') parser.add_argument( '-p', '--port', dest = 'port', default = str(EZModule.TCP_PORT), help = 'Port for the JD robot') parser.add_argument( '-n', '--name', dest = 'name', default = '', help = 'Name prefix for Yarp port names') return parser.parse_args() def main(module_cls): """ This is a main method to run a module from command line. @param module_cls - an EZModule based class that can be started as a standalone module. """ args = createArgParser() yarp.Network.init() resource_finder = yarp.ResourceFinder() resource_finder.setVerbose(True) # resource_finder.configure(argc,argv); module = module_cls(args.ip, args.port, args.name) module.runModule(resource_finder) yarp.Network.fini()<|fim▁end|>
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU Affero General Public License for more details. #
<|file_name|>bootstrap-collapse-3387a8b21abb7862bae6aac8337d6b26.js<|end_file_name|><|fim▁begin|>/* ============================================================= * bootstrap-collapse.js v2.3.2 * http://getbootstrap.com/2.3.2/javascript.html#collapse * ============================================================= * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* COLLAPSE PUBLIC CLASS DEFINITION * ================================ */ var Collapse = function (element, options) { this.$element = $(element) this.options = $.extend({}, $.fn.collapse.defaults, options) if (this.options.parent) { this.$parent = $(this.options.parent) } this.options.toggle && this.toggle() } Collapse.prototype = { constructor: Collapse , dimension: function () { var hasWidth = this.$element.hasClass('width') return hasWidth ? 'width' : 'height' } , show: function () { var dimension , scroll , actives , hasData if (this.transitioning || this.$element.hasClass('in')) return dimension = this.dimension() scroll = $.camelCase(['scroll', dimension].join('-')) actives = this.$parent && this.$parent.find('> .accordion-group > .in') if (actives && actives.length) { hasData = actives.data('collapse')<|fim▁hole|> } this.$element[dimension](0) this.transition('addClass', $.Event('show'), 'shown') $.support.transition && this.$element[dimension](this.$element[0][scroll]) } , hide: function () { var dimension if (this.transitioning || !this.$element.hasClass('in')) return dimension = this.dimension() this.reset(this.$element[dimension]()) this.transition('removeClass', $.Event('hide'), 'hidden') this.$element[dimension](0) } , reset: function (size) { var dimension = this.dimension() this.$element .removeClass('collapse') [dimension](size || 'auto') [0].offsetWidth this.$element[size !== null ? 'addClass' : 'removeClass']('collapse') return this } , transition: function (method, startEvent, completeEvent) { var that = this , complete = function () { if (startEvent.type == 'show') that.reset() that.transitioning = 0 that.$element.trigger(completeEvent) } this.$element.trigger(startEvent) if (startEvent.isDefaultPrevented()) return this.transitioning = 1 this.$element[method]('in') $.support.transition && this.$element.hasClass('collapse') ? this.$element.one($.support.transition.end, complete) : complete() } , toggle: function () { this[this.$element.hasClass('in') ? 'hide' : 'show']() } } /* COLLAPSE PLUGIN DEFINITION * ========================== */ var old = $.fn.collapse $.fn.collapse = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('collapse') , options = $.extend({}, $.fn.collapse.defaults, $this.data(), typeof option == 'object' && option) if (!data) $this.data('collapse', (data = new Collapse(this, options))) if (typeof option == 'string') data[option]() }) } $.fn.collapse.defaults = { toggle: true } $.fn.collapse.Constructor = Collapse /* COLLAPSE NO CONFLICT * ==================== */ $.fn.collapse.noConflict = function () { $.fn.collapse = old return this } /* COLLAPSE DATA-API * ================= */ $(document).on('click.collapse.data-api', '[data-toggle=collapse]', function (e) { var $this = $(this), href , target = $this.attr('data-target') || e.preventDefault() || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7 , option = $(target).data('collapse') ? 'toggle' : $this.data() $this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed') $(target).collapse(option) }) }(window.jQuery);<|fim▁end|>
if (hasData && hasData.transitioning) return actives.collapse('hide') hasData || actives.data('collapse', null)
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url <|fim▁hole|> # Examples: url(r'^$', 'index', name='index'), # url(r'^api/user/$', 'api_user'), url(r'^skin_config/$', 'skin_config', name='skin_config'), url(r'^login/$', 'Login', name='login'), url(r'^logout/$', 'Logout', name='logout'), url(r'^exec_cmd/$', 'exec_cmd', name='exec_cmd'), url(r'^file/upload/$', 'upload', name='file_upload'), url(r'^file/download/$', 'download', name='file_download'), url(r'^setting', 'setting', name='setting'), url(r'^terminal/$', 'web_terminal', name='terminal'), url(r'^mylog/$', 'mylog', name='mylog'), url(r'^juser/', include('juser.urls')), url(r'^jasset/', include('jasset.urls')), url(r'^jlog/', include('jlog.urls')), url(r'^jperm/', include('jperm.urls')), url(r'^dbtool/', include('dbtool.urls')), url(r'^cachemanage/', include('cachemanage.urls')), )<|fim▁end|>
urlpatterns = patterns('jumpserver.views',
<|file_name|>SimpleCacheManager.java<|end_file_name|><|fim▁begin|>package com.icfcc.cache.support; import com.icfcc.cache.Cache; import java.util.Collection; /** * Simple cache manager working against a given collection of caches. * Useful for testing or simple caching declarations. * * @author Costin Leau * @since 3.1 */ public class SimpleCacheManager extends AbstractCacheManager { private Collection<? extends Cache> caches; /** * Specify the collection of Cache instances to use for this CacheManager. */<|fim▁hole|> public void setCaches(Collection<? extends Cache> caches) { this.caches = caches; } @Override protected Collection<? extends Cache> loadCaches() { return this.caches; } }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod vk;<|fim▁end|>
#![feature(associated_consts)] extern crate libc;
<|file_name|>Money.js<|end_file_name|><|fim▁begin|>/** * @file * Money is a value object representing a monetary value. It does not use * floating point numbers, so it avoids rounding errors. * The only operation that may cause stray cents is split, it assures that no * cents vanish by distributing as evenly as possible among the parts it splits into. * * Money has no notion of currency, so working in a single currency -- no matter * which one -- is appropriate usage. * * In lack of better terminology, Money uses "dollars" and "cents". * * One dollar is assumed to be 100 cents. * * The cent number is guaranteed to be below 100. */ Module("Cactus.Data", function (m) { /** * @param natural dollars * @param natural cents * if > 100 then dollars are added until cents < 100. */ var Money = Class("Money", { has : { /** * @type int */ amount : null }, methods : { BUILD : function (dollars, cents) { var dollars = parseInt(dollars, 10); var cents = parseInt(cents, 10); if (dollars !== 0 && cents < 0) { throw new Error("Money: cents < 0"); } if (isNaN(dollars)) { throw new Error("Money: dollars is NaN"); } if (isNaN(cents)) { throw new Error("Money: cents is NaN"); } return { amount : dollars * 100 + (dollars < 0 ? -1 * cents : cents) }; }, /** * @return int */ _getAmount : function () { return this.amount; }, /** * @return natural */ getDollars : function () { if (this.amount < 0) { return Math.ceil(this.amount / 100); } else { return Math.floor(this.amount / 100); } }, /** * @return natural */ getCents : function () { if (this.amount < 0 && this.getDollars() === 0) { return this.amount % 100; } else { return Math.abs(this.amount % 100); } }, /** * The value with zero padded cents if < 100, and . used as the decimal * separator. * * @return string */ toString : function () { if (this.isNegative()) { if (this.gt(new Money(-1, 0))) { var cents = (-this.getCents()) < 10 ? "0" + (-this.getCents()) : (-this.getCents()); return "-0." + cents; } } var cents = this.getCents() < 10 ? "0" + this.getCents() : this.getCents(); return this.getDollars() + "." + cents; }, /** * @param Money money * @return Money */ add : function (money) { return Money._fromAmount(this._getAmount() + money._getAmount()); }, /** * @param Money money * @return Money */ sub : function (money) { return Money._fromAmount(this._getAmount() - money._getAmount()); }, /** * @param number multiplier * @return Money */ mult : function (multiplier) { return Money._fromAmount(this._getAmount() * multiplier); }, /** * @return Boolean */ isPositive : function () { return this._getAmount() > 0; }, /** * @return Boolean */ isNegative : function () { return this._getAmount() < 0; }, /** * @return Boolean */ isZero : function () { return this._getAmount() === 0; }, /** * @param Money money * @return Boolean */ equals : function (money) { return this._getAmount() === money._getAmount(); }, /** * @param Money money * @return Boolean */ gt : function (money) { return this._getAmount() > money._getAmount();<|fim▁hole|> * @param Money money * @return Boolean */ lt : function (money) { return money.gt(this); }, /** * @param Money money * @return Money */ negate : function () { return Money._fromAmount(-this._getAmount()); }, /** * Splits the object into parts, the remaining cents are distributed from * the first element of the result and onward. * * @param int divisor * @return Array<Money> */ split : function (divisor) { var dividend = this._getAmount(); var quotient = Math.floor(dividend / divisor); var remainder = dividend - quotient * divisor; var res = []; var moneyA = Money._fromAmount(quotient + 1); var moneyB = Money._fromAmount(quotient); for (var i = 0; i < remainder; i++) { res.push(moneyA); } for (i = 0; i < divisor - remainder; i++) { res.push(moneyB); } return res; }, /** * @return Hash{ * dollars : int, * cents : int * } */ serialize : function () { return { dollars : this.getDollars(), cents : this.getCents() }; } } }); /** * @param String s * @return Money */ Money.fromString = function (s) { if (s === null) { throw new Error("Money.fromString: String was null."); } if (s === "") { throw new Error("Money.fromString: String was empty."); } if (!/^-?\d+(?:\.\d{2})?$/.test(s)) { throw new Error("Money.fromString: Invalid format, got: " + s); } var a = s.split("."); if (a.length === 1) { return new Money(parseInt(a[0], 10), 0); } else if (a.length === 2) { return new Money(parseInt(a[0], 10), parseInt(a[1], 10)); } else { throw new Error("Money:fromString: BUG: RegExp should have prevent this from happening."); } }; /** * @param int amount * @return Money */ Money._fromAmount = function (amount) { if (amount > 0) { return new Money(Math.floor(amount / 100), amount % 100); } else { return new Money(Math.ceil(amount / 100), Math.abs(amount) < 100 ? (amount % 100) : -(amount % 100)); } }; /** * @param Array<Money> ms * @return Money */ Money.sum = function (ms) { var sum = new Money(0, 0); for (var i = 0; i < ms.length; i++) { sum = sum.add(ms[i]); } return sum; }; m.Money = Money; });<|fim▁end|>
}, /**
<|file_name|>Invocation.ts<|end_file_name|><|fim▁begin|>import * as _ from "lodash"; import * as common from "../Common/_all"; import { ICallContext, CallType, ProxyType } from "./ICallContext"; export enum InvocationType { NONE, SETUP, EXECUTE } export abstract class BaseInvocation implements ICallContext { returnValue: any; invocationType = InvocationType.NONE; constructor(public readonly proxyType: ProxyType, public callType: CallType) { } abstract get args(): IArguments; abstract set args(value: IArguments); abstract get property(): IPropertyInfo; abstract invokeBase(): void; get isAnUnknownDynamicCallAtExecution(): boolean { return this.proxyType == ProxyType.DYNAMIC && this.callType == CallType.UNKNOWN && this.invocationType == InvocationType.EXECUTE; }; } export class MethodInvocation extends BaseInvocation { private _args: IArguments; constructor( private readonly _that: Object, private readonly _property: MethodInfo, args?: IArguments, proxyType = ProxyType.STATIC, callType = CallType.FUNCTION) { super(proxyType, callType); if (args) { this._args = <any>_.cloneDeep(args); this._args.length = args.length; this._args.callee = args.callee; } } get args(): IArguments { return this._args || <any>{ length: 0, callee: null }; } set args(value: IArguments) { this._args = value; } get property(): IPropertyInfo { return this._property; } invokeBase(): void { let thatClone = {}; if (this._that) common.Utils.clone(thatClone, this._that); else thatClone = this._property.obj; this.returnValue = this._property.toFunc.apply(thatClone, this._args); } toString(): string { const res = `${this.property}(${common.Utils.argsName(this.args)})`; return res; } } export class ValueGetterInvocation extends BaseInvocation { constructor( private readonly _property: IPropertyInfo, readonly value: any, proxyType = ProxyType.STATIC, callType = CallType.PROPERTY) { super(proxyType, callType); this.returnValue = _.cloneDeep(value); } get args(): IArguments { let args: any[] = []; Object.defineProperty(args, "callee", { configurable: true, enumerable: true, writable: false, value: null }); return <any>args; } set args(value: IArguments) { } get property(): IPropertyInfo { return this._property; } invokeBase(): void { this.returnValue = (<any>this._property.obj)[this._property.name]; } toString(): string { const res = `${this.property}`; return res; } } export class DynamicGetInvocation extends ValueGetterInvocation { constructor( property: IPropertyInfo, value: any) { super(property, value, ProxyType.DYNAMIC, CallType.UNKNOWN); this.returnValue = _.cloneDeep(value); } } export class ValueSetterInvocation extends BaseInvocation { private _args: IArguments; constructor( private readonly _property: IPropertyInfo, args: IArguments, proxyType = ProxyType.STATIC, callType = CallType.PROPERTY) { super(proxyType, callType); this._args = <any>_.cloneDeep(args); this._args.length = args.length; this._args.callee = args.callee; } get args(): IArguments { return this._args; } set args(value: IArguments) { this._args = value; } get property(): IPropertyInfo { return this._property; } invokeBase(): void { (<any>this._property.obj)[this._property.name] = this._args[0]; this.returnValue = (<any>this._property.obj)[this._property.name]; } toString(): string { const res = `${this.property} = ${common.Utils.argsName(this.args[0])}`; return res; } } export class MethodGetterInvocation extends BaseInvocation { constructor( private readonly _property: IPropertyInfo, private readonly _getter: () => any, proxyType = ProxyType.STATIC, callType = CallType.FUNCTION) { super(proxyType, callType); } get args(): IArguments { let args: any[] = []; Object.defineProperty(args, "callee", { configurable: true, enumerable: true, writable: false, value: null }); return <any>args; } set args(value: IArguments) { } get property(): IPropertyInfo { return this._property; } invokeBase(): void { this.returnValue = (<any>this._property.obj)[this._property.name]; } toString(): string { const res = `${this.property}`; return res; } } export class MethodSetterInvocation extends BaseInvocation { private _args: IArguments; constructor( private readonly _property: IPropertyInfo, private readonly _setter: (v: any) => void, args: IArguments, proxyType = ProxyType.STATIC, callType = CallType.FUNCTION) { super(proxyType, callType); this._args = <any>_.cloneDeep(args); this._args.length = args.length; this._args.callee = args.callee; } get args(): IArguments { return this._args; } set args(value: IArguments) { this._args = value; } get property(): IPropertyInfo { return this._property; } invokeBase(): void { (<any>this._property.obj)[this._property.name] = this._args[0]; this.returnValue = (<any>this._property.obj)[this._property.name]; } toString(): string { const res = `${this.property}(${common.Utils.argsName(this.args[0])})`; return res; } } export class MethodInfo implements IPropertyInfo { readonly desc: common.PropDescriptor; constructor( public readonly obj: any, public readonly name: string, desc?: common.PropDescriptor) { if (desc) this.desc = _.cloneDeep(desc); } get toFunc(): Function { const func = _.isFunction(this.obj) ? <Function>this.obj : <Function>this.obj[this.name]; return func; } <|fim▁hole|> } } export class PropertyInfo implements IPropertyInfo { readonly desc: common.PropDescriptor; constructor( public readonly obj: Object, public readonly name: string, desc?: common.PropDescriptor) { if (desc) this.desc = _.cloneDeep(desc); } toString(): string { const objName = common.Utils.objectName(this.obj); const res = `${objName}.${this.name}`; return res; } } export interface IPropertyInfo { obj: Object; name: string; desc?: common.PropDescriptor; }<|fim▁end|>
toString(): string { const objName = common.Utils.objectName(this.obj); const res = _.isFunction(this.obj) ? `${objName}` : `${objName}.${this.name}`; return res;
<|file_name|>testing.py<|end_file_name|><|fim▁begin|>""" Mixins for setting up particular course structures (such as split tests or cohorted content) """ from datetime import datetime from pytz import UTC from openedx.core.djangoapps.course_groups.models import CourseUserGroupPartitionGroup from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory from openedx.core.djangoapps.user_api.tests.factories import UserCourseTagFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.partitions.partitions import UserPartition, Group from student.tests.factories import CourseEnrollmentFactory, UserFactory class ContentGroupTestCase(ModuleStoreTestCase): """ Sets up discussion modules visible to content groups 'Alpha' and 'Beta', as well as a module visible to all students. Creates a staff user, users with access to Alpha/Beta (by way of cohorts), and a non-cohorted user with no special access. """ def setUp(self): super(ContentGroupTestCase, self).setUp() self.course = CourseFactory.create( org='org', number='number', run='run', # This test needs to use a course that has already started -- # discussion topics only show up if the course has already started, # and the default start date for courses is Jan 1, 2030. start=datetime(2012, 2, 3, tzinfo=UTC), user_partitions=[ UserPartition( 0, 'Content Group Configuration', '', [Group(1, 'Alpha'), Group(2, 'Beta')], scheme_id='cohort' ) ], grading_policy={ "GRADER": [{ "type": "Homework", "min_count": 1, "drop_count": 0, "short_label": "HW", "passing_grade": 0, "weight": 1.0 }] }, cohort_config={'cohorted': True}, discussion_topics={} ) self.staff_user = UserFactory.create(is_staff=True) self.alpha_user = UserFactory.create() self.beta_user = UserFactory.create() self.non_cohorted_user = UserFactory.create()<|fim▁hole|> for user in [self.staff_user, self.alpha_user, self.beta_user, self.non_cohorted_user]: CourseEnrollmentFactory.create(user=user, course_id=self.course.id) alpha_cohort = CohortFactory( course_id=self.course.id, name='Cohort Alpha', users=[self.alpha_user] ) beta_cohort = CohortFactory( course_id=self.course.id, name='Cohort Beta', users=[self.beta_user] ) CourseUserGroupPartitionGroup.objects.create( course_user_group=alpha_cohort, partition_id=self.course.user_partitions[0].id, group_id=self.course.user_partitions[0].groups[0].id ) CourseUserGroupPartitionGroup.objects.create( course_user_group=beta_cohort, partition_id=self.course.user_partitions[0].id, group_id=self.course.user_partitions[0].groups[1].id ) self.alpha_module = ItemFactory.create( parent_location=self.course.location, category='discussion', discussion_id='alpha_group_discussion', discussion_target='Visible to Alpha', group_access={self.course.user_partitions[0].id: [self.course.user_partitions[0].groups[0].id]} ) self.beta_module = ItemFactory.create( parent_location=self.course.location, category='discussion', discussion_id='beta_group_discussion', discussion_target='Visible to Beta', group_access={self.course.user_partitions[0].id: [self.course.user_partitions[0].groups[1].id]} ) self.global_module = ItemFactory.create( parent_location=self.course.location, category='discussion', discussion_id='global_group_discussion', discussion_target='Visible to Everyone' ) self.course = self.store.get_item(self.course.location) class TestConditionalContent(ModuleStoreTestCase): """ Construct a course with graded problems that exist within a split test. """ TEST_SECTION_NAME = 'Problem' def setUp(self): """ Set up a course with graded problems within a split test. Course hierarchy is as follows (modeled after how split tests are created in studio): -> course -> chapter -> sequential (graded) -> vertical -> split_test -> vertical (Group A) -> problem -> vertical (Group B) -> problem """ super(TestConditionalContent, self).setUp() # Create user partitions self.user_partition_group_a = 0 self.user_partition_group_b = 1 self.partition = UserPartition( 0, 'first_partition', 'First Partition', [ Group(self.user_partition_group_a, 'Group A'), Group(self.user_partition_group_b, 'Group B') ] ) # Create course with group configurations and grading policy self.course = CourseFactory.create( user_partitions=[self.partition], grading_policy={ "GRADER": [{ "type": "Homework", "min_count": 1, "drop_count": 0, "short_label": "HW", "passing_grade": 0, "weight": 1.0 }] } ) chapter = ItemFactory.create(parent_location=self.course.location, display_name='Chapter') # add a sequence to the course to which the problems can be added self.problem_section = ItemFactory.create(parent_location=chapter.location, category='sequential', metadata={'graded': True, 'format': 'Homework'}, display_name=self.TEST_SECTION_NAME) # Create users and partition them self.student_a = UserFactory.create(username='student_a', email='[email protected]') CourseEnrollmentFactory.create(user=self.student_a, course_id=self.course.id) self.student_b = UserFactory.create(username='student_b', email='[email protected]') CourseEnrollmentFactory.create(user=self.student_b, course_id=self.course.id) UserCourseTagFactory( user=self.student_a, course_id=self.course.id, key='xblock.partition_service.partition_{0}'.format(self.partition.id), # pylint: disable=no-member value=str(self.user_partition_group_a) ) UserCourseTagFactory( user=self.student_b, course_id=self.course.id, key='xblock.partition_service.partition_{0}'.format(self.partition.id), # pylint: disable=no-member value=str(self.user_partition_group_b) ) # Create a vertical to contain our split test problem_vertical = ItemFactory.create( parent_location=self.problem_section.location, category='vertical', display_name='Problem Unit' ) # Create the split test and child vertical containers vertical_a_url = self.course.id.make_usage_key('vertical', 'split_test_vertical_a') vertical_b_url = self.course.id.make_usage_key('vertical', 'split_test_vertical_b') self.split_test = ItemFactory.create( parent_location=problem_vertical.location, category='split_test', display_name='Split Test', user_partition_id=self.partition.id, # pylint: disable=no-member group_id_to_child={str(index): url for index, url in enumerate([vertical_a_url, vertical_b_url])} ) self.vertical_a = ItemFactory.create( parent_location=self.split_test.location, category='vertical', display_name='Group A problem container', location=vertical_a_url ) self.vertical_b = ItemFactory.create( parent_location=self.split_test.location, category='vertical', display_name='Group B problem container', location=vertical_b_url )<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import absolute_import, unicode_literals from qproject.celery import app as celery_app __all__ = ['celery_app']<|fim▁end|>
<|file_name|>module3.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python #coding:utf-8<|fim▁hole|> print 'start to load...' import sys name = 42 def func(): pass class kclass : pass print 'done loading.'<|fim▁end|>
<|file_name|>get_keyword_feature.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------- # coding=utf8 # Name: 模块1 # Purpose: # # Author: zhx # # Created: 10/05/2016 # Copyright: (c) zhx 2016 # Licence: <your licence> #------------------------------------------------------------------------------- import openpyxl import jieba threshold = 2140 popular = 0 def main(): cctv_data = openpyxl.load_workbook("cctv.xlsx") cctv_keywords = openpyxl.load_workbook("cctv_keywords.xlsx") cctv_new = openpyxl.Workbook() new_sheet = cctv_new.active #print cctv_data.get_sheet_names() sheet1 = cctv_keywords["Sheet"] sheet2 = cctv_data["Sheet"] words = {} for r in xrange(1,36003): word = sheet1.cell(row=r,column=1).value word_min = sheet1.cell(row=r,column=2).value word_max = sheet1.cell(row=r,column=3).value word_mean = sheet1.cell(row=r,column=4).value words[word] = [word_min,word_max,word_mean] for r in xrange(2,4749): print r<|fim▁hole|> like = sheet2.cell(row=r,column=5).value repost = sheet2.cell(row=r,column=6).value if like == '赞': like = '0' if repost =='转发': repost = '0' like_repost = int(like)+int(repost) if like_repost>threshold: popular =1 else: popular =0 hour = int(time[1:3]) minute =int (time[4:]) time = hour*60 + minute new_sheet.cell(row=r,column=10).value = time new_sheet.cell(row=r,column=11).value = like_repost if content ==None: continue print r seg_list = jieba.cut(content, cut_all = True) wordsplite = ' '.join(seg_list) wordsplite = wordsplite.split(' ') maxlike = 0 max_word ='' min_word ='' mean_word='' minlike = 9999999 tmplist = [] tmpdic ={} for w in wordsplite: if words.has_key(w): tmpdic[w] =int(words[w][2]) tmplist.append(int(words[w][2])) likes = int(words[w][2]) if likes<minlike: minlike = likes min_word = w if likes>maxlike: maxlike = likes max_word = w else: continue if len(tmplist)!=0: tmplist.sort() mean = tmplist[int(len(tmplist)/2)] for w in tmpdic: if tmpdic[w]==mean: mean_word =w if min_word!='': new_sheet.cell(row=r,column=1).value = words[min_word][0] new_sheet.cell(row=r,column=2).value = words[min_word][1] new_sheet.cell(row=r,column=3).value = words[min_word][2] if max_word!='': new_sheet.cell(row=r,column=4).value = words[max_word][0] new_sheet.cell(row=r,column=5).value = words[max_word][1] new_sheet.cell(row=r,column=6).value = words[max_word][2] if mean_word!='': new_sheet.cell(row=r,column=7).value = words[mean_word][0] new_sheet.cell(row=r,column=8).value = words[mean_word][1] new_sheet.cell(row=r,column=9).value = words[mean_word][2] cctv_new.save("train_feature_keyword_reg.xlsx") main()<|fim▁end|>
content = sheet2.cell(row=r,column=3).value time = sheet2.cell(row=r,column=11).value
<|file_name|>gamekings.py<|end_file_name|><|fim▁begin|>import re from .common import InfoExtractor class GamekingsIE(InfoExtractor): _VALID_URL = r'http://www\.gamekings\.tv/videos/(?P<name>[0-9a-z\-]+)' _TEST = { u"url": u"http://www.gamekings.tv/videos/phoenix-wright-ace-attorney-dual-destinies-review/", u'file': u'20130811.mp4', # MD5 is flaky, seems to change regularly #u'md5': u'2f32b1f7b80fdc5cb616efb4f387f8a3', u'info_dict': { u"title": u"Phoenix Wright: Ace Attorney \u2013 Dual Destinies Review", u"description": u"Melle en Steven hebben voor de review een week in de rechtbank doorbracht met Phoenix Wright: Ace Attorney - Dual Destinies.", } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) name = mobj.group('name') webpage = self._download_webpage(url, name) video_url = self._og_search_video_url(webpage) video = re.search(r'[0-9]+', video_url) video_id = video.group(0) # Todo: add medium format video_url = video_url.replace(video_id, 'large/' + video_id)<|fim▁hole|> 'id': video_id, 'ext': 'mp4', 'url': video_url, 'title': self._og_search_title(webpage), 'description': self._og_search_description(webpage), }<|fim▁end|>
return {
<|file_name|>epa.rs<|end_file_name|><|fim▁begin|>#[cfg(test)] #[path="../../../../tests/collisions/detection/gjkepa/epa_test.rs"] mod tests; use {NEG_INFINITY, Scalar}; use maths::{ApproxEq, Approximations, CoordinateTransform, Vec3D}; use maths::_2d::Vec2D; use utils::compute_surfaces_for_convex_hull; use algorithms::IterativeAlgorithm; use collisions::{CollisionData, ContactSet, SupportMap}; use collisions::shapes::{Direction, Intersection, Plane, Ray}; use collisions::shapes::_2d::{Line2D, Polygon}; use collisions::shapes::convex_shapes::ConvexPolyhedron; use collisions::detection::gjkepa::{GJKSimplex, MinkowskiDifference}; pub struct EPA<'a> { diff: MinkowskiDifference<'a>, vertices: Vec<Vec3D>, has_converged: bool, triangulated_faces: Vec<[usize; 3]>, } impl<'a> EPA<'a> { pub fn new(simplex: &GJKSimplex, data_0: &'a CollisionData, data_1: &'a CollisionData) -> EPA<'a> { let diff = MinkowskiDifference(data_0, data_1); let vertices: Vec<Vec3D> = simplex.vertices().iter().cloned().collect(); let triangulated_faces_original = [ [0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3], ]; let centroid = 0.25 * vertices.iter() .fold(Vec3D::zero(), |total, vertex| total + vertex); let triangulated_faces: Vec<[usize; 3]> = triangulated_faces_original.iter() .map(|triangulation| { let plane = Plane::from_counter_clockwise_points( vertices[triangulation[0]], vertices[triangulation[1]], vertices[triangulation[2]], ); if plane.normal_projection_of(centroid) < 0.0 { *triangulation } else { [triangulation[0], triangulation[2], triangulation[1]] } })<|fim▁hole|> EPA { diff: diff, vertices: vertices, has_converged: false, triangulated_faces: triangulated_faces, } } } impl<'a> IterativeAlgorithm for EPA<'a> { type Result = EPAPolyhedron<'a>; fn result(self) -> Self::Result { EPAPolyhedron { diff: self.diff, polyhedron: unsafe { ConvexPolyhedron::from_triangulation(self.vertices, self.triangulated_faces) }, } } fn has_converged(&self) -> bool { self.has_converged } fn next_iteration(&mut self) { if self.has_converged { return; } // TODO the triangulated faces never change order, so we can skip work // by filtered values in the next iteration let candidate = self.triangulated_faces.iter() .filter_map(|triangulation| { let plane = Plane::from_counter_clockwise_points( self.vertices[triangulation[0]], self.vertices[triangulation[1]], self.vertices[triangulation[2]], ); // TODO this should only return points on the boundary support // points self.diff.support_points_iter(Direction::from(plane.normal())) .filter(|&point| { // I don’t know why, but the uniqueness test is still // required !self.vertices.iter().any(|vertex| vertex.approx_eq(point)) && plane.normal_projection_of(point).is_strictly_positive() }) .next() }) .next(); match candidate { Some(new_support_point) => { self.vertices.push(new_support_point); let mid_point = self.vertices.iter() .fold(Vec3D::zero(), |total, vertex| { total + vertex }) / self.vertices.len() as Scalar; let surfaces = compute_surfaces_for_convex_hull(&self.vertices); self.triangulated_faces = surfaces.into_iter() .map(|surface| { let mut node_guess = surface.nodes; let plane = Plane::from_counter_clockwise_points( self.vertices[node_guess[0]], self.vertices[node_guess[1]], self.vertices[node_guess[2]], ); if plane.normal_projection_of(mid_point).is_strictly_positive() { node_guess = [node_guess[0], node_guess[2], node_guess[1]]; } node_guess }) .collect(); }, None => self.has_converged = true, } } } pub struct EPAPolyhedron<'a> { diff: MinkowskiDifference<'a>, polyhedron: ConvexPolyhedron, } impl<'a> EPAPolyhedron<'a> { pub fn compute_contact_set(self) -> ContactSet { let mut iterator = self.polyhedron.faces_iter(); let initial_face = iterator.next() .expect("expected polyhedron to have at least one face, but it did not"); let (penetration_depth, closest_face) = iterator .fold((NEG_INFINITY, initial_face), |(origin_to_closest_face_offset, closest_face), face| { let offset = face.normal_projection_of_origin(); let diff = offset - origin_to_closest_face_offset; if diff.is_strictly_positive() { (offset, face) } else { (origin_to_closest_face_offset, closest_face) } }); let contact_normal = -closest_face.normal(); let feature_0 = { let vertices = self.diff.0.support_points_iter(-Direction::from(contact_normal)) .collect::<Vec<Vec3D>>(); Feature::from_vertices(vertices) }; let feature_1 = { let vertices = self.diff.1.support_points_iter( Direction::from(contact_normal)) .collect::<Vec<Vec3D>>(); Feature::from_vertices(vertices) }; return match (feature_0, feature_1) { (Feature::Vertex(vertex), _other) => { let correction = contact_normal * penetration_depth / 2.0; let contact_point = vertex - correction; ContactSet::new( Plane::new(contact_point - contact_normal * penetration_depth, contact_normal), vec!(contact_point), ) }, (_other, Feature::Vertex(vertex)) => { let correction = contact_normal * penetration_depth / 2.0; let contact_point = vertex + correction; ContactSet::new( Plane::new(contact_point - contact_normal * penetration_depth, contact_normal), vec!(contact_point), ) }, (Feature::Edge(vertex_00, vertex_01), Feature::Edge(vertex_10, vertex_11)) => { let ray_0 = Ray::from_points(vertex_00, vertex_01); let ray_1 = Ray::from_points(vertex_10, vertex_11); // TODO does this need a depth correction? let contact_point = Ray::closest_point_to_rays(&ray_0, &ray_1); ContactSet::new( Plane::new(contact_point - contact_normal * penetration_depth, contact_normal), vec!(contact_point), ) }, (Feature::Edge(edge_vertex_0, edge_vertex_1), Feature::Face(face_vertices)) => { EPAPolyhedron::compute_contact_set_for_edge_face((edge_vertex_0, edge_vertex_1), face_vertices, Plane::from(closest_face)) }, (Feature::Face(face_vertices), Feature::Edge(edge_vertex_0, edge_vertex_1)) => { EPAPolyhedron::compute_contact_set_for_edge_face((edge_vertex_0, edge_vertex_1), face_vertices, Plane::from(closest_face).reversed()) }, (Feature::Face(face_vertices_0), Feature::Face(face_vertices_1)) => { EPAPolyhedron::compute_contact_set_for_face_face(face_vertices_0, face_vertices_1, Plane::from(closest_face)) }, }; } fn compute_contact_set_for_edge_face(edge_vertices: (Vec3D, Vec3D), face_vertices: Vec<Vec3D>, contact_plane: Plane) -> ContactSet { let coordinates = CoordinateTransform::from_plane(&contact_plane); let start = coordinates.transform(edge_vertices.0); let end = coordinates.transform(edge_vertices.1); let average_z_0 = (start.z + end.z) / 2.0; let edge_points = Line2D::new( Vec2D::new(start.x, start.y), Vec2D::new(end.x, end.y), ); let (polygon, average_z_1) = project_to_polygon_2d(face_vertices, &coordinates); let average_z = (average_z_0 + average_z_1) / 2.0; let intersection = polygon.intersection(&edge_points).unwrap(); let contact_point_0 = coordinates.transform_with_inverse(Vec3D::new(intersection.start.x, intersection.start.y, average_z)); let contact_point_1 = coordinates.transform_with_inverse(Vec3D::new(intersection.end.x, intersection.end.y, average_z)); return ContactSet::new( Plane::new(contact_point_0, contact_plane.normal()), vec!(contact_point_0, contact_point_1), ); } fn compute_contact_set_for_face_face(face_vertices_0: Vec<Vec3D>, face_vertices_1: Vec<Vec3D>, contact_plane: Plane) -> ContactSet { let coordinates = CoordinateTransform::from_plane(&contact_plane); let (polygon_0, average_z_0) = project_to_polygon_2d(face_vertices_0, &coordinates); let (polygon_1, average_z_1) = project_to_polygon_2d(face_vertices_1, &coordinates); let average_z = (average_z_0 + average_z_1) / 2.0; let intersection = polygon_0.intersection(&polygon_1) .expect("expected an intersection for face-face features, but none was found"); let points: Vec<Vec3D> = intersection.points().iter() .map(|point| coordinates.transform_with_inverse(Vec3D::new(point.x, point.y, average_z))) .collect(); return ContactSet::new( Plane::new(points[0], contact_plane.normal()), points, ); } pub fn polyhedron(&self) -> &ConvexPolyhedron { &self.polyhedron } } fn project_to_polygon_2d(vertices: Vec<Vec3D>, coordinates: &CoordinateTransform) -> (Polygon, Scalar) { let points: Vec<Vec3D> = vertices.into_iter() .map(|vertex| coordinates.transform(vertex)) .collect(); let average_z = points.iter().fold(0.0, |total, point| total + point.z) / points.len() as Scalar; let flat_projected_points: Vec<Vec2D> = points.into_iter() .map(|point| Vec2D::new(point.x, point.y)) .collect(); let polygon = Polygon::convex_hull_from_points(&flat_projected_points) // TODO can we avoid needing to validate this? .expect("A valid face always has enough points"); return (polygon, average_z); } enum Feature { Vertex(Vec3D), Edge(Vec3D, Vec3D), Face(Vec<Vec3D>), } impl Feature { fn from_vertices(colliding_vertices: Vec<Vec3D>) -> Feature { let length = colliding_vertices.len(); match length { 0 => panic!("no vertices found to describe the contact feature!"), 1 => Feature::Vertex(colliding_vertices[0]), 2 => Feature::Edge(colliding_vertices[0], colliding_vertices[1]), _otherwise => Feature::Face(colliding_vertices), } } }<|fim▁end|>
.collect();
<|file_name|>redis_queues.py<|end_file_name|><|fim▁begin|># This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; only version 2 of the License is applicable. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # This plugin is to monitor queue lengths in Redis. Based on redis_info.py by # Garret Heaton <powdahound at gmail.com>, hence the GPL at the top. import collectd from contextlib import closing, contextmanager import socket # Host to connect to. Override in config by specifying 'Host'. REDIS_HOST = 'localhost' # Port to connect on. Override in config by specifying 'Port'. REDIS_PORT = 6379 # Verbose logging on/off. Override in config by specifying 'Verbose'. VERBOSE_LOGGING = False # Queue names to monitor. Override in config by specifying 'Queues'. QUEUE_NAMES = [] def fetch_queue_lengths(queue_names): """Connect to Redis server and request queue lengths. Return a dictionary from queue names to integers. """ try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((REDIS_HOST, REDIS_PORT)) log_verbose('Connected to Redis at %s:%s' % (REDIS_HOST, REDIS_PORT)) except socket.error, e: collectd.error('redis_queues plugin: Error connecting to %s:%d - %r' % (REDIS_HOST, REDIS_PORT, e)) return None queue_lengths = {} with closing(s) as redis_socket: for queue_name in queue_names: log_verbose('Requesting length of queue %s' % queue_name) redis_socket.sendall('llen %s\r\n' % queue_name) with closing(redis_socket.makefile('r')) as response_file: response = response_file.readline() if response.startswith(':'): try: queue_lengths[queue_name] = int(response[1:-1]) except ValueError: log_verbose('Invalid response: %r' % response) else:<|fim▁hole|> def configure_callback(conf): """Receive configuration block""" global REDIS_HOST, REDIS_PORT, VERBOSE_LOGGING, QUEUE_NAMES for node in conf.children: if node.key == 'Host': REDIS_HOST = node.values[0] elif node.key == 'Port': REDIS_PORT = int(node.values[0]) elif node.key == 'Verbose': VERBOSE_LOGGING = bool(node.values[0]) elif node.key == 'Queues': QUEUE_NAMES = list(node.values) else: collectd.warning('redis_queues plugin: Unknown config key: %s.' % node.key) log_verbose('Configured with host=%s, port=%s' % (REDIS_HOST, REDIS_PORT)) for queue in QUEUE_NAMES: log_verbose('Watching queue %s' % queue) if not QUEUE_NAMES: log_verbose('Not watching any queues') def read_callback(): log_verbose('Read callback called') queue_lengths = fetch_queue_lengths(QUEUE_NAMES) if queue_lengths is None: # An earlier error, reported to collectd by fetch_queue_lengths return for queue_name, queue_length in queue_lengths.items(): log_verbose('Sending value: %s=%s' % (queue_name, queue_length)) val = collectd.Values(plugin='redis_queues') val.type = 'gauge' val.type_instance = queue_name val.values = [queue_length] val.dispatch() def log_verbose(msg): if not VERBOSE_LOGGING: return collectd.info('redis plugin [verbose]: %s' % msg) # register callbacks collectd.register_config(configure_callback) collectd.register_read(read_callback)<|fim▁end|>
log_verbose('Invalid response: %r' % response) return queue_lengths
<|file_name|>context.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ The MIT License (MIT) Copyright (c) 2015-2016 Rapptz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import asyncio class Context: """Represents the context in which a command is being invoked under. This class contains a lot of meta data to help you understand more about the invocation context. This class is not created manually and is instead<|fim▁hole|> Attributes ----------- message : :class:`discord.Message` The message that triggered the command being executed. bot : :class:`Bot` The bot that contains the command being executed. args : list The list of transformed arguments that were passed into the command. If this is accessed during the :func:`on_command_error` event then this list could be incomplete. kwargs : dict A dictionary of transformed arguments that were passed into the command. Similar to :attr:`args`\, if this is accessed in the :func:`on_command_error` event then this dict could be incomplete. prefix : str The prefix that was used to invoke the command. command The command (i.e. :class:`Command` or its superclasses) that is being invoked currently. invoked_with : str The command name that triggered this invocation. Useful for finding out which alias called the command. invoked_subcommand The subcommand (i.e. :class:`Command` or its superclasses) that was invoked. If no valid subcommand was invoked then this is equal to `None`. subcommand_passed : Optional[str] The string that was attempted to call a subcommand. This does not have to point to a valid registered subcommand and could just point to a nonsense string. If nothing was passed to attempt a call to a subcommand then this is set to `None`. """ def __init__(self, **attrs): self.message = attrs.pop('message', None) self.bot = attrs.pop('bot', None) self.args = attrs.pop('args', []) self.kwargs = attrs.pop('kwargs', {}) self.prefix = attrs.pop('prefix') self.command = attrs.pop('command', None) self.view = attrs.pop('view', None) self.invoked_with = attrs.pop('invoked_with', None) self.invoked_subcommand = attrs.pop('invoked_subcommand', None) self.subcommand_passed = attrs.pop('subcommand_passed', None) @asyncio.coroutine def invoke(self, command, *args, **kwargs): """|coro| Calls a command with the arguments given. This is useful if you want to just call the callback that a :class:`Command` holds internally. Note ------ You do not pass in the context as it is done for you. Parameters ----------- command : :class:`Command` A command or superclass of a command that is going to be called. \*args The arguments to to use. \*\*kwargs The keyword arguments to use. """ arguments = [] if command.instance is not None: arguments.append(command.instance) if command.pass_context: arguments.append(self) arguments.extend(args) ret = yield from command.callback(*arguments, **kwargs) return ret @property def cog(self): """Returns the cog associated with this context's command. None if it does not exist.""" if self.command is None: return None return self.command.instance<|fim▁end|>
passed around to commands by passing in :attr:`Command.pass_context`.
<|file_name|>Adapter_PLC_Base.cc<|end_file_name|><|fim▁begin|>/* Lehrstuhl fuer Energietransport und -speicherung UNIVERSITAET DUISBURG-ESSEN ef.Ruhr E-DeMa AP-2 Wissenschaftlicher Mitarbeiter: Dipl.-Ing. Holger Kellerbauer Das Linklayer-Paket "powerline" umfasst eine Sammlung von Modulen, die zur Simulation von Powerline- Uebertragungsstrecken in intelligenten Energieverteilsystemen programmiert worden sind. Dieser Quellcode wurde erstellt von Dipl.-Ing. Holger Kellerbauer - er basiert auf dem INET Framework-Modul "Linklayer/Ethernet" von Andras Varga (c) 2003. Er ist gesitiges Eigentum des Lehrstuhles fuer Energietransport und -speicherung der Universitaet Duisburg-Essen, und darf ohne Genehmigung weder weitergegeben, noch verwendet werden. */ #include <stdio.h> #include <string.h> #include <omnetpp.h> #include "Adapter_PLC_Base.h" #include "IPassiveQueue.h" #include "IInterfaceTable.h" #include "InterfaceTableAccess.h" //static const double SPEED_OF_LIGHT = 200000000.0; // TODO: Changed by Ramon; already defined in INETDefs.h Adapter_PLC_Base::Adapter_PLC_Base() { nb = NULL; queueModule = NULL; interfaceEntry = NULL; endTxMsg = endIFGMsg = endPauseMsg = NULL; } Adapter_PLC_Base::~Adapter_PLC_Base() { cancelAndDelete(endTxMsg); cancelAndDelete(endIFGMsg); cancelAndDelete(endPauseMsg); } void Adapter_PLC_Base::initialize() { physOutGate = gate("phys$o"); initializeFlags(); initializeTxrate(); WATCH(txrate); initializeMACAddress(); initializeQueueModule(); initializeNotificationBoard(); initializeStatistics(); /* The adapter has no higher layer! */ // registerInterface(txrate); // needs MAC address // initialize queue txQueue.setName("txQueue"); // initialize self messages endTxMsg = new cMessage("EndTransmission", ENDTRANSMISSION); endIFGMsg = new cMessage("EndIFG", ENDIFG); endPauseMsg = new cMessage("EndPause", ENDPAUSE); // initialize states transmitState = TX_IDLE_STATE; receiveState = RX_IDLE_STATE; WATCH(transmitState); WATCH(receiveState); // initalize pause pauseUnitsRequested = 0; WATCH(pauseUnitsRequested); // initialize queue limit txQueueLimit = par("txQueueLimit"); WATCH(txQueueLimit); } void Adapter_PLC_Base::initializeQueueModule() { if (par("queueModule").stringValue()[0]) { cModule *module = getParentModule()->getSubmodule(par("queueModule").stringValue()); queueModule = check_and_cast<IPassiveQueue *>(module); if (COMMENTS_ON) EV << "Requesting first frame from queue module\n"; queueModule->requestPacket(); } } void Adapter_PLC_Base::initializeMACAddress() { const char *addrstr = par("address"); if (!strcmp(addrstr,"auto")) { // assign automatic address address = MACAddress::generateAutoAddress(); // change module parameter from "auto" to concrete address par("address").setStringValue(address.str().c_str()); } else { address.setAddress(addrstr); } } void Adapter_PLC_Base::initializeNotificationBoard() { hasSubscribers = false; if (interfaceEntry) { nb = NotificationBoardAccess().getIfExists(); notifDetails.setInterfaceEntry(interfaceEntry); nb->subscribe(this, NF_SUBSCRIBERLIST_CHANGED); updateHasSubcribers(); } } void Adapter_PLC_Base::initializeFlags() { // initialize connected flag connected = physOutGate->getPathEndGate()->isConnected(); if (!connected) if (COMMENTS_ON) EV << "MAC not connected to a network.\n"; WATCH(connected); // TODO: this should be settable from the gui // initialize disabled flag // Note: it is currently not supported to enable a disabled MAC at runtime. // Difficulties: (1) autoconfig (2) how to pick up channel state (free, tx, collision etc) disabled = false; WATCH(disabled); // initialize promiscuous flag promiscuous = par("promiscuous"); WATCH(promiscuous); } void Adapter_PLC_Base::initializeStatistics() { framesSentInBurst = 0; bytesSentInBurst = 0; numFramesSent = numFramesReceivedOK = numBytesSent = numBytesReceivedOK = 0; numFramesPassedToHL = numDroppedBitError = numDroppedNotForUs = 0; numFramesFromHL = numDroppedIfaceDown = 0; numPauseFramesRcvd = numPauseFramesSent = 0; WATCH(framesSentInBurst); WATCH(bytesSentInBurst); WATCH(numFramesSent); WATCH(numFramesReceivedOK); WATCH(numBytesSent); WATCH(numBytesReceivedOK); WATCH(numFramesFromHL); WATCH(numDroppedIfaceDown); WATCH(numDroppedBitError); WATCH(numDroppedNotForUs); WATCH(numFramesPassedToHL); WATCH(numPauseFramesRcvd); WATCH(numPauseFramesSent); /* numFramesSentVector.setName("framesSent"); numFramesReceivedOKVector.setName("framesReceivedOK"); numBytesSentVector.setName("bytesSent"); numBytesReceivedOKVector.setName("bytesReceivedOK"); numDroppedIfaceDownVector.setName("framesDroppedIfaceDown"); numDroppedBitErrorVector.setName("framesDroppedBitError"); numDroppedNotForUsVector.setName("framesDroppedNotForUs"); numFramesPassedToHLVector.setName("framesPassedToHL"); numPauseFramesRcvdVector.setName("pauseFramesRcvd"); numPauseFramesSentVector.setName("pauseFramesSent"); */ } void Adapter_PLC_Base::registerInterface(double txrate) { IInterfaceTable *ift = InterfaceTableAccess().getIfExists(); if (!ift) return; // interfaceEntry = new InterfaceEntry(); interfaceEntry = new InterfaceEntry(NULL); // TODO: Changed by Ramon // interface name: our module name without special characters ([]) char *interfaceName = new char[strlen(getParentModule()->getFullName())+1]; char *d=interfaceName; for (const char *s=getParentModule()->getFullName(); *s; s++) if (isalnum(*s)) *d++ = *s; *d = '\0'; interfaceEntry->setName(interfaceName); delete [] interfaceName; // data rate interfaceEntry->setDatarate(txrate); // generate a link-layer address to be used as interface token for IPv6 interfaceEntry->setMACAddress(address); interfaceEntry->setInterfaceToken(address.formInterfaceIdentifier()); //InterfaceToken token(0, simulation.getUniqueNumber(), 64); //interfaceEntry->setInterfaceToken(token); // MTU: typical values are 576 (Internet de facto), 1500 (PLC-friendly), // 4000 (on some point-to-point links), 4470 (Cisco routers default, FDDI compatible) interfaceEntry->setMtu(par("mtu")); // capabilities interfaceEntry->setMulticast(true); interfaceEntry->setBroadcast(true); // add // ift->addInterface(interfaceEntry, this); ift->addInterface(interfaceEntry); // TODO: Changed by Ramon } bool Adapter_PLC_Base::checkDestinationAddress(PlcFrame *frame) { /* // If not set to promiscuous = on, then checks if received frame contains destination MAC address // matching port's MAC address, also checks if broadcast bit is set if (!promiscuous && !frame->getDest().isBroadcast() && !frame->getDest().equals(address)) { if (COMMENTS_ON) EV << "Frame `" << frame->getName() <<"' not destined to us, discarding\n"; numDroppedNotForUs++; numDroppedNotForUsVector.record(numDroppedNotForUs); delete frame; return false; } */ if (COMMENTS_ON) EV << "Since I'm an adapter, I will pass through every frame!" << endl; return true; } void Adapter_PLC_Base::calculateParameters() { if (disabled || !connected) { bitTime = slotTime = interFrameGap = jamDuration = shortestFrameDuration = 0; carrierExtension = frameBursting = false; return; } // CHANGE --------------------------------------------------------------------------------------- /* Diesen Abschnitt mus man ausblenden, da er die Uebertragungsraten auf wenige, zulaessige reduziert, was fuer Powerline absolut nicht zutreffend ist. if (txrate != PLC_TXRATE && txrate != FAST_PLC_TXRATE && txrate != GIGABIT_PLC_TXRATE && txrate != FAST_GIGABIT_PLC_TXRATE) { error("nonstandard transmission rate %g, must be %g, %g, %g or %g bit/sec", txrate, PLC_TXRATE, FAST_PLC_TXRATE, GIGABIT_PLC_TXRATE, FAST_GIGABIT_PLC_TXRATE); } */ double temp = ROBO_DATARATE * 1000000; if (txrate <= temp) { if (COMMENTS_ON) EV << "Measured datarate is estimated below ROBO datarate." << endl; if (COMMENTS_ON) EV << "ROBO datarate is the lowest datarate possible." << endl; if (COMMENTS_ON) EV << "TX rate will be set to ROBO datarate." << endl; txrate = ROBO_DATARATE * 1000000; } if (txrate <= ROBO_DATARATE || txrate >= 1000000000) txrate = ROBO_DATARATE; bitTime = 1/(double)txrate; if (COMMENTS_ON) EV << endl << "Bit time is calculated to " << bitTime << "s." << endl; /* Dieser Abschnitt muss ausgeblendet werden, da die Berechnung der Parameter fuer Powerline-Uebertragungen etwas anders von Statten gehen muss. // set slot time if (txrate==PLC_TXRATE || txrate==FAST_PLC_TXRATE) slotTime = SLOT_TIME; else slotTime = GIGABIT_SLOT_TIME; // only if Gigabit PLC frameBursting = (txrate==GIGABIT_PLC_TXRATE || txrate==FAST_GIGABIT_PLC_TXRATE); carrierExtension = (slotTime == GIGABIT_SLOT_TIME && !duplexMode); interFrameGap = INTERFRAME_GAP_BITS/(double)txrate; jamDuration = 8*JAM_SIGNAL_BYTES*bitTime; shortestFrameDuration = carrierExtension ? GIGABIT_MIN_FRAME_WITH_EXT : MIN_PLC_FRAME; */ // set slot time slotTime = 512/txrate; if (COMMENTS_ON) EV << "Slot time is set to " << slotTime << "s." << endl; // only if fast PLC // Ein Burst darf nicht laenger als 5000 usec sein. 4 x MaxFramedauer < 5000 usec => FrameBursting = true! simtime_t k = 0.005; /* s */ simtime_t x = (simtime_t)(bitTime * (MAX_PLC_FRAME * 8 /* bytes */) * 4); if (COMMENTS_ON) EV << "Calculation for frame bursting resulted in " << x << " sec." << endl; if (COMMENTS_ON) EV << "Threshold for frame bursting is " << k << " sec." << endl; if (x < k) { frameBursting = true; } else { frameBursting = false; } if (COMMENTS_ON) EV << "Frame bursting is at " << frameBursting << "." << endl; carrierExtension = false; // not available for PLC if (frameBursting) { interFrameGap = /* INTERFRAME_GAP_BITS/(double)txrate; */ CIFS / 1000000; } else { interFrameGap = /* INTERFRAME_GAP_BITS/(double)txrate; */ RIFS / 1000000; } if (COMMENTS_ON) EV << "Inter frame gap is at " << interFrameGap << "s." << endl; jamDuration = 8*JAM_SIGNAL_BYTES*bitTime; if (COMMENTS_ON) EV << "Jam duration is at " << jamDuration << "s." << endl; shortestFrameDuration = MIN_PLC_FRAME; // ---------------------------------------------------------------------------------------------- } void Adapter_PLC_Base::printParameters() { // Dump parameters if (COMMENTS_ON) EV << "MAC address: " << address << (promiscuous ? ", promiscuous mode" : "") << endl; if (COMMENTS_ON) EV << "txrate: " << txrate << ", " << (duplexMode ? "duplex" : "half-duplex") << endl; #if 0 EV << "bitTime: " << bitTime << endl; EV << "carrierExtension: " << carrierExtension << endl; EV << "frameBursting: " << frameBursting << endl; EV << "slotTime: " << slotTime << endl; EV << "interFrameGap: " << interFrameGap << endl; EV << endl; #endif } void Adapter_PLC_Base::processFrameFromUpperLayer(PlcFrame *frame) { if (COMMENTS_ON) EV << "Received frame from upper layer: " << frame << endl; if (frame->getDest().equals(address)) { error("logic error: frame %s from higher layer has local MAC address as dest (%s)", frame->getFullName(), frame->getDest().str().c_str()); } if (frame->getByteLength() > MAX_PLC_FRAME) error("packet from higher layer (%d bytes) exceeds maximum PLC frame size (%d)", frame->getByteLength(), MAX_PLC_FRAME); // must be PlcFrame (or PlcPauseFrame) from upper layer bool isPauseFrame = (dynamic_cast<PlcPauseFrame*>(frame)!=NULL); if (!isPauseFrame) { numFramesFromHL++; if (txQueueLimit && txQueue.length()>txQueueLimit) error("txQueue length exceeds %d -- this is probably due to " "a bogus app model generating excessive traffic " "(or if this is normal, increase txQueueLimit!)", txQueueLimit); // fill in src address if not set if (frame->getSrc().isUnspecified()) frame->setSrc(address); // store frame and possibly begin transmitting if (COMMENTS_ON) EV << "Packet " << frame << " arrived from higher layers, enqueueing\n"; txQueue.insert(frame); } else { if (COMMENTS_ON) EV << "PAUSE received from higher layer\n"; // PAUSE frames enjoy priority -- they're transmitted before all other frames queued up if (!txQueue.empty()) txQueue.insertBefore(txQueue.front(), frame); // front() frame is probably being transmitted else txQueue.insert(frame); } } void Adapter_PLC_Base::processMsgFromNetwork(cPacket *frame) { if (COMMENTS_ON) EV << "Received frame from network: " << frame << endl; // frame must be PlcFrame or PlcJam if (dynamic_cast<PlcFrame*>(frame)==NULL && dynamic_cast<PlcJam*>(frame)==NULL) error("message with unexpected message class '%s' arrived from network (name='%s')", frame->getClassName(), frame->getFullName()); // detect cable length violation in half-duplex mode if (!duplexMode && simTime()-frame->getSendingTime()>=shortestFrameDuration) error("very long frame propagation time detected, maybe cable exceeds maximum allowed length? " "(%lgs corresponds to an approx. %lgm cable)", SIMTIME_STR(simTime() - frame->getSendingTime()), SIMTIME_STR((simTime() - frame->getSendingTime())*SPEED_OF_LIGHT)); } void Adapter_PLC_Base::frameReceptionComplete(PlcFrame *frame) { int pauseUnits; PlcPauseFrame *pauseFrame; if ((pauseFrame=dynamic_cast<PlcPauseFrame*>(frame))!=NULL) { pauseUnits = pauseFrame->getPauseTime(); delete frame; numPauseFramesRcvd++; // numPauseFramesRcvdVector.record(numPauseFramesRcvd); processPauseCommand(pauseUnits); } else { processReceivedDataFrame((PlcFrame *)frame); } } void Adapter_PLC_Base::processReceivedDataFrame(PlcFrame *frame) { // bit errors if (frame->hasBitError()) { numDroppedBitError++; // numDroppedBitErrorVector.record(numDroppedBitError); delete frame; return; } // strip preamble and SFD frame->addByteLength(-PREAMBLE_BYTES-SFD_BYTES); // statistics numFramesReceivedOK++; numBytesReceivedOK += frame->getByteLength(); // numFramesReceivedOKVector.record(numFramesReceivedOK); // numBytesReceivedOKVector.record(numBytesReceivedOK); if (!checkDestinationAddress(frame)) return; numFramesPassedToHL++; // numFramesPassedToHLVector.record(numFramesPassedToHL); // pass up to upper layer send(frame, "upperLayerOut"); } void Adapter_PLC_Base::processPauseCommand(int pauseUnits) { if (transmitState==TX_IDLE_STATE) { if (COMMENTS_ON) EV << "PAUSE frame received, pausing for " << pauseUnitsRequested << " time units\n"; if (pauseUnits>0) scheduleEndPausePeriod(pauseUnits); } else if (transmitState==PAUSE_STATE) { if (COMMENTS_ON) EV << "PAUSE frame received, pausing for " << pauseUnitsRequested << " more time units from now\n"; cancelEvent(endPauseMsg); if (pauseUnits>0) scheduleEndPausePeriod(pauseUnits); } else { // transmitter busy -- wait until it finishes with current frame (endTx) // and then it'll go to PAUSE state if (COMMENTS_ON) EV << "PAUSE frame received, storing pause request\n"; pauseUnitsRequested = pauseUnits; } } void Adapter_PLC_Base::handleEndIFGPeriod() { if (transmitState!=WAIT_IFG_STATE) error("Not in WAIT_IFG_STATE at the end of IFG period"); if (txQueue.empty()) error("End of IFG and no frame to transmit"); // End of IFG period, okay to transmit, if Rx idle OR duplexMode cPacket *frame = (cPacket *)txQueue.front(); if (COMMENTS_ON) EV << "IFG elapsed, now begin transmission of frame " << frame << endl; // CHANGE ---------------------------------------------------------------------------- // We skip carrier extension, because there is no for plc communications /* // Perform carrier extension if in Gigabit PLC if (carrierExtension && frame->getByteLength() < GIGABIT_MIN_FRAME_WITH_EXT) { EV << "Performing carrier extension of small frame\n"; frame->setByteLength(GIGABIT_MIN_FRAME_WITH_EXT); } */ // ----------------------------------------------------------------------------------- // start frame burst, if enabled if (frameBursting) { if (COMMENTS_ON) EV << "Starting frame burst\n"; framesSentInBurst = 0; bytesSentInBurst = 0; } } void Adapter_PLC_Base::handleEndTxPeriod() {<|fim▁hole|> if (transmitState!=TRANSMITTING_STATE || (!duplexMode && receiveState!=RX_IDLE_STATE)) error("End of transmission, and incorrect state detected"); if (txQueue.empty()) error("Frame under transmission cannot be found"); // get frame from buffer cPacket *frame = (cPacket *)txQueue.pop(); numFramesSent++; numBytesSent += frame->getByteLength(); // numFramesSentVector.record(numFramesSent); // numBytesSentVector.record(numBytesSent); if (dynamic_cast<PlcPauseFrame*>(frame)!=NULL) { numPauseFramesSent++; // numPauseFramesSentVector.record(numPauseFramesSent); } if (COMMENTS_ON) EV << "Transmission of " << frame << " successfully completed\n"; delete frame; } void Adapter_PLC_Base::handleEndPausePeriod() { if (transmitState != PAUSE_STATE) error("At end of PAUSE not in PAUSE_STATE!"); if (COMMENTS_ON) EV << "Pause finished, resuming transmissions\n"; beginSendFrames(); } void Adapter_PLC_Base::processMessageWhenNotConnected(cMessage *msg) { if (COMMENTS_ON) EV << "Interface is not connected -- dropping packet " << msg << endl; delete msg; numDroppedIfaceDown++; } void Adapter_PLC_Base::processMessageWhenDisabled(cMessage *msg) { if (COMMENTS_ON) EV << "MAC is disabled -- dropping message " << msg << endl; delete msg; } void Adapter_PLC_Base::scheduleEndIFGPeriod() { // CHANGE ------------------------------------------------------------------------------ /* Anders als bei CSMA/CD (Ethernet) wird bei PLC CSMA/CA angewendet. Hierzu wird zu der "normalen" Wartezeit nach der Feststellung, dass das Medium nicht belegt ist, eine zufaellige zusaetzliche Wartezeit addiert, die ein Vielfaches von 1.28 us ist (dieser Wert konnte beim Modemhersteller devolo erfragt werden). Hierdurch werden Kollisionen noch unwahrscheinlicher, da nach Freigabe nicht alle Modems mit Sendewunsch gleichzeitig versuchen, das Medium zu belegen. */ // For CSMA/CA, we have to add a random selected additional wait time int gap = CSMA_CA_MAX_ADDITIONAL_WAIT_TIME; int x = rand()%10; x = x * gap; simtime_t additional_wait_time = (simtime_t) x/1000; if (COMMENTS_ON) EV << "End of IFG period is scheduled at t=" << simTime()+interFrameGap +additional_wait_time << "." << endl; // ------------------------------------------------------------------------------------- scheduleAt(simTime()+interFrameGap +additional_wait_time, endIFGMsg); transmitState = WAIT_IFG_STATE; } // CHANGE ------------------------------------------------------------------------------ void Adapter_PLC_Base::scheduleEndIFGPeriod(int priority) /* Um trotz des durch CSMA/CA etwas zufaelligerem Mediumzugriff noch zu gewaehrleisten, das wichtige Informationen zur Aufrechterhaltung der Verbindungen gegenueber einfachem Datenverkehr bevorzugt werden, gibt es nach jeder Mediumsfreigabe die "Priority resolution period". In vier Stufen erhalten zunaechst die Modems mit wichtigen Frames den Vorzug. Die Periode wird dazu in 4 Teilabschnitte geteilt, und die durch CSMA/CA erweiterte Wartezeit wird innerhalb der Fenster aufgeloest. [----------------------------- Priority resolution period ------------------------------------] t-> [Window 1 - Priority 4] [Window 2 - Priority 3] [Window 3 - Priority 2] [Window 4 - Priority 1] */ { if (COMMENTS_ON) EV << "Scheduling end of IFG period ..." << endl; if (COMMENTS_ON) EV << "Priority based traffic detected. Priority is " << priority << "." << endl; // the higher the priority, the faster the channel access double whole_period = PRIORITY_RESOLUTION_PERIOD; if (COMMENTS_ON) EV << "Whole priority resolution period: " << whole_period << " micro sec" << endl; double quarter_period = whole_period/4; if (COMMENTS_ON) EV << "Quarter of the priority resolution period: " << quarter_period << " micro sec" << endl; double basic_priority_period = whole_period - (priority * quarter_period); if (COMMENTS_ON) EV << "Basic priority period for a priority of " << priority << " is: " << basic_priority_period << " micro sec" << endl; double fluctuation = quarter_period * 0.9; // 16,12 us if (COMMENTS_ON) EV << "The fluctuations are at maximum: " << fluctuation << " micro sec" << endl; // For CSMA/CA, we have to add a random selected additional wait time int gap = CSMA_CA_MAX_ADDITIONAL_WAIT_TIME; int int_k = rand()%11; // 0 us to 14,08 us if (COMMENTS_ON) EV << "Diceroll resulted in " << int_k << "." << endl; double m = fluctuation - (int_k * gap); double x = (m+basic_priority_period)/1000000; simtime_t priority_wait_time = (simtime_t) x; // us if (COMMENTS_ON) EV << "This time, the priority based additional wait time is calculated to: " << priority_wait_time * 1000000 << " micro sec" << endl; simtime_t complete_wait_time = interFrameGap+priority_wait_time; if (COMMENTS_ON) EV << "End of IFG period is scheduled at t=" << simTime()+complete_wait_time << " micro sec." << endl << endl; scheduleAt(simTime()+complete_wait_time, endIFGMsg); transmitState = WAIT_IFG_STATE; } // ------------------------------------------------------------------------------------- void Adapter_PLC_Base::scheduleEndTxPeriod(cPacket *frame) { scheduleAt(simTime()+frame->getBitLength()*bitTime, endTxMsg); transmitState = TRANSMITTING_STATE; } void Adapter_PLC_Base::scheduleEndPausePeriod(int pauseUnits) { // length is interpreted as 512-bit-time units simtime_t pausePeriod = pauseUnits*PAUSE_BITTIME*bitTime; scheduleAt(simTime()+pausePeriod, endPauseMsg); transmitState = PAUSE_STATE; } bool Adapter_PLC_Base::checkAndScheduleEndPausePeriod() { if (pauseUnitsRequested>0) { // if we received a PAUSE frame recently, go into PAUSE state if (COMMENTS_ON) EV << "Going to PAUSE mode for " << pauseUnitsRequested << " time units\n"; scheduleEndPausePeriod(pauseUnitsRequested); pauseUnitsRequested = 0; return true; } return false; } void Adapter_PLC_Base::beginSendFrames() { if (!txQueue.empty()) { // Other frames are queued, therefore wait IFG period and transmit next frame if (COMMENTS_ON) EV << "Transmit next frame in output queue, after IFG period\n"; scheduleEndIFGPeriod(); } else { transmitState = TX_IDLE_STATE; if (queueModule) { // tell queue module that we've become idle if (COMMENTS_ON) EV << "Requesting another frame from queue module\n"; queueModule->requestPacket(); } else { // No more frames set transmitter to idle if (COMMENTS_ON) EV << "No more frames to send, transmitter set to idle\n"; } } } void Adapter_PLC_Base::fireChangeNotification(int type, cPacket *msg) { if (nb) { notifDetails.setPacket(msg); nb->fireChangeNotification(type, &notifDetails); } } void Adapter_PLC_Base::finish() { /* if (!disabled) { simtime_t t = simTime(); recordScalar("simulated time", t); recordScalar("txrate (Mb)", txrate/1000000); recordScalar("full duplex", duplexMode); recordScalar("frames sent", numFramesSent); recordScalar("frames rcvd", numFramesReceivedOK); recordScalar("bytes sent", numBytesSent); recordScalar("bytes rcvd", numBytesReceivedOK); recordScalar("frames from higher layer", numFramesFromHL); recordScalar("frames from higher layer dropped (iface down)", numDroppedIfaceDown); recordScalar("frames dropped (bit error)", numDroppedBitError); recordScalar("frames dropped (not for us)", numDroppedNotForUs); recordScalar("frames passed up to HL", numFramesPassedToHL); recordScalar("PAUSE frames sent", numPauseFramesSent); recordScalar("PAUSE frames rcvd", numPauseFramesRcvd); if (t>0) { recordScalar("frames/sec sent", numFramesSent/t); recordScalar("frames/sec rcvd", numFramesReceivedOK/t); recordScalar("bits/sec sent", 8*numBytesSent/t); recordScalar("bits/sec rcvd", 8*numBytesReceivedOK/t); } } */ } void Adapter_PLC_Base::updateDisplayString() { // icon coloring const char *color; if (receiveState==RX_COLLISION_STATE) color = "red"; else if (transmitState==TRANSMITTING_STATE) color = "yellow"; else if (transmitState==JAMMING_STATE) color = "red"; else if (receiveState==RECEIVING_STATE) color = "#4040ff"; else if (transmitState==BACKOFF_STATE) color = "white"; else if (transmitState==PAUSE_STATE) color = "gray"; else color = ""; getDisplayString().setTagArg("i",1,color); if (!strcmp(getParentModule()->getClassName(),"PLCInterface")) getParentModule()->getDisplayString().setTagArg("i",1,color); // connection coloring updateConnectionColor(transmitState); #if 0 // this code works but didn't turn out to be very useful const char *txStateName; switch (transmitState) { case TX_IDLE_STATE: txStateName="IDLE"; break; case WAIT_IFG_STATE: txStateName="WAIT_IFG"; break; case TRANSMITTING_STATE: txStateName="TX"; break; case JAMMING_STATE: txStateName="JAM"; break; case BACKOFF_STATE: txStateName="BACKOFF"; break; case PAUSE_STATE: txStateName="PAUSE"; break; default: error("wrong tx state"); } const char *rxStateName; switch (receiveState) { case RX_IDLE_STATE: rxStateName="IDLE"; break; case RECEIVING_STATE: rxStateName="RX"; break; case RX_COLLISION_STATE: rxStateName="COLL"; break; default: error("wrong rx state"); } char buf[80]; sprintf(buf, "tx:%s rx: %s\n#boff:%d #cTx:%d", txStateName, rxStateName, backoffs, numConcurrentTransmissions); getDisplayString().setTagArg("t",0,buf); #endif } void Adapter_PLC_Base::updateConnectionColor(int txState) { const char *color; if (txState==TRANSMITTING_STATE) color = "yellow"; else if (txState==JAMMING_STATE || txState==BACKOFF_STATE) color = "red"; else color = ""; cGate *g = physOutGate; while (g && g->getType()==cGate::OUTPUT) { g->getDisplayString().setTagArg("o",0,color); g->getDisplayString().setTagArg("o",1, color[0] ? "3" : "1"); g = g->getNextGate(); } } void Adapter_PLC_Base::receiveChangeNotification(int category, const cPolymorphic *) { if (category==NF_SUBSCRIBERLIST_CHANGED) updateHasSubcribers(); }<|fim▁end|>
// we only get here if transmission has finished successfully, without collision
<|file_name|>eureka_test.go<|end_file_name|><|fim▁begin|>// Copyright 2020 The Prometheus Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package eureka import ( "context" "io" "net/http" "net/http/httptest" "testing" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" "github.com/prometheus/prometheus/discovery/targetgroup" ) func testUpdateServices(respHandler http.HandlerFunc) ([]*targetgroup.Group, error) { // Create a test server with mock HTTP handler. ts := httptest.NewServer(respHandler) defer ts.Close() conf := SDConfig{ Server: ts.URL, } md, err := NewDiscovery(&conf, nil) if err != nil { return nil, err } return md.refresh(context.Background()) } func TestEurekaSDHandleError(t *testing.T) { var ( errTesting = "non 2xx status '500' response during eureka service discovery" respHandler = func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) w.Header().Set("Content-Type", "application/xml") io.WriteString(w, ``) } ) tgs, err := testUpdateServices(respHandler) require.EqualError(t, err, errTesting) require.Equal(t, len(tgs), 0) } func TestEurekaSDEmptyList(t *testing.T) { var ( appsXML = `<applications> <versions__delta>1</versions__delta> <apps__hashcode/> </applications>` respHandler = func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "application/xml") io.WriteString(w, appsXML) } ) tgs, err := testUpdateServices(respHandler) require.NoError(t, err) require.Equal(t, len(tgs), 1) } func TestEurekaSDSendGroup(t *testing.T) { var ( appsXML = `<applications> <versions__delta>1</versions__delta> <apps__hashcode>UP_4_</apps__hashcode> <application> <name>CONFIG-SERVICE</name> <instance> <instanceId>config-service001.test.com:config-service:8080</instanceId> <hostName>config-service001.test.com</hostName> <app>CONFIG-SERVICE</app> <ipAddr>192.133.83.31</ipAddr> <status>UP</status> <overriddenstatus>UNKNOWN</overriddenstatus> <port enabled="true">8080</port> <securePort enabled="false">8080</securePort> <countryId>1</countryId> <dataCenterInfo class="com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo"> <name>MyOwn</name> </dataCenterInfo> <leaseInfo> <renewalIntervalInSecs>30</renewalIntervalInSecs> <durationInSecs>90</durationInSecs> <registrationTimestamp>1596003469304</registrationTimestamp> <lastRenewalTimestamp>1596110179310</lastRenewalTimestamp> <evictionTimestamp>0</evictionTimestamp> <serviceUpTimestamp>1547190033103</serviceUpTimestamp> </leaseInfo> <metadata> <instanceId>config-service001.test.com:config-service:8080</instanceId> </metadata> <homePageUrl>http://config-service001.test.com:8080/</homePageUrl> <statusPageUrl>http://config-service001.test.com:8080/info</statusPageUrl> <healthCheckUrl>http://config-service001.test.com 8080/health</healthCheckUrl> <vipAddress>config-service</vipAddress> <isCoordinatingDiscoveryServer>false</isCoordinatingDiscoveryServer> <lastUpdatedTimestamp>1596003469304</lastUpdatedTimestamp> <lastDirtyTimestamp>1596003469304</lastDirtyTimestamp> <actionType>ADDED</actionType> </instance> <instance> <instanceId>config-service002.test.com:config-service:8080</instanceId> <hostName>config-service002.test.com</hostName> <app>CONFIG-SERVICE</app> <ipAddr>192.133.83.31</ipAddr> <status>UP</status> <overriddenstatus>UNKNOWN</overriddenstatus> <port enabled="true">8080</port> <securePort enabled="false">8080</securePort> <countryId>1</countryId> <dataCenterInfo class="com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo"> <name>MyOwn</name> </dataCenterInfo> <leaseInfo> <renewalIntervalInSecs>30</renewalIntervalInSecs> <durationInSecs>90</durationInSecs> <registrationTimestamp>1596003469304</registrationTimestamp> <lastRenewalTimestamp>1596110179310</lastRenewalTimestamp> <evictionTimestamp>0</evictionTimestamp> <serviceUpTimestamp>1547190033103</serviceUpTimestamp> </leaseInfo> <metadata> <instanceId>config-service002.test.com:config-service:8080</instanceId> </metadata> <homePageUrl>http://config-service002.test.com:8080/</homePageUrl> <statusPageUrl>http://config-service002.test.com:8080/info</statusPageUrl> <healthCheckUrl>http://config-service002.test.com:8080/health</healthCheckUrl> <vipAddress>config-service</vipAddress> <isCoordinatingDiscoveryServer>false</isCoordinatingDiscoveryServer> <lastUpdatedTimestamp>1596003469304</lastUpdatedTimestamp> <lastDirtyTimestamp>1596003469304</lastDirtyTimestamp> <actionType>ADDED</actionType> </instance> </application> <application> <name>META-SERVICE</name> <instance> <instanceId>meta-service002.test.com:meta-service:8080</instanceId> <hostName>meta-service002.test.com</hostName> <app>META-SERVICE</app> <ipAddr>192.133.87.237</ipAddr> <status>UP</status> <overriddenstatus>UNKNOWN</overriddenstatus> <port enabled="true">8080</port> <securePort enabled="false">443</securePort> <countryId>1</countryId> <dataCenterInfo class="com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo"> <name>MyOwn</name> </dataCenterInfo> <leaseInfo> <renewalIntervalInSecs>30</renewalIntervalInSecs> <durationInSecs>90</durationInSecs> <registrationTimestamp>1535444352472</registrationTimestamp> <lastRenewalTimestamp>1596110168846</lastRenewalTimestamp> <evictionTimestamp>0</evictionTimestamp> <serviceUpTimestamp>1535444352472</serviceUpTimestamp> </leaseInfo> <metadata> <project>meta-service</project> <management.port>8090</management.port> </metadata> <homePageUrl>http://meta-service002.test.com:8080/</homePageUrl> <statusPageUrl>http://meta-service002.test.com:8080/info</statusPageUrl> <healthCheckUrl>http://meta-service002.test.com:8080/health</healthCheckUrl> <vipAddress>meta-service</vipAddress> <secureVipAddress>meta-service</secureVipAddress> <isCoordinatingDiscoveryServer>false</isCoordinatingDiscoveryServer> <lastUpdatedTimestamp>1535444352472</lastUpdatedTimestamp> <lastDirtyTimestamp>1535444352398</lastDirtyTimestamp> <actionType>ADDED</actionType> </instance> <instance> <instanceId>meta-service001.test.com:meta-service:8080</instanceId> <hostName>meta-service001.test.com</hostName> <app>META-SERVICE</app> <ipAddr>192.133.87.236</ipAddr> <status>UP</status> <overriddenstatus>UNKNOWN</overriddenstatus> <port enabled="true">8080</port> <securePort enabled="false">443</securePort> <countryId>1</countryId> <dataCenterInfo class="com.netflix.appinfo.InstanceInfo$DefaultDataCenterInfo"> <name>MyOwn</name> </dataCenterInfo> <leaseInfo> <renewalIntervalInSecs>30</renewalIntervalInSecs> <durationInSecs>90</durationInSecs> <registrationTimestamp>1535444352472</registrationTimestamp> <lastRenewalTimestamp>1596110168846</lastRenewalTimestamp> <evictionTimestamp>0</evictionTimestamp><|fim▁hole|> </leaseInfo> <metadata> <project>meta-service</project> <management.port>8090</management.port> </metadata> <homePageUrl>http://meta-service001.test.com:8080/</homePageUrl> <statusPageUrl>http://meta-service001.test.com:8080/info</statusPageUrl> <healthCheckUrl>http://meta-service001.test.com:8080/health</healthCheckUrl> <vipAddress>meta-service</vipAddress> <secureVipAddress>meta-service</secureVipAddress> <isCoordinatingDiscoveryServer>false</isCoordinatingDiscoveryServer> <lastUpdatedTimestamp>1535444352472</lastUpdatedTimestamp> <lastDirtyTimestamp>1535444352398</lastDirtyTimestamp> <actionType>ADDED</actionType> </instance> </application> </applications>` respHandler = func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "application/xml") io.WriteString(w, appsXML) } ) tgs, err := testUpdateServices(respHandler) require.NoError(t, err) require.Equal(t, len(tgs), 1) tg := tgs[0] require.Equal(t, tg.Source, "eureka") require.Equal(t, len(tg.Targets), 4) tgt := tg.Targets[0] require.Equal(t, tgt[model.AddressLabel], model.LabelValue("config-service001.test.com:8080")) tgt = tg.Targets[2] require.Equal(t, tgt[model.AddressLabel], model.LabelValue("meta-service002.test.com:8080")) }<|fim▁end|>
<serviceUpTimestamp>1535444352472</serviceUpTimestamp>
<|file_name|>fileRotate.js<|end_file_name|><|fim▁begin|>/* * file.js: Transport for outputting to a local log file * * (C) 2010 Charlie Robbins * MIT LICENCE * */ var events = require('events'), fs = require('fs'), path = require('path'), util = require('util'), colors = require('colors'), common = require('../common'), Transport = require('./transport').Transport; // // ### function FileRotate (options) // #### @options {Object} Options for this instance. // Constructor function for the FileRotate transport object responsible // for persisting log messages and metadata to one or more files. // var FileRotate = exports.FileRotate = function (options) { Transport.call(this, options); // // Helper function which throws an `Error` in the event // that any of the rest of the arguments is present in `options`. // function throwIf (target /*, illegal... */) { Array.prototype.slice.call(arguments, 1).forEach(function (name) { if (options[name]) { throw new Error('Cannot set ' + name + ' and ' + target + 'together'); } }); } if (options.filename || options.dirname) { throwIf('filename or dirname', 'stream'); this._basename = this.filename = path.basename(options.filename) || 'winston.log'; this.dirname = options.dirname || path.dirname(options.filename); this.options = options.options || { flags: 'a' }; } else if (options.stream) { throwIf('stream', 'filename', 'maxsize'); this.stream = options.stream; } else { throw new Error('Cannot log to file without filename or stream.'); } this.json = options.json !== false; this.colorize = options.colorize || false; this.maxsize = options.maxsize || null; this.maxFiles = options.maxFiles || null; this.timestamp = typeof options.timestamp !== 'undefined' ? options.timestamp : false; // // Internal state variables representing the number // of files this instance has created and the current // size (in bytes) of the current logfile. // this._size = 0; this._created = 0; this._buffer = []; this._draining = false; var today = new Date(); this._month = today.getMonth(); this._date = today.getDate(); }; // // Inherit from `winston.Transport`. // util.inherits(FileRotate, Transport); /* */ function getFormatToday() { var d = new Date(); return [ d.getFullYear(), common.pad(d.getMonth() + 1), common.pad(d.getDate()), '-' ].join(''); } // // Expose the name of this Transport on the prototype // FileRotate.prototype.name = 'fileRotate'; // // ### function log (level, msg, [meta], callback) // #### @level {string} Level at which to log the message. // #### @msg {string} Message to log // #### @meta {Object} **Optional** Additional metadata to attach // #### @callback {function} Continuation to respond to when complete. // Core logging method exposed to Winston. Metadata is optional. // FileRotate.prototype.log = function (level, msg, meta, callback) { if (this.silent) { return callback(null, true); } var self = this, output = common.log({ level: level, message: msg, meta: meta, json: this.json, colorize: this.colorize, timestamp: this.timestamp }) + '\n'; this._size += output.length; if (!this.filename) { console.log('no filename'); // // If there is no `filename` on this instance then it was configured // with a raw `WriteableStream` instance and we should not perform any // size restrictions. // this.stream.write(output); self._lazyDrain(); } else { this.open(function (err) { console.log('opening callbak'); if (err) { console.log('opening err'); // // If there was an error enqueue the message // return self._buffer.push(output); } self.stream.write(output); self._lazyDrain(); }); } callback(null, true); }; // // ### function open (callback) // #### @callback {function} Continuation to respond to when complete // Checks to see if a new file needs to be created based on the `maxsize` // (if any) and the current size of the file used. // FileRotate.prototype.open = function (callback) { var now = new Date(); if (this.opening) { console.log('opening'); // // If we are already attempting to open the next // available file then respond with a value indicating // that the message should be buffered. // return callback(true); } else if (!this.stream || (this.maxsize && this._size >= this.maxsize) || (this._month <= now.getMonth() && this._date < now.getDate()) ) { console.log('no stream, over max, new date'); // // If we dont have a stream or have exceeded our size, then create // the next stream and respond with a value indicating that // the message should be buffered. // callback(true); return this._createStream(); } // // Otherwise we have a valid (and ready) stream. // callback(); }; // // ### function close () // Closes the stream associated with this instance. // FileRotate.prototype.close = function () { var self = this; console.log('close'); if (this.stream) { this.stream.end(); this.stream.destroySoon(); this.stream.once('drain', function () { self.emit('flush'); self.emit('closed'); }); } }; // // ### function flush () // Flushes any buffered messages to the current `stream` // used by this instance. // FileRotate.prototype.flush = function () { var self = this; console.log('flush'); // // Iterate over the `_buffer` of enqueued messaged // and then write them to the newly created stream. // this._buffer.forEach(function (str) { process.nextTick(function () { self.stream.write(str); self._size += str.length; }); }); // // Quickly truncate the `_buffer` once the write operations // have been started // self._buffer.length = 0; // // When the stream has drained we have flushed // our buffer. // self.stream.once('drain', function () { self.emit('flush');<|fim▁hole|> // // ### @private function _createStream () // Attempts to open the next appropriate file for this instance // based on the common state (such as `maxsize` and `_basename`). // FileRotate.prototype._createStream = function () { var self = this; this.opening = true; console.log('create Stream'); (function checkFile (target) { console.log('check file'); var fullname = path.join(self.dirname, target); // // Creates the `WriteStream` and then flushes any // buffered messages. // function createAndFlush (size) { console.log('create and flush'); if (self.stream) { console.log('end stream'); self.stream.end(); self.stream.destroySoon(); } self._size = size; self.filename = target; self.stream = fs.createWriteStream(fullname, self.options); // // When the current stream has finished flushing // then we can be sure we have finished opening // and thus can emit the `open` event. // self.once('flush', function () { self.opening = false; self.emit('open', fullname); }); // // Remark: It is possible that in the time it has taken to find the // next logfile to be written more data than `maxsize` has been buffered, // but for sensible limits (10s - 100s of MB) this seems unlikely in less // than one second. // self.flush(); } fs.stat(fullname, function (err, stats) { console.log('stats > ' + stats); if (err) { if (err.code !== 'ENOENT') { return self.emit('error', err); } return createAndFlush(0); } if (stats) { console.log('mtime > ' + stats.mtime); console.log('type of > ' + typeof stats.mtime); self._month = stats.mtime.getMonth(); self._date = stats.mtime.getDate(); } console.log(self._month); console.log(self._date); var now = new Date(); if (!stats || (self.maxsize && stats.size >= self.maxsize) || (self._date < now.getDate() && self._month <= now.getMonth()) ) { console.log('no stats, max size, new date'); // // If `stats.size` is greater than the `maxsize` for // this instance then try again // return checkFile(self._getFile(true)); } createAndFlush(stats.size); }); })(this._getFile()); }; // // ### @private function _getFile () // Gets the next filename to use for this instance // in the case that log filesizes are being capped. // FileRotate.prototype._getFile = function (inc) { console.log('get file'); var self = this, ext = path.extname(this._basename), basename = getFormatToday() + path.basename(this._basename, ext), remaining; if (inc) { // // Increment the number of files created or // checked by this instance. // // Check for maxFiles option and delete file if (this.maxFiles && (this._created >= (this.maxFiles - 1))) { remaining = this._created - (this.maxFiles - 1); if (remaining === 0) { fs.unlinkSync(path.join(this.dirname, basename + ext)); } else { fs.unlinkSync(path.join(this.dirname, basename + remaining + ext)); } } this._created += 1; } return this._created ? basename + this._created + ext : basename + ext; }; // // ### @private function _lazyDrain () // Lazily attempts to emit the `logged` event when `this.stream` has // drained. This is really just a simple mutex that only works because // Node.js is single-threaded. // FileRotate.prototype._lazyDrain = function () { console.log('lazy drain'); var self = this; if (!this._draining && this.stream) { this._draining = true; this.stream.once('drain', function () { this._draining = false; self.emit('logged'); }); } };<|fim▁end|>
self.emit('logged'); }); };
<|file_name|>Modal.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { Gateway } from 'react-gateway'; import ReactModal2 from './ReactModal2'; import Icon from '../Icon'; import './Modal.scss'; /** * This component is only created to facilitate the process of using react-modal2 with react gateway * * You can import this Modal and use onClose props with internal state to control its appearance: * *export default class MyComponent extends React.Component { * state = { * isModalOpen: false * }; * * handleOpen = () => { * this.setState({ isModalOpen: true }); * }; * * handleClose = () => { * this.setState({ isModalOpen: false }); * }; * * render() { * return ( * <div> * <button onClick={this.handleOpen}>Open</button> * {this.state.isModalOpen && ( * <MyCustomModal onClose={this.handleClose}> * <h1>Hello from Modal</h1> * <button onClick={this.handleClose}>Close</button> * </MyCustomModal> * )} * </div> * ); * } *} * * more info: https://www.npmjs.com/package/react-modal2 */ const freezeTheScroll = () => { // eslint-disable-next-line const body = window.document.querySelector('body'); body.style.overflow = 'hidden';<|fim▁hole|>}; const unfreezeTheScroll = () => { // eslint-disable-next-line const body = window.document.querySelector('body'); body.style.overflow = 'initial'; }; export default class Modal extends React.Component { static propTypes = { onClose: React.PropTypes.func.isRequired, }; static defaultProps = { closeOnEsc: true, closeOnBackdropClick: true, }; /* eslint-disable */ componentDidMount() { if (window) { freezeTheScroll(); } } componentWillUnmount() { if (window) { unfreezeTheScroll(); } } /* eslint-enable */ render() { return ( <Gateway into="modal"> <ReactModal2 onClose={this.props.onClose} closeOnEsc={this.props.closeOnEsc} closeOnBackdropClick={this.props.closeOnEsc} backdropClassName="BusyModalBackdrop" modalClassName="BusyModal" > <a className="BusyModal--close" onClick={this.props.onClose}> <Icon name="close" /> </a> {this.props.children} </ReactModal2> </Gateway> ); } }<|fim▁end|>
<|file_name|>flatten_test.rs<|end_file_name|><|fim▁begin|>// @generated SignedSource<<f151ffe80d9e519322a823c1bbc24bae>> mod flatten; use flatten::transform_fixture; use fixture_tests::test_fixture; #[test] fn flatten_multiple_conditions() { let input = include_str!("flatten/fixtures/flatten-multiple-conditions.graphql"); let expected = include_str!("flatten/fixtures/flatten-multiple-conditions.expected"); test_fixture(transform_fixture, "flatten-multiple-conditions.graphql", "flatten/fixtures/flatten-multiple-conditions.expected", input, expected); } #[test] fn flatten_same_conditions() { let input = include_str!("flatten/fixtures/flatten-same-conditions.graphql"); let expected = include_str!("flatten/fixtures/flatten-same-conditions.expected"); test_fixture(transform_fixture, "flatten-same-conditions.graphql", "flatten/fixtures/flatten-same-conditions.expected", input, expected); } #[test] fn flatten_transform() { let input = include_str!("flatten/fixtures/flatten-transform.graphql"); let expected = include_str!("flatten/fixtures/flatten-transform.expected"); test_fixture(transform_fixture, "flatten-transform.graphql", "flatten/fixtures/flatten-transform.expected", input, expected); } #[test] fn flattens_inline_inside_condition() { let input = include_str!("flatten/fixtures/flattens-inline-inside-condition.graphql"); let expected = include_str!("flatten/fixtures/flattens-inline-inside-condition.expected"); test_fixture(transform_fixture, "flattens-inline-inside-condition.graphql", "flatten/fixtures/flattens-inline-inside-condition.expected", input, expected); } #[test] fn flattens_inside_plural() { let input = include_str!("flatten/fixtures/flattens-inside-plural.graphql"); let expected = include_str!("flatten/fixtures/flattens-inside-plural.expected"); test_fixture(transform_fixture, "flattens-inside-plural.graphql", "flatten/fixtures/flattens-inside-plural.expected", input, expected); } #[test] fn flattens_matching_fragment_types() { let input = include_str!("flatten/fixtures/flattens-matching-fragment-types.graphql");<|fim▁hole|>#[test] fn linked_handle_field() { let input = include_str!("flatten/fixtures/linked-handle-field.graphql"); let expected = include_str!("flatten/fixtures/linked-handle-field.expected"); test_fixture(transform_fixture, "linked-handle-field.graphql", "flatten/fixtures/linked-handle-field.expected", input, expected); } #[test] fn match_field() { let input = include_str!("flatten/fixtures/match-field.graphql"); let expected = include_str!("flatten/fixtures/match-field.expected"); test_fixture(transform_fixture, "match-field.graphql", "flatten/fixtures/match-field.expected", input, expected); } #[test] fn match_field_overlap() { let input = include_str!("flatten/fixtures/match-field-overlap.graphql"); let expected = include_str!("flatten/fixtures/match-field-overlap.expected"); test_fixture(transform_fixture, "match-field-overlap.graphql", "flatten/fixtures/match-field-overlap.expected", input, expected); } #[test] fn scalar_handle_field() { let input = include_str!("flatten/fixtures/scalar-handle-field.graphql"); let expected = include_str!("flatten/fixtures/scalar-handle-field.expected"); test_fixture(transform_fixture, "scalar-handle-field.graphql", "flatten/fixtures/scalar-handle-field.expected", input, expected); }<|fim▁end|>
let expected = include_str!("flatten/fixtures/flattens-matching-fragment-types.expected"); test_fixture(transform_fixture, "flattens-matching-fragment-types.graphql", "flatten/fixtures/flattens-matching-fragment-types.expected", input, expected); }
<|file_name|>jquery.tablesorter.js<|end_file_name|><|fim▁begin|>/**! * TableSorter 2.17.7 - Client-side table sorting with ease! * @requires jQuery v1.2.6+ * * Copyright (c) 2007 Christian Bach * Examples and docs at: http://tablesorter.com * Dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl.html * * @type jQuery * @name tablesorter * @cat Plugins/Tablesorter * @author Christian Bach/[email protected] * @contributor Rob Garrison/https://github.com/Mottie/tablesorter */ /*jshint browser:true, jquery:true, unused:false, expr: true */ /*global console:false, alert:false */ !(function($) { "use strict"; $.extend({ /*jshint supernew:true */ tablesorter: new function() { var ts = this; ts.version = "2.17.7"; ts.parsers = []; ts.widgets = []; ts.defaults = { // *** appearance theme : 'default', // adds tablesorter-{theme} to the table for styling widthFixed : false, // adds colgroup to fix widths of columns showProcessing : false, // show an indeterminate timer icon in the header when the table is sorted or filtered. headerTemplate : '{content}',// header layout template (HTML ok); {content} = innerHTML, {icon} = <i/> (class from cssIcon) onRenderTemplate : null, // function(index, template){ return template; }, (template is a string) onRenderHeader : null, // function(index){}, (nothing to return) // *** functionality cancelSelection : true, // prevent text selection in the header tabIndex : true, // add tabindex to header for keyboard accessibility dateFormat : 'mmddyyyy', // other options: "ddmmyyy" or "yyyymmdd" sortMultiSortKey : 'shiftKey', // key used to select additional columns sortResetKey : 'ctrlKey', // key used to remove sorting on a column usNumberFormat : true, // false for German "1.234.567,89" or French "1 234 567,89" delayInit : false, // if false, the parsed table contents will not update until the first sort serverSideSorting: false, // if true, server-side sorting should be performed because client-side sorting will be disabled, but the ui and events will still be used. // *** sort options headers : {}, // set sorter, string, empty, locked order, sortInitialOrder, filter, etc. ignoreCase : true, // ignore case while sorting sortForce : null, // column(s) first sorted; always applied sortList : [], // Initial sort order; applied initially; updated when manually sorted sortAppend : null, // column(s) sorted last; always applied sortStable : false, // when sorting two rows with exactly the same content, the original sort order is maintained sortInitialOrder : 'asc', // sort direction on first click sortLocaleCompare: false, // replace equivalent character (accented characters) sortReset : false, // third click on the header will reset column to default - unsorted sortRestart : false, // restart sort to "sortInitialOrder" when clicking on previously unsorted columns emptyTo : 'bottom', // sort empty cell to bottom, top, none, zero stringTo : 'max', // sort strings in numerical column as max, min, top, bottom, zero textExtraction : 'basic', // text extraction method/function - function(node, table, cellIndex){} textAttribute : 'data-text',// data-attribute that contains alternate cell text (used in textExtraction function) textSorter : null, // choose overall or specific column sorter function(a, b, direction, table, columnIndex) [alt: ts.sortText] numberSorter : null, // choose overall numeric sorter function(a, b, direction, maxColumnValue) // *** widget options widgets: [], // method to add widgets, e.g. widgets: ['zebra'] widgetOptions : { zebra : [ 'even', 'odd' ] // zebra widget alternating row class names }, initWidgets : true, // apply widgets on tablesorter initialization // *** callbacks initialized : null, // function(table){}, // *** extra css class names tableClass : '', cssAsc : '', cssDesc : '', cssNone : '', cssHeader : '', cssHeaderRow : '', cssProcessing : '', // processing icon applied to header during sort/filter cssChildRow : 'tablesorter-childRow', // class name indiciating that a row is to be attached to the its parent cssIcon : 'tablesorter-icon', // if this class exists, a <i> will be added to the header automatically cssInfoBlock : 'tablesorter-infoOnly', // don't sort tbody with this class name (only one class name allowed here!) // *** selectors selectorHeaders : '> thead th, > thead td', selectorSort : 'th, td', // jQuery selector of content within selectorHeaders that is clickable to trigger a sort selectorRemove : '.remove-me', // *** advanced debug : false, // *** Internal variables headerList: [], empties: {}, strings: {}, parsers: [] // deprecated; but retained for backwards compatibility // widgetZebra: { css: ["even", "odd"] } }; // internal css classes - these will ALWAYS be added to // the table and MUST only contain one class name - fixes #381 ts.css = { table : 'tablesorter', cssHasChild: 'tablesorter-hasChildRow', childRow : 'tablesorter-childRow', header : 'tablesorter-header', headerRow : 'tablesorter-headerRow', headerIn : 'tablesorter-header-inner', icon : 'tablesorter-icon', info : 'tablesorter-infoOnly', processing : 'tablesorter-processing', sortAsc : 'tablesorter-headerAsc', sortDesc : 'tablesorter-headerDesc', sortNone : 'tablesorter-headerUnSorted' }; // labels applied to sortable headers for accessibility (aria) support ts.language = { sortAsc : 'Ascending sort applied, ', sortDesc : 'Descending sort applied, ', sortNone : 'No sort applied, ', nextAsc : 'activate to apply an ascending sort', nextDesc : 'activate to apply a descending sort', nextNone : 'activate to remove the sort' }; /* debuging utils */ function log() { var a = arguments[0], s = arguments.length > 1 ? Array.prototype.slice.call(arguments) : a; if (typeof console !== "undefined" && typeof console.log !== "undefined") { console[ /error/i.test(a) ? 'error' : /warn/i.test(a) ? 'warn' : 'log' ](s); } else { alert(s); } } function benchmark(s, d) { log(s + " (" + (new Date().getTime() - d.getTime()) + "ms)"); } ts.log = log; ts.benchmark = benchmark; // $.isEmptyObject from jQuery v1.4 function isEmptyObject(obj) { /*jshint forin: false */ for (var name in obj) { return false; } return true; } function getElementText(table, node, cellIndex) { if (!node) { return ""; } var te, c = table.config, t = c.textExtraction || '', text = ""; if (t === "basic") { // check data-attribute first text = $(node).attr(c.textAttribute) || node.textContent || node.innerText || $(node).text() || ""; } else { if (typeof(t) === "function") { text = t(node, table, cellIndex); } else if (typeof (te = ts.getColumnData( table, t, cellIndex )) === 'function') { text = te(node, table, cellIndex); } else { // previous "simple" method text = node.textContent || node.innerText || $(node).text() || ""; } } return $.trim(text); } function detectParserForColumn(table, rows, rowIndex, cellIndex) { var cur, i = ts.parsers.length, node = false, nodeValue = '', keepLooking = true; while (nodeValue === '' && keepLooking) { rowIndex++; if (rows[rowIndex]) { node = rows[rowIndex].cells[cellIndex]; nodeValue = getElementText(table, node, cellIndex); if (table.config.debug) { log('Checking if value was empty on row ' + rowIndex + ', column: ' + cellIndex + ': "' + nodeValue + '"'); } } else { keepLooking = false; } } while (--i >= 0) { cur = ts.parsers[i]; // ignore the default text parser because it will always be true if (cur && cur.id !== 'text' && cur.is && cur.is(nodeValue, table, node)) { return cur; } } // nothing found, return the generic parser (text) return ts.getParserById('text'); } function buildParserCache(table) { var c = table.config, // update table bodies in case we start with an empty table tb = c.$tbodies = c.$table.children('tbody:not(.' + c.cssInfoBlock + ')'), rows, list, l, i, h, ch, np, p, e, time, j = 0, parsersDebug = "", len = tb.length; if ( len === 0) { return c.debug ? log('Warning: *Empty table!* Not building a parser cache') : ''; } else if (c.debug) { time = new Date(); log('Detecting parsers for each column'); } list = { extractors: [], parsers: [] }; while (j < len) { rows = tb[j].rows; if (rows[j]) { l = c.columns; // rows[j].cells.length; for (i = 0; i < l; i++) { h = c.$headers.filter('[data-column="' + i + '"]:last'); // get column indexed table cell ch = ts.getColumnData( table, c.headers, i ); // get column parser/extractor e = ts.getParserById( ts.getData(h, ch, 'extractor') ); p = ts.getParserById( ts.getData(h, ch, 'sorter') ); np = ts.getData(h, ch, 'parser') === 'false'; // empty cells behaviour - keeping emptyToBottom for backwards compatibility c.empties[i] = ts.getData(h, ch, 'empty') || c.emptyTo || (c.emptyToBottom ? 'bottom' : 'top' ); // text strings behaviour in numerical sorts c.strings[i] = ts.getData(h, ch, 'string') || c.stringTo || 'max'; if (np) { p = ts.getParserById('no-parser'); } if (!e) { // For now, maybe detect someday e = false; } if (!p) { p = detectParserForColumn(table, rows, -1, i); } if (c.debug) { parsersDebug += "column:" + i + "; extractor:" + e.id + "; parser:" + p.id + "; string:" + c.strings[i] + '; empty: ' + c.empties[i] + "\n"; } list.parsers[i] = p; list.extractors[i] = e; } } j += (list.parsers.length) ? len : 1; } if (c.debug) { log(parsersDebug ? parsersDebug : "No parsers detected"); benchmark("Completed detecting parsers", time); } c.parsers = list.parsers; c.extractors = list.extractors; } /* utils */ function buildCache(table) { var cc, t, tx, v, i, j, k, $row, rows, cols, cacheTime, totalRows, rowData, colMax, c = table.config, $tb = c.$table.children('tbody'), extractors = c.extractors, parsers = c.parsers; c.cache = {}; c.totalRows = 0; // if no parsers found, return - it's an empty table. if (!parsers) { return c.debug ? log('Warning: *Empty table!* Not building a cache') : ''; } if (c.debug) { cacheTime = new Date(); } // processing icon if (c.showProcessing) { ts.isProcessing(table, true); } for (k = 0; k < $tb.length; k++) { colMax = []; // column max value per tbody cc = c.cache[k] = { normalized: [] // array of normalized row data; last entry contains "rowData" above // colMax: # // added at the end }; // ignore tbodies with class name from c.cssInfoBlock if (!$tb.eq(k).hasClass(c.cssInfoBlock)) { totalRows = ($tb[k] && $tb[k].rows.length) || 0; for (i = 0; i < totalRows; ++i) { rowData = { // order: original row order # // $row : jQuery Object[] child: [] // child row text (filter widget) }; /** Add the table data to main data array */ $row = $($tb[k].rows[i]); rows = [ new Array(c.columns) ]; cols = []; // if this is a child row, add it to the last row's children and continue to the next row // ignore child row class, if it is the first row if ($row.hasClass(c.cssChildRow) && i !== 0) { t = cc.normalized.length - 1; cc.normalized[t][c.columns].$row = cc.normalized[t][c.columns].$row.add($row); // add "hasChild" class name to parent row if (!$row.prev().hasClass(c.cssChildRow)) { $row.prev().addClass(ts.css.cssHasChild); } // save child row content (un-parsed!) rowData.child[t] = $.trim( $row[0].textContent || $row[0].innerText || $row.text() || "" ); // go to the next for loop continue; } rowData.$row = $row; rowData.order = i; // add original row position to rowCache for (j = 0; j < c.columns; ++j) { if (typeof parsers[j] === 'undefined') { if (c.debug) { log('No parser found for cell:', $row[0].cells[j], 'does it have a header?'); } continue; } t = getElementText(table, $row[0].cells[j], j); // do extract before parsing if there is one if (typeof extractors[j].id === 'undefined') { tx = t; } else { tx = extractors[j].format(t, table, $row[0].cells[j], j); } // allow parsing if the string is empty, previously parsing would change it to zero, // in case the parser needs to extract data from the table cell attributes v = parsers[j].id === 'no-parser' ? '' : parsers[j].format(tx, table, $row[0].cells[j], j); cols.push( c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v ); if ((parsers[j].type || '').toLowerCase() === "numeric") { // determine column max value (ignore sign) colMax[j] = Math.max(Math.abs(v) || 0, colMax[j] || 0); } } // ensure rowData is always in the same location (after the last column) cols[c.columns] = rowData; cc.normalized.push(cols); } cc.colMax = colMax; // total up rows, not including child rows c.totalRows += cc.normalized.length; } } if (c.showProcessing) { ts.isProcessing(table); // remove processing icon } if (c.debug) { benchmark("Building cache for " + totalRows + " rows", cacheTime); } } // init flag (true) used by pager plugin to prevent widget application function appendToTable(table, init) { var c = table.config, wo = c.widgetOptions, b = table.tBodies, rows = [], cc = c.cache, n, totalRows, $bk, $tb, i, k, appendTime; // empty table - fixes #206/#346 if (isEmptyObject(cc)) { // run pager appender in case the table was just emptied return c.appender ? c.appender(table, rows) : table.isUpdating ? c.$table.trigger("updateComplete", table) : ''; // Fixes #532 } if (c.debug) { appendTime = new Date(); } for (k = 0; k < b.length; k++) { $bk = $(b[k]); if ($bk.length && !$bk.hasClass(c.cssInfoBlock)) { // get tbody $tb = ts.processTbody(table, $bk, true); n = cc[k].normalized; totalRows = n.length; for (i = 0; i < totalRows; i++) { rows.push(n[i][c.columns].$row); // removeRows used by the pager plugin; don't render if using ajax - fixes #411 if (!c.appender || (c.pager && (!c.pager.removeRows || !wo.pager_removeRows) && !c.pager.ajax)) { $tb.append(n[i][c.columns].$row); } } // restore tbody ts.processTbody(table, $tb, false); } } if (c.appender) { c.appender(table, rows); } if (c.debug) { benchmark("Rebuilt table", appendTime); } // apply table widgets; but not before ajax completes if (!init && !c.appender) { ts.applyWidget(table); } if (table.isUpdating) { c.$table.trigger("updateComplete", table); } } function formatSortingOrder(v) { // look for "d" in "desc" order; return true return (/^d/i.test(v) || v === 1); } function buildHeaders(table) { var ch, $t, h, i, t, lock, time, c = table.config; c.headerList = []; c.headerContent = []; if (c.debug) { time = new Date(); } // children tr in tfoot - see issue #196 & #547 c.columns = ts.computeColumnIndex( c.$table.children('thead, tfoot').children('tr') ); // add icon if cssIcon option exists i = c.cssIcon ? '<i class="' + ( c.cssIcon === ts.css.icon ? ts.css.icon : c.cssIcon + ' ' + ts.css.icon ) + '"></i>' : ''; // redefine c.$headers here in case of an updateAll that replaces or adds an entire header cell - see #683 c.$headers = $(table).find(c.selectorHeaders).each(function(index) { $t = $(this); // make sure to get header cell & not column indexed cell ch = ts.getColumnData( table, c.headers, index, true ); // save original header content c.headerContent[index] = $(this).html(); // set up header template t = c.headerTemplate.replace(/\{content\}/g, $(this).html()).replace(/\{icon\}/g, i); if (c.onRenderTemplate) { h = c.onRenderTemplate.apply($t, [index, t]); if (h && typeof h === 'string') { t = h; } // only change t if something is returned } $(this).html('<div class="' + ts.css.headerIn + '">' + t + '</div>'); // faster than wrapInner if (c.onRenderHeader) { c.onRenderHeader.apply($t, [index]); } this.column = parseInt( $(this).attr('data-column'), 10); this.order = formatSortingOrder( ts.getData($t, ch, 'sortInitialOrder') || c.sortInitialOrder ) ? [1,0,2] : [0,1,2]; this.count = -1; // set to -1 because clicking on the header automatically adds one this.lockedOrder = false; lock = ts.getData($t, ch, 'lockedOrder') || false; if (typeof lock !== 'undefined' && lock !== false) { this.order = this.lockedOrder = formatSortingOrder(lock) ? [1,1,1] : [0,0,0]; } $t.addClass(ts.css.header + ' ' + c.cssHeader); // add cell to headerList c.headerList[index] = this; // add to parent in case there are multiple rows $t.parent().addClass(ts.css.headerRow + ' ' + c.cssHeaderRow).attr('role', 'row'); // allow keyboard cursor to focus on element if (c.tabIndex) { $t.attr("tabindex", 0); } }).attr({ scope: 'col', role : 'columnheader' }); // enable/disable sorting updateHeader(table); if (c.debug) { benchmark("Built headers:", time); log(c.$headers); } } function commonUpdate(table, resort, callback) { var c = table.config; // remove rows/elements before update c.$table.find(c.selectorRemove).remove(); // rebuild parsers buildParserCache(table); // rebuild the cache map buildCache(table); checkResort(c.$table, resort, callback); } function updateHeader(table) { var s, $th, col, c = table.config; c.$headers.each(function(index, th){ $th = $(th); col = ts.getColumnData( table, c.headers, index, true ); // add "sorter-false" class if "parser-false" is set s = ts.getData( th, col, 'sorter' ) === 'false' || ts.getData( th, col, 'parser' ) === 'false'; th.sortDisabled = s; $th[ s ? 'addClass' : 'removeClass' ]('sorter-false').attr('aria-disabled', '' + s); // aria-controls - requires table ID if (table.id) { if (s) { $th.removeAttr('aria-controls'); } else { $th.attr('aria-controls', table.id); } } }); } function setHeadersCss(table) { var f, i, j, c = table.config, list = c.sortList, len = list.length, none = ts.css.sortNone + ' ' + c.cssNone, css = [ts.css.sortAsc + ' ' + c.cssAsc, ts.css.sortDesc + ' ' + c.cssDesc], aria = ['ascending', 'descending'], // find the footer $t = $(table).find('tfoot tr').children().add(c.$extraHeaders).removeClass(css.join(' ')); // remove all header information c.$headers .removeClass(css.join(' ')) .addClass(none).attr('aria-sort', 'none'); for (i = 0; i < len; i++) { // direction = 2 means reset! if (list[i][1] !== 2) { // multicolumn sorting updating - choose the :last in case there are nested columns f = c.$headers.not('.sorter-false').filter('[data-column="' + list[i][0] + '"]' + (len === 1 ? ':last' : '') ); if (f.length) { for (j = 0; j < f.length; j++) { if (!f[j].sortDisabled) { f.eq(j).removeClass(none).addClass(css[list[i][1]]).attr('aria-sort', aria[list[i][1]]); } } // add sorted class to footer & extra headers, if they exist if ($t.length) { $t.filter('[data-column="' + list[i][0] + '"]').removeClass(none).addClass(css[list[i][1]]); } } } } // add verbose aria labels c.$headers.not('.sorter-false').each(function(){ var $this = $(this), nextSort = this.order[(this.count + 1) % (c.sortReset ? 3 : 2)], txt = $this.text() + ': ' + ts.language[ $this.hasClass(ts.css.sortAsc) ? 'sortAsc' : $this.hasClass(ts.css.sortDesc) ? 'sortDesc' : 'sortNone' ] + ts.language[ nextSort === 0 ? 'nextAsc' : nextSort === 1 ? 'nextDesc' : 'nextNone' ]; $this.attr('aria-label', txt ); }); } // automatically add col group, and column sizes if set function fixColumnWidth(table) { if (table.config.widthFixed && $(table).find('colgroup').length === 0) { var colgroup = $('<colgroup>'), overallWidth = $(table).width(); // only add col for visible columns - fixes #371 $(table.tBodies[0]).find("tr:first").children(":visible").each(function() { colgroup.append($('<col>').css('width', parseInt(($(this).width()/overallWidth)*1000, 10)/10 + '%')); }); $(table).prepend(colgroup); } } function updateHeaderSortCount(table, list) { var s, t, o, col, primary, c = table.config, sl = list || c.sortList; c.sortList = []; $.each(sl, function(i,v){ // ensure all sortList values are numeric - fixes #127 col = parseInt(v[0], 10); // make sure header exists o = c.$headers.filter('[data-column="' + col + '"]:last')[0]; if (o) { // prevents error if sorton array is wrong // o.count = o.count + 1; t = ('' + v[1]).match(/^(1|d|s|o|n)/); t = t ? t[0] : ''; // 0/(a)sc (default), 1/(d)esc, (s)ame, (o)pposite, (n)ext switch(t) { case '1': case 'd': // descending t = 1; break; case 's': // same direction (as primary column) // if primary sort is set to "s", make it ascending t = primary || 0; break; case 'o': s = o.order[(primary || 0) % (c.sortReset ? 3 : 2)]; // opposite of primary column; but resets if primary resets t = s === 0 ? 1 : s === 1 ? 0 : 2; break; case 'n': o.count = o.count + 1; t = o.order[(o.count) % (c.sortReset ? 3 : 2)]; break; default: // ascending t = 0; break; } primary = i === 0 ? t : primary; s = [ col, parseInt(t, 10) || 0 ]; c.sortList.push(s); t = $.inArray(s[1], o.order); // fixes issue #167 o.count = t >= 0 ? t : s[1] % (c.sortReset ? 3 : 2); } }); } function getCachedSortType(parsers, i) { return (parsers && parsers[i]) ? parsers[i].type || '' : ''; } function initSort(table, cell, event){ if (table.isUpdating) { // let any updates complete before initializing a sort return setTimeout(function(){ initSort(table, cell, event); }, 50); } var arry, indx, col, order, s, c = table.config, key = !event[c.sortMultiSortKey], $table = c.$table; // Only call sortStart if sorting is enabled $table.trigger("sortStart", table); // get current column sort order cell.count = event[c.sortResetKey] ? 2 : (cell.count + 1) % (c.sortReset ? 3 : 2); // reset all sorts on non-current column - issue #30 if (c.sortRestart) { indx = cell; c.$headers.each(function() { // only reset counts on columns that weren't just clicked on and if not included in a multisort if (this !== indx && (key || !$(this).is('.' + ts.css.sortDesc + ',.' + ts.css.sortAsc))) { this.count = -1; } }); } // get current column index indx = cell.column; // user only wants to sort on one column if (key) { // flush the sort list c.sortList = []; if (c.sortForce !== null) { arry = c.sortForce; for (col = 0; col < arry.length; col++) { if (arry[col][0] !== indx) { c.sortList.push(arry[col]); } } } // add column to sort list order = cell.order[cell.count]; if (order < 2) { c.sortList.push([indx, order]); // add other columns if header spans across multiple if (cell.colSpan > 1) { for (col = 1; col < cell.colSpan; col++) { c.sortList.push([indx + col, order]); } } } // multi column sorting } else { // get rid of the sortAppend before adding more - fixes issue #115 & #523 if (c.sortAppend && c.sortList.length > 1) { for (col = 0; col < c.sortAppend.length; col++) { s = ts.isValueInArray(c.sortAppend[col][0], c.sortList); if (s >= 0) { c.sortList.splice(s,1); } } } // the user has clicked on an already sorted column if (ts.isValueInArray(indx, c.sortList) >= 0) { // reverse the sorting direction for (col = 0; col < c.sortList.length; col++) { s = c.sortList[col]; order = c.$headers.filter('[data-column="' + s[0] + '"]:last')[0]; if (s[0] === indx) { // order.count seems to be incorrect when compared to cell.count s[1] = order.order[cell.count]; if (s[1] === 2) { c.sortList.splice(col,1); order.count = -1; } } } } else { // add column to sort list array order = cell.order[cell.count]; if (order < 2) { c.sortList.push([indx, order]); // add other columns if header spans across multiple if (cell.colSpan > 1) { for (col = 1; col < cell.colSpan; col++) { c.sortList.push([indx + col, order]); } } } } } if (c.sortAppend !== null) { arry = c.sortAppend; for (col = 0; col < arry.length; col++) { if (arry[col][0] !== indx) { c.sortList.push(arry[col]); } } } // sortBegin event triggered immediately before the sort $table.trigger("sortBegin", table); // setTimeout needed so the processing icon shows up setTimeout(function(){ // set css for headers setHeadersCss(table); multisort(table); appendToTable(table); $table.trigger("sortEnd", table); }, 1); } // sort multiple columns function multisort(table) { /*jshint loopfunc:true */ var i, k, num, col, sortTime, colMax, cache, order, sort, x, y, dir = 0, c = table.config, cts = c.textSorter || '', sortList = c.sortList, l = sortList.length, bl = table.tBodies.length; if (c.serverSideSorting || isEmptyObject(c.cache)) { // empty table - fixes #206/#346 return; } if (c.debug) { sortTime = new Date(); } for (k = 0; k < bl; k++) { colMax = c.cache[k].colMax; cache = c.cache[k].normalized; cache.sort(function(a, b) { // cache is undefined here in IE, so don't use it! for (i = 0; i < l; i++) { col = sortList[i][0]; order = sortList[i][1]; // sort direction, true = asc, false = desc dir = order === 0; if (c.sortStable && a[col] === b[col] && l === 1) { return a[c.columns].order - b[c.columns].order; } // fallback to natural sort since it is more robust num = /n/i.test(getCachedSortType(c.parsers, col)); if (num && c.strings[col]) { // sort strings in numerical columns if (typeof (c.string[c.strings[col]]) === 'boolean') { num = (dir ? 1 : -1) * (c.string[c.strings[col]] ? -1 : 1); } else { num = (c.strings[col]) ? c.string[c.strings[col]] || 0 : 0; } // fall back to built-in numeric sort // var sort = $.tablesorter["sort" + s](table, a[c], b[c], c, colMax[c], dir); sort = c.numberSorter ? c.numberSorter(a[col], b[col], dir, colMax[col], table) : ts[ 'sortNumeric' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], num, colMax[col], col, table); } else { // set a & b depending on sort direction<|fim▁hole|> if (typeof(cts) === 'function') { // custom OVERALL text sorter sort = cts(x[col], y[col], dir, col, table); } else if (typeof(cts) === 'object' && cts.hasOwnProperty(col)) { // custom text sorter for a SPECIFIC COLUMN sort = cts[col](x[col], y[col], dir, col, table); } else { // fall back to natural sort sort = ts[ 'sortNatural' + (dir ? 'Asc' : 'Desc') ](a[col], b[col], col, table, c); } } if (sort) { return sort; } } return a[c.columns].order - b[c.columns].order; }); } if (c.debug) { benchmark("Sorting on " + sortList.toString() + " and dir " + order + " time", sortTime); } } function resortComplete($table, callback){ var table = $table[0]; if (table.isUpdating) { $table.trigger('updateComplete'); } if ($.isFunction(callback)) { callback($table[0]); } } function checkResort($table, flag, callback) { var sl = $table[0].config.sortList; // don't try to resort if the table is still processing // this will catch spamming of the updateCell method if (flag !== false && !$table[0].isProcessing && sl.length) { $table.trigger("sorton", [sl, function(){ resortComplete($table, callback); }, true]); } else { resortComplete($table, callback); ts.applyWidget($table[0], false); } } function bindMethods(table){ var c = table.config, $table = c.$table; // apply easy methods that trigger bound events $table .unbind('sortReset update updateRows updateCell updateAll addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave '.split(' ').join(c.namespace + ' ')) .bind("sortReset" + c.namespace, function(e, callback){ e.stopPropagation(); c.sortList = []; setHeadersCss(table); multisort(table); appendToTable(table); if ($.isFunction(callback)) { callback(table); } }) .bind("updateAll" + c.namespace, function(e, resort, callback){ e.stopPropagation(); table.isUpdating = true; ts.refreshWidgets(table, true, true); ts.restoreHeaders(table); buildHeaders(table); ts.bindEvents(table, c.$headers, true); bindMethods(table); commonUpdate(table, resort, callback); }) .bind("update" + c.namespace + " updateRows" + c.namespace, function(e, resort, callback) { e.stopPropagation(); table.isUpdating = true; // update sorting (if enabled/disabled) updateHeader(table); commonUpdate(table, resort, callback); }) .bind("updateCell" + c.namespace, function(e, cell, resort, callback) { e.stopPropagation(); table.isUpdating = true; $table.find(c.selectorRemove).remove(); // get position from the dom var v, t, row, icell, $tb = $table.find('tbody'), $cell = $(cell), // update cache - format: function(s, table, cell, cellIndex) // no closest in jQuery v1.2.6 - tbdy = $tb.index( $(cell).closest('tbody') ),$row = $(cell).closest('tr'); tbdy = $tb.index( $.fn.closest ? $cell.closest('tbody') : $cell.parents('tbody').filter(':first') ), $row = $.fn.closest ? $cell.closest('tr') : $cell.parents('tr').filter(':first'); cell = $cell[0]; // in case cell is a jQuery object // tbody may not exist if update is initialized while tbody is removed for processing if ($tb.length && tbdy >= 0) { row = $tb.eq(tbdy).find('tr').index( $row ); icell = $cell.index(); c.cache[tbdy].normalized[row][c.columns].$row = $row; if (typeof c.extractors[icell].id === 'undefined') { t = getElementText(table, cell, icell); } else { t = c.extractors[icell].format( getElementText(table, cell, icell), table, cell, icell ); } v = c.parsers[icell].id === 'no-parser' ? '' : c.parsers[icell].format( t, table, cell, icell ); c.cache[tbdy].normalized[row][icell] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v; if ((c.parsers[icell].type || '').toLowerCase() === "numeric") { // update column max value (ignore sign) c.cache[tbdy].colMax[icell] = Math.max(Math.abs(v) || 0, c.cache[tbdy].colMax[icell] || 0); } checkResort($table, resort, callback); } }) .bind("addRows" + c.namespace, function(e, $row, resort, callback) { e.stopPropagation(); table.isUpdating = true; if (isEmptyObject(c.cache)) { // empty table, do an update instead - fixes #450 updateHeader(table); commonUpdate(table, resort, callback); } else { $row = $($row).attr('role', 'row'); // make sure we're using a jQuery object var i, j, l, t, v, rowData, cells, rows = $row.filter('tr').length, tbdy = $table.find('tbody').index( $row.parents('tbody').filter(':first') ); // fixes adding rows to an empty table - see issue #179 if (!(c.parsers && c.parsers.length)) { buildParserCache(table); } // add each row for (i = 0; i < rows; i++) { l = $row[i].cells.length; cells = []; rowData = { child: [], $row : $row.eq(i), order: c.cache[tbdy].normalized.length }; // add each cell for (j = 0; j < l; j++) { if (typeof c.extractors[j].id === 'undefined') { t = getElementText(table, $row[i].cells[j], j); } else { t = c.extractors[j].format( getElementText(table, $row[i].cells[j], j), table, $row[i].cells[j], j ); } v = c.parsers[j].id === 'no-parser' ? '' : c.parsers[j].format( t, table, $row[i].cells[j], j ); cells[j] = c.ignoreCase && typeof v === 'string' ? v.toLowerCase() : v; if ((c.parsers[j].type || '').toLowerCase() === "numeric") { // update column max value (ignore sign) c.cache[tbdy].colMax[j] = Math.max(Math.abs(cells[j]) || 0, c.cache[tbdy].colMax[j] || 0); } } // add the row data to the end cells.push(rowData); // update cache c.cache[tbdy].normalized.push(cells); } // resort using current settings checkResort($table, resort, callback); } }) .bind("updateComplete" + c.namespace, function(){ table.isUpdating = false; }) .bind("sorton" + c.namespace, function(e, list, callback, init) { var c = table.config; e.stopPropagation(); $table.trigger("sortStart", this); // update header count index updateHeaderSortCount(table, list); // set css for headers setHeadersCss(table); // fixes #346 if (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); } $table.trigger("sortBegin", this); // sort the table and append it to the dom multisort(table); appendToTable(table, init); $table.trigger("sortEnd", this); ts.applyWidget(table); if ($.isFunction(callback)) { callback(table); } }) .bind("appendCache" + c.namespace, function(e, callback, init) { e.stopPropagation(); appendToTable(table, init); if ($.isFunction(callback)) { callback(table); } }) .bind("updateCache" + c.namespace, function(e, callback){ // rebuild parsers if (!(c.parsers && c.parsers.length)) { buildParserCache(table); } // rebuild the cache map buildCache(table); if ($.isFunction(callback)) { callback(table); } }) .bind("applyWidgetId" + c.namespace, function(e, id) { e.stopPropagation(); ts.getWidgetById(id).format(table, c, c.widgetOptions); }) .bind("applyWidgets" + c.namespace, function(e, init) { e.stopPropagation(); // apply widgets ts.applyWidget(table, init); }) .bind("refreshWidgets" + c.namespace, function(e, all, dontapply){ e.stopPropagation(); ts.refreshWidgets(table, all, dontapply); }) .bind("destroy" + c.namespace, function(e, c, cb){ e.stopPropagation(); ts.destroy(table, c, cb); }) .bind("resetToLoadState" + c.namespace, function(){ // remove all widgets ts.refreshWidgets(table, true, true); // restore original settings; this clears out current settings, but does not clear // values saved to storage. c = $.extend(true, ts.defaults, c.originalSettings); table.hasInitialized = false; // setup the entire table again ts.setup( table, c ); }); } /* public methods */ ts.construct = function(settings) { return this.each(function() { var table = this, // merge & extend config options c = $.extend(true, {}, ts.defaults, settings); // save initial settings c.originalSettings = settings; // create a table from data (build table widget) if (!table.hasInitialized && ts.buildTable && this.tagName !== 'TABLE') { // return the table (in case the original target is the table's container) ts.buildTable(table, c); } else { ts.setup(table, c); } }); }; ts.setup = function(table, c) { // if no thead or tbody, or tablesorter is already present, quit if (!table || !table.tHead || table.tBodies.length === 0 || table.hasInitialized === true) { return c.debug ? log('ERROR: stopping initialization! No table, thead, tbody or tablesorter has already been initialized') : ''; } var k = '', $table = $(table), m = $.metadata; // initialization flag table.hasInitialized = false; // table is being processed flag table.isProcessing = true; // make sure to store the config object table.config = c; // save the settings where they read $.data(table, "tablesorter", c); if (c.debug) { $.data( table, 'startoveralltimer', new Date()); } // removing this in version 3 (only supports jQuery 1.7+) c.supportsDataObject = (function(version) { version[0] = parseInt(version[0], 10); return (version[0] > 1) || (version[0] === 1 && parseInt(version[1], 10) >= 4); })($.fn.jquery.split(".")); // digit sort text location; keeping max+/- for backwards compatibility c.string = { 'max': 1, 'min': -1, 'emptyMin': 1, 'emptyMax': -1, 'zero': 0, 'none': 0, 'null': 0, 'top': true, 'bottom': false }; // add table theme class only if there isn't already one there if (!/tablesorter\-/.test($table.attr('class'))) { k = (c.theme !== '' ? ' tablesorter-' + c.theme : ''); } c.table = table; c.$table = $table .addClass(ts.css.table + ' ' + c.tableClass + k) .attr('role', 'grid'); c.$headers = $table.find(c.selectorHeaders); // give the table a unique id, which will be used in namespace binding if (!c.namespace) { c.namespace = '.tablesorter' + Math.random().toString(16).slice(2); } else { // make sure namespace starts with a period & doesn't have weird characters c.namespace = '.' + c.namespace.replace(/\W/g,''); } c.$table.children().children('tr').attr('role', 'row'); c.$tbodies = $table.children('tbody:not(.' + c.cssInfoBlock + ')').attr({ 'aria-live' : 'polite', 'aria-relevant' : 'all' }); if (c.$table.find('caption').length) { c.$table.attr('aria-labelledby', 'theCaption'); } c.widgetInit = {}; // keep a list of initialized widgets // change textExtraction via data-attribute c.textExtraction = c.$table.attr('data-text-extraction') || c.textExtraction || 'basic'; // build headers buildHeaders(table); // fixate columns if the users supplies the fixedWidth option // do this after theme has been applied fixColumnWidth(table); // try to auto detect column type, and store in tables config buildParserCache(table); // start total row count at zero c.totalRows = 0; // build the cache for the tbody cells // delayInit will delay building the cache until the user starts a sort if (!c.delayInit) { buildCache(table); } // bind all header events and methods ts.bindEvents(table, c.$headers, true); bindMethods(table); // get sort list from jQuery data or metadata // in jQuery < 1.4, an error occurs when calling $table.data() if (c.supportsDataObject && typeof $table.data().sortlist !== 'undefined') { c.sortList = $table.data().sortlist; } else if (m && ($table.metadata() && $table.metadata().sortlist)) { c.sortList = $table.metadata().sortlist; } // apply widget init code ts.applyWidget(table, true); // if user has supplied a sort list to constructor if (c.sortList.length > 0) { $table.trigger("sorton", [c.sortList, {}, !c.initWidgets, true]); } else { setHeadersCss(table); if (c.initWidgets) { // apply widget format ts.applyWidget(table, false); } } // show processesing icon if (c.showProcessing) { $table .unbind('sortBegin' + c.namespace + ' sortEnd' + c.namespace) .bind('sortBegin' + c.namespace + ' sortEnd' + c.namespace, function(e) { clearTimeout(c.processTimer); ts.isProcessing(table); if (e.type === 'sortBegin') { c.processTimer = setTimeout(function(){ ts.isProcessing(table, true); }, 500); } }); } // initialized table.hasInitialized = true; table.isProcessing = false; if (c.debug) { ts.benchmark("Overall initialization time", $.data( table, 'startoveralltimer')); } $table.trigger('tablesorter-initialized', table); if (typeof c.initialized === 'function') { c.initialized(table); } }; ts.getColumnData = function(table, obj, indx, getCell){ if (typeof obj === 'undefined' || obj === null) { return; } table = $(table)[0]; var result, $h, k, c = table.config; if (obj[indx]) { return getCell ? obj[indx] : obj[c.$headers.index( c.$headers.filter('[data-column="' + indx + '"]:last') )]; } for (k in obj) { if (typeof k === 'string') { if (getCell) { // get header cell $h = c.$headers.eq(indx).filter(k); } else { // get column indexed cell $h = c.$headers.filter('[data-column="' + indx + '"]:last').filter(k); } if ($h.length) { return obj[k]; } } } return result; }; // computeTableHeaderCellIndexes from: // http://www.javascripttoolbox.com/lib/table/examples.php // http://www.javascripttoolbox.com/temp/table_cellindex.html ts.computeColumnIndex = function(trs) { var matrix = [], lookup = {}, cols = 0, // determine the number of columns i, j, k, l, $cell, cell, cells, rowIndex, cellId, rowSpan, colSpan, firstAvailCol, matrixrow; for (i = 0; i < trs.length; i++) { cells = trs[i].cells; for (j = 0; j < cells.length; j++) { cell = cells[j]; $cell = $(cell); rowIndex = cell.parentNode.rowIndex; cellId = rowIndex + "-" + $cell.index(); rowSpan = cell.rowSpan || 1; colSpan = cell.colSpan || 1; if (typeof(matrix[rowIndex]) === "undefined") { matrix[rowIndex] = []; } // Find first available column in the first row for (k = 0; k < matrix[rowIndex].length + 1; k++) { if (typeof(matrix[rowIndex][k]) === "undefined") { firstAvailCol = k; break; } } lookup[cellId] = firstAvailCol; cols = Math.max(firstAvailCol, cols); // add data-column $cell.attr({ 'data-column' : firstAvailCol }); // 'data-row' : rowIndex for (k = rowIndex; k < rowIndex + rowSpan; k++) { if (typeof(matrix[k]) === "undefined") { matrix[k] = []; } matrixrow = matrix[k]; for (l = firstAvailCol; l < firstAvailCol + colSpan; l++) { matrixrow[l] = "x"; } } } } // may not be accurate if # header columns !== # tbody columns return cols + 1; // add one because it's a zero-based index }; // *** Process table *** // add processing indicator ts.isProcessing = function(table, toggle, $ths) { table = $(table); var c = table[0].config, // default to all headers $h = $ths || table.find('.' + ts.css.header); if (toggle) { // don't use sortList if custom $ths used if (typeof $ths !== 'undefined' && c.sortList.length > 0) { // get headers from the sortList $h = $h.filter(function(){ // get data-column from attr to keep compatibility with jQuery 1.2.6 return this.sortDisabled ? false : ts.isValueInArray( parseFloat($(this).attr('data-column')), c.sortList) >= 0; }); } table.add($h).addClass(ts.css.processing + ' ' + c.cssProcessing); } else { table.add($h).removeClass(ts.css.processing + ' ' + c.cssProcessing); } }; // detach tbody but save the position // don't use tbody because there are portions that look for a tbody index (updateCell) ts.processTbody = function(table, $tb, getIt){ table = $(table)[0]; var holdr; if (getIt) { table.isProcessing = true; $tb.before('<span class="tablesorter-savemyplace"/>'); holdr = ($.fn.detach) ? $tb.detach() : $tb.remove(); return holdr; } holdr = $(table).find('span.tablesorter-savemyplace'); $tb.insertAfter( holdr ); holdr.remove(); table.isProcessing = false; }; ts.clearTableBody = function(table) { $(table)[0].config.$tbodies.children().detach(); }; ts.bindEvents = function(table, $headers, core){ table = $(table)[0]; var downTime, c = table.config; if (core !== true) { c.$extraHeaders = c.$extraHeaders ? c.$extraHeaders.add($headers) : $headers; } // apply event handling to headers and/or additional headers (stickyheaders, scroller, etc) $headers // http://stackoverflow.com/questions/5312849/jquery-find-self; .find(c.selectorSort).add( $headers.filter(c.selectorSort) ) .unbind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' ')) .bind('mousedown mouseup sort keyup '.split(' ').join(c.namespace + ' '), function(e, external) { var cell, type = e.type; // only recognize left clicks or enter if ( ((e.which || e.button) !== 1 && !/sort|keyup/.test(type)) || (type === 'keyup' && e.which !== 13) ) { return; } // ignore long clicks (prevents resizable widget from initializing a sort) if (type === 'mouseup' && external !== true && (new Date().getTime() - downTime > 250)) { return; } // set timer on mousedown if (type === 'mousedown') { downTime = new Date().getTime(); return /(input|select|button|textarea)/i.test(e.target.tagName) ? '' : !c.cancelSelection; } if (c.delayInit && isEmptyObject(c.cache)) { buildCache(table); } // jQuery v1.2.6 doesn't have closest() cell = $.fn.closest ? $(this).closest('th, td')[0] : /TH|TD/.test(this.tagName) ? this : $(this).parents('th, td')[0]; // reference original table headers and find the same cell cell = c.$headers[ $headers.index( cell ) ]; if (!cell.sortDisabled) { initSort(table, cell, e); } }); if (c.cancelSelection) { // cancel selection $headers .attr('unselectable', 'on') .bind('selectstart', false) .css({ 'user-select': 'none', 'MozUserSelect': 'none' // not needed for jQuery 1.8+ }); } }; // restore headers ts.restoreHeaders = function(table){ var c = $(table)[0].config; // don't use c.$headers here in case header cells were swapped c.$table.find(c.selectorHeaders).each(function(i){ // only restore header cells if it is wrapped // because this is also used by the updateAll method if ($(this).find('.' + ts.css.headerIn).length){ $(this).html( c.headerContent[i] ); } }); }; ts.destroy = function(table, removeClasses, callback){ table = $(table)[0]; if (!table.hasInitialized) { return; } // remove all widgets ts.refreshWidgets(table, true, true); var $t = $(table), c = table.config, $h = $t.find('thead:first'), $r = $h.find('tr.' + ts.css.headerRow).removeClass(ts.css.headerRow + ' ' + c.cssHeaderRow), $f = $t.find('tfoot:first > tr').children('th, td'); if (removeClasses === false && $.inArray('uitheme', c.widgets) >= 0) { // reapply uitheme classes, in case we want to maintain appearance $t.trigger('applyWidgetId', ['uitheme']); $t.trigger('applyWidgetId', ['zebra']); } // remove widget added rows, just in case $h.find('tr').not($r).remove(); // disable tablesorter $t .removeData('tablesorter') .unbind('sortReset update updateAll updateRows updateCell addRows updateComplete sorton appendCache updateCache applyWidgetId applyWidgets refreshWidgets destroy mouseup mouseleave keypress sortBegin sortEnd resetToLoadState '.split(' ').join(c.namespace + ' ')); c.$headers.add($f) .removeClass( [ts.css.header, c.cssHeader, c.cssAsc, c.cssDesc, ts.css.sortAsc, ts.css.sortDesc, ts.css.sortNone].join(' ') ) .removeAttr('data-column') .removeAttr('aria-label') .attr('aria-disabled', 'true'); $r.find(c.selectorSort).unbind('mousedown mouseup keypress '.split(' ').join(c.namespace + ' ')); ts.restoreHeaders(table); $t.toggleClass(ts.css.table + ' ' + c.tableClass + ' tablesorter-' + c.theme, removeClasses === false); // clear flag in case the plugin is initialized again table.hasInitialized = false; delete table.config.cache; if (typeof callback === 'function') { callback(table); } }; // *** sort functions *** // regex used in natural sort ts.regex = { chunk : /(^([+\-]?(?:0|[1-9]\d*)(?:\.\d*)?(?:[eE][+\-]?\d+)?)?$|^0x[0-9a-f]+$|\d+)/gi, // chunk/tokenize numbers & letters chunks: /(^\\0|\\0$)/, // replace chunks @ ends hex: /^0x[0-9a-f]+$/i // hex }; // Natural sort - https://github.com/overset/javascript-natural-sort (date sorting removed) // this function will only accept strings, or you'll see "TypeError: undefined is not a function" // I could add a = a.toString(); b = b.toString(); but it'll slow down the sort overall ts.sortNatural = function(a, b) { if (a === b) { return 0; } var xN, xD, yN, yD, xF, yF, i, mx, r = ts.regex; // first try and sort Hex codes if (r.hex.test(b)) { xD = parseInt(a.match(r.hex), 16); yD = parseInt(b.match(r.hex), 16); if ( xD < yD ) { return -1; } if ( xD > yD ) { return 1; } } // chunk/tokenize xN = a.replace(r.chunk, '\\0$1\\0').replace(r.chunks, '').split('\\0'); yN = b.replace(r.chunk, '\\0$1\\0').replace(r.chunks, '').split('\\0'); mx = Math.max(xN.length, yN.length); // natural sorting through split numeric strings and default strings for (i = 0; i < mx; i++) { // find floats not starting with '0', string or 0 if not defined xF = isNaN(xN[i]) ? xN[i] || 0 : parseFloat(xN[i]) || 0; yF = isNaN(yN[i]) ? yN[i] || 0 : parseFloat(yN[i]) || 0; // handle numeric vs string comparison - number < string - (Kyle Adams) if (isNaN(xF) !== isNaN(yF)) { return (isNaN(xF)) ? 1 : -1; } // rely on string comparison if different types - i.e. '02' < 2 != '02' < '2' if (typeof xF !== typeof yF) { xF += ''; yF += ''; } if (xF < yF) { return -1; } if (xF > yF) { return 1; } } return 0; }; ts.sortNaturalAsc = function(a, b, col, table, c) { if (a === b) { return 0; } var e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; } return ts.sortNatural(a, b); }; ts.sortNaturalDesc = function(a, b, col, table, c) { if (a === b) { return 0; } var e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; } return ts.sortNatural(b, a); }; // basic alphabetical sort ts.sortText = function(a, b) { return a > b ? 1 : (a < b ? -1 : 0); }; // return text string value by adding up ascii value // so the text is somewhat sorted when using a digital sort // this is NOT an alphanumeric sort ts.getTextValue = function(a, num, mx) { if (mx) { // make sure the text value is greater than the max numerical value (mx) var i, l = a ? a.length : 0, n = mx + num; for (i = 0; i < l; i++) { n += a.charCodeAt(i); } return num * n; } return 0; }; ts.sortNumericAsc = function(a, b, num, mx, col, table) { if (a === b) { return 0; } var c = table.config, e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : -e || -1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : e || 1; } if (isNaN(a)) { a = ts.getTextValue(a, num, mx); } if (isNaN(b)) { b = ts.getTextValue(b, num, mx); } return a - b; }; ts.sortNumericDesc = function(a, b, num, mx, col, table) { if (a === b) { return 0; } var c = table.config, e = c.string[ (c.empties[col] || c.emptyTo ) ]; if (a === '' && e !== 0) { return typeof e === 'boolean' ? (e ? -1 : 1) : e || 1; } if (b === '' && e !== 0) { return typeof e === 'boolean' ? (e ? 1 : -1) : -e || -1; } if (isNaN(a)) { a = ts.getTextValue(a, num, mx); } if (isNaN(b)) { b = ts.getTextValue(b, num, mx); } return b - a; }; ts.sortNumeric = function(a, b) { return a - b; }; // used when replacing accented characters during sorting ts.characterEquivalents = { "a" : "\u00e1\u00e0\u00e2\u00e3\u00e4\u0105\u00e5", // áàâãäąå "A" : "\u00c1\u00c0\u00c2\u00c3\u00c4\u0104\u00c5", // ÁÀÂÃÄĄÅ "c" : "\u00e7\u0107\u010d", // çćč "C" : "\u00c7\u0106\u010c", // ÇĆČ "e" : "\u00e9\u00e8\u00ea\u00eb\u011b\u0119", // éèêëěę "E" : "\u00c9\u00c8\u00ca\u00cb\u011a\u0118", // ÉÈÊËĚĘ "i" : "\u00ed\u00ec\u0130\u00ee\u00ef\u0131", // íìİîïı "I" : "\u00cd\u00cc\u0130\u00ce\u00cf", // ÍÌİÎÏ "o" : "\u00f3\u00f2\u00f4\u00f5\u00f6", // óòôõö "O" : "\u00d3\u00d2\u00d4\u00d5\u00d6", // ÓÒÔÕÖ "ss": "\u00df", // ß (s sharp) "SS": "\u1e9e", // ẞ (Capital sharp s) "u" : "\u00fa\u00f9\u00fb\u00fc\u016f", // úùûüů "U" : "\u00da\u00d9\u00db\u00dc\u016e" // ÚÙÛÜŮ }; ts.replaceAccents = function(s) { var a, acc = '[', eq = ts.characterEquivalents; if (!ts.characterRegex) { ts.characterRegexArray = {}; for (a in eq) { if (typeof a === 'string') { acc += eq[a]; ts.characterRegexArray[a] = new RegExp('[' + eq[a] + ']', 'g'); } } ts.characterRegex = new RegExp(acc + ']'); } if (ts.characterRegex.test(s)) { for (a in eq) { if (typeof a === 'string') { s = s.replace( ts.characterRegexArray[a], a ); } } } return s; }; // *** utilities *** ts.isValueInArray = function(column, arry) { var indx, len = arry.length; for (indx = 0; indx < len; indx++) { if (arry[indx][0] === column) { return indx; } } return -1; }; ts.addParser = function(parser) { var i, l = ts.parsers.length, a = true; for (i = 0; i < l; i++) { if (ts.parsers[i].id.toLowerCase() === parser.id.toLowerCase()) { a = false; } } if (a) { ts.parsers.push(parser); } }; ts.getParserById = function(name) { /*jshint eqeqeq:false */ if (name == 'false') { return false; } var i, l = ts.parsers.length; for (i = 0; i < l; i++) { if (ts.parsers[i].id.toLowerCase() === (name.toString()).toLowerCase()) { return ts.parsers[i]; } } return false; }; ts.addWidget = function(widget) { ts.widgets.push(widget); }; ts.hasWidget = function(table, name){ table = $(table); return table.length && table[0].config && table[0].config.widgetInit[name] || false; }; ts.getWidgetById = function(name) { var i, w, l = ts.widgets.length; for (i = 0; i < l; i++) { w = ts.widgets[i]; if (w && w.hasOwnProperty('id') && w.id.toLowerCase() === name.toLowerCase()) { return w; } } }; ts.applyWidget = function(table, init) { table = $(table)[0]; // in case this is called externally var c = table.config, wo = c.widgetOptions, widgets = [], time, w, wd; // prevent numerous consecutive widget applications if (init !== false && table.hasInitialized && (table.isApplyingWidgets || table.isUpdating)) { return; } if (c.debug) { time = new Date(); } if (c.widgets.length) { table.isApplyingWidgets = true; // ensure unique widget ids c.widgets = $.grep(c.widgets, function(v, k){ return $.inArray(v, c.widgets) === k; }); // build widget array & add priority as needed $.each(c.widgets || [], function(i,n){ wd = ts.getWidgetById(n); if (wd && wd.id) { // set priority to 10 if not defined if (!wd.priority) { wd.priority = 10; } widgets[i] = wd; } }); // sort widgets by priority widgets.sort(function(a, b){ return a.priority < b.priority ? -1 : a.priority === b.priority ? 0 : 1; }); // add/update selected widgets $.each(widgets, function(i,w){ if (w) { if (init || !(c.widgetInit[w.id])) { // set init flag first to prevent calling init more than once (e.g. pager) c.widgetInit[w.id] = true; if (w.hasOwnProperty('options')) { wo = table.config.widgetOptions = $.extend( true, {}, w.options, wo ); } if (w.hasOwnProperty('init')) { w.init(table, w, c, wo); } } if (!init && w.hasOwnProperty('format')) { w.format(table, c, wo, false); } } }); } setTimeout(function(){ table.isApplyingWidgets = false; }, 0); if (c.debug) { w = c.widgets.length; benchmark("Completed " + (init === true ? "initializing " : "applying ") + w + " widget" + (w !== 1 ? "s" : ""), time); } }; ts.refreshWidgets = function(table, doAll, dontapply) { table = $(table)[0]; // see issue #243 var i, c = table.config, cw = c.widgets, w = ts.widgets, l = w.length; // remove previous widgets for (i = 0; i < l; i++){ if ( w[i] && w[i].id && (doAll || $.inArray( w[i].id, cw ) < 0) ) { if (c.debug) { log( 'Refeshing widgets: Removing "' + w[i].id + '"' ); } // only remove widgets that have been initialized - fixes #442 if (w[i].hasOwnProperty('remove') && c.widgetInit[w[i].id]) { w[i].remove(table, c, c.widgetOptions); c.widgetInit[w[i].id] = false; } } } if (dontapply !== true) { ts.applyWidget(table, doAll); } }; // get sorter, string, empty, etc options for each column from // jQuery data, metadata, header option or header class name ("sorter-false") // priority = jQuery data > meta > headers option > header class name ts.getData = function(h, ch, key) { var val = '', $h = $(h), m, cl; if (!$h.length) { return ''; } m = $.metadata ? $h.metadata() : false; cl = ' ' + ($h.attr('class') || ''); if (typeof $h.data(key) !== 'undefined' || typeof $h.data(key.toLowerCase()) !== 'undefined'){ // "data-lockedOrder" is assigned to "lockedorder"; but "data-locked-order" is assigned to "lockedOrder" // "data-sort-initial-order" is assigned to "sortInitialOrder" val += $h.data(key) || $h.data(key.toLowerCase()); } else if (m && typeof m[key] !== 'undefined') { val += m[key]; } else if (ch && typeof ch[key] !== 'undefined') { val += ch[key]; } else if (cl !== ' ' && cl.match(' ' + key + '-')) { // include sorter class name "sorter-text", etc; now works with "sorter-my-custom-parser" val = cl.match( new RegExp('\\s' + key + '-([\\w-]+)') )[1] || ''; } return $.trim(val); }; ts.formatFloat = function(s, table) { if (typeof s !== 'string' || s === '') { return s; } // allow using formatFloat without a table; defaults to US number format var i, t = table && table.config ? table.config.usNumberFormat !== false : typeof table !== "undefined" ? table : true; if (t) { // US Format - 1,234,567.89 -> 1234567.89 s = s.replace(/,/g,''); } else { // German Format = 1.234.567,89 -> 1234567.89 // French Format = 1 234 567,89 -> 1234567.89 s = s.replace(/[\s|\.]/g,'').replace(/,/g,'.'); } if(/^\s*\([.\d]+\)/.test(s)) { // make (#) into a negative number -> (10) = -10 s = s.replace(/^\s*\(([.\d]+)\)/, '-$1'); } i = parseFloat(s); // return the text instead of zero return isNaN(i) ? $.trim(s) : i; }; ts.isDigit = function(s) { // replace all unwanted chars and match return isNaN(s) ? (/^[\-+(]?\d+[)]?$/).test(s.toString().replace(/[,.'"\s]/g, '')) : true; }; }() }); // make shortcut var ts = $.tablesorter; // extend plugin scope $.fn.extend({ tablesorter: ts.construct }); // add default parsers ts.addParser({ id: 'no-parser', is: function() { return false; }, format: function() { return ''; }, type: 'text' }); ts.addParser({ id: "text", is: function() { return true; }, format: function(s, table) { var c = table.config; if (s) { s = $.trim( c.ignoreCase ? s.toLocaleLowerCase() : s ); s = c.sortLocaleCompare ? ts.replaceAccents(s) : s; } return s; }, type: "text" }); ts.addParser({ id: "digit", is: function(s) { return ts.isDigit(s); }, format: function(s, table) { var n = ts.formatFloat((s || '').replace(/[^\w,. \-()]/g, ""), table); return s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s; }, type: "numeric" }); ts.addParser({ id: "currency", is: function(s) { return (/^\(?\d+[\u00a3$\u20ac\u00a4\u00a5\u00a2?.]|[\u00a3$\u20ac\u00a4\u00a5\u00a2?.]\d+\)?$/).test((s || '').replace(/[+\-,. ]/g,'')); // £$€¤¥¢ }, format: function(s, table) { var n = ts.formatFloat((s || '').replace(/[^\w,. \-()]/g, ""), table); return s && typeof n === 'number' ? n : s ? $.trim( s && table.config.ignoreCase ? s.toLocaleLowerCase() : s ) : s; }, type: "numeric" }); ts.addParser({ id: "ipAddress", is: function(s) { return (/^\d{1,3}[\.]\d{1,3}[\.]\d{1,3}[\.]\d{1,3}$/).test(s); }, format: function(s, table) { var i, a = s ? s.split(".") : '', r = "", l = a.length; for (i = 0; i < l; i++) { r += ("00" + a[i]).slice(-3); } return s ? ts.formatFloat(r, table) : s; }, type: "numeric" }); ts.addParser({ id: "url", is: function(s) { return (/^(https?|ftp|file):\/\//).test(s); }, format: function(s) { return s ? $.trim(s.replace(/(https?|ftp|file):\/\//, '')) : s; }, type: "text" }); ts.addParser({ id: "isoDate", is: function(s) { return (/^\d{4}[\/\-]\d{1,2}[\/\-]\d{1,2}/).test(s); }, format: function(s, table) { return s ? ts.formatFloat((s !== "") ? (new Date(s.replace(/-/g, "/")).getTime() || s) : "", table) : s; }, type: "numeric" }); ts.addParser({ id: "percent", is: function(s) { return (/(\d\s*?%|%\s*?\d)/).test(s) && s.length < 15; }, format: function(s, table) { return s ? ts.formatFloat(s.replace(/%/g, ""), table) : s; }, type: "numeric" }); ts.addParser({ id: "usLongDate", is: function(s) { // two digit years are not allowed cross-browser // Jan 01, 2013 12:34:56 PM or 01 Jan 2013 return (/^[A-Z]{3,10}\.?\s+\d{1,2},?\s+(\d{4})(\s+\d{1,2}:\d{2}(:\d{2})?(\s+[AP]M)?)?$/i).test(s) || (/^\d{1,2}\s+[A-Z]{3,10}\s+\d{4}/i).test(s); }, format: function(s, table) { return s ? ts.formatFloat( (new Date(s.replace(/(\S)([AP]M)$/i, "$1 $2")).getTime() || s), table) : s; }, type: "numeric" }); ts.addParser({ id: "shortDate", // "mmddyyyy", "ddmmyyyy" or "yyyymmdd" is: function(s) { // testing for ##-##-#### or ####-##-##, so it's not perfect; time can be included return (/(^\d{1,2}[\/\s]\d{1,2}[\/\s]\d{4})|(^\d{4}[\/\s]\d{1,2}[\/\s]\d{1,2})/).test((s || '').replace(/\s+/g," ").replace(/[\-.,]/g, "/")); }, format: function(s, table, cell, cellIndex) { if (s) { var c = table.config, ci = c.$headers.filter('[data-column=' + cellIndex + ']:last'), format = ci.length && ci[0].dateFormat || ts.getData( ci, ts.getColumnData( table, c.headers, cellIndex ), 'dateFormat') || c.dateFormat; s = s.replace(/\s+/g," ").replace(/[\-.,]/g, "/"); // escaped - because JSHint in Firefox was showing it as an error if (format === "mmddyyyy") { s = s.replace(/(\d{1,2})[\/\s](\d{1,2})[\/\s](\d{4})/, "$3/$1/$2"); } else if (format === "ddmmyyyy") { s = s.replace(/(\d{1,2})[\/\s](\d{1,2})[\/\s](\d{4})/, "$3/$2/$1"); } else if (format === "yyyymmdd") { s = s.replace(/(\d{4})[\/\s](\d{1,2})[\/\s](\d{1,2})/, "$1/$2/$3"); } } return s ? ts.formatFloat( (new Date(s).getTime() || s), table) : s; }, type: "numeric" }); ts.addParser({ id: "time", is: function(s) { return (/^(([0-2]?\d:[0-5]\d)|([0-1]?\d:[0-5]\d\s?([AP]M)))$/i).test(s); }, format: function(s, table) { return s ? ts.formatFloat( (new Date("2000/01/01 " + s.replace(/(\S)([AP]M)$/i, "$1 $2")).getTime() || s), table) : s; }, type: "numeric" }); ts.addParser({ id: "metadata", is: function() { return false; }, format: function(s, table, cell) { var c = table.config, p = (!c.parserMetadataName) ? 'sortValue' : c.parserMetadataName; return $(cell).metadata()[p]; }, type: "numeric" }); // add default widgets ts.addWidget({ id: "zebra", priority: 90, format: function(table, c, wo) { var $tb, $tv, $tr, row, even, time, k, l, child = new RegExp(c.cssChildRow, 'i'), b = c.$tbodies; if (c.debug) { time = new Date(); } for (k = 0; k < b.length; k++ ) { // loop through the visible rows $tb = b.eq(k); l = $tb.children('tr').length; if (l > 1) { row = 0; $tv = $tb.children('tr:visible').not(c.selectorRemove); // revered back to using jQuery each - strangely it's the fastest method /*jshint loopfunc:true */ $tv.each(function(){ $tr = $(this); // style children rows the same way the parent row was styled if (!child.test(this.className)) { row++; } even = (row % 2 === 0); $tr.removeClass(wo.zebra[even ? 1 : 0]).addClass(wo.zebra[even ? 0 : 1]); }); } } if (c.debug) { ts.benchmark("Applying Zebra widget", time); } }, remove: function(table, c, wo){ var k, $tb, b = c.$tbodies, rmv = (wo.zebra || [ "even", "odd" ]).join(' '); for (k = 0; k < b.length; k++ ){ $tb = $.tablesorter.processTbody(table, b.eq(k), true); // remove tbody $tb.children().removeClass(rmv); $.tablesorter.processTbody(table, $tb, false); // restore tbody } } }); })(jQuery);<|fim▁end|>
x = dir ? a : b; y = dir ? b : a; // text sort function
<|file_name|>flag.ts<|end_file_name|><|fim▁begin|>export class Flag {<|fim▁hole|> public sDescription: string, public type: string, public guiGroup: string ){} }<|fim▁end|>
constructor( public sFlag: string, public lFlag: string,
<|file_name|>test_remote_runners.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # XXX: FabricRunner import depends on config being setup. import st2tests.config as tests_config tests_config.parse_args() import mock from unittest2 import TestCase from st2actions.runners.fabric_runner import BaseFabricRunner from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED, LIVEACTION_STATUS_FAILED from st2common.models.system.action import RemoteScriptAction from st2common.models.system.action import FabricRemoteScriptAction class FabricRunner(BaseFabricRunner): def run(self): pass class FabricRunnerTestCase(TestCase): def test_get_env_vars(self): runner = FabricRunner('id') env_vars = {'key1': 'val1', 'key2': 'val2'} env_vars.update(runner._get_common_action_env_variables()) runner.runner_parameters = {'hosts': 'localhost', 'env': env_vars} # This is awful, context is just set at some point, no idea when and # where MOVE IT TO CONSTRUCTOR!11 runner.context = {} runner.pre_run() actual_env_vars = runner._get_env_vars() self.assertEqual(actual_env_vars, env_vars) class TestFabricRunnerResultStatus(TestCase): def test_pf_ok_all_success(self): result = { '1': {'succeeded': True}, '2': {'succeeded': True}, '3': {'succeeded': True}, } self.assertEquals(LIVEACTION_STATUS_SUCCEEDED, FabricRunner._get_result_status(result, True)) def test_pf_ok_some_success(self): result = { '1': {'succeeded': False}, '2': {'succeeded': True}, '3': {'succeeded': False}, } self.assertEquals(LIVEACTION_STATUS_SUCCEEDED, FabricRunner._get_result_status(result, True)) result = { '1': {'succeeded': True}, '2': {'succeeded': False}, '3': {'succeeded': False}, } self.assertEquals(LIVEACTION_STATUS_SUCCEEDED, FabricRunner._get_result_status(result, True)) result = { '1': {'succeeded': False}, '2': {'succeeded': False}, '3': {'succeeded': True}, } self.assertEquals(LIVEACTION_STATUS_SUCCEEDED, FabricRunner._get_result_status(result, True)) def test_pf_ok_all_fail(self): result = { '1': {'succeeded': False}, '2': {'succeeded': False}, '3': {'succeeded': False}, }<|fim▁hole|> def test_pf_not_ok_all_success(self): result = { '1': {'succeeded': True}, '2': {'succeeded': True}, '3': {'succeeded': True}, } self.assertEquals(LIVEACTION_STATUS_SUCCEEDED, FabricRunner._get_result_status(result, False)) def test_pf_not_ok_some_success(self): result = { '1': {'succeeded': False}, '2': {'succeeded': True}, '3': {'succeeded': False}, } self.assertEquals(LIVEACTION_STATUS_FAILED, FabricRunner._get_result_status(result, False)) result = { '1': {'succeeded': True}, '2': {'succeeded': False}, '3': {'succeeded': False}, } self.assertEquals(LIVEACTION_STATUS_FAILED, FabricRunner._get_result_status(result, False)) result = { '1': {'succeeded': False}, '2': {'succeeded': False}, '3': {'succeeded': True}, } self.assertEquals(LIVEACTION_STATUS_FAILED, FabricRunner._get_result_status(result, False)) def test_pf_not_ok_all_fail(self): result = { '1': {'succeeded': False}, '2': {'succeeded': False}, '3': {'succeeded': False}, } self.assertEquals(LIVEACTION_STATUS_FAILED, FabricRunner._get_result_status(result, False)) class RemoteScriptActionTestCase(TestCase): def test_parameter_formatting(self): # Only named args named_args = {'--foo1': 'bar1', '--foo2': 'bar2', '--foo3': True, '--foo4': False} action = RemoteScriptAction(name='foo', action_exec_id='dummy', script_local_path_abs='test.py', script_local_libs_path_abs='/', remote_dir='/tmp', named_args=named_args, positional_args=None) self.assertEqual(action.command, '/tmp/test.py --foo1=bar1 --foo2=bar2 --foo3') class FabricRemoteScriptActionTestCase(TestCase): @mock.patch('st2common.models.system.action.run') @mock.patch('st2common.models.system.action.put') @mock.patch('st2common.models.system.action.shell_env') @mock.patch('st2common.models.system.action.settings') def test_settings_are_used(self, mock_settings, mock_shell_env, mock_put, mock_run): # Test that the remote script action uses fabric environment and authentication settings named_args = {} action = FabricRemoteScriptAction(name='foo', action_exec_id='dummy', script_local_path_abs='test.py', script_local_libs_path_abs='/', remote_dir='/tmp', named_args=named_args, positional_args=None) task = action.get_fabric_task() self.assertEqual(mock_settings.call_count, 0) self.assertEqual(mock_shell_env.call_count, 0) task.run() self.assertEqual(mock_settings.call_count, 1) self.assertEqual(mock_shell_env.call_count, 1)<|fim▁end|>
self.assertEquals(LIVEACTION_STATUS_FAILED, FabricRunner._get_result_status(result, True))
<|file_name|>updateScale.ts<|end_file_name|><|fim▁begin|>import { PickScaleConfigWithoutType, ScaleConfigWithoutType } from './types/ScaleConfig'; import { DefaultThresholdInput, D3Scale, PickD3Scale } from './types/Scale'; import { StringLike, DefaultOutput } from './types/Base'; import scaleOperator, { ALL_OPERATORS } from './operators/scaleOperator'; const applyAllOperators = scaleOperator(...ALL_OPERATORS); // Overload function signature for more strict typing, e.g., // If the scale is a ScaleLinear, the config is a linear config. function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'linear', Output>, config: PickScaleConfigWithoutType<'linear', Output>, ): PickD3Scale<'linear', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike,<|fim▁hole|>): PickD3Scale<'log', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'pow', Output>, config: PickScaleConfigWithoutType<'pow', Output>, ): PickD3Scale<'pow', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'sqrt', Output>, config: PickScaleConfigWithoutType<'sqrt', Output>, ): PickD3Scale<'sqrt', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'symlog', Output>, config: PickScaleConfigWithoutType<'symlog', Output>, ): PickD3Scale<'symlog', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'time', Output>, config: PickScaleConfigWithoutType<'time', Output>, ): PickD3Scale<'time', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'utc', Output>, config: PickScaleConfigWithoutType<'utc', Output>, ): PickD3Scale<'utc', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'quantile', Output>, config: PickScaleConfigWithoutType<'quantile', Output>, ): PickD3Scale<'quantile', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'quantize', Output>, config: PickScaleConfigWithoutType<'quantize', Output>, ): PickD3Scale<'quantize', Output>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'threshold', Output, StringLike, ThresholdInput>, config: PickScaleConfigWithoutType<'threshold', Output, StringLike, ThresholdInput>, ): PickD3Scale<'threshold', Output, StringLike, ThresholdInput>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'ordinal', Output, DiscreteInput>, config: PickScaleConfigWithoutType<'ordinal', Output, DiscreteInput>, ): PickD3Scale<'ordinal', Output, DiscreteInput>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'point', Output, DiscreteInput>, config: PickScaleConfigWithoutType<'point', Output, DiscreteInput>, ): PickD3Scale<'point', Output, DiscreteInput>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'band', Output, DiscreteInput>, config: PickScaleConfigWithoutType<'band', Output, DiscreteInput>, ): PickD3Scale<'band', Output, DiscreteInput>; function updateScale< Output = DefaultOutput, DiscreteInput extends StringLike = StringLike, ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput, Scale extends D3Scale<Output, DiscreteInput, ThresholdInput> = D3Scale< Output, DiscreteInput, ThresholdInput > >(scale: Scale, config?: undefined): Scale; // Actual implementation function updateScale< Output, DiscreteInput extends StringLike, ThresholdInput extends DefaultThresholdInput >( scale: D3Scale<Output, DiscreteInput, ThresholdInput>, config?: ScaleConfigWithoutType<Output, DiscreteInput, ThresholdInput>, ) { return applyAllOperators(scale.copy(), config); } export default updateScale;<|fim▁end|>
ThresholdInput extends DefaultThresholdInput = DefaultThresholdInput >( scale: PickD3Scale<'log', Output>, config: PickScaleConfigWithoutType<'log', Output>,
<|file_name|>greenlets.py<|end_file_name|><|fim▁begin|>import distutils.version try: import greenlet getcurrent = greenlet.greenlet.getcurrent<|fim▁hole|> preserves_excinfo = (distutils.version.LooseVersion(greenlet.__version__) >= distutils.version.LooseVersion('0.3.2')) greenlet = greenlet.greenlet except ImportError, e: raise try: from py.magic import greenlet getcurrent = greenlet.getcurrent GreenletExit = greenlet.GreenletExit preserves_excinfo = False except ImportError: try: from stackless import greenlet getcurrent = greenlet.getcurrent GreenletExit = greenlet.GreenletExit preserves_excinfo = False except ImportError: try: from support.stacklesss import greenlet, getcurrent, GreenletExit preserves_excinfo = False (greenlet, getcurrent, GreenletExit) # silence pyflakes except ImportError, e: raise ImportError("Unable to find an implementation of greenlet.")<|fim▁end|>
GreenletExit = greenlet.greenlet.GreenletExit
<|file_name|>services.py<|end_file_name|><|fim▁begin|># Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain<|fim▁hole|># a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation LOG = logging.getLogger(__name__) class NovaServices(utils.NovaScenario): """Benchmark scenarios for Nova agents.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure() def list_services(self, host=None, binary=None): """List all nova services. Measure the "nova service-list" command performance. :param host: List nova services on host :param binary: List nova services matching given binary """ self._list_services(host, binary)<|fim▁end|>
<|file_name|>helloworld.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.3.3 (function() { var CMinion, Minion, port, restify, server; restify = require("restify"); CMinion = require("../../minion"); Minion = new CMinion(); server = restify.createServer(); server.use(restify.queryParser()); server.get("/", function(req, res) { res.send("Hello World."); return Minion.logRequest(req.query); }); port = process.env.PORT || 3000; Minion.started();<|fim▁hole|> return console.log("Listening on port " + port); }); }).call(this);<|fim▁end|>
server.listen(port, function() {
<|file_name|>WagonManager.java<|end_file_name|><|fim▁begin|>package org.apache.maven.artifact.manager; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ <|fim▁hole|> import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.TransferFailedException; import org.apache.maven.wagon.authentication.AuthenticationInfo; import org.apache.maven.wagon.proxy.ProxyInfo; /** * Manages <a href="https://maven.apache.org/wagon">Wagon</a> related operations in Maven. * * @author <a href="[email protected]">Michal Maczka </a> */ @Deprecated public interface WagonManager extends org.apache.maven.repository.legacy.WagonManager { /** * this method is only here for backward compat (project-info-reports:dependencies) * the default implementation will return an empty AuthenticationInfo */ AuthenticationInfo getAuthenticationInfo( String id ); ProxyInfo getProxy( String protocol ); void getArtifact( Artifact artifact, ArtifactRepository repository ) throws TransferFailedException, ResourceDoesNotExistException; void getArtifact( Artifact artifact, List<ArtifactRepository> remoteRepositories ) throws TransferFailedException, ResourceDoesNotExistException; ArtifactRepository getMirrorRepository( ArtifactRepository repository ); }<|fim▁end|>
import java.util.List;
<|file_name|>db_test.go<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Kagucho <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package db import "testing" func TestDB(t *testing.T) { var db DB if !t.Run(`Prepare`, func(t *testing.T) { var err error db, err = Prepare() if err != nil { t.Fatal(err) } }) { t.FailNow() } <|fim▁hole|> t.Run(`QueryMember`, db.testQueryMember) t.Run(`QueryMemberGraph`, db.testQueryMemberGraph) t.Run(`QueryMembers`, db.testQueryMembers) t.Run(`QueryMembersCount`, db.testQueryMembersCount) t.Run(`QueryOfficer`, db.testQueryOfficer) t.Run(`QueryOfficerName`, db.testQueryOfficerName) t.Run(`QueryOfficers`, db.testQueryOfficers) t.Run(`GetScope`, db.testGetScope) t.Run(`Close`, func(t *testing.T) { if err := db.Close(); err != nil { t.Error(err) } }) }<|fim▁end|>
t.Run(`QueryClub`, db.testQueryClub) t.Run(`QueryClubName`, db.testQueryClubName) t.Run(`QueryClubNames`, db.testQueryClubNames) t.Run(`QueryClubs`, db.testQueryClubs)
<|file_name|>multiple_cu_test.go<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////// // Copyright 2016 Siva Chandra // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////// package garf import ( "testing" ) func TestDebugInfoMultipleCU(t *testing.T) { dwData, err := LoadDwData("test_data/multiple_cu_linux_x86_64.exe") if err != nil { t.Errorf("Error loading DWARF from file.\n%s", err.Error()) return } compUnits, err := dwData.CompUnits() if err != nil { t.Errorf("Error reading comp units.\n%s", err.Error()) return } if len(compUnits) != 3 { t.Errorf("Wrong number of comp units: %d", len(compUnits)) return } die, err := compUnits[0].DIETree() if err != nil { t.Errorf("Error reading DIE tree of comp unit 0.\n%s", err.Error()) return } if die.Tag != DW_TAG_compile_unit { t.Errorf("Wrong DIE tag for comp unit 0.") } die, err = compUnits[1].DIETree() if err != nil { t.Errorf("Error reading DIE tree of comp unit 1.\n%s", err.Error()) return } if die.Tag != DW_TAG_compile_unit { t.Errorf("Wrong DIE tag for comp unit 1.") } if len(die.Children) != 3 { t.Errorf("Wrong number of children for the root of the DIE tree of comp unit 1.") } childDie := die.Children[2].Children[0] if childDie.Tag != DW_TAG_formal_parameter { t.Errorf("Wrong tag for a DIE in comp unit 1.") } typeDie := childDie.Attributes[DW_AT_type].Value.(*DIE) if typeDie.Attributes[DW_AT_name].Value.(string) != "int" { t.Errorf("Wrong type name for type DIE in comp unit 1.") } die, err = compUnits[2].DIETree() if err != nil {<|fim▁hole|> t.Errorf("Error reading DIE tree of comp unit 2.\n%s", err.Error()) return } if die.Tag != DW_TAG_compile_unit { t.Errorf("Wrong DIE tag for comp unit 2.") } }<|fim▁end|>
<|file_name|>utils.ts<|end_file_name|><|fim▁begin|>/** * Assumes file is an Angular component if type is javascript/typescript */ export function isPathAngularComponent(path: string): boolean { return /\.ts|js$/i.test(path); } /** * Extract inline template from a component */ export function extractComponentInlineTemplate(contents: string): string { const regExp: RegExp = /template\s*:\s*(["'`])([^\1]*?)\1/; const match = regExp.exec(contents); if (match !== null) { return match[2]; } return ''; }<|fim▁hole|>export function stripBOM(contents: string): string { return contents.trim(); }<|fim▁end|>
<|file_name|>ServiceInstanceBindingRepository.java<|end_file_name|><|fim▁begin|>package com.emc.ecs.servicebroker.repository; import com.emc.ecs.servicebroker.exception.EcsManagementClientException; import com.emc.ecs.servicebroker.service.s3.S3Service; import com.emc.ecs.servicebroker.model.Constants; import com.emc.object.s3.bean.GetObjectResult; import com.emc.object.s3.bean.ListObjectsResult; import com.emc.object.s3.bean.S3Object; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.module.SimpleModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.servicebroker.model.binding.SharedVolumeDevice; import org.springframework.cloud.servicebroker.model.binding.VolumeDevice; import org.springframework.cloud.servicebroker.model.binding.VolumeMount; import javax.annotation.PostConstruct; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import static java.lang.String.format; @SuppressWarnings("unused") public class ServiceInstanceBindingRepository { static final Logger logger = LoggerFactory.getLogger(ServiceInstanceBindingRepository.class); public static final String FILENAME_PREFIX = "service-instance-binding"; private final ObjectMapper objectMapper = new ObjectMapper(); { // NOTE -- ideally we would not need this code, but for now, the VolumeMount class has // custom serialization that is not matched with corresponding deserialization, so // deserializing serialized volume mounts doesn't work OOTB. SimpleModule module = new SimpleModule(); module.addDeserializer(VolumeMount.DeviceType.class, new DeviceTypeDeserializer()); module.addDeserializer(VolumeMount.Mode.class, new ModeDeserializer()); module.addDeserializer(VolumeDevice.class, new VolumeDeviceDeserializer()); objectMapper.registerModule(module); } @Autowired private S3Service s3; private static String getFilename(String id) { return FILENAME_PREFIX + "/" + id + ".json"; } private static boolean isCorrectFilename (String filename) { return filename.matches(FILENAME_PREFIX + "/.*\\.json"); } private ServiceInstanceBinding findByFilename(String filename) throws IOException { if (!isCorrectFilename(filename)) { String errorMessage = format("Invalid filename of service instance binding provided: %s", filename); throw new IOException(errorMessage); } logger.debug("Loading service instance binding from repository file {}", filename); GetObjectResult<InputStream> input = s3.getObject(filename); return objectMapper.readValue(input.getObject(), ServiceInstanceBinding.class); }<|fim▁hole|> credentials.remove(Constants.S3_URL); credentials.remove(Constants.CREDENTIALS_SECRET_KEY); binding.setCredentials(credentials); return binding; } @PostConstruct public void initialize() throws EcsManagementClientException { logger.info("Service binding file prefix: {}", FILENAME_PREFIX); } public void save(ServiceInstanceBinding binding) throws IOException { String filename = getFilename(binding.getBindingId()); String serialized = objectMapper.writeValueAsString(binding); s3.putObject(filename, serialized); } public ServiceInstanceBinding find(String id) throws IOException { String filename = getFilename(id); return findByFilename(filename); } public ListServiceInstanceBindingsResponse listServiceInstanceBindings(String marker, int pageSize) throws IOException { if (pageSize < 0) { throw new IOException("Page size could not be negative number"); } List<ServiceInstanceBinding> bindings = new ArrayList<>(); ListObjectsResult list = marker != null ? s3.listObjects(FILENAME_PREFIX + "/", getFilename(marker), pageSize) : s3.listObjects(FILENAME_PREFIX + "/", null, pageSize); for (S3Object s3Object: list.getObjects()) { String filename = s3Object.getKey(); if (isCorrectFilename(filename)) { ServiceInstanceBinding binding = findByFilename(filename); bindings.add(removeSecretCredentials(binding)); } } ListServiceInstanceBindingsResponse response = new ListServiceInstanceBindingsResponse(bindings); response.setMarker(list.getMarker()); response.setPageSize(list.getMaxKeys()); response.setNextMarker(list.getNextMarker()); return response; } public void delete(String id) { String filename = getFilename(id); s3.deleteObject(filename); } public static class ModeDeserializer extends StdDeserializer<VolumeMount.Mode> { ModeDeserializer() { this(null); } ModeDeserializer(Class<?> vc) { super(vc); } @Override public VolumeMount.Mode deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { JsonNode node = jp.getCodec().readTree(jp); String s = node.asText(); if (s.equals("rw")) { return VolumeMount.Mode.READ_WRITE; } else { return VolumeMount.Mode.READ_ONLY; } } } public static class DeviceTypeDeserializer extends StdDeserializer<VolumeMount.DeviceType> { DeviceTypeDeserializer() { this(null); } DeviceTypeDeserializer(Class<?> vc) { super(vc); } @Override public VolumeMount.DeviceType deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return VolumeMount.DeviceType.SHARED; } } public static class VolumeDeviceDeserializer extends StdDeserializer<VolumeDevice> { VolumeDeviceDeserializer() { this(null); } VolumeDeviceDeserializer(Class<?> vc) { super(vc); } @Override public VolumeDevice deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return jp.getCodec().readValue(jp, SharedVolumeDevice.class); } } }<|fim▁end|>
ServiceInstanceBinding removeSecretCredentials(ServiceInstanceBinding binding) { Map<String, Object> credentials = binding.getCredentials();
<|file_name|>revision.py<|end_file_name|><|fim▁begin|>from mylibs.transform import * from mylibs.online import * from mylibs.index import * import mylibs.myio import mylibs.models import paths import pickle from sklearn.linear_model import LogisticRegression from sklearn.ensemble import GradientBoostingClassifier from sklearn.ensemble import RandomForestClassifier import math train_final = '../data/features/extend/train.csv' class Train: def select_dataset(self, path): self.ddir = path def select_model(self): print 'select model' #return LogisticRegression()#penalty = 'l2', C=0.3) #return GradientBoostingClassifier(n_estimators=21,max_depth=5,loss='deviance') self.clf = RandomForestClassifier(n_estimators=35,max_depth = 13, max_features='auto',n_jobs=2) def train(self, clf_path): X, y, ids = mylibs.myio.load_train(self.ddir) print 'train' self.clf.fit(X, y) self.evaluation(X, y) mylibs.models.save(clf_path, self.clf) def evaluation(self, X, y): print 'evalution' y_prd = self.clf.predict(X) m = [[0,0],[0,0]] for i in range(len(y)): m[y_prd[i]][y[i]] += 1 print ' \t','real 0\t','real 1' print ' predict 0\t',m[0][0],'\t', m[0][1] print ' predict 1\t',m[1][0],'\t', m[1][1] p = float(m[1][1])/(m[1][1]+m[1][0]) r = float(m[1][1])/(m[1][1]+m[0][1]) f1 = 2*r*p/(p+r) print ' p\t',p<|fim▁hole|>####################################################################### dev_final = '../data/features/extend/dev.csv' dev_result = '../data/results/dev.csv' class Dev: def predict(self, ddir, clf_path): clf = mylibs.models.load(clf_path) ptor = Predictor() self.res = ptor.predict(clf, ddir) def save_proba(self, fdir): title = ['user_id','item_id','item_catgory','time','label','proba'] mylibs.myio.write_file(fdir, title, self.res) def load_proba(self, fdir): dic, title, self.res = mylibs.myio.read_file(fdir) def evalution(self, limit, dev_pos): for l in limit: pos = [] pos.extend(self.get_pos(l)) print 'limit =', l, '\toutput size =', len(pos), '\tpos size =', len(dev_pos) self.__evalution__(pos, dev_pos) def get_pos(self, l): pos = set() for sample in self.res: [user_id, item_id, item_category, time, label, proba] = sample if float(proba) > l: pos.add(user_id+','+item_id) return pos def __evalution__(self, pos, dev_pos): print ' dev evalution:' c1 = len(pos) c2 = len(dev_pos) c = 0.0 for i in pos: if dev_pos.has_key(i): c += 1 if c1 == 0: print ' precision = error' else: print ' precision =', c/c1 if c2 == 0: print ' recall = error' else: print ' recall =', c/c2 if c1 + c2 == 0: print ' f1 score = error' else: print ' f1 score =', 2*c/(c1+c2) print '' ################################ def train(): t = Train() t.select_model() t.select_dataset(train_final) t.train(paths.clf) def dev(): d = Dev() d.predict(dev_final, paths.clf) d.save_proba(dev_result) #d.load_proba(dev_result) d.evalution([-0.1,0.3,0.5,0.53,0.56,0.6,0.63,0.67,0.7], DicBuilder('../data/divid/dev_pos.csv').get_dic(['user_id','item_id'],[])) if __name__ == '__main__': train() dev()<|fim▁end|>
print ' r\t',r print ' f1\t',f1 #############################################3
<|file_name|>Bar.java<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.example; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement;<|fim▁hole|>@XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class Bar { @XmlAttribute private String name; @XmlAttribute private String value; public Bar() { } public void setName(String name) { this.name = name; } public void setValue(String value) { this.value = value; } public String getName() { return name; } public String getValue() { return value; } }<|fim▁end|>
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)] use std::{cmp, io}; use bytes::{BufMut, BytesMut}; pub const SIZE: usize = 31; #[derive(Serialize, Deserialize)] pub struct Message { pub message: &'static str, } pub struct Writer<'a>(pub &'a mut BytesMut); impl<'a> io::Write for Writer<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.put_slice(buf); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } pub fn get_query_param(query: &str) -> u16 { let q = if let Some(pos) = query.find("q") { query.split_at(pos + 2).1.parse::<u16>().ok().unwrap_or(1) } else { 1 }; cmp::min(500, cmp::max(1, q)) } fn escapable(b: u8) -> bool { match b { b'<' | b'>' | b'&' | b'"' | b'\'' | b'/' => true, _ => false, } } pub fn escape(writer: &mut Writer, s: String) { let bytes = s.as_bytes(); let mut last_pos = 0; for (idx, b) in s.as_bytes().iter().enumerate() { if escapable(*b) { let _ = writer.0.put_slice(&bytes[last_pos..idx]); last_pos = idx + 1; match *b { b'<' => { let _ = writer.0.put_slice(b"&lt;"); } b'>' => { let _ = writer.0.put_slice(b"&gt;"); } b'&' => {<|fim▁hole|> } b'\'' => { let _ = writer.0.put_slice(b"&#x27;"); } b'/' => { let _ = writer.0.put_slice(b"&#x2f;"); } _ => panic!("incorrect indexing"), } } } if last_pos < bytes.len() - 1 { let _ = writer.0.put_slice(&bytes[last_pos..]); } }<|fim▁end|>
let _ = writer.0.put_slice(b"&amp;"); } b'"' => { let _ = writer.0.put_slice(b"&quot;");
<|file_name|>resize.js<|end_file_name|><|fim▁begin|>'use strict'; describe('Directive: resize', function () { // load the directive's module beforeEach(module('orderDisplayApp')); var element, scope; beforeEach(inject(function ($rootScope) { scope = $rootScope.$new(); })); //TODO: Add unit tests <|fim▁hole|> expect(scope.screenHeight).toBe($window.outerHeight); }));*/ });<|fim▁end|>
/*it('should change height', inject(function ($compile, $window) { element = angular.element('<resize></resize>'); element = $compile(element)(scope);
<|file_name|>test_tooltool.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import contextlib import copy import hashlib import json import logging import mock import os import os.path import shutil import sys import tempfile import threading import tooltool import unittest from io import StringIO, BytesIO from io import open from nose.tools import eq_ PY3 = sys.version_info[0] == 3 CWD_PATH = os.getcwd() from tooltool import to_binary, get_hexdigest, open_attrs, URLError, HTTPError if PY3: urlopen_module_as_str = 'urllib.request.urlopen' import http.server as BaseHTTPServer else: urlopen_module_as_str = 'urllib2.urlopen' import BaseHTTPServer class TestDirMixin(object): def setUpTestDir(self): self.test_dir = os.path.abspath(tempfile.mkdtemp()) self.__old_cwd = os.getcwd() if os.path.exists(self.test_dir): shutil.rmtree(self.test_dir) os.makedirs(self.test_dir) os.chdir(self.test_dir) def tearDownTestDir(self): os.chdir(self.__old_cwd) shutil.rmtree(self.test_dir) class BufferHandler(logging.Handler): def __init__(self, buffer): self.buffer = buffer logging.Handler.__init__(self) def emit(self, record): self.buffer.append((record.levelno, record.getMessage())) @classmethod @contextlib.contextmanager def capture(cls, logger_name): logger = logging.getLogger(logger_name) buffer = [] handler = cls(buffer) logger.addHandler(handler) try: yield buffer finally: logger.removeHandler(handler) class DigestTests(unittest.TestCase): def setUp(self): self.sample_data = open('test_file.ogg', 'rb') self.sample_algo = 'sha1' self.sample_digest = 'de3e3bbffd83c328ad7d9537ad2d03f68fc02e52' def test_digest_file(self): test_digest = tooltool.digest_file(self.sample_data, self.sample_algo) # If this assertion fails, verify that test_file.ogg is an ogg file # of Linus Torvalds explaining how he pronounces 'Linux' self.assertEqual(test_digest, self.sample_digest) class BaseFileRecordTest(unittest.TestCase): def setUp(self): self.sample_file = 'test_file.ogg' self.sample_file_path = os.path.join(CWD_PATH, self.sample_file) self.sample_algo = 'sha512' self.sample_size = os.path.getsize(self.sample_file_path) with open(self.sample_file_path, 'rb') as f: self.sample_hash = tooltool.digest_file(f, self.sample_algo) self.test_record = tooltool.FileRecord( filename=self.sample_file, size=self.sample_size, digest=self.sample_hash, algorithm=self.sample_algo ) self.test_record_json = { 'filename': self.sample_file, 'algorithm': self.sample_algo, 'digest': self.sample_hash, 'size': self.sample_size, } # using mkstemp to ensure that the filename generated # isn't actually on the system. (tmpfd, filename) = tempfile.mkstemp() os.close(tmpfd) os.remove(filename) if os.path.exists(filename): self.fail('did not remove %s' % filename) self.absent_file = filename class BaseFileRecordListTest(BaseFileRecordTest): def setUp(self): BaseFileRecordTest.setUp(self) self.record_list = [] for i in range(0, 4): record = copy.deepcopy(self.test_record) record.algorithm = i self.record_list.append(record) class BaseManifestTest(TestDirMixin, BaseFileRecordTest): def setUp(self): BaseFileRecordTest.setUp(self) self.setUpTestDir() self.sample_manifest = tooltool.Manifest([self.test_record]) self.sample_manifest_file = 'manifest.tt' with open(self.sample_manifest_file, **open_attrs) as tmpfile: self.sample_manifest.dump(tmpfile, fmt='json') def tearDown(self): self.tearDownTestDir() class TestFileRecord(BaseFileRecordTest): def test_create_with_posix_path_info(self): self.assertRaises(tooltool.ExceptionWithFilename, lambda: tooltool.FileRecord('abc/def', 10, 'abcd', 'alpha')) def test_create_with_windows_path_info(self): self.assertRaises(tooltool.ExceptionWithFilename, lambda: tooltool.FileRecord(r'abc\def', 10, 'abcd', 'alpha')) def test_present(self): # this test feels silly, but things are built on this # method, so probably best to test it self.assertTrue(self.test_record.present()) def test_absent(self): self.test_record.filename = self.absent_file self.assertFalse(self.test_record.present()) def test_valid_size(self): self.assertTrue(self.test_record.validate_size()) def test_invalid_size(self): self.test_record.size += 1 self.assertFalse(self.test_record.validate_size()) def test_size_of_missing_file(self): self.test_record.filename = self.absent_file self.assertRaises( tooltool.MissingFileException, self.test_record.validate_size) def test_valid_digest(self): self.assertTrue(self.test_record.validate_digest()) def test_invalid_digest(self): self.test_record.digest = 'NotValidDigest' self.assertFalse(self.test_record.validate_digest()) def test_digest_of_missing_file(self): self.test_record.filename = self.absent_file self.assertRaises( tooltool.MissingFileException, self.test_record.validate_digest) def test_overall_valid(self): self.assertTrue(self.test_record.validate()) def test_overall_invalid_size(self): self.test_record.size = 3 self.assertFalse(self.test_record.validate()) def test_overall_invalid_digest(self): self.test_record.digest = 'NotValidDigest' self.assertFalse(self.test_record.validate()) def test_overall_invalid_missing_file(self): self.test_record.filename = self.absent_file self.assertRaises( tooltool.MissingFileException, self.test_record.validate) def test_equality(self): test_record2 = copy.deepcopy(self.test_record) self.assertEqual(self.test_record, test_record2) self.assertEqual(self.test_record, self.test_record) def test_equality_validity(self): test_record2 = copy.deepcopy(self.test_record) self.test_record.visibility = True test_record2.visibility = False self.assertNotEqual(self.test_record, test_record2) def test_inequality(self): for i in ['filename', 'size', 'algorithm', 'digest', 'version']: test_record2 = copy.deepcopy(self.test_record) test_record2.__dict__[i] = 'wrong!' self.assertNotEqual(self.test_record, test_record2) def test_repr(self): a = eval(repr(self.test_record)) self.assertEqual(str(a), str(self.test_record)) self.assertEqual(a, self.test_record) def test_create_file_record(self): fr = tooltool.create_file_record(self.sample_file, self.sample_algo) self.assertEqual(self.test_record, fr) def test_describe_absent(self): self.test_record.filename = self.absent_file self.assertEqual("'%s' is absent" % self.absent_file, self.test_record.describe()) def test_describe_present_valid(self): self.assertEqual("'%s' is present and valid" % self.test_record.filename, self.test_record.describe()) def test_describe_present_invalid(self): self.test_record.size = 4 self.test_record.digest = 'NotValidDigest' self.assertEqual("'%s' is present and invalid" % self.test_record.filename, self.test_record.describe()) class TestFileRecordJSONCodecs(BaseFileRecordListTest): def test_default(self): encoder = tooltool.FileRecordJSONEncoder() dict_from_encoder = encoder.default(self.test_record) for i in ['filename', 'size', 'algorithm', 'digest']: self.assertEqual( dict_from_encoder[i], self.test_record.__dict__[i]) def test_default_list(self): encoder = tooltool.FileRecordJSONEncoder() new_list = encoder.default(self.record_list) for record in range(0, len(self.record_list)): self.assertEqual(new_list[record], encoder.default(self.record_list[record])) def test_unrelated_class(self): encoder = tooltool.FileRecordJSONEncoder() class Junk: pass self.assertRaises( tooltool.FileRecordJSONEncoderException, encoder.default, Junk() ) def test_list_with_unrelated_class(self): encoder = tooltool.FileRecordJSONEncoder() class Junk: pass self.assertRaises( tooltool.FileRecordJSONEncoderException, encoder.default, [self.test_record, Junk(), self.test_record], ) def test_decode(self): json_string = json.dumps( self.test_record, cls=tooltool.FileRecordJSONEncoder) decoder = tooltool.FileRecordJSONDecoder() f = decoder.decode(json_string) for i in ['filename', 'size', 'algorithm', 'digest', 'visibility']: self.assertEqual(getattr(f, i), self.test_record.__dict__[i]) def test_decode_visibility(self): self.test_record.visibility = True json_string = json.dumps( self.test_record, cls=tooltool.FileRecordJSONEncoder) decoder = tooltool.FileRecordJSONDecoder() f = decoder.decode(json_string) for i in ['filename', 'size', 'algorithm', 'digest', 'visibility']: self.assertEqual(getattr(f, i), self.test_record.__dict__[i]) def test_decode_dict_not_filerecord(self): decoder = tooltool.FileRecordJSONDecoder() eq_(decoder.decode('{"filename": "foo.txt"}'), {'filename': 'foo.txt'}) def test_json_dumps(self): json_string = json.dumps( self.test_record, cls=tooltool.FileRecordJSONEncoder) dict_from_json = json.loads(json_string) for i in ['filename', 'size', 'algorithm', 'digest']: self.assertEqual(dict_from_json[i], self.test_record.__dict__[i]) def test_json_dumps_with_unpack(self): self.test_record.unpack = True json_string = json.dumps( self.test_record, cls=tooltool.FileRecordJSONEncoder) from_json = json.loads(json_string, cls=tooltool.FileRecordJSONDecoder) for i in ['filename', 'size', 'algorithm', 'digest', 'unpack']: self.assertEqual(getattr(from_json, i), getattr(self.test_record, i), i) def test_json_dumps_with_version(self): self.test_record.version = 'test 3.2.1 contact' json_string = json.dumps( self.test_record, cls=tooltool.FileRecordJSONEncoder) from_json = json.loads(json_string, cls=tooltool.FileRecordJSONDecoder) for i in ['filename', 'size', 'algorithm', 'digest', 'version']: self.assertEqual(getattr(from_json, i), getattr(self.test_record, i), i) def test_decode_list(self): json_string = json.dumps( self.record_list, cls=tooltool.FileRecordJSONEncoder) new_list = json.loads(json_string, cls=tooltool.FileRecordJSONDecoder) self.assertEquals(len(new_list), len(self.record_list)) for record in range(0, len(self.record_list)): self.assertEqual(new_list[record], self.record_list[record]) class TestManifest(BaseFileRecordTest): def setUp(self): BaseFileRecordTest.setUp(self) self.other_sample_file = 'other-%s' % self.sample_file if os.path.exists(self.other_sample_file): os.remove(self.other_sample_file) shutil.copyfile(self.sample_file, self.other_sample_file) self.other_test_record = copy.deepcopy(self.test_record) self.other_test_record.filename = self.other_sample_file self.test_manifest = tooltool.Manifest( [self.test_record, self.other_test_record]) def tearDown(self): try: os.remove(self.other_sample_file) except OSError: pass def test_present(self): self.assertTrue(self.test_manifest.present()) def test_absent(self): os.remove(self.other_sample_file) self.assertFalse(self.test_manifest.present()) def test_validate_sizes(self): self.assertTrue(self.test_manifest.validate_sizes()) def test_incorrect_size(self): self.test_manifest.file_records[1].size = 1 self.assertFalse(self.test_manifest.validate_sizes()) def test_validate_digest(self): self.assertTrue(self.test_manifest.validate_digests()) def test_validate(self): self.assertTrue(self.test_manifest.validate()) def test_incorrect_digest(self): self.test_manifest.file_records[1].digest = 'wrong' self.assertFalse(self.test_manifest.validate_digests()) def test_equality_same_object(self): self.assertEqual(self.test_manifest, self.test_manifest) def test_equality_copy(self): a_copy = copy.copy(self.test_manifest) self.assertEqual(self.test_manifest, a_copy) def test_equality_deepcopy(self): a_deepcopy = copy.deepcopy(self.test_manifest) self.assertEqual(self.test_manifest, a_deepcopy) def test_equality_copy_method(self): a_copy = self.test_manifest.copy() self.assertEqual(self.test_manifest, a_copy) def test_equality_unrelated(self): one = tooltool.Manifest([self.test_record, self.other_test_record]) two = tooltool.Manifest([self.test_record, self.other_test_record]) self.assertEqual(one, two) def test_equality_different_order(self): one = tooltool.Manifest([self.test_record, self.other_test_record]) two = tooltool.Manifest([self.other_test_record, self.test_record]) self.assertEqual(one, two) def test_inequality_different_count(self): one = tooltool.Manifest([self.other_test_record]) two = tooltool.Manifest([self.test_record, self.other_test_record]) self.assertNotEqual(one, two) # on Python2.6, __ne__ isn't called automatically, # so just verify manually self.failUnless(one.__ne__(two)) def test_inequality_different_records(self): one = tooltool.Manifest([self.test_record]) two = tooltool.Manifest([self.other_test_record]) self.assertNotEqual(one, two) def test_json_dump(self): if PY3: tmp_manifest = tempfile.TemporaryFile('w+') else: tmp_manifest = tempfile.TemporaryFile('w+b') self.test_manifest.dump(tmp_manifest, fmt='json') tmp_manifest.seek(0) new_manifest = tooltool.Manifest() new_manifest.load(tmp_manifest, fmt='json') self.assertEqual(new_manifest, self.test_manifest) def test_json_dumps(self): s = self.test_manifest.dumps(fmt='json') new_manifest = tooltool.Manifest() new_manifest.loads(s, fmt='json') self.assertEqual(new_manifest, self.test_manifest) def test_load_empty_json_file(self): empty = tempfile.TemporaryFile() manifest = tooltool.Manifest() self.assertRaises(tooltool.InvalidManifest, manifest.load, empty, fmt='json') def test_load_empty_json_string(self): empty = '' manifest = tooltool.Manifest() self.assertRaises(tooltool.InvalidManifest, manifest.loads, empty, fmt='json') class TestManifestOperations(BaseManifestTest): def test_open_manifest(self): manifest = tooltool.open_manifest(self.sample_manifest_file) eq_(manifest.file_records[0].filename, 'test_file.ogg') def test_open_manifest_missing(self): self.assertRaises(tooltool.InvalidManifest, lambda: tooltool.open_manifest('no-such-file')) def call_main(*args): try: old_stderr = sys.stderr sys.stderr = sys.stdout try: return tooltool.main(list(args), _skip_logging=True) except SystemExit as e: return "exit %d" % e.code finally: sys.stderr = old_stderr def test_main_help(): eq_(call_main('tooltool', '--help'), "exit 0") def test_main_no_command(): eq_(call_main('tooltool'), "exit 2") def test_main_bad_command(): eq_(call_main('tooltool', 'foo'), 1) def test_main_bad_algorithm(): eq_(call_main('tooltool', '--algorithm', 'sha13', 'fetch'), 'exit 2') def test_command_list(): with mock.patch('tooltool.list_manifest') as list_manifest: eq_(call_main('tooltool', 'list', '--manifest', 'foo.tt'), 0) list_manifest.assert_called_with('foo.tt') def test_command_validate(): with mock.patch('tooltool.validate_manifest') as validate_manifest: eq_(call_main('tooltool', 'validate'), 0) validate_manifest.assert_called_with('manifest.tt') def test_command_add(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', 'a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, None, False) def test_command_add_unpack(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', '--unpack', 'a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, None, True) def test_command_add_version(): with mock.patch('tooltool.add_files') as add_files: version = 'foo 1.7.12-beta.2+test' eq_(call_main('tooltool', 'add', '--version', version, 'foo.tar.gz'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['foo.tar.gz'], version, None, False) def test_command_add_visibility_internal(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', '--visibility', 'internal', 'a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, 'internal', False) def test_command_add_visibility_internal_unpack(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', '--visibility', 'internal', '--unpack', 'a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, 'internal', True) def test_command_add_visibility_public(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', '--visibility', 'public', 'a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, 'public', False) def test_command_add_visibility_public_unpack(): with mock.patch('tooltool.add_files') as add_files: eq_(call_main('tooltool', 'add', '--visibility', 'public', '--unpack','a', 'b'), 0) add_files.assert_called_with('manifest.tt', 'sha512', ['a', 'b'], None, 'public', True) def test_command_purge_no_folder(): with mock.patch('tooltool.purge') as purge: eq_(call_main('tooltool', 'purge'), 1) assert not purge.called def test_command_purge(): with mock.patch('tooltool.purge') as purge: eq_(call_main('tooltool', 'purge', '--cache', 'foo'), 1) purge.assert_called_with(folder='foo', gigs=0) def test_command_purge_size(): with mock.patch('tooltool.purge') as purge: eq_(call_main('tooltool', 'purge', '--size', '10', '--cache', 'foo'), 1) purge.assert_called_with(folder='foo', gigs=10) def test_command_fetch_no_url(): with mock.patch('tooltool.fetch_files') as fetch_files: eq_(call_main('tooltool', 'fetch'), 0) fetch_files.assert_called_with('manifest.tt', ['https://tooltool.mozilla-releng.net/'], [], cache_folder=None, auth_file=None, region=None) def test_command_fetch(): with mock.patch('tooltool.fetch_files') as fetch_files: eq_(call_main('tooltool', 'fetch', 'a', 'b', '--url', 'http://foo/bar/'), 0) fetch_files.assert_called_with('manifest.tt', ['http://foo/bar/'], ['a', 'b'], cache_folder=None, auth_file=None, region=None) def test_command_fetch_no_trailing_slash(): with mock.patch('tooltool.fetch_files') as fetch_files: eq_(call_main('tooltool', 'fetch', 'a', 'b', '--url', 'http://foo/bar'), 0) fetch_files.assert_called_with('manifest.tt', ['http://foo/bar/'], ['a', 'b'], cache_folder=None, auth_file=None, region=None) def test_command_fetch_region(): with mock.patch('tooltool.fetch_files') as fetch_files: eq_(call_main('tooltool', 'fetch', 'a', 'b', '--url', 'http://foo/bar/', '--region', 'us-east-1'), 0) fetch_files.assert_called_with('manifest.tt', ['http://foo/bar/'], ['a', 'b'], cache_folder=None, auth_file=None, region='us-east-1') def test_command_fetch_auth_file(): # mock can't patch os.path.expanduser, because it looks for path.expanduser # in the os module old_expanduser = os.path.expanduser os.path.expanduser = mock.Mock() try: with mock.patch('tooltool.fetch_files') as fetch_files: os.path.expanduser.side_effect = lambda path: path.replace("~", "HOME") eq_(call_main('tooltool', 'fetch', 'a', 'b', '--url', 'http://foo/bar/', '--authentication-file', '~/.tooltool-token'), 0) fetch_files.assert_called_with('manifest.tt', ['http://foo/bar/'], ['a', 'b'], cache_folder=None, auth_file="HOME/.tooltool-token", region=None) finally: os.path.expanduser = old_expanduser def test_command_upload(): with mock.patch('tooltool.upload') as upload: eq_(call_main('tooltool', 'upload', '--url', 'http://foo/', '--message', 'msg'), 0) upload.assert_called_with('manifest.tt', 'msg', ['http://foo/'], None, None) def test_command_upload_region(): with mock.patch('tooltool.upload') as upload: eq_(call_main('tooltool', 'upload', '--url', 'http://foo/', '--message', 'msg', '--region=us-west-3'), 0) upload.assert_called_with('manifest.tt', 'msg', ['http://foo/'], None, 'us-west-3') def test_command_upload_no_message(): with mock.patch('tooltool.upload') as upload: eq_(call_main('tooltool', 'upload', '--url', 'http://foo/'), 1) assert not upload.called def test_command_upload_no_url(): with mock.patch('tooltool.upload') as upload: eq_(call_main('tooltool', 'upload', '--message', 'msg'), 0) upload.assert_called_with('manifest.tt', 'msg', ['https://tooltool.mozilla-releng.net/'], None, None) class UploadTests(TestDirMixin, unittest.TestCase): class Handler(BaseHTTPServer.BaseHTTPRequestHandler): """A mini webserver for uploading. This implements both the RelengAPI bits (POST and GET) and the S3 bits (PUT).""" test_case = None def log_request(self, code=None, size=None): logging.getLogger('fake_web').info("%s %s" % (self.path, code)) def verify_auth(self): token = self.test_case.server_config.get('exp_auth_token') if token: if self.headers.get('Authorization') != 'Bearer %s' % token: self.send_response(403, b"Forbidden") self.send_header('content-type', 'text/plain') self.end_headers() self.wfile.write(to_binary("go away")) self.wfile.close() return False return True def do_POST(self): cfg = self.test_case.server_config if '?region=' in self.path: self.path, self.test_case.server_got_region = self.path.split('?') eq_(self.path, '/tooltool/upload') eq_(self.headers['content-type'], 'application/json') if not self.verify_auth(): return body = json.loads(self.rfile.read(int(self.headers['content-length']))) self.test_case.server_requests.setdefault('POST', []).append(copy.deepcopy(body)) eq_(body['message'], 'hi mom') files_on_server = cfg.get('files_on_server', []) for filename, file in body['files'].items(): if filename not in files_on_server: file['put_url'] = self.test_case.s3url('/sha512/' + file['digest']) if cfg.get('post_fails'): self.send_response(409, b'Exploded') self.send_header('Content-Type', 'application/json') self.end_headers() self.wfile.write(to_binary(json.dumps({'error': {'name': 'uhoh', 'description': 'failed'}}))) else: self.send_response(200, b'OK') self.send_header('Content-Type', 'application/json') self.end_headers() self.wfile.write(to_binary(json.dumps({'result': body}))) if not PY3: self.wfile.close() def do_PUT(self): # S3 upload cfg = self.test_case.server_config assert self.path.startswith('/sha512/'), self.path eq_(self.headers['content-type'], 'application/octet-stream') content_length = int(self.headers.get('content-length', -1)) data = self.rfile.read(content_length) digest = get_hexdigest(data) self.test_case.server_requests.setdefault('PUT', []).append(digest) assert self.path.endswith(digest) if digest in cfg.get('upload_failures', []): self.send_response(500, b'NOPE') else: self.send_response(200, b'OK') self.send_header('Content-Type', 'text/plain') self.end_headers() if not PY3: self.wfile.close() def do_GET(self): # notify cfg = self.test_case.server_config assert self.path.startswith('/tooltool/upload/complete/sha512/') if not self.verify_auth(): return digest = self.path[-128:] self.test_case.server_requests.setdefault('GET', []).append(digest) if cfg.get('get_fails'): self.send_response(500, b'NOPE') elif cfg.get('get_409s'): self.send_response(409, b'Conflict') self.send_header('X-Retry-After', '10') del cfg['get_409s'] # succeed on retry else: self.send_response(200, b'OK') self.send_header('Content-Type', 'text/plain') self.end_headers() if not PY3: self.wfile.close() def setUp(self): self.setUpTestDir() def start_server(self): self.server_config = {} self.server_requests = {} UploadTests.Handler.test_case = self self.httpd = BaseHTTPServer.HTTPServer(("127.0.0.1", 0), UploadTests.Handler) self.http_port = self.httpd.server_port self.server_thread = threading.Thread(target=self.httpd.serve_forever) self.server_thread.daemon = 1 self.server_thread.start() self.content = "FILE DATA" with open("testfile.txt", **open_attrs) as f: f.write(self.content) self.digest = get_hexdigest(self.content) def tearDown(self): if hasattr(self, 'httpd'): self.httpd.shutdown() self.server_thread.join() self.tearDownTestDir() def add_file(self, filename, on_server=False, upload_fails=False, version=None, visibility='internal', unpack=False): data = os.urandom(1024) open(filename, 'wb').write(data) digest = get_hexdigest(data) tooltool.add_files('manifest.tt', 'sha512', [filename], version, visibility, unpack) if on_server: self.server_config.setdefault('files_on_server', []).append(filename) if upload_fails: self.server_config.setdefault('upload_failures', []).append(digest) return digest def mkurl(self, path): return 'http://127.0.0.1:%d/tooltool/%s' % (self.http_port, path) def s3url(self, path): return 'http://127.0.0.1:%d%s' % (self.http_port, path) def test_upload_success(self): """An upload with two files, one of which is on the server already, succeeds""" self.start_server() foo_digest = self.add_file("foo.txt", on_server=True) bar_digest = self.add_file("bar.txt", on_server=False) assert tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], None, None) self.server_requests['POST'].sort() eq_(self.server_requests, { 'POST': [{ 'files': { 'foo.txt': { 'digest': foo_digest, 'algorithm': 'sha512', 'visibility': 'internal', 'size': 1024, }, 'bar.txt': { 'digest': bar_digest, 'algorithm': 'sha512', 'visibility': 'internal', 'size': 1024, }, }, 'message': 'hi mom', }], 'PUT': [bar_digest], 'GET': [bar_digest], }) def test_upload_success_auth(self): """An upload with authentication information succeeds when the server expects authentication.""" self.start_server() foo_digest = self.add_file("foo.txt", on_server=True) self.server_config['exp_auth_token'] = token = 'abcABC' open("auth", **open_attrs).write(token) assert tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], 'auth', None) eq_(self.server_requests, { 'POST': [{ 'files': { 'foo.txt': { 'digest': foo_digest, 'algorithm': 'sha512', 'visibility': 'internal', 'size': 1024, }, }, 'message': 'hi mom', }], }) def test_upload_success_region(self): """An upload with a region specified results in a POST with that region in the URL.""" self.start_server() self.add_file("foo.txt", on_server=True) assert tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], None, 'us-west-1') eq_(self.server_got_region, 'region=us-west-1') def test_upload_failure_auth(self): """An upload with incorrect authentication information fails""" self.start_server() self.add_file("foo.txt", on_server=True) self.server_config['exp_auth_token'] = 'abcABC' open("auth", **open_attrs).write('not-the-token') assert not tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], 'auth', None) def test_upload_s3_fails(self): """When an S3 upload fails, the upload fails and no notification takes place.""" self.start_server() foo_digest = self.add_file("foo.txt", upload_fails=True) assert not tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], None, None) eq_(self.server_requests, { 'POST': [{ 'files': { 'foo.txt': { 'digest': foo_digest, 'algorithm': 'sha512', 'visibility': 'internal', 'size': 1024, }, }, 'message': 'hi mom', }], 'PUT': [foo_digest], }) def test_upload_send_batch_fails(self): """When the upload request to RelengAPI fails, upload fails.""" self.start_server() self.server_config['post_fails'] = True foo_digest = self.add_file("foo.txt", upload_fails=True) assert not tooltool.upload('manifest.tt', 'hi mom', [self.mkurl('')], None, None) eq_(self.server_requests, { 'POST': [{ 'files': { 'foo.txt': { 'digest': foo_digest, 'algorithm': 'sha512', 'visibility': 'internal', 'size': 1024, }, }, 'message': 'hi mom', }], }) def test_no_manifest(self): """When given a manifest that doesn't exist, upload fails.""" assert not tooltool.upload('nosuch.tt', 'hi mom', ['http://'], None, None) def test_manifest_without_visibility(self): """When given a manifest with a file record without visibility, upload fails.""" self.add_file("foo.txt", visibility=None) assert not tooltool.upload('manifest.tt', 'hi mom', ['http://'], None, None) def test_invalid_manifest(self): """When given a manifest that doesn't validate, upload fails""" self.add_file("foo.txt") open("foo.txt", **open_attrs).write('bogus') assert not tooltool.upload('manifest.tt', 'hi mom', ['http://'], None, None) def test_send_batch_success(self): self.start_server() batch = {'message': 'hi mom', 'files': {}} eq_(tooltool._send_batch(self.mkurl(''), None, batch, None), batch) eq_(self.server_requests, {'POST': [batch]}) def test_send_batch_region(self): self.start_server() batch = {'message': 'hi mom', 'files': {}} eq_(tooltool._send_batch(self.mkurl(''), None, batch, 'us-south-1'), batch) eq_(self.server_requests, {'POST': [batch]}) eq_(self.server_got_region, 'region=us-south-1') def test_send_batch_failure(self): self.start_server() self.server_config['post_fails'] = True batch = {'message': 'hi mom', 'files': {}} eq_(tooltool._send_batch(self.mkurl(''), None, batch, None), None) eq_(self.server_requests, {'POST': [batch]}) def test_s3_upload(self): self.start_server() file = {'put_url': self.s3url('/sha512/' + self.digest)} tooltool._s3_upload('testfile.txt', file) eq_(self.server_requests, {'PUT': [self.digest]}) assert file['upload_ok'] def test_s3_upload_fails(self): self.start_server() self.server_config['upload_failures'] = [self.digest] file = {'put_url': self.s3url('/sha512/' + self.digest)} tooltool._s3_upload('testfile.txt', file) eq_(self.server_requests, {'PUT': [self.digest]}) assert not file['upload_ok'], file assert 'upload_exception' in file, file def test_notify_upload(self): self.start_server() file = {'algorithm': 'sha512', 'digest': self.digest} tooltool._notify_upload_complete(self.mkurl(''), None, file) eq_(self.server_requests, {'GET': [self.digest]}) def test_notify_upload_wait(self): self.start_server() self.server_config['get_409s'] = True file = {'algorithm': 'sha512', 'digest': self.digest} with mock.patch('time.sleep') as fake_sleep: tooltool._notify_upload_complete(self.mkurl(''), None, file) fake_sleep.assert_called_with(10) eq_(self.server_requests, {'GET': [self.digest, self.digest]}) # two reqs def test_notify_upload_fails(self): self.start_server() self.server_config['get_fails'] = True file = {'algorithm': 'sha512', 'digest': self.digest} with BufferHandler.capture('tooltool') as logged: tooltool._notify_upload_complete(self.mkurl(''), None, file) eq_(self.server_requests, {'GET': [self.digest]}) eq_(logged, [(logging.ERROR, 'Error making RelengAPI request:')]) def test_notify_upload_exception(self): self.start_server() self.server_config['get_fails'] = True file = {'algorithm': 'sha512', 'digest': self.digest} with BufferHandler.capture('tooltool') as logged: with mock.patch(urlopen_module_as_str) as urlopen: urlopen.side_effect = RuntimeError('oh noes') tooltool._notify_upload_complete(self.mkurl(''), None, file) eq_(self.server_requests, {}) eq_(logged[0], (logging.ERROR, 'While notifying server of upload completion:')) def test_log_api_error_generic(): with BufferHandler.capture('tooltool') as logged: tooltool._log_api_error(RuntimeError('uhoh')) eq_(logged, [(logging.ERROR, 'Error making RelengAPI request:')]) def test_log_api_error_api_error(): with BufferHandler.capture('tooltool') as logged: error = json.dumps({'error': {'name': 'Bad Request', 'description': 'Nice try'}}) if PY3: fp = StringIO(error) else: fp = BytesIO(error) exc = HTTPError("http://a", 400, "Bad Request", {'content-type': 'application/json'}, fp) tooltool._log_api_error(exc) eq_(logged, [(logging.ERROR, 'Bad Request: Nice try')]) class FetchTests(TestDirMixin, unittest.TestCase): _server_files = ['one', 'two', 'three'] server_files_by_hash = dict((get_hexdigest(v), v) for v in _server_files) server_corrupt = False urls = ['http://a', 'http://2'] def setUp(self): self.setUpTestDir() self.cache_dir = os.path.abspath('cache') def tearDown(self): self.tearDownTestDir() def fake_fetch_file(self, urls, file_record, auth_file=None, region=None): eq_(urls, self.urls) if file_record.digest in self.server_files_by_hash: if self.server_corrupt: content = 'XXX' else: content = self.server_files_by_hash[file_record.digest] fd, temp_path = tempfile.mkstemp(dir=self.test_dir) os.write(fd, to_binary(content)) os.close(fd) return os.path.split(temp_path)[1] else: return None def add_file_to_dir(self, file, corrupt=False): content = 'X' * len(file) if corrupt else file open(os.path.join(self.test_dir, "file-" + file), **open_attrs).write(content) def add_file_to_cache(self, file, corrupt=False): if not os.path.exists(self.cache_dir): os.mkdir(self.cache_dir) digest = get_hexdigest(file) content = 'X' * len(file) if corrupt else file open(os.path.join(self.cache_dir, digest), **open_attrs).write(content) def make_manifest(self, filename, *files, **kwargs): unpack = kwargs.pop('unpack', False) manifest = [] for file in files: manifest.append({ 'filename': 'file-' + file, 'size': len(file), 'algorithm': 'sha512', 'digest': get_hexdigest(file), 'unpack': unpack, }) with open(filename, **open_attrs) as f: json.dump(manifest, f) def assert_files(self, *files): eq_(sorted([f for f in os.listdir(self.test_dir) if f != 'cache' and not f.endswith('.tt')]), sorted(['file-' + f for f in files])) for f in files: eq_(open('file-' + f, encoding='utf-8').read(), f) def assert_cached_files(self, *files): if not files and not os.path.exists(self.cache_dir): return hashes = [get_hexdigest(f) for f in files] eq_(sorted(os.listdir(self.cache_dir)), sorted(hashes)) for f, h in zip(files, hashes): eq_(open(os.path.join(self.cache_dir, h), encoding='utf-8').read(), f) # tests def test_no_manifest(self): """If the given manifest isn't present, fetch_files fails""" eq_(tooltool.fetch_files('not-present.tt', self.urls), False) def test_all_present(self): """When all expected files are present, fetch_files does not fetch anything""" self.add_file_to_dir('one') self.add_file_to_dir('two') self.make_manifest('manifest.tt', 'one', 'two') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = RuntimeError eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one', 'two') self.assert_cached_files() def test_all_cached(self): """When all expected files are in the cache, fetch_files copies but does not fetch""" self.add_file_to_cache('one') self.add_file_to_cache('two') self.make_manifest('manifest.tt', 'one', 'two') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = RuntimeError eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one', 'two') self.assert_cached_files('one', 'two') def test_all_missing(self): """When all expected files are not found, they are fetched."""<|fim▁hole|> True) self.assert_files('one', 'two') self.assert_cached_files('one', 'two') def test_missing_not_on_server(self): """When the file is missing everywhere including the server, fetch fails""" self.make_manifest('manifest.tt', 'ninetynine') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), False) self.assert_files() self.assert_cached_files() def test_missing_corrupt_on_server(self): """When the file is missing everywhere and coorrupt the server, fetch fails""" self.make_manifest('manifest.tt', 'one') with mock.patch('tooltool.fetch_file') as fetch_file: self.server_corrupt = True fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), False) self.assert_files() self.assert_cached_files() def test_local_corrupt_but_cached(self): """When the local files are corrupt but the cache is OK, the cache is used""" self.add_file_to_dir('one', corrupt=True) self.add_file_to_cache('one') self.make_manifest('manifest.tt', 'one') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = RuntimeError eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one') self.assert_cached_files('one') def test_local_missing_cache_corrupt(self): """When the local files are missing and the cache is corrupt, fetch""" self.add_file_to_cache('one', corrupt=True) self.make_manifest('manifest.tt', 'one') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one') self.assert_cached_files('one') def test_missing_unwritable_cache(self): """If fetch downloads files but can't write to the cache, it still succeeds""" self.make_manifest('manifest.tt', 'one') os.mkdir(self.cache_dir, 0o500) try: with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one') self.assert_cached_files() finally: os.chmod(self.cache_dir, 0o700) def test_mixed(self): """fetch creates a dir containing the right files given a mix of file states""" self.add_file_to_dir('one', corrupt=True) self.add_file_to_cache('two', corrupt=True) self.add_file_to_dir('four') self.add_file_to_cache('five') self.make_manifest('manifest.tt', 'one', 'two', 'three', 'four', 'five') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) self.assert_files('one', 'two', 'three', 'four', 'five') self.assert_cached_files('one', 'two', 'three', 'five') def test_region_arg(self): """A region argument passed to fetch_files gets passed on to fetch_file""" self.make_manifest('manifest.tt', 'one') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache', region='ca-north-2'), True) fetch_file.assert_called_with(self.urls, mock.ANY, auth_file=None, region='ca-north-2') self.assert_files('one') self.assert_cached_files('one') def test_file_list(self): """fetch only fetches the files requested in the file list""" self.add_file_to_dir('one') self.add_file_to_cache('five') self.make_manifest('manifest.tt', 'one', 'five', 'nine') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache', filenames=['five']), True) self.assert_files('one', 'five') self.assert_cached_files('five') def test_unpack_file(self): """When asked to unpack files, fetch calls unpack_file.""" self.add_file_to_dir('four') self.add_file_to_cache('five') self.make_manifest('manifest.tt', 'three', 'four', 'five', unpack=True) with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file with mock.patch('tooltool.unpack_file') as unpack_file: eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), True) unpack_file.assert_has_calls([ mock.call('file-three'), mock.call('file-four'), mock.call('file-five'), ], any_order=True) self.assert_files('three', 'four', 'five') self.assert_cached_files('three', 'five') def test_unpack_file_fails(self): """When asked to unpack files, and the unpack fails, fetch fails.""" self.make_manifest('manifest.tt', 'one', unpack=True) with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file with mock.patch('tooltool.unpack_file') as unpack_file: unpack_file.side_effect = lambda f: False eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'), False) unpack_file.assert_called_with('file-one') self.assert_files('one') def try_unpack_file(self, filename): os.mkdir('basename') open("basename/LEFTOVER.txt", **open_attrs).write("rm me") self.failUnless(tooltool.unpack_file(filename)) self.failUnless(os.path.exists('basename')) self.failUnless(os.path.exists('basename/README.txt')) self.failIf(os.path.exists('basename/LEFTOVER.txt')) def setup_archive(self, cmd): os.mkdir('basename') open("basename/README.txt", **open_attrs).write("in tarball") os.system(cmd) shutil.rmtree('basename') def test_unpack_file_tar(self): self.setup_archive('tar -cf basename.tar basename') self.try_unpack_file('basename.tar') def test_unpack_file_tar_gz(self): self.setup_archive('tar -czf basename.tar.gz basename') self.try_unpack_file('basename.tar.gz') def test_unpack_file_tar_xz(self): self.setup_archive('tar -cJf basename.tar.xz basename') self.try_unpack_file('basename.tar.xz') def test_unpack_file_tar_bz2(self): self.setup_archive('tar -cjf basename.tar.bz2 basename') self.try_unpack_file('basename.tar.bz2') def test_unpack_file_zip(self): self.setup_archive('zip -qr basename.zip basename') self.try_unpack_file('basename.zip') def test_unpack_file_invalid_xz(self): self.setup_archive('echo BOGUS > basename.tar.xz') self.assertFalse(tooltool.unpack_file('basename.tar.xz')) def test_unpack_file_invalid_zip(self): self.setup_archive('echo BOGUS > basename.zip') self.assertFalse(tooltool.unpack_file('basename.zip')) def test_unpack_file_not_tarfile(self): open('basename.tar.shrink', **open_attrs).write('not a tarfile') self.assertFalse(tooltool.unpack_file('basename.tar.shrink')) class FetchFileTests(BaseFileRecordTest, TestDirMixin): def setUp(self): BaseFileRecordTest.setUp(self) self.setUpTestDir() def tearDown(self): self.tearDownTestDir() BaseFileRecordTest.tearDown(self) @contextlib.contextmanager def mocked_urllib2(self, data, exp_size=4096, exp_token=None): with mock.patch(urlopen_module_as_str) as urlopen: def fake_read(url, size): eq_(size, exp_size) remaining = data[url] rv, remaining = remaining[:size], remaining[size:] data[url] = remaining return rv def replacement(req): auth = req.get_header('Authorization') if auth: if exp_token.strip()[0] == '{': exp_token_ = json.loads(exp_token) assert 'id="{}"'.format(exp_token_['clientId']) in auth assert 'id="{}"'.format(exp_token_['clientId']) in \ tooltool.make_taskcluster_header(exp_token_, req) else: eq_(auth, 'Bearer %s' % exp_token) else: assert not exp_token, "got token auth when not expecting it" url = req.get_full_url() if url not in data: raise URLError("bogus url") m = mock.Mock(name='Response') m.read = lambda size: fake_read(url, size) return m urlopen.side_effect = replacement yield def test_fetch_file(self): # note: the first URL doesn't match, so this loops twice with self.mocked_urllib2({'http://b/sha512/' + self.sample_hash: 'abcd'}): filename = tooltool.fetch_file(['http://a', 'http://b'], self.test_record) assert filename eq_(open(filename, encoding='utf-8').read(), 'abcd') os.unlink(filename) def test_fetch_file_region(self): with self.mocked_urllib2({'http://a/sha512/%s?region=us-west-1' % self.sample_hash: 'abcd'}): filename = tooltool.fetch_file(['http://a'], self.test_record, region='us-west-1') assert filename eq_(open(filename, encoding='utf-8').read(), 'abcd') os.unlink(filename) def test_fetch_file_size(self): with self.mocked_urllib2({'http://b/sha512/' + self.sample_hash: 'abcd'}, exp_size=1024): filename = tooltool.fetch_file( ['http://a', 'http://b'], self.test_record, grabchunk=1024) assert filename eq_(open(filename, encoding='utf-8').read(), 'abcd') os.unlink(filename) def test_fetch_file_auth_file(self): with self.mocked_urllib2({'http://b/sha512/' + self.sample_hash: 'abcd'}, exp_token='TOKTOK'): with open("auth", **open_attrs) as f: f.write('TOKTOK') filename = tooltool.fetch_file( ['http://a', 'http://b'], self.test_record, auth_file='auth') assert filename eq_(open(filename, encoding='utf-8').read(), 'abcd') os.unlink(filename) def test_fetch_file_auth_file_taskcluster(self): credentials = json.dumps({'clientId': '123', 'accessToken': '456'}) with self.mocked_urllib2({'http://b/sha512/' + self.sample_hash: 'abcd'}, exp_token=credentials): with open("auth", **open_attrs) as f: f.write(credentials) filename = tooltool.fetch_file( ['http://a', 'http://b'], self.test_record, auth_file='auth') assert filename eq_(open(filename, encoding='utf-8').read(), 'abcd') os.unlink(filename) def test_fetch_file_fails(self): with self.mocked_urllib2({}): filename = tooltool.fetch_file(['http://a'], self.test_record) assert filename is None def test_touch(): open("testfile", 'wb') os.utime("testfile", (0, 0)) tooltool.touch("testfile") assert os.stat("testfile").st_mtime > 0 os.unlink("testfile") def test_touch_doesnt_exit(): assert not os.path.exists("testfile") tooltool.touch("testfile") assert not os.path.exists("testfile") class PurgeTests(TestDirMixin, unittest.TestCase): def setUp(self): self.setUpTestDir() def tearDown(self): self.tearDownTestDir() def fake_freespace(self, p): # A fake 10G drive, with each file = 1G eq_(p, self.test_dir) return 1024 ** 3 * (10 - len(os.listdir(self.test_dir))) def add_files(self, *files): now = 1426127031 # add files, with ordered mtime for f in files: path = os.path.join(self.test_dir, f) open(path, 'wb') os.utime(path, (now, now)) now += 10 def test_purge_fails(self): path = os.path.join(self.test_dir, 'sticky') open(path, 'wb') os.chmod(self.test_dir, 0o500) # prevent delete try: tooltool.purge(self.test_dir, 0) eq_(os.listdir(self.test_dir), ['sticky']) finally: os.chmod(self.test_dir, 0o700) def test_purge_nonfile_not_deleted(self): path = os.path.join(self.test_dir, 'somedir') os.mkdir(path) tooltool.purge(self.test_dir, 0) eq_(os.listdir(self.test_dir), ['somedir']) def test_purge_nonzero(self): # six files means six gigs consumed, so we'll delete two self.add_files("one", "two", "three", "four", "five", "six") with mock.patch('tooltool.freespace') as freespace: freespace.side_effect = self.fake_freespace tooltool.purge(self.test_dir, 6) eq_(sorted(os.listdir(self.test_dir)), sorted(['three', 'four', 'five', 'six'])) def test_purge_no_need(self): self.add_files("one", "two") with mock.patch('tooltool.freespace') as freespace: freespace.side_effect = self.fake_freespace tooltool.purge(self.test_dir, 4) eq_(sorted(os.listdir(self.test_dir)), sorted(['one', 'two'])) def test_purge_zero(self): self.add_files("one", "two", "three") tooltool.purge(self.test_dir, 0) eq_(os.listdir(self.test_dir), []) def test_freespace(self): # we can't set up a dedicated partition for this test, so just assume # the disk isn't full (other tests assume this too, really) assert tooltool.freespace(self.test_dir) > 0 class AddFiles(BaseManifestTest): def assert_manifest(self, exp_manifest, manifest=None): got_manifest = json.load(open(manifest or self.sample_manifest_file, encoding='utf-8')) got_manifest.sort(key=lambda f: f['digest']) exp_manifest.sort(key=lambda f: f['digest']) eq_(got_manifest, exp_manifest) def make_file(self, filename="a_file"): data = os.urandom(100) open(filename, 'wb').write(data) return { 'filename': filename, 'algorithm': 'sha512', 'digest': get_hexdigest(data), 'size': len(data) } def test_append(self): """Adding a new file to an existing manifest results in a manifest with two files""" file_json = self.make_file() assert tooltool.add_files('manifest.tt', 'sha512', [file_json['filename']], None, None, False) self.assert_manifest([self.test_record_json, file_json]) def test_append_internal(self): """Adding a new file to an existing manifest results in a manifest with two files, with the visibility set on the new one""" file_json = self.make_file() file_json['visibility'] = 'internal' assert tooltool.add_files('manifest.tt', 'sha512', [file_json['filename']], None, 'internal', False) self.assert_manifest([self.test_record_json, file_json]) def test_append_public(self): """Adding a new file to an existing manifest results in a manifest with two files, with the visibility set on the new one""" file_json = self.make_file() file_json['visibility'] = 'public' assert tooltool.add_files('manifest.tt', 'sha512', [file_json['filename']], None, 'public', False) self.assert_manifest([self.test_record_json, file_json]) def test_append_unpack(self): """Adding a new file to an existing manifest results in a manifest with two files, with the visibility and unpack attributes set""" file_json = self.make_file() file_json['visibility'] = 'public' file_json['unpack'] = True assert tooltool.add_files('manifest.tt', 'sha512', [file_json['filename']], None, 'public', True) self.assert_manifest([self.test_record_json, file_json]) def test_new_manifest(self): """Adding a new file to a new manifest results in a manifest with one file""" file_json = self.make_file() assert tooltool.add_files('new_manifest.tt', 'sha512', [file_json['filename']], None, None, False) self.assert_manifest([file_json], manifest='new_manifest.tt') def test_file_already_exists(self): """Adding a file to a manifest that is already in that manifest fails""" assert not tooltool.add_files('manifest.tt', 'sha512', [os.path.join(os.path.dirname(__file__), self.sample_file)], None, None, False) self.assert_manifest([self.test_record_json]) def test_filename_already_exists(self): """Adding a file to a manifest that has the same name as an existing file fails""" self.make_file(self.sample_file) assert not tooltool.add_files('manifest.tt', 'sha512', [self.sample_file], None, None, False) self.assert_manifest([self.test_record_json]) class ValidateManifest(BaseManifestTest): def test_validate_exists(self): sample_file_src = os.path.join(os.path.dirname(__file__), self.sample_file) shutil.copyfile(sample_file_src, self.sample_file) assert tooltool.validate_manifest('manifest.tt') def test_validate_missing_files(self): assert not tooltool.validate_manifest('manifest.tt') def test_validate_invalid_files(self): open(self.sample_file, **open_attrs).write("BOGUS") assert not tooltool.validate_manifest('manifest.tt') def test_validate_invalid_manifest(self): open('manifest.tt', **open_attrs).write("BOGUS") assert not tooltool.validate_manifest('manifest.tt') class ListManifest(BaseManifestTest): def test_list(self): # add two files open("foo.txt", **open_attrs).write("FOO!") open("bar.txt", **open_attrs).write("BAR!") tooltool.add_files('manifest.tt', 'sha512', ['foo.txt', 'bar.txt'], None, None, False) open("bar.txt", **open_attrs).write("bar is invalid") old_stdout = sys.stdout if PY3: sys.stdout = StringIO() else: sys.stdout = BytesIO() try: assert tooltool.list_manifest('manifest.tt') finally: output = sys.stdout.getvalue() sys.stdout = old_stdout eq_(sorted(output.strip().split('\n')), sorted([ '-\t-\ttest_file.ogg', 'P\t-\tbar.txt', 'P\tV\tfoo.txt', ])) def test_list_invalid_manifest(self): open("manifest.tt", **open_attrs).write("BOGUS") assert not tooltool.list_manifest("manifest.tt")<|fim▁end|>
self.make_manifest('manifest.tt', 'one', 'two') with mock.patch('tooltool.fetch_file') as fetch_file: fetch_file.side_effect = self.fake_fetch_file eq_(tooltool.fetch_files('manifest.tt', self.urls, cache_folder='cache'),
<|file_name|>column-manager-view.js<|end_file_name|><|fim▁begin|>define(function(require) { 'use strict'; var ColumnManagerView; var _ = require('underscore'); var BaseView = require('oroui/js/app/views/base/view'); ColumnManagerView = BaseView.extend({ template: require('tpl!orodatagrid/templates/column-manager/column-manager.html'), autoRender: true, className: 'dropdown-menu', events: { 'click [data-role="column-manager-select-all"]': 'onSelectAll' }, listen: { 'change:renderable collection': 'onRenderableChange' }, initialize: function(options) { ColumnManagerView.__super__.initialize.call(this, options); this.filterer = _.bind(options.columnFilterModel.filterer, options.columnFilterModel); // to handle renderable change at once for multiple changes this.onRenderableChange = _.debounce(this.onRenderableChange, 0); this.listenTo(options.columnFilterModel, 'change', this.updateView); }, render: function() { ColumnManagerView.__super__.render.call(this); this.updateView(); return this; }, /** * Update filter view from its state */ updateView: function() { var models = this._getFilteredModels(); var hasUnrenderable = Boolean(_.find(models, function(model) {return !model.get('renderable');})); this.$('[data-role="column-manager-select-all"]').toggleClass('disabled', !hasUnrenderable); }, /** * Handles renderable change of column models * - updates the view */ onRenderableChange: function() { this.updateView(); }, onSelectAll: function(e) { e.preventDefault(); _.each(this._getFilteredModels(), function(model) { model.set('renderable', true); }); }, _getFilteredModels: function() { return _.filter(this.collection.filter(this.filterer), function(model) { return !model.get('disabledVisibilityChange');<|fim▁hole|> } }); return ColumnManagerView; });<|fim▁end|>
});
<|file_name|>buffer_tests.rs<|end_file_name|><|fim▁begin|>//! Tests for `IBuffer`. use buffer::IBuffer; use std::io::Read; #[test] fn read_from_string() { let mut in_buf = IBuffer::from_str("Hello world"); let mut buf = [0u8; 6]; let res = in_buf.read(&mut buf); assert_eq!(6, res.expect("read must be ok")); assert_eq!([b'H', b'e', b'l', b'l', b'o', b' '], buf); } #[test] fn read_from_string_overflow() { let mut in_buf = IBuffer::from_str("Hello world"); let mut buf = [0u8; 6]; let _ = in_buf.read(&mut buf); let res = in_buf.read(&mut buf); assert_eq!(5, res.expect("read must be ok")); assert_eq!([b'w', b'o', b'r', b'l', b'd'], buf[0..5]);<|fim▁hole|>} #[test] fn read_from_string_eof() { let mut in_buf = IBuffer::from_str("Hello world"); let mut buf = [0u8; 6]; let _ = in_buf.read(&mut buf); let _ = in_buf.read(&mut buf); let res = in_buf.read(&mut buf); assert_eq!(0, res.expect("read must be ok")); }<|fim▁end|>
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "iamhhb.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try:<|fim▁hole|> import django # noqa except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv)<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate fringe; extern crate tokio_core; #[macro_use(task_local)] extern crate futures; use futures::Async; use std::cell::{RefCell, UnsafeCell}; /// Convenient macro to suspend and retry async operation /// just as it was blocking operation #[macro_export] macro_rules! poll { ($e:expr) => {{ let res; loop { match $e { Err(ref e) if e.kind() == ::std::io::ErrorKind::WouldBlock => { $crate::yield_now(); } other => { res = Some(other); break }, } } res.unwrap() }} } task_local! { static TL_YIELDER : RefCell<Vec<YielderRefStore>> = RefCell::new(vec!()) } // Unsafe magic to for task-local yidler // // TODO: Need wiser people to verify this not not buggy // // `fringe` gives coroutine `&'a mut Yielder<I, O>` // // to have access to it from `yield_now`, use // an escape hatch and be extremely careful struct YielderRefStore(UnsafeCell<&'static Yield>);<|fim▁hole|> UnsafeCell::new(std::mem::transmute(y)) ) } } unsafe impl Send for YielderRefStore {} // Wrapper over `fringe::Yielder` to make put it in `task_local` trait Yield { fn suspend(&self); } impl<I : Send, O : Send, E : Send> Yield for fringe::generator::Yielder<I, SuspendCommand<O, E>> { fn suspend(&self) { let _ = self.suspend(Ok(Async::NotReady)); } } // We wake up coroutine passing this to it's inside enum ResumeCommand { Unblock } // We suspend coroutine passing this to the outside pub type SuspendCommand<O, E> = futures::Poll<O, E>; unsafe fn yielder_tl_push(y : &Yield) { TL_YIELDER.with(|tl_yielder| { let mut tl_yielder = tl_yielder.borrow_mut(); tl_yielder.push(YielderRefStore::new(y)); }); } unsafe fn yielder_tl_pop() -> &'static Yield { let yielder : YielderRefStore = TL_YIELDER.with(|tl_yielder| { let mut tl_yielder = tl_yielder.borrow_mut(); tl_yielder.pop().unwrap() }); yielder.0.into_inner() } pub struct Fiber<'a, O : Send+'a, E : Send+'a> { co : fringe::Generator<'a, ResumeCommand, SuspendCommand<O, E>, fringe::OsStack>, } impl<'a, O : Send, E : Send> Fiber<'a, O, E> { pub fn new<F>(f : F) -> Self where F: FnOnce() -> Result<O, E> + Send +'a { let stack = fringe::OsStack::new(1 << 20).unwrap(); let gen = fringe::Generator::new(stack, move |yielder, _resume_cmd : ResumeCommand| { unsafe { yielder_tl_push(yielder) }; let res = f(); // pop the `Yield`, but use the "typed" version let _pop_and_discard = unsafe { yielder_tl_pop() }; // TODO: can any optimizations prevent `yielder` to see // changes introduced when task-local version was used? yielder.suspend(match res { Ok(t) => Ok(Async::Ready(t)), Err(e) => Err(e) }); }); Fiber { co : gen } } } impl<'a, O : Send, E : Send> futures::Future for Fiber<'a, O, E> { type Item = O; type Error = E; fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> { self.co.resume(ResumeCommand::Unblock).expect("poll on finished Fiber is illegal") } } /// Yield the current fiber /// /// Block current fiber. It will be resumed and thus this function will return, /// when the event loop decides it might be ready. This includes: previously /// blocked IO becoming unblocked etc. pub fn yield_now() { let y = unsafe { yielder_tl_pop() }; y.suspend(); unsafe { yielder_tl_push(y) } } /// Block current fiber to wait for result of another future pub fn await<F: futures::Future>(mut f: F) -> Result<F::Item, F::Error> { loop { match f.poll() { Ok(Async::NotReady) => yield_now(), Ok(Async::Ready(val)) => return Ok(val), Err(e) => return Err(e), }; } }<|fim▁end|>
impl YielderRefStore { unsafe fn new<'a>(y : &'a Yield) -> YielderRefStore { YielderRefStore (
<|file_name|>app.routes.ts<|end_file_name|><|fim▁begin|>import { ModuleWithProviders } from '@angular/core'; import { Routes, RouterModule } from '@angular/router'; import { AppComponent } from './app.component'; import { LoginComponent } from './login/login.component'; import { MembersComponent } from './members/members.component'; import { AuthGuard } from './auth.service'; import { SignupComponent } from './signup/signup.component'; import { EmailComponent } from './email/email.component'; import { HomeComponent } from './home/home.component'; import { ChatComponent } from './chat/chat.component'; import { EspressoJournalComponent } from './journals/espresso-journal/espresso-journal.component'; import { FilterJournalComponent } from './journals/filter-journal/filter-journal.component'; import { CoffeeBrewingComponent } from './guides/coffee-brewing/coffee-brewing.component'; import { FoodPreparationComponent } from './guides/food-preparation/food-preparation.component'; import { TeaBrewingComponent } from './guides/tea-brewing/tea-brewing.component'; import { OurCoffeesComponent } from './guides/our-coffees/our-coffees.component'; import { OurTeasComponent } from './guides/our-teas/our-teas.component'; import { ContactsComponent } from './guides/contacts/contacts.component'; import { StocklistComponent } from './stocklist/stocklist.component'; export const router: Routes = [ { path: '', redirectTo: 'home', pathMatch: 'full' }, { path: 'home', component: HomeComponent, data: { title: 'Heroes List' } }, { path: 'login', component: LoginComponent }, { path: 'signup', component: SignupComponent }, { path: 'login-email', component: EmailComponent }, { path: 'members', component: MembersComponent, canActivate: [AuthGuard] }, { path: 'espresso-journal', component: EspressoJournalComponent, canActivate: [AuthGuard] }, { path: 'filter-journal', component: FilterJournalComponent, canActivate: [AuthGuard] }, { path: 'coffee-brewing', component: CoffeeBrewingComponent, canActivate: [AuthGuard] }, { path: 'tea-brewing', component: TeaBrewingComponent, canActivate: [AuthGuard] }, { path: 'food-preparation', component: FoodPreparationComponent, canActivate: [AuthGuard] }, { path: 'our-coffees', component: OurCoffeesComponent, canActivate: [AuthGuard] }, { path: 'our-teas', component: OurTeasComponent, canActivate: [AuthGuard] }, { path: 'contacts', component: ContactsComponent, canActivate: [AuthGuard] },<|fim▁hole|>export const routes: ModuleWithProviders = RouterModule.forRoot(router);<|fim▁end|>
{ path: 'stocklist', component: StocklistComponent, canActivate: [AuthGuard] }, { path: 'chat', component: ChatComponent, canActivate: [AuthGuard] } ];
<|file_name|>runge_kutta_2a.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 <|fim▁hole|> xold = x0 told = t0 ret = [] while (told <= tf): ret += [(told, xold)] k1 = dX(xold, told) k2 = dX(xold + h*k1, told+h) xold = xold + h/2 * (k1+k2) told = round(told + h,3) return ret if __name__ == "__main__": dX = lambda x, t: t + x RungeKutta2aEDO(0, 0, 1, 0.1, dX)<|fim▁end|>
from math import log, exp def RungeKutta2aEDO (x0, t0, tf, h, dX):
<|file_name|>view.py<|end_file_name|><|fim▁begin|>import logging from flask import request, flash, abort, Response from flask_admin import expose from flask_admin.babel import gettext, ngettext, lazy_gettext from flask_admin.model import BaseModelView from flask_admin.model.form import wrap_fields_in_fieldlist from flask_admin.model.fields import ListEditableFieldList from flask_admin._compat import iteritems, string_types import mongoengine import gridfs from mongoengine.connection import get_db from bson.objectid import ObjectId from flask_admin.actions import action from .filters import FilterConverter, BaseMongoEngineFilter from .form import get_form, CustomModelConverter from .typefmt import DEFAULT_FORMATTERS from .tools import parse_like_term from .helpers import format_error from .ajax import process_ajax_references, create_ajax_loader from .subdoc import convert_subdocuments # Set up logger log = logging.getLogger("flask-admin.mongo") SORTABLE_FIELDS = set(( mongoengine.StringField, mongoengine.IntField, mongoengine.FloatField, mongoengine.BooleanField, mongoengine.DateTimeField, mongoengine.ComplexDateTimeField, mongoengine.ObjectIdField, mongoengine.DecimalField, mongoengine.ReferenceField, mongoengine.EmailField, mongoengine.UUIDField, mongoengine.URLField )) class ModelView(BaseModelView): """ MongoEngine model scaffolding. """ column_filters = None """ Collection of the column filters. Can contain either field names or instances of :class:`flask_admin.contrib.mongoengine.filters.BaseFilter` classes. <|fim▁hole|> For example:: class MyModelView(BaseModelView): column_filters = ('user', 'email') or:: class MyModelView(BaseModelView): column_filters = (BooleanEqualFilter(User.name, 'Name')) """ model_form_converter = CustomModelConverter """ Model form conversion class. Use this to implement custom field conversion logic. Custom class should be derived from the `flask_admin.contrib.mongoengine.form.CustomModelConverter`. For example:: class MyModelConverter(AdminModelConverter): pass class MyAdminView(ModelView): model_form_converter = MyModelConverter """ object_id_converter = ObjectId """ Mongodb ``_id`` value conversion function. Default is `bson.ObjectId`. Use this if you are using String, Binary and etc. For example:: class MyModelView(BaseModelView): object_id_converter = int or:: class MyModelView(BaseModelView): object_id_converter = str """ filter_converter = FilterConverter() """ Field to filter converter. Override this attribute to use a non-default converter. """ column_type_formatters = DEFAULT_FORMATTERS """ Customized type formatters for MongoEngine backend """ allowed_search_types = (mongoengine.StringField, mongoengine.URLField, mongoengine.EmailField) """ List of allowed search field types. """ form_subdocuments = None """ Subdocument configuration options. This field accepts dictionary, where key is field name and value is either dictionary or instance of the `flask_admin.contrib.EmbeddedForm`. Consider following example:: class Comment(db.EmbeddedDocument): name = db.StringField(max_length=20, required=True) value = db.StringField(max_length=20) class Post(db.Document): text = db.StringField(max_length=30) data = db.EmbeddedDocumentField(Comment) class MyAdmin(ModelView): form_subdocuments = { 'data': { 'form_columns': ('name',) } } In this example, `Post` model has child `Comment` subdocument. When generating form for `Comment` embedded document, Flask-Admin will only create `name` field. It is also possible to use class-based embedded document configuration:: class CommentEmbed(EmbeddedForm): form_columns = ('name',) class MyAdmin(ModelView): form_subdocuments = { 'data': CommentEmbed() } Arbitrary depth nesting is supported:: class SomeEmbed(EmbeddedForm): form_excluded_columns = ('test',) class CommentEmbed(EmbeddedForm): form_columns = ('name',) form_subdocuments = { 'inner': SomeEmbed() } class MyAdmin(ModelView): form_subdocuments = { 'data': CommentEmbed() } There's also support for forms embedded into `ListField`. All you have to do is to create nested rule with `None` as a name. Even though it is slightly confusing, but that's how Flask-MongoEngine creates form fields embedded into ListField:: class Comment(db.EmbeddedDocument): name = db.StringField(max_length=20, required=True) value = db.StringField(max_length=20) class Post(db.Document): text = db.StringField(max_length=30) data = db.ListField(db.EmbeddedDocumentField(Comment)) class MyAdmin(ModelView): form_subdocuments = { 'data': { 'form_subdocuments': { None: { 'form_columns': ('name',) } } } } """ def __init__(self, model, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): """ Constructor :param model: Model class :param name: Display name :param category: Display category :param endpoint: Endpoint :param url: Custom URL :param menu_class_name: Optional class name for the menu item. :param menu_icon_type: Optional icon. Possible icon types: - `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon - `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon - `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory - `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL :param menu_icon_value: Icon glyph name or URL, depending on `menu_icon_type` setting """ self._search_fields = [] super(ModelView, self).__init__(model, name, category, endpoint, url, static_folder, menu_class_name=menu_class_name, menu_icon_type=menu_icon_type, menu_icon_value=menu_icon_value) self._primary_key = self.scaffold_pk() def _refresh_cache(self): """ Refresh cache. """ # Process subdocuments if self.form_subdocuments is None: self.form_subdocuments = {} self._form_subdocuments = convert_subdocuments(self.form_subdocuments) # Cache other properties super(ModelView, self)._refresh_cache() def _process_ajax_references(self): """ AJAX endpoint is exposed by top-level admin view class, but subdocuments might have AJAX references too. This method will recursively go over subdocument configuration and will precompute AJAX references for them ensuring that subdocuments can also use AJAX to populate their ReferenceFields. """ references = super(ModelView, self)._process_ajax_references() return process_ajax_references(references, self) def _get_model_fields(self, model=None): """ Inspect model and return list of model fields :param model: Model to inspect """ if model is None: model = self.model return sorted(iteritems(model._fields), key=lambda n: n[1].creation_counter) def scaffold_pk(self): # MongoEngine models have predefined 'id' as a key return 'id' def get_pk_value(self, model): """ Return the primary key value from the model instance :param model: Model instance """ return model.pk def scaffold_list_columns(self): """ Scaffold list columns """ columns = [] for n, f in self._get_model_fields(): # Verify type field_class = type(f) if (field_class == mongoengine.ListField and isinstance(f.field, mongoengine.EmbeddedDocumentField)): continue if field_class == mongoengine.EmbeddedDocumentField: continue if self.column_display_pk or field_class != mongoengine.ObjectIdField: columns.append(n) return columns def scaffold_sortable_columns(self): """ Return a dictionary of sortable columns (name, field) """ columns = {} for n, f in self._get_model_fields(): if type(f) in SORTABLE_FIELDS: if self.column_display_pk or type(f) != mongoengine.ObjectIdField: columns[n] = f return columns def init_search(self): """ Init search """ if self.column_searchable_list: for p in self.column_searchable_list: if isinstance(p, string_types): p = self.model._fields.get(p) if p is None: raise Exception('Invalid search field') field_type = type(p) # Check type if (field_type not in self.allowed_search_types): raise Exception('Can only search on text columns. ' + 'Failed to setup search for "%s"' % p) self._search_fields.append(p) return bool(self._search_fields) def scaffold_filters(self, name): """ Return filter object(s) for the field :param name: Either field name or field instance """ if isinstance(name, string_types): attr = self.model._fields.get(name) else: attr = name if attr is None: raise Exception('Failed to find field for filter: %s' % name) # Find name visible_name = None if not isinstance(name, string_types): visible_name = self.get_column_name(attr.name) if not visible_name: visible_name = self.get_column_name(name) # Convert filter type_name = type(attr).__name__ flt = self.filter_converter.convert(type_name, attr, visible_name) return flt def is_valid_filter(self, filter): """ Validate if the provided filter is a valid MongoEngine filter :param filter: Filter object """ return isinstance(filter, BaseMongoEngineFilter) def scaffold_form(self): """ Create form from the model. """ form_class = get_form(self.model, self.model_form_converter(self), base_class=self.form_base_class, only=self.form_columns, exclude=self.form_excluded_columns, field_args=self.form_args, extra_fields=self.form_extra_fields) return form_class def scaffold_list_form(self, custom_fieldlist=ListEditableFieldList, validators=None): """ Create form for the `index_view` using only the columns from `self.column_editable_list`. :param validators: `form_args` dict with only validators {'name': {'validators': [required()]}} :param custom_fieldlist: A WTForm FieldList class. By default, `ListEditableFieldList`. """ form_class = get_form(self.model, self.model_form_converter(self), base_class=self.form_base_class, only=self.column_editable_list, field_args=validators) return wrap_fields_in_fieldlist(self.form_base_class, form_class, custom_fieldlist) # AJAX foreignkey support def _create_ajax_loader(self, name, opts): return create_ajax_loader(self.model, name, name, opts) def get_query(self): """ Returns the QuerySet for this view. By default, it returns all the objects for the current model. """ return self.model.objects def _search(self, query, search_term): # TODO: Unfortunately, MongoEngine contains bug which # prevents running complex Q queries and, as a result, # Flask-Admin does not support per-word searching like # in other backends op, term = parse_like_term(search_term) criteria = None for field in self._search_fields: flt = {'%s__%s' % (field.name, op): term} q = mongoengine.Q(**flt) if criteria is None: criteria = q else: criteria |= q return query.filter(criteria) def get_list(self, page, sort_column, sort_desc, search, filters, execute=True): """ Get list of objects from MongoEngine :param page: Page number :param sort_column: Sort column :param sort_desc: Sort descending :param search: Search criteria :param filters: List of applied filters :param execute: Run query immediately or not """ query = self.get_query() # Filters if self._filters: for flt, flt_name, value in filters: f = self._filters[flt] query = f.apply(query, f.clean(value)) # Search if self._search_supported and search: query = self._search(query, search) # Get count count = query.count() if not self.simple_list_pager else None # Sorting if sort_column: query = query.order_by('%s%s' % ('-' if sort_desc else '', sort_column)) else: order = self._get_default_order() if order: query = query.order_by('%s%s' % ('-' if order[1] else '', order[0])) # Pagination if page is not None: query = query.skip(page * self.page_size) query = query.limit(self.page_size) if execute: query = query.all() return count, query def get_one(self, id): """ Return a single model instance by its ID :param id: Model ID """ try: return self.get_query().filter(pk=id).first() except mongoengine.ValidationError as ex: flash(gettext('Failed to get model. %(error)s', error=format_error(ex)), 'error') return None def create_model(self, form): """ Create model helper :param form: Form instance """ try: model = self.model() form.populate_obj(model) self._on_model_change(form, model, True) model.save() except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to create record. %(error)s', error=format_error(ex)), 'error') log.exception('Failed to create record.') return False else: self.after_model_change(form, model, True) return model def update_model(self, form, model): """ Update model helper :param form: Form instance :param model: Model instance to update """ try: form.populate_obj(model) self._on_model_change(form, model, False) model.save() except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to update record. %(error)s', error=format_error(ex)), 'error') log.exception('Failed to update record.') return False else: self.after_model_change(form, model, False) return True def delete_model(self, model): """ Delete model helper :param model: Model instance """ try: self.on_model_delete(model) model.delete() except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to delete record. %(error)s', error=format_error(ex)), 'error') log.exception('Failed to delete record.') return False else: self.after_model_delete(model) return True # FileField access API @expose('/api/file/') def api_file_view(self): pk = request.args.get('id') coll = request.args.get('coll') db = request.args.get('db', 'default') if not pk or not coll or not db: abort(404) fs = gridfs.GridFS(get_db(db), coll) data = fs.get(self.object_id_converter(pk)) if not data: abort(404) return Response(data.read(), content_type=data.content_type, headers={ 'Content-Length': data.length }) # Default model actions def is_action_allowed(self, name): # Check delete action permission if name == 'delete' and not self.can_delete: return False return super(ModelView, self).is_action_allowed(name) @action('delete', lazy_gettext('Delete'), lazy_gettext('Are you sure you want to delete selected records?')) def action_delete(self, ids): try: count = 0 all_ids = [self.object_id_converter(pk) for pk in ids] for obj in self.get_query().in_bulk(all_ids).values(): count += self.delete_model(obj) flash(ngettext('Record was successfully deleted.', '%(count)s records were successfully deleted.', count, count=count)) except Exception as ex: if not self.handle_view_exception(ex): flash(gettext('Failed to delete records. %(error)s', error=str(ex)), 'error')<|fim▁end|>
<|file_name|>SVGUnitTypes.java<|end_file_name|><|fim▁begin|>package org.w3c.dom.svg; public interface SVGUnitTypes { // Unit Types public static final short SVG_UNIT_TYPE_UNKNOWN = 0; public static final short SVG_UNIT_TYPE_USERSPACEONUSE = 1; <|fim▁hole|><|fim▁end|>
public static final short SVG_UNIT_TYPE_OBJECTBOUNDINGBOX = 2; }
<|file_name|>official_holiday_wizard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # -*- encoding: utf-8 -*- #############################################################################<|fim▁hole|># This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields,osv from openerp.addons.at_base import util class official_holiday_template_wizard(osv.osv_memory): def do_create(self, cr, uid, ids, context=None): holiday_obj = self.pool.get("official.holiday") template_obj = self.pool.get("official.holiday.template") user_obj = self.pool.get("res.users") template_ids = template_obj.search(cr,uid,[("id","in",util.active_ids(context))]) company_id = user_obj.browse(cr, uid, uid, context).company_id.id official_holiday_ids = [] for template_id in template_ids: template = template_obj.browse(cr, uid, template_id) for holiday in template.official_holiday_ids: official_holiday_ids.append(holiday.id) for wizard in self.browse(cr, uid, ids, context=context): if wizard.calendar_ids: for calendar in wizard.calendar_ids: holiday_obj.create_calendar_entries(cr, uid, official_holiday_ids, fiscalyear_id=wizard.fiscalyear_id.id, company_id=company_id, calendar_id=calendar.id,context=context) else: holiday_obj.create_calendar_entries(cr, uid, official_holiday_ids, fiscalyear_id=wizard.fiscalyear_id.id, company_id=company_id, context=context) return { "type" : "ir.actions.act_window_close" } _name = "official.holiday.template.wizard" _description = "Official holiday template wizard" _columns = { "fiscalyear_id" : fields.many2one("account.fiscalyear", "Fiscal Year"), "calendar_ids" : fields.many2many("resource.calendar", "holiday_calendar_rel", "holiday_id", "calendar_id", "Working Time"), }<|fim▁end|>
# # Copyright (c) 2007 Martin Reisenhofer <[email protected]> #
<|file_name|>DDLTablePerClassTestSuite.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates, Frank Schwarz. All rights reserved. * This program and the accompanying materials are made available under the <|fim▁hole|> * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 08/20/2008-1.0.1 Nathan Beyer (Cerner) * - 241308: Primary key is incorrectly assigned to embeddable class * field with the same name as the primary key field's name * 01/12/2009-1.1 Daniel Lo, Tom Ware, Guy Pelletier * - 247041: Null element inserted in the ArrayList * 07/17/2009 - tware - added tests for DDL generation of maps * 01/22/2010-2.0.1 Guy Pelletier * - 294361: incorrect generated table for element collection attribute overrides * 06/14/2010-2.2 Guy Pelletier * - 264417: Table generation is incorrect for JoinTables in AssociationOverrides * 09/15/2010-2.2 Chris Delahunt * - 322233 - AttributeOverrides and AssociationOverride dont change field type info * 11/17/2010-2.2.0 Chris Delahunt * - 214519: Allow appending strings to CREATE TABLE statements * 11/23/2010-2.2 Frank Schwarz * - 328774: TABLE_PER_CLASS-mapped key of a java.util.Map does not work for querying * 01/04/2011-2.3 Guy Pelletier * - 330628: @PrimaryKeyJoinColumn(...) is not working equivalently to @JoinColumn(..., insertable = false, updatable = false) * 01/06/2011-2.3 Guy Pelletier * - 312244: can't map optional one-to-one relationship using @PrimaryKeyJoinColumn * 01/11/2011-2.3 Guy Pelletier * - 277079: EmbeddedId's fields are null when using LOB with fetchtype LAZY ******************************************************************************/ package org.eclipse.persistence.testing.tests.jpa.ddlgeneration; import junit.framework.Test; import junit.framework.TestSuite; import org.eclipse.persistence.testing.framework.junit.JUnitTestCase; import javax.persistence.EntityManager; /** * JUnit test case(s) for DDL generation. */ public class DDLTablePerClassTestSuite extends DDLGenerationJUnitTestSuite { // This is the persistence unit name on server as for persistence unit name "ddlTablePerClass" in J2SE private static final String DDL_TPC_PU = "MulitPU-2"; public DDLTablePerClassTestSuite() { super(); } public DDLTablePerClassTestSuite(String name) { super(name); setPuName(DDL_TPC_PU); } public static Test suite() { TestSuite suite = new TestSuite(); suite.setName("DDLTablePerClassTestSuite"); suite.addTest(new DDLTablePerClassTestSuite("testSetup")); suite.addTest(new DDLTablePerClassTestSuite("testDDLTablePerClassModel")); suite.addTest(new DDLTablePerClassTestSuite("testDDLTablePerClassModelQuery")); if (! JUnitTestCase.isJPA10()) { suite.addTest(new DDLTablePerClassTestSuite("testTPCMappedKeyMapQuery")); } return suite; } /** * The setup is done as a test, both to record its failure, and to allow execution in the server. */ public void testSetup() { // Trigger DDL generation EntityManager emDDLTPC = createEntityManager("MulitPU-2"); closeEntityManager(emDDLTPC); clearCache(DDL_TPC_PU); } public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } }<|fim▁end|>
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at
<|file_name|>logger.rs<|end_file_name|><|fim▁begin|>use log::{Level, LevelFilter, Log, Metadata, Record, SetLoggerError}; struct Logger { log_level: Level, } impl Log for Logger { fn enabled(&self, metadata: &Metadata) -> bool { metadata.level() <= self.log_level } fn log(&self, record: &Record) { if self.enabled(record.metadata()) { println!("[{:>5}@{}] {}", record.level(), record.target(), record.args());<|fim▁hole|>} pub fn init(level: &str) -> Result<(), SetLoggerError> { if level != "off" { let log_level = match level { "trace" => Level::Trace, "info" => Level::Info, "debug" => Level::Debug, "warn" => Level::Warn, "error" => Level::Error, _ => Level::Error, }; let logger = Logger { log_level }; log::set_boxed_logger(Box::new(logger)).map(|()| log::set_max_level(LevelFilter::Info)) } else { Ok(()) } }<|fim▁end|>
} } fn flush(&self) {}
<|file_name|>documentOrderUtils.ts<|end_file_name|><|fim▁begin|>import { AttributeNodePointer, ChildNodePointer, isTinyNode, NodePointer, } from '../../domClone/Pointer'; import { ConcreteChildNode, ConcreteNode, ConcreteParentNode } from '../../domFacade/ConcreteNode'; import DomFacade from '../../domFacade/DomFacade'; import arePointersEqual from '../operators/compares/arePointersEqual'; import isSubtypeOf from './isSubtypeOf'; import Value, { ValueType } from './Value'; /** * Compares positions of given nodes in the given state, assuming they share a common parent * * @param domFacade The domFacade in which to consider the nodes * @param node1 The first node * @param node2 The second node * * @return Returns 0 if node1 equals node2, -1 if node1 precedes node2, and 1 otherwise */ function compareSiblingElements( domFacade: DomFacade, node1: NodePointer, node2: NodePointer ): number { const parentNode = domFacade.getParentNodePointer(node1 as ChildNodePointer, null); const childNodes = domFacade.getChildNodePointers(parentNode, null); for (let i = 0, l = childNodes.length; i < l; ++i) { const childNode = childNodes[i]; if (arePointersEqual(childNode, node1)) { return -1; } if (arePointersEqual(childNode, node2)) { return 1; } } } /** * Find all ancestors of the given node * * @param domFacade The domFacade to consider relations in * @param node The node to find all ancestors of * @return All of the ancestors of the given node */ function findAllAncestorPointers(domFacade: DomFacade, node: NodePointer): NodePointer[] { const ancestors: NodePointer[] = []; for ( let ancestor = node; ancestor; ancestor = domFacade.getParentNodePointer( ancestor as ChildNodePointer | AttributeNodePointer, null ) ) { ancestors.unshift(ancestor); } return ancestors; } function findAllAncestors(domFacade: DomFacade, node: ConcreteNode): ConcreteNode[] { const ancestors = []; for ( let ancestor = node; ancestor; ancestor = domFacade.getParentNode(ancestor as ConcreteChildNode, null) ) { ancestors.unshift(ancestor); } return ancestors; } function wrapToPointer(node: ConcreteNode): NodePointer { return { node, graftAncestor: null }; } /** * Compares the given positions w.r.t. document order in this state * * @param tieBreakerArr Results of earlier comparisons, used as a tie breaker for compares between documents * @param domFacade The domFacade in which to consider the nodes * @param nodeA * @param nodeB * * @return Returns 0 if the positions are equal, -1 if the first position precedes the second, and 1 otherwise. */ function compareElements( tieBreakerArr: NodePointer[], domFacade: DomFacade, nodeA: NodePointer, nodeB: NodePointer ): number { if ( !nodeA.graftAncestor && !nodeB.graftAncestor && !isTinyNode(nodeA.node) && !isTinyNode(nodeB.node) ) { // Comparing normal nodes. Can be optimized by disregarding pointers for ancestors const actualNodeA = nodeA.node; const actualNodeB = nodeB.node; if (actualNodeA === actualNodeB) { return 0; } const actualAncestorsA = findAllAncestors(domFacade, actualNodeA); const actualAncestorsB = findAllAncestors(domFacade, actualNodeB); if (actualAncestorsA[0] !== actualAncestorsB[0]) { const topAncestorPointerA = wrapToPointer(actualAncestorsA[0]); const topAncestorPointerB = wrapToPointer(actualAncestorsB[0]); // Separate trees, use earlier determined tie breakers let index1 = tieBreakerArr.findIndex((e) => arePointersEqual(e, topAncestorPointerA)); let index2 = tieBreakerArr.findIndex((e) => arePointersEqual(e, topAncestorPointerB)); if (index1 === -1) { index1 = tieBreakerArr.push(topAncestorPointerA); } if (index2 === -1) { index2 = tieBreakerArr.push(topAncestorPointerB); } return index1 - index2; } let y = 1; for (const z = Math.min(actualAncestorsA.length, actualAncestorsB.length); y < z; ++y) { if (actualAncestorsA[y] !== actualAncestorsB[y]) { break; } }<|fim▁hole|> if (!actualAncestorA) { // All nodes under a node are higher in document order than said node return -1; } if (!actualAncestorB) { // All nodes under a node are higher in document order than said node return 1; } // Compare positions under the common ancestor const parentNode = actualAncestorsB[y - 1]; const childNodes = domFacade.getChildNodes(parentNode as ConcreteParentNode, null); for (let m = 0, n = childNodes.length; m < n; ++m) { const childNode = childNodes[m]; if (childNode === actualAncestorA) { return -1; } if (childNode === actualAncestorB) { return 1; } } } else { if (arePointersEqual(nodeA, nodeB)) { return 0; } const ancestors1 = findAllAncestorPointers(domFacade, nodeA); const ancestors2 = findAllAncestorPointers(domFacade, nodeB); const topAncestor1 = ancestors1[0]; const topAncestor2 = ancestors2[0]; if (!arePointersEqual(topAncestor1, topAncestor2)) { // Separate trees, use earlier determined tie breakers let index1 = tieBreakerArr.findIndex((e) => arePointersEqual(e, topAncestor1)); let index2 = tieBreakerArr.findIndex((e) => arePointersEqual(e, topAncestor2)); if (index1 === -1) { index1 = tieBreakerArr.push(topAncestor1); } if (index2 === -1) { index2 = tieBreakerArr.push(topAncestor2); } return index1 - index2; } // Skip common ancestors let i = 1; for (const l = Math.min(ancestors1.length, ancestors2.length); i < l; ++i) { if (!arePointersEqual(ancestors1[i], ancestors2[i])) { break; } } if (!ancestors1[i]) { // All nodes under a node are higher in document order than said node return -1; } if (!ancestors2[i]) { // All nodes under a node are higher in document order than said node return 1; } // Compare positions under the common ancestor return compareSiblingElements(domFacade, ancestors1[i], ancestors2[i]); } } function compareNodePositionsWithTieBreaker( tieBreakerArr: NodePointer[], domFacade: DomFacade, node1: Value, node2: Value ) { const isNode1SubtypeOfAttribute = isSubtypeOf(node1.type, ValueType.ATTRIBUTE); const isNode2SubtypeOfAttribute = isSubtypeOf(node2.type, ValueType.ATTRIBUTE); let value1: NodePointer; let value2: NodePointer; if (isNode1SubtypeOfAttribute && !isNode2SubtypeOfAttribute) { value1 = domFacade.getParentNodePointer(node1.value); value2 = node2.value; if (arePointersEqual(value1, value2)) { // Same element, so A return 1; } } else if (isNode2SubtypeOfAttribute && !isNode1SubtypeOfAttribute) { value1 = node1.value; value2 = domFacade.getParentNodePointer(node2.value); if (arePointersEqual(value1, value2)) { // Same element, so B before A return -1; } } else if (isNode1SubtypeOfAttribute && isNode2SubtypeOfAttribute) { if ( arePointersEqual( domFacade.getParentNodePointer(node2.value), domFacade.getParentNodePointer(node1.value) ) ) { // Sort on attributes name return domFacade.getLocalName(node1.value) > domFacade.getLocalName(node2.value) ? 1 : -1; } value1 = domFacade.getParentNodePointer(node1.value); value2 = domFacade.getParentNodePointer(node2.value); } else { value1 = node1.value; value2 = node2.value; } return compareElements(tieBreakerArr, domFacade, value1, value2); } export function compareNodePositions(domFacade: DomFacade, node1: Value, node2: Value) { return compareNodePositionsWithTieBreaker( domFacade.orderOfDetachedNodes, domFacade, node1, node2 ); } /** * Sort (and deduplicate) the nodeValues in DOM order * Attributes are placed after their elements, before childnodes. * Attributes are sorted alphabetically by their names * * @param domFacade * @param nodeValues * * @return The sorted nodes */ export function sortNodeValues(domFacade: DomFacade, nodeValues: Value[]): Value[] { return mergeSort(nodeValues, (node1, node2) => compareNodePositionsWithTieBreaker(domFacade.orderOfDetachedNodes, domFacade, node1, node2) ).filter((nodeValue, i, sortedNodes) => { if (i === 0) { return true; } return !arePointersEqual( nodeValue.value as NodePointer, sortedNodes[i - 1].value as NodePointer ); }); } type Comparer<T> = (value1: T, value2: T) => number; const defaultComparer: Comparer<any> = (value1, value2) => (value1 < value2 ? -1 : 0); /** * Runs a merge sort across the provided array either using the provided comparer or the default. * * @param array The array to sort * @param comparer Function used to determine the order of the elements. It is expected to return * a negative value if first argument is less than second argument. * * @return The array sorted by the comparer */ export function mergeSort<T>(array: T[], comparer: Comparer<T> = defaultComparer): T[] { if (array.length <= 1) return array; const mid = Math.floor(array.length / 2); const left = mergeSort(array.slice(0, mid), comparer); const right = mergeSort(array.slice(mid), comparer); return merge(left, right, comparer); } function merge<T>(leftArray: T[], rightArray: T[], comparer: Comparer<T>): T[] { const sorted = new Array<T>(); while (leftArray.length && rightArray.length) { if (comparer(leftArray[0], rightArray[0]) < 0) { sorted.push(leftArray.shift()); } else { sorted.push(rightArray.shift()); } } return sorted.concat(leftArray.concat(rightArray)); }<|fim▁end|>
const actualAncestorA = actualAncestorsA[y]; const actualAncestorB = actualAncestorsB[y];
<|file_name|>JustcoinWrapper.java<|end_file_name|><|fim▁begin|>package com.shinydev.wrappers; import au.com.bytecode.opencsv.CSVReader; import com.shinydev.justcoin.model.JustcoinTransaction; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.FileReader; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkArgument; public class JustcoinWrapper { private static final Logger LOG = LoggerFactory.getLogger(JustcoinWrapper.class); private final String justcoinCvsFileLocation; private final DateTimeZone zone; public JustcoinWrapper(String justcoinCvsFileLocation, DateTimeZone zone) { this.justcoinCvsFileLocation = justcoinCvsFileLocation; this.zone = zone; } public List<JustcoinTransaction> getJustcoinCreditTransactions(DateTime start, DateTime end) throws IOException { checkArgument(start.getZone().equals(zone), "start date should be with the same zone as passed in constructor"); checkArgument(end.getZone().equals(zone), "end date should be with the same zone as passed in constructor"); CSVReader reader = new CSVReader(new FileReader(justcoinCvsFileLocation)); List<String[]> myEntries = new ArrayList<>(); String [] nextLine; while ((nextLine = reader.readNext()) != null) { if ("id".equals(nextLine[0])) { LOG.debug("Parsed Justcoin CVS header"); } else { myEntries.add(nextLine); } } List<JustcoinTransaction> justcoinTransactions = myEntries.stream() .map(entry -> new JustcoinTransaction( entry[0], entry[1], Long.valueOf(entry[2]), //long DateTime.parse(entry[3]).withZone(zone), entry[4], new BigDecimal(entry[5]) //big decimal )) .collect(Collectors.toList()); return justcoinTransactions.stream() .filter(tx -> tx.getDateTime().isAfter(start))<|fim▁hole|> .collect(Collectors.toList()); } }<|fim▁end|>
.filter(tx -> tx.getDateTime().isBefore(end))
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[derive(Debug, PartialEq)] pub enum CentrifugeError { WrongProtocol, ParsingError, UnknownProtocol, InvalidPacket, } pub mod prelude { pub use crate::structs::raw::Raw::*; pub use crate::structs::ether::Ether::*; } /// `Zero` - This packet is very interesting /// `One` - This packet is somewhat interesting /// `Two` - Stuff you want to see if you're looking really hard /// `AlmostMaximum` - Some binary data /// `Maximum` - We couldn't parse this #[derive(Debug)] pub enum NoiseLevel { Zero = 0, One = 1, Two = 2, AlmostMaximum = 3, Maximum = 4, } impl NoiseLevel { pub fn into_u8(self) -> u8 { self as u8 } }<|fim▁hole|>pub mod raw; pub mod ether; pub mod arp; pub mod cjdns; pub mod icmp; pub mod ipv4; pub mod ipv6; pub mod ip; pub mod tcp; pub mod udp; pub mod tls; pub mod http; pub mod dhcp; pub mod dns; pub mod ssdp; pub mod dropbox;<|fim▁end|>
<|file_name|>unused-attr.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![deny(unused_attributes)] #![allow(dead_code, unused_imports)] #![feature(core)] #![foo] //~ ERROR unused attribute #[foo] //~ ERROR unused attribute extern crate core; #[foo] //~ ERROR unused attribute use std::collections; #[foo] //~ ERROR unused attribute extern "C" { #[foo] //~ ERROR unused attribute fn foo(); } #[foo] //~ ERROR unused attribute mod foo { #[foo] //~ ERROR unused attribute pub enum Foo { #[foo] //~ ERROR unused attribute Bar, } } #[foo] //~ ERROR unused attribute fn bar(f: foo::Foo) { match f { #[foo] //~ ERROR unused attribute foo::Foo::Bar => {} } }<|fim▁hole|> a: isize } #[foo] //~ ERROR unused attribute trait Baz { #[foo] //~ ERROR unused attribute fn blah(); #[foo] //~ ERROR unused attribute fn blah2() {} } fn main() {}<|fim▁end|>
#[foo] //~ ERROR unused attribute struct Foo { #[foo] //~ ERROR unused attribute