prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>closest_colors.py<|end_file_name|><|fim▁begin|>""" Find the closest DMC colors for a hex color. Usage: python closest_colors.py <hexcolor> """ import sys from .. import color from .. import dmc_colors <|fim▁hole|> sys.exit(__doc__) hex_color = sys.argv[1] rgb_color = color.RGBColorFromHexString(hex_color) print 'Given RGB color', rgb_color print print 'Closest DMC colors by distance:' for pair in dmc_colors.GetClosestDMCColorsPairs(rgb_color): print 'Distance:', pair[1], dmc_colors.GetStringForDMCColor(pair[0]) if __name__ == '__main__': main()<|fim▁end|>
def main(): if len(sys.argv) < 2:
<|file_name|>0004_postimage.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2016-10-04 19:14 from __future__ import unicode_literals import archives.models from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('archives', '0003_attachment'), ] operations = [ migrations.CreateModel( name='PostImage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('file', models.ImageField(blank=True, upload_to='images/%Y/%m/%d', validators=[archives.models.check_image_extension], verbose_name='图片')), ('created_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')), ('last_modify_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')),<|fim▁hole|> ], ), ]<|fim▁end|>
('uploaded_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='上传者')),
<|file_name|>microarray.py<|end_file_name|><|fim▁begin|># vi: sw=4 ts=4 et: """microarray.py - cMonkey microarray related processing This module captures the microarray-specific scoring component of cMonkey. This file is part of cMonkey Python. Please see README and LICENSE for more information and licensing details. """ import numpy as np import logging import datamatrix as dm import util import scoring import multiprocessing as mp def seed_column_members(data_matrix, row_membership, num_clusters, num_clusters_per_column): """Default column membership seeder ('best') In case of multiple input ratio matrices, we assume that these matrices have been combined into data_matrix""" num_rows = data_matrix.num_rows() num_cols = data_matrix.num_columns() # create a submatrix for each cluster column_scores = [] for cluster_num in xrange(1, num_clusters + 1): current_cluster_rows = [] for row_index in xrange(num_rows): if row_membership[row_index][0] == cluster_num: current_cluster_rows.append(data_matrix.row_names[row_index]) submatrix = data_matrix.submatrix_by_name( row_names=current_cluster_rows) scores = (-scoring.compute_column_scores_submatrix(submatrix)).values[0] column_scores.append(scores) column_members = [] start_time = util.current_millis() for column_index in xrange(num_cols): scores_to_order = [] for row_index in xrange(num_clusters): scores_to_order.append(column_scores[row_index][column_index]) column_members.append(order(scores_to_order)[:num_clusters_per_column]) elapsed = util.current_millis() - start_time logging.info("seed column members in %f s.", elapsed % 1000.0) return column_members def order(alist): """a weird R function that gives each item's position in the original list if you enumerate each item in a sorted list""" return map(lambda x: alist.index(x) + 1, sorted(alist, reverse=True)) def compute_row_scores(membership, matrix, num_clusters,<|fim▁hole|> for the each row name in the input name matrix""" start_time = util.current_millis() cluster_row_scores = __compute_row_scores_for_clusters( membership, matrix, num_clusters, use_multiprocessing) # TODO: replace the nan/inf-Values with the quantile-thingy in the R-version logging.info("__compute_row_scores_for_clusters() in %f s.", (util.current_millis() - start_time) / 1000.0) # rearrange result into a DataMatrix, where rows are indexed by gene # and columns represent clusters start_time = util.current_millis() values = np.zeros((matrix.num_rows(), num_clusters)) # note that cluster is 0 based on a matrix for cluster in xrange(num_clusters): row_scores = cluster_row_scores[cluster] values[:, cluster] = row_scores result = dm.DataMatrix(matrix.num_rows(), num_clusters, row_names=matrix.row_names, values=values) logging.info("made result matrix in %f s.", (util.current_millis() - start_time) / 1000.0) result = result.sorted_by_row_name() result.fix_extreme_values() return result ROW_SCORE_MATRIX = None ROW_SCORE_MEMBERSHIP = None def __compute_row_scores_for_clusters(membership, matrix, num_clusters, use_multiprocessing): """compute the pure row scores for the specified clusters without nowmalization""" # note that we set the data into globals before we fork it off # to save memory and pickling time global ROW_SCORE_MATRIX, ROW_SCORE_MEMBERSHIP ROW_SCORE_MATRIX = matrix ROW_SCORE_MEMBERSHIP = membership if use_multiprocessing: pool = mp.Pool() result = pool.map(compute_row_scores_for_cluster, xrange(1, num_clusters + 1)) pool.close() pool.join() else: result = [] for cluster in range(1, num_clusters + 1): result.append(compute_row_scores_for_cluster(cluster)) # cleanup ROW_SCORE_MATRIX = None ROW_SCORE_MEMBERSHIP = None return result def compute_row_scores_for_cluster(cluster): """This function computes the row score for a cluster""" global ROW_SCORE_MATRIX, ROW_SCORE_MEMBERSHIP membership = ROW_SCORE_MEMBERSHIP matrix = ROW_SCORE_MATRIX rnames = membership.rows_for_cluster(cluster) cnames = membership.columns_for_cluster(cluster) sm1 = matrix.submatrix_by_name(row_names=rnames, column_names=cnames) if sm1.num_columns() > 1: matrix_filtered = matrix.submatrix_by_name(column_names=cnames) row_scores_for_cluster = __compute_row_scores_for_submatrix( matrix_filtered, sm1) return row_scores_for_cluster else: return None def __compute_row_scores_for_submatrix(matrix, submatrix): """For a given matrix, compute the row scores. The second submatrix is used to calculate the column means on and should be derived from datamatrix filtered by the row names and column names of a specific cluster. matrix should be filtered by the columns of a specific cluster in order for the column means to be applied properly. The result is a DataMatrix with one row containing all the row scores""" return np.log( util.row_means(np.square(matrix.values - submatrix.column_means())) + 1e-99) """ def __quantile_normalize_scores(cluster_row_scores, row_names, membership, num_clusters): #quantile normalize the row scores in cluster_row_scores #that are not NaN or +/-Inf and are in a row cluster membership values_for_quantile = [] for cluster in xrange(1, num_clusters + 1): row_scores_for_cluster = cluster_row_scores[cluster - 1] cluster_rows = membership.rows_for_cluster(cluster) if row_scores_for_cluster != None: for row in xrange(len(row_scores_for_cluster)): score = row_scores_for_cluster[row] gene_name = row_names[row] if np.isfinite(score) and (gene_name in cluster_rows): values_for_quantile.append(score) return util.quantile(values_for_quantile, 0.95) """ class RowScoringFunction(scoring.ScoringFunctionBase): """Scoring algorithm for microarray data based on genes""" def __init__(self, membership, matrix, scaling_func=None, run_in_iteration=scoring.schedule(1, 2), config_params=None): """Create scoring function instance""" scoring.ScoringFunctionBase.__init__(self, membership, matrix, scaling_func, run_in_iteration, config_params) self.run_log = scoring.RunLog("row_scoring") def name(self): """returns the name of this scoring function""" return "Row" def do_compute(self, iteration_result, ref_matrix=None): """the row scoring function""" return compute_row_scores(self.membership(), self.matrix(), self.num_clusters(), self.config_params[scoring.KEY_MULTIPROCESSING]) def run_logs(self): """return the run logs""" return [self.run_log] __all__ = ['compute_row_scores', 'seed_column_members']<|fim▁end|>
use_multiprocessing): """for each cluster 1, 2, .. num_clusters compute the row scores
<|file_name|>diff.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates.<|fim▁hole|> */ //! A simple binary that computes the diff between two files. use std::fs; #[cfg(target_family = "unix")] use std::os::unix::fs::PermissionsExt; use std::path::Path; use std::path::PathBuf; use structopt::StructOpt; use xdiff::diff_unified; use xdiff::CopyInfo; use xdiff::DiffFile; use xdiff::DiffOpts; use xdiff::FileType; const EXEC_BIT: u32 = 0o0000100; #[derive(Debug, StructOpt)] #[structopt(name = "diff", about = "A showcase binary for xdiff diff library.")] struct Opt { /// Input file #[structopt(parse(from_os_str))] file_a: PathBuf, /// Output file, stdout if not present #[structopt(parse(from_os_str))] file_b: PathBuf, /// Treat the <file-b> as a copy of the <file-a> #[structopt(short, long)] copy: bool, /// Treat the <file-b> as a move of the <file-a> #[structopt(short, long)] move_: bool, /// Do not follow symlinks - compare them instead (POSIX-only) #[structopt(short, long)] symlink: bool, /// Number of lines of unified context (default: 3) #[structopt(short = "U", long, default_value = "3")] unified: usize, } fn main() -> Result<(), std::io::Error> { let opt = Opt::from_args(); #[cfg(target_family = "unix")] fn file_mode_and_contents( opt: &Opt, path: &Path, ) -> Result<(FileType, Vec<u8>), std::io::Error> { use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; if opt.symlink && path.symlink_metadata()?.file_type().is_symlink() { let dest = path.read_link()?; let dest: &OsStr = dest.as_ref(); Ok((FileType::Symlink, dest.as_bytes().to_owned())) } else if (path.metadata()?.permissions().mode() & EXEC_BIT) > 0 { Ok((FileType::Executable, fs::read(path)?)) } else { Ok((FileType::Regular, fs::read(path)?)) } } #[cfg(target_family = "windows")] fn file_mode_and_contents( _opt: &Opt, path: &Path, ) -> Result<(FileType, Vec<u8>), std::io::Error> { Ok((FileType::Regular, fs::read(path)?)) } let copy_info = match (opt.copy, opt.move_) { (true, false) => CopyInfo::Copy, (false, true) => CopyInfo::Move, (false, false) => CopyInfo::None, (true, true) => panic!("file can't be marked as both copy and move"), }; let a_path_str = opt.file_a.to_string_lossy(); let a = if opt.file_a.is_file() { let (mode, contents) = file_mode_and_contents(&opt, &opt.file_a)?; Some(DiffFile::new(a_path_str.as_bytes(), contents, mode)) } else { None }; let b_path_str = opt.file_b.to_string_lossy(); let b = if opt.file_b.is_file() { let (mode, contents) = file_mode_and_contents(&opt, &opt.file_b)?; Some(DiffFile::new(b_path_str.as_bytes(), contents, mode)) } else { None }; let diff = diff_unified( a, b, DiffOpts { context: opt.unified, copy_info, }, ); print!("{}", String::from_utf8_lossy(&diff)); Ok(()) }<|fim▁end|>
* * This software may be used and distributed according to the terms of the * GNU General Public License version 2.
<|file_name|>test_np_model_gradients.py<|end_file_name|><|fim▁begin|>from nltk.tree import Tree from np_shallow_neural_classifier import ShallowNeuralClassifier from np_rnn_classifier import RNNClassifier from np_autoencoder import Autoencoder from np_tree_nn import TreeNN import numpy as np import pytest import utils __author__ = "Christopher Potts" __version__ = "CS224u, Stanford, Spring 2021" utils.fix_random_seeds() class GradientCheckError(Exception): """Raised if a gradient check fails.""" @pytest.mark.parametrize("hidden_activation, d_hidden_activation", [ [np.tanh, utils.d_tanh], [utils.relu, utils.d_relu] ]) def test_np_shallow_neural_classifier_gradients(hidden_activation, d_hidden_activation): model = ShallowNeuralClassifier( max_iter=10, hidden_activation=hidden_activation, d_hidden_activation=d_hidden_activation) # A tiny dataset so that we can run `fit` and set all the model # parameters: X = utils.randmatrix(5, 2) y = np.random.choice((0,1), 5) model.fit(X, y) # Use the first example for the check: ex = X[0] label = model._onehot_encode([y[0]])[0] # Forward and backward to get the gradients: hidden, pred = model.forward_propagation(ex) d_W_hy, d_b_hy, d_W_xh, d_b_xh = model.backward_propagation( hidden, pred, ex, label) # Model parameters to check: param_pairs = ( ('W_hy', d_W_hy), ('b_hy', d_b_hy), ('W_xh', d_W_xh), ('b_xh', d_b_xh) ) gradient_check(param_pairs, model, ex, label) @pytest.mark.parametrize("hidden_activation, d_hidden_activation", [ [np.tanh, utils.d_tanh], [utils.relu, utils.d_relu] ]) def test_np_rnn_classifier(hidden_activation, d_hidden_activation): # A tiny dataset so that we can run `fit` and set all the model # parameters: vocab = ['a', 'b', '$UNK'] data = [ [list('ab'), 'good'], [list('aab'), 'good'], [list('abb'), 'good']] model = RNNClassifier( vocab, max_iter=10, hidden_dim=2, hidden_activation=hidden_activation, d_hidden_activation=d_hidden_activation) X, y = zip(*data) model.fit(X, y) # Use the first example for the check: ex = X[0] label = model._onehot_encode([y[0]])[0] # Forward and backward to get the gradients: hidden, pred = model.forward_propagation(ex) d_W_hy, d_b, d_W_hh, d_W_xh = model.backward_propagation( hidden, pred, ex, label) # Model parameters to check: param_pairs = ( ('W_xh', d_W_xh), ('W_hh', d_W_hh), ('W_hy', d_W_hy), ('b', d_b) ) gradient_check(param_pairs, model, ex, label)<|fim▁hole|> @pytest.mark.parametrize("hidden_activation, d_hidden_activation", [ [np.tanh, utils.d_tanh], [utils.relu, utils.d_relu] ]) def test_np_autoencoder(hidden_activation, d_hidden_activation): model = Autoencoder( max_iter=10, hidden_dim=2, hidden_activation=hidden_activation, d_hidden_activation=d_hidden_activation) # A tiny dataset so that we can run `fit` and set all the model # parameters: X = utils.randmatrix(5, 5) model.fit(X) # Use the first example for the check: ex = X[0] label = X[0] # Forward and backward to get the gradients: hidden, pred = model.forward_propagation(ex) d_W_hy, d_b_hy, d_W_xh, d_b_xh = model.backward_propagation( hidden, pred, ex, label) # Model parameters to check: param_pairs = ( ('W_hy', d_W_hy), ('b_hy', d_b_hy), ('W_xh', d_W_xh), ('b_xh', d_b_xh) ) gradient_check(param_pairs, model, ex, label) @pytest.mark.parametrize("hidden_activation, d_hidden_activation", [ [np.tanh, utils.d_tanh], [utils.relu, utils.d_relu] ]) def test_np_tree_nn(hidden_activation, d_hidden_activation): # A tiny dataset so that we can run `fit` and set all the model # parameters: vocab = ["1", "+", "2"] X = [ "(even (odd 1) (neutral (neutral +) (odd 1)))", "(odd (odd 1) (neutral (neutral +) (even 2)))"] X = [Tree.fromstring(ex) for ex in X] y = [tree.label() for tree in X] model = TreeNN( vocab, max_iter=10, hidden_dim=5, hidden_activation=hidden_activation, d_hidden_activation=d_hidden_activation) model.fit(X, y) # Use the first example for the check: ex = X[0] label = model._onehot_encode([ex.label()])[0] # Forward and backward to get the gradients: hidden, pred = model.forward_propagation(ex) d_W_hy, d_b_y, d_W, d_b = model.backward_propagation( hidden, pred, ex, label) # Model parameters to check: param_pairs = ( ('W_hy', d_W_hy), ('b_y', d_b_y), ('W', d_W), ('b', d_b) ) gradient_check(param_pairs, model, ex, label) def gradient_check(param_pairs, model, ex, label, epsilon=0.0001, threshold=0.001): """ Numerical gradient check following the method described here: http://ufldl.stanford.edu/wiki/index.php/Gradient_checking_and_advanced_optimization Parameters ---------- param_pairs : list of str, np.aray pairs In each pair, the first is the name of the parameter to check, and the second is its purported derivatives. We use the name as the first pair so that we can raise an informative error message in the case of a failure. model : trained model instance This should have attributes for all of the parameters named in `param_pairs`, and it must have methods `forward_propagation`, and `get_error`. ex : an example that `model` can process label : a label vector that `model` can learn from directly epsilon : float The small constant by which the parameter values are changed. threshold : float Tolerance for raising an error. Raises ------ GradientCheckError """ for param_name, d_params in param_pairs: params = getattr(model, param_name) # This iterator will allow is to cycle over all the values for # arrays of any dimension: iterator = np.nditer(params, flags=['multi_index'], op_flags=['readwrite']) while not iterator.finished: idx = iterator.multi_index actual = params[idx] params[idx] = actual + epsilon _, pred = model.forward_propagation(ex) grad_pos = model.get_error(pred, label) params[idx] = actual - epsilon _, pred = model.forward_propagation(ex) grad_neg = model.get_error(pred, label) grad_est = (grad_pos - grad_neg) / (epsilon * 2.0) params[idx] = actual grad_bp = d_params[idx] # Relative error to control for differences in proportion # across parameter values: err = np.abs(grad_bp - grad_est) / (np.abs(grad_bp) + np.abs(grad_est)) if err >= threshold: raise GradientCheckError( "Gradient check error for {} at {}: error is {}".format( param_name, idx, err)) iterator.iternext()<|fim▁end|>
<|file_name|>11933-2.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h> using namespace std; typedef long long ll; typedef pair<int,int> ii; typedef vector<int> vi; typedef vector<ii> vii;<|fim▁hole|> int n; while(scanf("%d", &n) && n) { bitset<32> bs,a,b; bs = n; int cont = 0; for(int i = 0; i < 32; i++) { if(bs.test(i)) { if(cont%2) b.set(i); else a.set(i); cont++; } } //int x = a; printf("%u %u\n", a.to_ulong(), b.to_ulong()); } return 0; }<|fim▁end|>
int main () {
<|file_name|>parsed.rs<|end_file_name|><|fim▁begin|>//! Parsed domain names. use std::borrow::Cow; use std::cmp; use std::fmt; use std::hash; use super::super::{Parser, ParseError, ParseResult}; use super::{DName, DNameBuf, DNameSlice, Label, NameLabels, NameLabelettes}; use super::plain::slice_from_bytes_unsafe; //------------ ParsedDName --------------------------------------------------- /// A domain name parsed from a DNS message. /// /// In an attempt to keep messages small, DNS uses a procedure called name /// compression. It tries to minimize the space used for repeated domain names /// by simply refering to the first occurence of the name. This works not only /// for complete names but also for suffixes. In this case, the first unique /// labels of the name are included and then a pointer is included for the /// rest of the name. /// /// A consequence of this is that when parsing a domain name, its labels can /// be scattered all over the message and we would need to allocate some /// space to re-assemble the original name. However, in many cases we don’t /// need the complete message. Many operations can be completed by just /// iterating over the labels which we can do in place. /// /// This is what the `ParsedDName` type does: It takes a reference to a /// message and an indicator where inside the message the name starts and /// then walks over the message as necessity dictates. When created while /// parsing a message, the parser quickly walks over the labels to make sure /// that the name indeed is valid. While this takes up a bit of time, it /// avoids late surprises and provides for a nicer interface with less /// `Result`s. /// /// Obviously, `ParsedDName` implements the [`DName`] trait and provides all /// operations required by this trait. It also implements `PartialEq` and /// `Eq`, as well as `PartialOrd` and `Ord` against all other domain name /// types, plus `Hash` with the same as the other types. /// /// [`DName`]: trait.DName.html #[derive(Clone)] pub struct ParsedDName<'a> { message: &'a [u8], start: usize } /// # Creation and Conversion /// impl<'a> ParsedDName<'a> { /// Creates a new parsed domain name. /// /// This parses out the leading uncompressed labels from the parser and /// then quickly jumps over any possible remaining compressing to check /// that the name is valid. pub fn parse(parser: &mut Parser<'a>) -> ParseResult<Self> { let res = ParsedDName{message: parser.bytes(), start: parser.pos()}; // Step 1: Walk over uncompressed labels to advance the parser. let pos; loop { match try!(Self::parse_label(parser)) { Ok(true) => return Ok(res), Ok(false) => { } Err(x) => { pos = x; break } } } // Step 2: Walk over the rest to see if the name is valid. let mut parser = parser.clone(); parser.remove_limit(); try!(parser.seek(pos)); loop { let step = try!(Self::parse_label(&mut parser)); match step { Ok(true) => return Ok(res), Ok(false) => { } Err(pos) => try!(parser.seek(pos)) } } } /// Unpacks the name. /// /// This will return the cow’s borrowed variant for any parsed name that /// isn’t in fact compressed. Otherwise it will assemble all the labels /// into an owned domain name. pub fn unpack(&self) -> Cow<'a, DNameSlice> { match self.split_uncompressed() { (Some(slice), None) => Cow::Borrowed(slice), (None, Some(packed)) => packed.unpack(), (None, None) => Cow::Borrowed(DNameSlice::empty()), (Some(slice), Some(packed)) => { let mut res = slice.to_owned(); for label in packed.labels() { res.push(label).unwrap() } Cow::Owned(res) } } } /// Returns a slice if the name is uncompressed. pub fn as_slice(&self) -> Option<&'a DNameSlice> { if let (Some(slice), None) = self.split_uncompressed() { Some(slice) } else { None } } } /// # Working with Labels /// impl<'a> ParsedDName<'a> { /// Returns an iterator over the labels of the name. pub fn labels(&self) -> NameLabels<'a> { NameLabels::from_parsed(self.clone()) } /// Returns an iterator over the labelettes of the name. pub fn labelettes(&self) -> NameLabelettes<'a> { NameLabelettes::new(self.labels()) } /// Splits off the first label from the name. /// /// For correctly encoded names, this function will always return /// `Some(_)`. The first element will be the parsed out label. The /// second element will be a parsed name of the remainder of the name /// if the label wasn’t the root label or `None` otherwise. pub fn split_first(&self) -> Option<(&'a Label, Option<Self>)> { let mut name = self.clone(); loop { let new_name = match name.split_label() { Ok(x) => return Some(x), Err(Some(x)) => x, Err(None) => return None }; name = new_name; } } /// Splits a label or goes to where a pointer points. /// /// Ok((label, tail)) -> a label and what is left. /// Err(Some(tail)) -> re-positioned tail. /// Err(None) -> broken fn split_label(&self) -> Result<(&'a Label, Option<Self>), Option<Self>> { if self.message[self.start] & 0xC0 == 0xC0 { // Pointer label. let start = ((self.message[self.start] & 0x3f) as usize) << 8 | match self.message.get(self.start + 1) { Some(two) => *two as usize, None => return Err(None) }; if start >= self.message.len() { Err(None) } else { Err(Some(ParsedDName{message: self.message, start: start})) } } else { // "Real" label. let (label, _) = match Label::split_from( &self.message[self.start..]) { Some(x) => x, None => return Err(None) }; let start = self.start + label.len(); if label.is_root() { Ok((label, None)) } else { Ok((label, Some(ParsedDName{message: self.message, start: start}))) } } } /// Splits off the part that is uncompressed. fn split_uncompressed(&self) -> (Option<&'a DNameSlice>, Option<Self>) { let mut name = self.clone(); loop { name = match name.split_label() { Ok((_, Some(new_name))) => new_name, Ok((label, None)) => { let end = name.start + label.len(); let bytes = &self.message[self.start..end]; return (Some(unsafe { slice_from_bytes_unsafe(bytes) }), None) } Err(Some(new_name)) => { let bytes = &self.message[self.start..name.start]; return (Some(unsafe { slice_from_bytes_unsafe(bytes) }), Some(new_name)) } Err(None) => unreachable!() }; } } /// Parses a label. /// /// Returns `Ok(is_root)` if the label is a normal label. Returns /// `Err(pos)` with the position of the next label. fn parse_label(parser: &mut Parser<'a>)<|fim▁hole|> -> ParseResult<Result<bool, usize>> { let head = try!(parser.parse_u8()); match head { 0 => Ok(Ok(true)), 1 ... 0x3F => parser.skip(head as usize).map(|_| Ok(false)), 0x41 => { let count = try!(parser.parse_u8()); let len = if count == 0 { 32 } else { ((count - 1) / 8 + 1) as usize }; parser.skip(len).map(|_| Ok(false)) } 0xC0 ... 0xFF => { Ok(Err(try!(parser.parse_u8()) as usize + (((head & 0x3F) as usize) << 8))) } _ => Err(ParseError::UnknownLabel) } } } //--- DName impl<'a> DName for ParsedDName<'a> { fn to_cow(&self) -> Cow<DNameSlice> { self.unpack() } fn labels(&self) -> NameLabels { NameLabels::from_parsed(self.clone()) } } //--- PartialEq and Eq impl<'a, N: DName> PartialEq<N> for ParsedDName<'a> { fn eq(&self, other: &N) -> bool { let self_iter = self.labelettes(); let other_iter = other.labelettes(); self_iter.eq(other_iter) } } impl<'a> PartialEq<str> for ParsedDName<'a> { fn eq(&self, other: &str) -> bool { use std::str::FromStr; let other = match DNameBuf::from_str(other) { Ok(other) => other, Err(_) => return false }; self.eq(&other) } } impl<'a> Eq for ParsedDName<'a> { } //--- PartialOrd and Ord impl<'a, N: DName> PartialOrd<N> for ParsedDName<'a> { fn partial_cmp(&self, other: &N) -> Option<cmp::Ordering> { let self_iter = self.labelettes().rev(); let other_iter = other.labelettes().rev(); self_iter.partial_cmp(other_iter) } } impl<'a> Ord for ParsedDName<'a> { fn cmp(&self, other: &Self) -> cmp::Ordering { let self_iter = self.labelettes().rev(); let other_iter = other.labelettes().rev(); self_iter.cmp(other_iter) } } //--- Hash impl<'a> hash::Hash for ParsedDName<'a> { fn hash<H: hash::Hasher>(&self, state: &mut H) { for item in self.labelettes() { item.hash(state) } } } //--- std::fmt traits impl<'a> fmt::Display for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut labels = self.labels(); if let Some(label) = labels.next() { try!(write!(f, "{}", label)); } for label in labels { try!(write!(f, ".{}", label)) } Ok(()) } } impl<'a> fmt::Octal for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut labels = self.labels(); if let Some(label) = labels.next() { try!(write!(f, "{:o}", label)); } for label in labels { try!(write!(f, ".{:o}", label)) } Ok(()) } } impl<'a> fmt::LowerHex for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut labels = self.labels(); if let Some(label) = labels.next() { try!(write!(f, "{:x}", label)); } for label in labels { try!(write!(f, ".{:x}", label)) } Ok(()) } } impl<'a> fmt::UpperHex for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut labels = self.labels(); if let Some(label) = labels.next() { try!(write!(f, "{:X}", label)); } for label in labels { try!(write!(f, ".{:X}", label)) } Ok(()) } } impl<'a> fmt::Binary for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut labels = self.labels(); if let Some(label) = labels.next() { try!(write!(f, "{:b}", label)); } for label in labels { try!(write!(f, ".{:b}", label)) } Ok(()) } } impl<'a> fmt::Debug for ParsedDName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(f.write_str("ParsedDName(")); try!(fmt::Display::fmt(self, f)); f.write_str(")") } }<|fim▁end|>
<|file_name|>gnu.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function import re import os import sys import warnings import platform import tempfile from subprocess import Popen, PIPE, STDOUT from numpy.distutils.fcompiler import FCompiler from numpy.distutils.exec_command import exec_command from numpy.distutils.misc_util import msvc_runtime_library from numpy.distutils.compat import get_exception compilers = ['GnuFCompiler', 'Gnu95FCompiler'] TARGET_R = re.compile("Target: ([a-zA-Z0-9_\-]*)") # XXX: handle cross compilation def is_win64(): return sys.platform == "win32" and platform.architecture()[0] == "64bit" if is_win64(): #_EXTRAFLAGS = ["-fno-leading-underscore"] _EXTRAFLAGS = [] else: _EXTRAFLAGS = [] class GnuFCompiler(FCompiler): compiler_type = 'gnu' compiler_aliases = ('g77',) description = 'GNU Fortran 77 compiler' def gnu_version_match(self, version_string): """Handle the different versions of GNU fortran compilers""" # Strip warning(s) that may be emitted by gfortran while version_string.startswith('gfortran: warning'): version_string = version_string[version_string.find('\n')+1:] # Gfortran versions from after 2010 will output a simple string # (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older # gfortrans may still return long version strings (``-dumpversion`` was # an alias for ``--version``) if len(version_string) <= 20: # Try to find a valid version string m = re.search(r'([0-9.]+)', version_string) if m: # g77 provides a longer version string that starts with GNU # Fortran if version_string.startswith('GNU Fortran'): return ('g77', m.group(1)) # gfortran only outputs a version string such as #.#.#, so check # if the match is at the start of the string elif m.start() == 0: return ('gfortran', m.group(1)) else: # Output probably from --version, try harder: m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string) if m: return ('gfortran', m.group(1)) m = re.search(r'GNU Fortran.*?\-?([0-9-.]+)', version_string) if m: v = m.group(1) if v.startswith('0') or v.startswith('2') or v.startswith('3'): # the '0' is for early g77's return ('g77', v) else: # at some point in the 4.x series, the ' 95' was dropped # from the version string return ('gfortran', v) # If still nothing, raise an error to make the problem easy to find. err = 'A valid Fortran version was not found in this string:\n' raise ValueError(err + version_string) def version_match(self, version_string): v = self.gnu_version_match(version_string) if not v or v[0] != 'g77': return None return v[1] possible_executables = ['g77', 'f77'] executables = { 'version_cmd' : [None, "-dumpversion"], 'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"], 'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes 'compiler_fix' : None, 'linker_so' : [None, "-g", "-Wall"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"], 'linker_exe' : [None, "-g", "-Wall"] } module_dir_switch = None module_include_switch = None # Cygwin: f771: warning: -fPIC ignored for target (all code is # position independent) if os.name != 'nt' and sys.platform != 'cygwin': pic_flags = ['-fPIC'] # use -mno-cygwin for g77 when Python is not Cygwin-Python if sys.platform == 'win32': for key in ['version_cmd', 'compiler_f77', 'linker_so', 'linker_exe']: executables[key].append('-mno-cygwin') g2c = 'g2c' suggested_f90_compiler = 'gnu95' def get_flags_linker_so(self): opt = self.linker_so[1:] if sys.platform == 'darwin': target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None) # If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value # and leave it alone. But, distutils will complain if the # environment's value is different from the one in the Python # Makefile used to build Python. We let disutils handle this # error checking. if not target: # If MACOSX_DEPLOYMENT_TARGET is not set in the environment, # we try to get it first from the Python Makefile and then we # fall back to setting it to 10.3 to maximize the set of # versions we can work with. This is a reasonable default # even when using the official Python dist and those derived # from it. import distutils.sysconfig as sc g = {} try: get_makefile_filename = sc.get_makefile_filename except AttributeError: pass # i.e. PyPy else: filename = get_makefile_filename() sc.parse_makefile(filename, g) target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3') os.environ['MACOSX_DEPLOYMENT_TARGET'] = target if target == '10.3': s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3' warnings.warn(s, stacklevel=2) opt.extend(['-undefined', 'dynamic_lookup', '-bundle']) else: opt.append("-shared") if sys.platform.startswith('sunos'): # SunOS often has dynamically loaded symbols defined in the # static library libg2c.a The linker doesn't like this. To # ignore the problem, use the -mimpure-text flag. It isn't # the safest thing, but seems to work. 'man gcc' says: # ".. Instead of using -mimpure-text, you should compile all # source code with -fpic or -fPIC." opt.append('-mimpure-text') return opt def get_libgcc_dir(self): status, output = exec_command(self.compiler_f77 + ['-print-libgcc-file-name'], use_tee=0) if not status: return os.path.dirname(output) return None def get_library_dirs(self): opt = [] if sys.platform[:5] != 'linux': d = self.get_libgcc_dir() if d: # if windows and not cygwin, libg2c lies in a different folder if sys.platform == 'win32' and not d.startswith('/usr/lib'): d = os.path.normpath(d) path = os.path.join(d, "lib%s.a" % self.g2c) if not os.path.exists(path): root = os.path.join(d, *((os.pardir,)*4)) d2 = os.path.abspath(os.path.join(root, 'lib')) path = os.path.join(d2, "lib%s.a" % self.g2c) if os.path.exists(path): opt.append(d2) opt.append(d) return opt def get_libraries(self): opt = [] d = self.get_libgcc_dir() if d is not None: g2c = self.g2c + '-pic' f = self.static_lib_format % (g2c, self.static_lib_extension) if not os.path.isfile(os.path.join(d, f)): g2c = self.g2c else: g2c = self.g2c if g2c is not None: opt.append(g2c) c_compiler = self.c_compiler if sys.platform == 'win32' and c_compiler and \ c_compiler.compiler_type == 'msvc': # the following code is not needed (read: breaks) when using MinGW # in case want to link F77 compiled code with MSVC opt.append('gcc') runtime_lib = msvc_runtime_library() if runtime_lib: opt.append(runtime_lib) if sys.platform == 'darwin': opt.append('cc_dynamic') return opt def get_flags_debug(self): return ['-g'] def get_flags_opt(self): v = self.get_version() if v and v <= '3.3.3': # With this compiler version building Fortran BLAS/LAPACK # with -O3 caused failures in lib.lapack heevr,syevr tests. opt = ['-O2'] else: opt = ['-O3'] opt.append('-funroll-loops') return opt def _c_arch_flags(self): """ Return detected arch flags from CFLAGS """ from distutils import sysconfig try: cflags = sysconfig.get_config_vars()['CFLAGS'] except KeyError: return [] arch_re = re.compile(r"-arch\s+(\w+)") arch_flags = [] for arch in arch_re.findall(cflags): arch_flags += ['-arch', arch] return arch_flags def get_flags_arch(self): return [] def runtime_library_dir_option(self, dir): sep = ',' if sys.platform == 'darwin' else '=' return '-Wl,-rpath%s"%s"' % (sep, dir) class Gnu95FCompiler(GnuFCompiler): compiler_type = 'gnu95' compiler_aliases = ('gfortran',) description = 'GNU Fortran 95 compiler' def version_match(self, version_string): v = self.gnu_version_match(version_string) if not v or v[0] != 'gfortran': return None v = v[1] if v >= '4.': # gcc-4 series releases do not support -mno-cygwin option pass else: # use -mno-cygwin flag for gfortran when Python is not # Cygwin-Python if sys.platform == 'win32': for key in ['version_cmd', 'compiler_f77', 'compiler_f90', 'compiler_fix', 'linker_so', 'linker_exe']: self.executables[key].append('-mno-cygwin') return v possible_executables = ['gfortran', 'f95'] executables = { 'version_cmd' : ["<F90>", "-dumpversion"], 'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form", "-fno-second-underscore"] + _EXTRAFLAGS, 'compiler_f90' : [None, "-Wall", "-g", "-fno-second-underscore"] + _EXTRAFLAGS, 'compiler_fix' : [None, "-Wall", "-g","-ffixed-form", "-fno-second-underscore"] + _EXTRAFLAGS, 'linker_so' : ["<F90>", "-Wall", "-g"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"], 'linker_exe' : [None, "-Wall"] } module_dir_switch = '-J' module_include_switch = '-I' g2c = 'gfortran' def _universal_flags(self, cmd): """Return a list of -arch flags for every supported architecture.""" if not sys.platform == 'darwin': return [] arch_flags = [] # get arches the C compiler gets. c_archs = self._c_arch_flags() if "i386" in c_archs: c_archs[c_archs.index("i386")] = "i686" # check the arches the Fortran compiler supports, and compare with # arch flags from C compiler for arch in ["ppc", "i686", "x86_64", "ppc64"]: if _can_target(cmd, arch) and arch in c_archs: arch_flags.extend(["-arch", arch]) return arch_flags def get_flags(self): flags = GnuFCompiler.get_flags(self) arch_flags = self._universal_flags(self.compiler_f90) if arch_flags: flags[:0] = arch_flags return flags def get_flags_linker_so(self): flags = GnuFCompiler.get_flags_linker_so(self) arch_flags = self._universal_flags(self.linker_so) if arch_flags: flags[:0] = arch_flags return flags def get_library_dirs(self): opt = GnuFCompiler.get_library_dirs(self) if sys.platform == 'win32': c_compiler = self.c_compiler if c_compiler and c_compiler.compiler_type == "msvc": target = self.get_target() if target: d = os.path.normpath(self.get_libgcc_dir()) root = os.path.join(d, *((os.pardir,)*4)) path = os.path.join(root, "lib") mingwdir = os.path.normpath(path) if os.path.exists(os.path.join(mingwdir, "libmingwex.a")):<|fim▁hole|> def get_libraries(self): opt = GnuFCompiler.get_libraries(self) if sys.platform == 'darwin': opt.remove('cc_dynamic') if sys.platform == 'win32': c_compiler = self.c_compiler if c_compiler and c_compiler.compiler_type == "msvc": if "gcc" in opt: i = opt.index("gcc") opt.insert(i+1, "mingwex") opt.insert(i+1, "mingw32") # XXX: fix this mess, does not work for mingw if is_win64(): c_compiler = self.c_compiler if c_compiler and c_compiler.compiler_type == "msvc": return [] else: pass return opt def get_target(self): status, output = exec_command(self.compiler_f77 + ['-v'], use_tee=0) if not status: m = TARGET_R.search(output) if m: return m.group(1) return "" def get_flags_opt(self): if is_win64(): return ['-O0'] else: return GnuFCompiler.get_flags_opt(self) def _can_target(cmd, arch): """Return true if the architecture supports the -arch flag""" newcmd = cmd[:] fid, filename = tempfile.mkstemp(suffix=".f") os.close(fid) try: d = os.path.dirname(filename) output = os.path.splitext(filename)[0] + ".o" try: newcmd.extend(["-arch", arch, "-c", filename]) p = Popen(newcmd, stderr=STDOUT, stdout=PIPE, cwd=d) p.communicate() return p.returncode == 0 finally: if os.path.exists(output): os.remove(output) finally: os.remove(filename) return False if __name__ == '__main__': from distutils import log log.set_verbosity(2) compiler = GnuFCompiler() compiler.customize() print(compiler.get_version()) try: compiler = Gnu95FCompiler() compiler.customize() print(compiler.get_version()) except Exception: msg = get_exception() print(msg)<|fim▁end|>
opt.append(mingwdir) return opt
<|file_name|>bridge.py<|end_file_name|><|fim▁begin|>"""Code to handle a Hue bridge.""" import asyncio from functools import partial from aiohttp import client_exceptions import aiohue import async_timeout import slugify as unicode_slug import voluptuous as vol from homeassistant import core from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client, config_validation as cv from .const import DOMAIN, LOGGER from .errors import AuthenticationRequired, CannotConnect from .helpers import create_config_flow from .sensor_base import SensorManager SERVICE_HUE_SCENE = "hue_activate_scene" ATTR_GROUP_NAME = "group_name" ATTR_SCENE_NAME = "scene_name" SCENE_SCHEMA = vol.Schema( {vol.Required(ATTR_GROUP_NAME): cv.string, vol.Required(ATTR_SCENE_NAME): cv.string} ) # How long should we sleep if the hub is busy HUB_BUSY_SLEEP = 0.01 class HueBridge: """Manages a single Hue bridge.""" def __init__(self, hass, config_entry, allow_unreachable, allow_groups): """Initialize the system.""" self.config_entry = config_entry self.hass = hass self.allow_unreachable = allow_unreachable self.allow_groups = allow_groups self.available = True self.authorized = False self.api = None self.parallel_updates_semaphore = None # Jobs to be executed when API is reset. self.reset_jobs = [] self.sensor_manager = None @property def host(self): """Return the host of this bridge.""" return self.config_entry.data["host"] async def async_setup(self, tries=0): """Set up a phue bridge based on host parameter.""" host = self.host hass = self.hass bridge = aiohue.Bridge( host, username=self.config_entry.data["username"], websession=aiohttp_client.async_get_clientsession(hass), ) try: await authenticate_bridge(hass, bridge) except AuthenticationRequired: # Usernames can become invalid if hub is reset or user removed. # We are going to fail the config entry setup and initiate a new # linking procedure. When linking succeeds, it will remove the # old config entry. create_config_flow(hass, host) return False except CannotConnect: LOGGER.error("Error connecting to the Hue bridge at %s", host) raise ConfigEntryNotReady except Exception: # pylint: disable=broad-except LOGGER.exception("Unknown error connecting with Hue bridge at %s", host) return False self.api = bridge self.sensor_manager = SensorManager(self) hass.async_create_task( hass.config_entries.async_forward_entry_setup(self.config_entry, "light") ) hass.async_create_task( hass.config_entries.async_forward_entry_setup( self.config_entry, "binary_sensor" ) ) hass.async_create_task( hass.config_entries.async_forward_entry_setup(self.config_entry, "sensor") ) hass.services.async_register( DOMAIN, SERVICE_HUE_SCENE, self.hue_activate_scene, schema=SCENE_SCHEMA )<|fim▁hole|> self.parallel_updates_semaphore = asyncio.Semaphore( 3 if self.api.config.modelid == "BSB001" else 10 ) self.authorized = True return True async def async_request_call(self, task): """Limit parallel requests to Hue hub. The Hue hub can only handle a certain amount of parallel requests, total. Although we limit our parallel requests, we still will run into issues because other products are hitting up Hue. ClientOSError means hub closed the socket on us. ContentResponseError means hub raised an error. Since we don't make bad requests, this is on them. """ async with self.parallel_updates_semaphore: for tries in range(4): try: return await task() except ( client_exceptions.ClientOSError, client_exceptions.ClientResponseError, ) as err: if tries == 3 or ( # We only retry if it's a server error. So raise on all 4XX errors. isinstance(err, client_exceptions.ClientResponseError) and err.status < 500 ): raise await asyncio.sleep(HUB_BUSY_SLEEP * tries) async def async_reset(self): """Reset this bridge to default state. Will cancel any scheduled setup retry and will unload the config entry. """ # The bridge can be in 3 states: # - Setup was successful, self.api is not None # - Authentication was wrong, self.api is None, not retrying setup. # If the authentication was wrong. if self.api is None: return True self.hass.services.async_remove(DOMAIN, SERVICE_HUE_SCENE) while self.reset_jobs: self.reset_jobs.pop()() # If setup was successful, we set api variable, forwarded entry and # register service results = await asyncio.gather( self.hass.config_entries.async_forward_entry_unload( self.config_entry, "light" ), self.hass.config_entries.async_forward_entry_unload( self.config_entry, "binary_sensor" ), self.hass.config_entries.async_forward_entry_unload( self.config_entry, "sensor" ), ) # None and True are OK return False not in results async def hue_activate_scene(self, call, updated=False): """Service to call directly into bridge to set scenes.""" group_name = call.data[ATTR_GROUP_NAME] scene_name = call.data[ATTR_SCENE_NAME] group = next( (group for group in self.api.groups.values() if group.name == group_name), None, ) # Additional scene logic to handle duplicate scene names across groups scene = next( ( scene for scene in self.api.scenes.values() if scene.name == scene_name and group is not None and sorted(scene.lights) == sorted(group.lights) ), None, ) # If we can't find it, fetch latest info. if not updated and (group is None or scene is None): await self.async_request_call(self.api.groups.update) await self.async_request_call(self.api.scenes.update) await self.hue_activate_scene(call, updated=True) return if group is None: LOGGER.warning("Unable to find group %s", group_name) return if scene is None: LOGGER.warning("Unable to find scene %s", scene_name) return await self.async_request_call(partial(group.set_action, scene=scene.id)) async def handle_unauthorized_error(self): """Create a new config flow when the authorization is no longer valid.""" if not self.authorized: # we already created a new config flow, no need to do it again return LOGGER.error( "Unable to authorize to bridge %s, setup the linking again.", self.host ) self.authorized = False create_config_flow(self.hass, self.host) async def authenticate_bridge(hass: core.HomeAssistant, bridge: aiohue.Bridge): """Create a bridge object and verify authentication.""" try: with async_timeout.timeout(10): # Create username if we don't have one if not bridge.username: device_name = unicode_slug.slugify( hass.config.location_name, max_length=19 ) await bridge.create_user(f"home-assistant#{device_name}") # Initialize bridge (and validate our username) await bridge.initialize() except (aiohue.LinkButtonNotPressed, aiohue.Unauthorized): raise AuthenticationRequired except (asyncio.TimeoutError, client_exceptions.ClientOSError): raise CannotConnect except aiohue.AiohueException: LOGGER.exception("Unknown Hue linking error occurred") raise AuthenticationRequired<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .Commerce import Commerce<|fim▁hole|><|fim▁end|>
from .Transaction import Transaction
<|file_name|>0018_auto_20150310_1533.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('booking', '0017_auto_20150309_1910'),<|fim▁hole|> migrations.AlterField( model_name='booking', name='date', field=models.DateTimeField(auto_now_add=True, db_index=True), preserve_default=True, ), migrations.AlterField( model_name='booking', name='status', field=models.CharField(default=b'pending', max_length=30, db_index=True, choices=[(b'pending', '\u041e\u0436\u0438\u0434\u0430\u0435\u0442 \u0438\u0441\u043f\u043e\u043b\u043d\u0438\u0442\u0435\u043b\u044f'), (b'waiting_for_approval', '\u041e\u0436\u0438\u0434\u0430\u0435\u0442 \u043f\u043e\u0434\u0442\u0432\u0435\u0440\u0436\u0434\u0435\u043d\u0438\u044f \u0437\u0430\u043a\u0430\u0437\u0447\u0438\u043a\u043e\u043c'), (b'running', '\u0412\u0437\u044f\u0442 \u043d\u0430 \u0438\u0441\u043f\u043e\u043b\u043d\u0435\u043d\u0438\u0435'), (b'completed', '\u0417\u0430\u0432\u0435\u0440\u0448\u0435\u043d')]), preserve_default=True, ), migrations.AlterField( model_name='comment', name='date', field=models.DateTimeField(auto_now_add=True, db_index=True), preserve_default=True, ), ]<|fim▁end|>
] operations = [
<|file_name|>engine.ts<|end_file_name|><|fim▁begin|>// Uniform random number generators // ================================ export interface IRandomEngine { /** * Sets the seed of the RNG engine. * @param {number} x * @returns */ setSeed(x: number): IRandomEngine; /** * Retrieves the seed of the RNG engine. */ getSeed(): number; /** * Retrieves an unsigned 32-bit integer. * @returns */ nextUint32(): number; /** * Retrieves an double within (0,1) with 53-bit precision. * @returns */ nextDouble(): number; } const mulUint32: (x: number, y: number) => number = (<any>Math).imul instanceof Function ? (x, y) => (<any>Math).imul(x, y) >>> 0 : (x, y) => { let ah = (x >>> 16) & 0xffff, al = x & 0xffff, bh = (y >>> 16) & 0xffff, bl = y & 0xffff; let high = (ah * bl + al * bh) & 0xffff; return (((high << 16) >>> 0) + (al * bl)) >>> 0; }; /**<|fim▁hole|>export class MT19937Engine implements IRandomEngine { private _mt = new Array<number>(624); private _mti = 625; private _seed; public setSeed(x: number): IRandomEngine { this._seed = x >>> 0; this._mt[0] = this._seed; for (this._mti = 1; this._mti < 624; this._mti++) { let d = this._mt[this._mti - 1] ^ (this._mt[this._mti - 1] >>> 30); this._mt[this._mti] = (mulUint32(d, 1812433253) + this._mti) >>> 0; } return this; } public getSeed(): number { return this._seed; } public nextUint32(): number { let mag01 = [0, 0x9908b0df]; let y: number; if (this._mti >= 624) { if (this._mti === 625) { // not initialized this.setSeed(5489); } let kk = 0; for (; kk < 624 - 397; kk++) { y = (this._mt[kk] & 0x80000000) | (this._mt[kk + 1] & 0x7fffffff); this._mt[kk] = this._mt[kk + 397] ^ (y >>> 1) ^ mag01[y & 0x01]; } for (; kk < 623; kk++) { y = (this._mt[kk] & 0x80000000) | (this._mt[kk + 1] & 0x7fffffff); this._mt[kk] = this._mt[kk + 397 - 624] ^ (y >>> 1) ^ mag01[y & 0x01]; } y = (this._mt[623] & 0x80000000) | (this._mt[0] & 0x7fffffff); this._mt[623] = this._mt[396] ^ (y >>> 1) ^ mag01[y & 0x01]; this._mti = 0; } y = this._mt[this._mti++]; // tempering y ^= (y >>> 11); y ^= (y << 7) & 0x9d2c5680; y ^= (y << 15) & 0xefc60000; y ^= (y >>> 18); return y >>> 0; } public nextDouble(): number { let a: number, b: number; do { a = this.nextUint32() >>> 5; b = this.nextUint32() >>> 6; } while (a === 0 && b === 0); // 2^26 = 67108864, 2^53 = 9007199254740992 return (a * 67108864.0 + b) * (1.0 / 9007199254740992); } } /* Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura, All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of its contributors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Any feedback is very welcome. http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space) */ /** * Wrapper for Math.random(). */ export class NativeEngine implements IRandomEngine { public setSeed(_x: number): IRandomEngine { // we cannot specify the seed for Math.random(). throw new Error('Seeding is not supported with the native random engine.'); } public getSeed(): number { throw new Error('Seeding is not supported with the native random engine.'); } public nextDouble(): number { let x: number; do { x = Math.random(); } while (x === 0); return x; } public nextUint32(): number { return Math.floor(this.nextDouble() * 4294967296); } }<|fim▁end|>
* MT19937 random number generator. * Adapted from http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/MT2002/CODES/mt19937ar.c * Original license attached immediately after this class. */
<|file_name|>selectors.spec.ts<|end_file_name|><|fim▁begin|>import { TestBed } from '@angular/core/testing'; import { Store, StoreModule } from '@ngrx/store'; import { TRACKS_PER_PAGE } from 'app/app-config'; import { testUtils } from 'app/utils/test'; import { TracklistRecord } from '../models'; import { TracklistActions } from '../tracklist-actions'; import { getCurrentTracklist, getTracklists, getTracks, getTracksForCurrentTracklist } from './selectors'; import { initialState, tracklistsReducer } from './tracklists-reducer'; import { tracksReducer } from './tracks-reducer'; describe('tracklists', () => { describe('selectors', () => { let actions: TracklistActions; let store: Store<any>; beforeEach(() => { let injector = TestBed.configureTestingModule({ imports: [ StoreModule.provideStore( { tracklists: tracklistsReducer, tracks: tracksReducer }, { tracklists: initialState .set('tracklist/1', new TracklistRecord({id: 'tracklist/1'})) .set('tracklist/2', new TracklistRecord({id: 'tracklist/2'})) } ) ] }); actions = new TracklistActions(); store = injector.get(Store); }); describe('getCurrentTracklist()', () => { it('should return observable that emits current tracklist', () => { let count = 0; let track = testUtils.createTrack(); let tracklist = null; store .let(getCurrentTracklist()) .subscribe(value => { count++; tracklist = value; }); // mount tracklist, making it current tracklist store.dispatch(actions.mountTracklist('tracklist/1')); expect(count).toBe(1); expect(tracklist.id).toBe('tracklist/1'); // load track into tracklist store.dispatch(actions.fetchTracksFulfilled({collection: [track]}, 'tracklist/1')); expect(count).toBe(2); // loading track that already exists in tracklist should not emit store.dispatch(actions.fetchTracksFulfilled({collection: [track]}, 'tracklist/1')); expect(count).toBe(2); // dispatching unrelated action should not emit store.dispatch({type: 'UNDEFINED'}); expect(count).toBe(2); }); }); describe('getTracks()', () => { it('should return observable that emits TracksState', () => { let count = 0; let tracks = null; store .let(getTracks()) .subscribe(value => { count++; tracks = value; }); // auto-emitting initial value expect(count).toBe(1); expect(tracks.size).toBe(0); // fetch tracks returning empty collection should not emit store.dispatch(actions.fetchTracksFulfilled({collection: []}, 'tracklist/1')); expect(count).toBe(1); // load track store.dispatch(actions.fetchTracksFulfilled({collection: [testUtils.createTrack()]}, 'tracklist/1')); expect(count).toBe(2); expect(tracks.size).toBe(1); // dispatching unrelated action should not emit store.dispatch({type: 'UNDEFINED'}); expect(count).toBe(2); }); }); describe('getTracksForCurrentTracklist()', () => { it('should return observable that emits paginated tracks for current tracklist', () => { let count = 0; let trackData = testUtils.createTracks(TRACKS_PER_PAGE * 2); let tracks = null; store .let(getTracksForCurrentTracklist()) .subscribe(value => { count++; tracks = value; }); // mount tracklist, making it current tracklist store.dispatch(actions.mountTracklist('tracklist/1')); expect(count).toBe(1); expect(tracks.size).toBe(0); // load two pages of tracks into tracklist; should emit first page of tracks store.dispatch(actions.fetchTracksFulfilled({collection: trackData}, 'tracklist/1')); expect(count).toBe(2); expect(tracks.size).toBe(TRACKS_PER_PAGE); // go to page two; should emit first and second page of tracks store.dispatch(actions.loadNextTracks()); expect(count).toBe(3); expect(tracks.size).toBe(TRACKS_PER_PAGE * 2); // dispatching unrelated action should not emit store.dispatch({type: 'UNDEFINED'}); expect(count).toBe(3); }); }); describe('getTracklists()', () => { it('should return observable that emits TracklistsState', () => { let count = 0; let track = testUtils.createTrack(); let tracklists = null; store .let(getTracklists()) .subscribe(value => { count++; tracklists = value; }); // auto-emitting initial value expect(count).toBe(1); expect(tracklists.get('currentTracklistId')).toBe(null); // mount tracklist, making it current tracklist store.dispatch(actions.mountTracklist('tracklist/1')); expect(count).toBe(2); expect(tracklists.get('currentTracklistId')).toBe('tracklist/1'); // load track store.dispatch(actions.fetchTracksFulfilled({collection: [track]}, 'tracklist/1'));<|fim▁hole|> expect(count).toBe(3); // loading same track should not emit store.dispatch(actions.fetchTracksFulfilled({collection: [track]}, 'tracklist/1')); expect(count).toBe(3); // dispatching unrelated action should not emit store.dispatch({type: 'UNDEFINED'}); expect(count).toBe(3); }); }); }); });<|fim▁end|>
<|file_name|>ego_dp_loadarea_peakload.py<|end_file_name|><|fim▁begin|>""" Calculates peak load per load area """ __copyright__ = "Reiner Lemoine Institut, Flensburg University of Applied Sciences, Centre for Sustainable Energy Systems" __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" __url__ = "https://github.com/openego/data_processing/blob/master/LICENSE" __author__ = "gplssm, IlkaCu" import pandas as pd from workalendar.europe import Germany from datetime import time as settime import time from sqlalchemy.orm import sessionmaker from demandlib import bdew as bdew, particular_profiles as profiles from dataprocessing.tools import io, metadata from egoio.db_tables.model_draft import EgoDemandLoadareaPeakLoad as orm_peak_load from oemof.db import tools from dataprocessing.python_scripts.functions.ego_scenario_log import write_ego_scenario_log def get_load_areas_table(schema, table, index_col, conn, columns=None): r"""Retrieve load areas intermediate results table from oedb """ # retrieve table with processed input data load_areas = pd.read_sql_table(table, conn, schema=schema, index_col=index_col, columns=columns) return load_areas def add_sectoral_peak_load(load_areas, **kwargs): r"""Add peak load per sector based on given annual consumption """ # define data year # TODO: in the future get this from somewhere else year = 2011 # call demandlib # TODO: change to use new demandlib # read standard load profiles e_slp = bdew.ElecSlp(year, holidays=holidays) # multiply given annual demand with timeseries # elec_demand = e_slp.get_profile(load_areas['h0', 'g0', 'l0', 'i0'].to_dict()) elec_demand = e_slp.get_profile(load_areas.to_dict()) <|fim▁hole|> # load_areas['sector_consumption_residential'], # 'g0': # load_areas['sector_consumption_retail'], # 'i0': # load_areas['sector_consumption_industrial'], # 'l0': # load_areas['sector_consumption_agricultural']} # ).elec_demand # hack correct industrial profile into dataframe # print(load_areas['sector_consumption_industrial']) # if load_areas['sector_consumption_industrial'] == 0: # load_areas['sector_consumption_industrial'] = 0.1 # Add the slp for the industrial group ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index, holidays=holidays) # Beginning and end of workday, weekdays and weekend days, and scaling factors # by default elec_demand['i0'] = ilp.simple_profile( load_areas['i0'], am=settime(6, 0, 0), pm=settime(22, 0, 0), profile_factors= {'week': {'day': 0.8, 'night': 0.6}, 'weekend': {'day': 0.6, 'night': 0.6}}) # Resample 15-minute values to hourly values and sum across sectors elec_demand = elec_demand.resample('H').mean().fillna(0).max().to_frame().T # demand_industry = eb.IndustrialLoadProfile('simple_industrial_profile', # **{'annual_demand': load_areas['sector_consumption_industrial'], # 'year': year, # 'am': settime(6, 0, 0), # 'pm': settime(22, 0, 0), # 'profile_factors': # {'week': {'day': 0.8, 'night': 0.6}, # 'weekend': {'day': 0.6, 'night': 0.6}} # }) # ind_demand = demand_industry.profile # elec_demand['i0'] = ind_demand peak_load = elec_demand.max(axis=0) return peak_load if __name__ == '__main__': la_index_col = 'id' schema = 'model_draft' table = 'ego_demand_loadarea' target_table = 'ego_demand_loadarea_peak_load' year = 2011 db_group = 'oeuser' cal = Germany() holidays = dict(cal.holidays(2011)) # get database connection object conn = io.oedb_session(section='oedb') Session = sessionmaker(bind=conn) session = Session() # retrieve load areas table columns = [la_index_col, 'sector_consumption_residential', 'sector_consumption_retail', 'sector_consumption_industrial', 'sector_consumption_agricultural'] load_areas = get_load_areas_table(schema, table, la_index_col, conn, columns=columns) write_ego_scenario_log(conn=conn, version='v0.4.5', io='input', schema='model_draft', table=table, script='ego_dp_loadarea_peakload.py', entries=len(load_areas)) names_dc = {'sector_consumption_residential': 'h0', 'sector_consumption_retail': 'g0', 'sector_consumption_agricultural': 'l0', 'sector_consumption_industrial': 'i0',} names_dc2 = {'h0': 'residential', 'g0': 'retail', 'l0': 'agricultural', 'i0': 'industrial'} # rename columns to demandlib compatible names load_areas.rename(columns=names_dc, inplace=True) # # delete old content from table # del_str = "DROP TABLE IF EXISTS {0}.{1} CASCADE;".format( # schema, target_table) # conn.execute(del_str) # empty table or create try: orm_peak_load.__table__.create(conn) except: session.query(orm_peak_load).delete() session.commit() # Use above function `add_sectoral_peak_load` via apply # elec_demand = load_areas.fillna(0).apply( # add_sectoral_peak_load, axis=1, args=()) # read standard load profiles e_slp = bdew.ElecSlp(year, holidays=holidays) # Add the slp for the industrial group ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index, holidays=holidays) # counter ctr = 0 # iterate over substation retrieving sectoral demand at each of it for it, row in load_areas.iterrows(): row = row.fillna(0) # multiply given annual demand with timeseries elec_demand = e_slp.get_profile(row.to_dict()) # Beginning and end of workday, weekdays and weekend days, and scaling factors # by default elec_demand['i0'] = ilp.simple_profile( row['i0'], am=settime(6, 0, 0), pm=settime(22, 0, 0), profile_factors= {'week': {'day': 0.8, 'night': 0.6}, 'weekend': {'day': 0.6, 'night': 0.6}}) # Resample 15-minute values to hourly values and sum across sectors elec_demand = elec_demand.resample('H').mean().fillna(0).max().to_frame().T#.max(axis=0)#.to_frame().unstack()#.\ # to_frame(name='peak_load') elec_demand['id'] = it elec_demand.set_index('id', inplace=True) # rename columns elec_demand.rename(columns=names_dc2, inplace=True) # Add data to orm object peak_load = orm_peak_load( id=it, retail=float(elec_demand['retail']), residential=float(elec_demand['residential']), industrial=float(elec_demand['industrial']), agricultural=float(elec_demand['agricultural'])) session.add(peak_load) # # write results to new database table # elec_demand.to_sql(target_table, # conn, # schema=schema, # index=True, # if_exists='fail') ctr += 1 # commit data to database every 1000 datasets: This is done since pushing every # single dataset slows down entire script, single commiting in the end sometimes # leads to conn. timeout. if (ctr % 1000) == 0: session.commit() # commit remaining datasets that were not committed in loop above session.commit() # grant access to db_group tools.grant_db_access(conn, schema, target_table, db_group) # change owner of table to db_group tools.change_owner_to(conn, schema, target_table, db_group) # # add primary key constraint on id column # tools.add_primary_key(conn, schema, target_table, la_index_col) # create metadata json str json_str = metadata.create_metadata_json( 'Peak load per load area', '', '2011', time.strftime("%d.%m.%Y"), 'Open Energy Database, schema: {0}, table: {1}'.format(schema, target_table), 'Germany', 'Sectoral peak of single load areas based on synthetic standard load ' + 'profiles.', [{'Name': 'id', 'Description': 'Unique identifier', 'Unit': '-'}, {'Name': 'g0', 'Description': 'Peak demand of retail sector', 'Unit': 'GW'}, {'Name': 'h0', 'Description': 'Peak demand of household sector', 'Unit': 'GW'}, {'Name': 'l0', 'Description': 'Peak demand of agricultural sector', 'Unit': 'GW'}, {'Name': 'i0', 'Description': 'Peak demand of industrial sector', 'Unit': 'GW'} ], {'Name': 'Guido Pleßmann', 'Mail': '[email protected]', 'Date': time.strftime("%d.%m.%Y"), 'Comment': 'Initial creation of dataset'}, 'Be aware of applicability. Data bases on synthetic load profiles', '', '' ) metadata.submit_comment(conn, json_str, schema, target_table) write_ego_scenario_log(conn=conn, version='v0.4.5', io='output', schema='model_draft', table=target_table, script='ego_dp_loadarea_peakload.py', entries=len(load_areas)) conn.close()<|fim▁end|>
# tmp_peak_load = dm.electrical_demand(method='calculate_profile', # year=year, # ann_el_demand_per_sector= { # 'h0':
<|file_name|>context_rcnn_lib.py<|end_file_name|><|fim▁begin|># Lint as: python3 # Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Library functions for ContextRCNN.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow.compat.v1 as tf import tf_slim as slim # The negative value used in padding the invalid weights. _NEGATIVE_PADDING_VALUE = -100000 def filter_weight_value(weights, values, valid_mask): """Filters weights and values based on valid_mask. _NEGATIVE_PADDING_VALUE will be added to invalid elements in the weights to avoid their contribution in softmax. 0 will be set for the invalid elements in the values. Args: weights: A float Tensor of shape [batch_size, input_size, context_size]. values: A float Tensor of shape [batch_size, context_size, projected_dimension]. valid_mask: A boolean Tensor of shape [batch_size, context_size]. True means valid and False means invalid. Returns: weights: A float Tensor of shape [batch_size, input_size, context_size]. values: A float Tensor of shape [batch_size, context_size, projected_dimension]. Raises: ValueError: If shape of doesn't match. """ w_batch_size, _, w_context_size = weights.shape v_batch_size, v_context_size, _ = values.shape m_batch_size, m_context_size = valid_mask.shape if w_batch_size != v_batch_size or v_batch_size != m_batch_size: raise ValueError("Please make sure the first dimension of the input" " tensors are the same.") if w_context_size != v_context_size: raise ValueError("Please make sure the third dimension of weights matches" " the second dimension of values.") if w_context_size != m_context_size: raise ValueError("Please make sure the third dimension of the weights" " matches the second dimension of the valid_mask.") valid_mask = valid_mask[..., tf.newaxis] # Force the invalid weights to be very negative so it won't contribute to # the softmax. weights += tf.transpose( tf.cast(tf.math.logical_not(valid_mask), weights.dtype) * _NEGATIVE_PADDING_VALUE, perm=[0, 2, 1]) # Force the invalid values to be 0. values *= tf.cast(valid_mask, values.dtype) return weights, values def compute_valid_mask(num_valid_elements, num_elements): """Computes mask of valid entries within padded context feature. <|fim▁hole|> num_elements: An int32 Tensor. Returns: A boolean Tensor of the shape [batch_size, num_elements]. True means valid and False means invalid. """ batch_size = num_valid_elements.shape[0] element_idxs = tf.range(num_elements, dtype=tf.int32) batch_element_idxs = tf.tile(element_idxs[tf.newaxis, ...], [batch_size, 1]) num_valid_elements = num_valid_elements[..., tf.newaxis] valid_mask = tf.less(batch_element_idxs, num_valid_elements) return valid_mask def project_features(features, projection_dimension, is_training, normalize): """Projects features to another feature space. Args: features: A float Tensor of shape [batch_size, features_size, num_features]. projection_dimension: A int32 Tensor. is_training: A boolean Tensor (affecting batch normalization). normalize: A boolean Tensor. If true, the output features will be l2 normalized on the last dimension. Returns: A float Tensor of shape [batch, features_size, projection_dimension]. """ # TODO(guanhangwu) Figure out a better way of specifying the batch norm # params. batch_norm_params = { "is_training": is_training, "decay": 0.97, "epsilon": 0.001, "center": True, "scale": True } batch_size, _, num_features = features.shape features = tf.reshape(features, [-1, num_features]) projected_features = slim.fully_connected( features, num_outputs=projection_dimension, activation_fn=tf.nn.relu6, normalizer_fn=slim.batch_norm, normalizer_params=batch_norm_params) projected_features = tf.reshape(projected_features, [batch_size, -1, projection_dimension]) if normalize: projected_features = tf.math.l2_normalize(projected_features, axis=-1) return projected_features def attention_block(input_features, context_features, bottleneck_dimension, output_dimension, attention_temperature, valid_mask, is_training): """Generic attention block. Args: input_features: A float Tensor of shape [batch_size, input_size, num_input_features]. context_features: A float Tensor of shape [batch_size, context_size, num_context_features]. bottleneck_dimension: A int32 Tensor representing the bottleneck dimension for intermediate projections. output_dimension: A int32 Tensor representing the last dimension of the output feature. attention_temperature: A float Tensor. It controls the temperature of the softmax for weights calculation. The formula for calculation as follows: weights = exp(weights / temperature) / sum(exp(weights / temperature)) valid_mask: A boolean Tensor of shape [batch_size, context_size]. is_training: A boolean Tensor (affecting batch normalization). Returns: A float Tensor of shape [batch_size, input_size, output_dimension]. """ with tf.variable_scope("AttentionBlock"): queries = project_features( input_features, bottleneck_dimension, is_training, normalize=True) keys = project_features( context_features, bottleneck_dimension, is_training, normalize=True) values = project_features( context_features, bottleneck_dimension, is_training, normalize=True) weights = tf.matmul(queries, keys, transpose_b=True) weights, values = filter_weight_value(weights, values, valid_mask) weights = tf.nn.softmax(weights / attention_temperature) features = tf.matmul(weights, values) output_features = project_features( features, output_dimension, is_training, normalize=False) return output_features def compute_box_context_attention(box_features, context_features, valid_context_size, bottleneck_dimension, attention_temperature, is_training): """Computes the attention feature from the context given a batch of box. Args: box_features: A float Tensor of shape [batch_size, max_num_proposals, height, width, channels]. It is pooled features from first stage proposals. context_features: A float Tensor of shape [batch_size, context_size, num_context_features]. valid_context_size: A int32 Tensor of shape [batch_size]. bottleneck_dimension: A int32 Tensor representing the bottleneck dimension for intermediate projections. attention_temperature: A float Tensor. It controls the temperature of the softmax for weights calculation. The formula for calculation as follows: weights = exp(weights / temperature) / sum(exp(weights / temperature)) is_training: A boolean Tensor (affecting batch normalization). Returns: A float Tensor of shape [batch_size, max_num_proposals, 1, 1, channels]. """ _, context_size, _ = context_features.shape valid_mask = compute_valid_mask(valid_context_size, context_size) channels = box_features.shape[-1] # Average pools over height and width dimension so that the shape of # box_features becomes [batch_size, max_num_proposals, channels]. box_features = tf.reduce_mean(box_features, [2, 3]) output_features = attention_block(box_features, context_features, bottleneck_dimension, channels.value, attention_temperature, valid_mask, is_training) # Expands the dimension back to match with the original feature map. output_features = output_features[:, :, tf.newaxis, tf.newaxis, :] return output_features<|fim▁end|>
Args: num_valid_elements: A int32 Tensor of shape [batch_size].
<|file_name|>DummyJdbcFactory.java<|end_file_name|><|fim▁begin|>package org.geomajas.layer.geotools; import java.io.IOException; import java.io.Serializable; import java.util.List; import java.util.Map; import org.geotools.data.AbstractDataStoreFactory; import org.geotools.data.DataStore; import org.geotools.data.store.ContentDataStore; import org.geotools.data.store.ContentEntry; import org.geotools.data.store.ContentFeatureSource; import org.opengis.feature.type.Name; public class DummyJdbcFactory extends AbstractDataStoreFactory implements org.geotools.data.DataStoreFactorySpi { public class DummyJdbcDataStore extends ContentDataStore { protected List<Name> createTypeNames() throws IOException { return null; } protected ContentFeatureSource createFeatureSource(ContentEntry entry) throws IOException { return null;<|fim▁hole|> } public String getDescription() { return "DummyJdbcFactory"; } public Param[] getParametersInfo() { return new Param[] { new Param("testScope", Boolean.class, "Set to true for unit testing", true) }; } public boolean canProcess(Map params) { if (!super.canProcess(params)) { return false; // was not in agreement with getParametersInfo } if (!(((String) params.get("testScope")).equalsIgnoreCase("true"))) { return (false); } else { return (true); } } public DataStore createDataStore(Map<String, Serializable> params) throws IOException { return new DummyJdbcDataStore(); } public DataStore createNewDataStore(Map<String, Serializable> params) throws IOException { return new DummyJdbcDataStore(); } }<|fim▁end|>
}
<|file_name|>ImageBehaviour.java<|end_file_name|><|fim▁begin|>package io.picopalette.apps.event_me.Utils; import android.annotation.SuppressLint; import android.content.Context; import android.support.design.widget.CoordinatorLayout; import android.support.v7.widget.Toolbar; import android.util.AttributeSet; import android.view.View; import com.facebook.drawee.view.SimpleDraweeView; import io.picopalette.apps.event_me.R; /** * Created by Aswin Sundar on 14-06-2017. */ public class ImageBehaviour extends CoordinatorLayout.Behavior<SimpleDraweeView> { private final static float MIN_AVATAR_PERCENTAGE_SIZE = 0.3f; private final static int EXTRA_FINAL_AVATAR_PADDING = 80; private final static String TAG = "behavior"; private final Context mContext; private float mAvatarMaxSize; private float mFinalLeftAvatarPadding; private float mStartPosition; private int mStartXPosition; private float mStartToolbarPosition; public ImageBehaviour(Context context, AttributeSet attrs) { mContext = context; init(); mFinalLeftAvatarPadding = context.getResources().getDimension(R.dimen.activity_horizontal_margin); } private void init() { bindDimensions(); } private void bindDimensions() { mAvatarMaxSize = mContext.getResources().getDimension(R.dimen.image_width); } private int mStartYPosition; private int mFinalYPosition; private int finalHeight; private int mStartHeight; private int mFinalXPosition; @Override public boolean layoutDependsOn(CoordinatorLayout parent, SimpleDraweeView child, View dependency) { return dependency instanceof Toolbar; } @Override public boolean onDependentViewChanged(CoordinatorLayout parent, SimpleDraweeView child, View dependency) { maybeInitProperties(child, dependency); final int maxScrollDistance = (int) (mStartToolbarPosition - getStatusBarHeight()); float expandedPercentageFactor = dependency.getY() / maxScrollDistance;<|fim▁hole|> float distanceXToSubtract = ((mStartXPosition - mFinalXPosition) * (1f - expandedPercentageFactor)) + (child.getWidth()/2); float heightToSubtract = ((mStartHeight - finalHeight) * (1f - expandedPercentageFactor)); child.setY(mStartYPosition - distanceYToSubtract); child.setX(mStartXPosition - distanceXToSubtract); int proportionalAvatarSize = (int) (mAvatarMaxSize * (expandedPercentageFactor)); CoordinatorLayout.LayoutParams lp = (CoordinatorLayout.LayoutParams) child.getLayoutParams(); lp.width = (int) (mStartHeight - heightToSubtract); lp.height = (int) (mStartHeight - heightToSubtract); child.setLayoutParams(lp); return true; } @SuppressLint("PrivateResource") private void maybeInitProperties(SimpleDraweeView child, View dependency) { if (mStartYPosition == 0) mStartYPosition = (int) (dependency.getY()); if (mFinalYPosition == 0) mFinalYPosition = (dependency.getHeight() /2); if (mStartHeight == 0) mStartHeight = child.getHeight(); if (finalHeight == 0) finalHeight = mContext.getResources().getDimensionPixelOffset(R.dimen.image_small_width); if (mStartXPosition == 0) mStartXPosition = (int) (child.getX() + (child.getWidth() / 2)); if (mFinalXPosition == 0) mFinalXPosition = mContext.getResources().getDimensionPixelOffset(R.dimen.abc_action_bar_content_inset_material) + (finalHeight / 2); if (mStartToolbarPosition == 0) mStartToolbarPosition = dependency.getY() + (dependency.getHeight()/2); } public int getStatusBarHeight() { int result = 0; int resourceId = mContext.getResources().getIdentifier("status_bar_height", "dimen", "android"); if (resourceId > 0) { result = mContext.getResources().getDimensionPixelSize(resourceId); } return result; } }<|fim▁end|>
float distanceYToSubtract = ((mStartYPosition - mFinalYPosition) * (1f - expandedPercentageFactor)) + (child.getHeight()/2);
<|file_name|>crateresolve_calories-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.<|fim▁hole|> pub fn f() -> isize { 100 }<|fim▁end|>
#![crate_name="crateresolve_calories#0.1"] #![crate_type = "lib"]
<|file_name|>decodeWebPage.py<|end_file_name|><|fim▁begin|>import requests, os from bs4 import BeautifulSoup url = 'http://www.nytimes.com' def extractArticles (url): data = requests.get(url) soup = BeautifulSoup(data.text, 'html.parser') articles = [] for article in soup.find_all('article'): if article.find('h2') != None and article.find('h2').find('a') != None: heading = article.find('h2').find('a').get_text().strip() if heading != "": articles.append(heading) articles = sorted(list(set(articles))) f = open('./articles/headlines2.txt', 'w') for heading in articles: f.write(heading) f.write('\n') f.close()<|fim▁hole|>extractArticles(url)<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! typeck.rs, an introduction The type checker is responsible for: 1. Determining the type of each expression 2. Resolving methods and traits 3. Guaranteeing that most type rules are met ("most?", you say, "why most?" Well, dear reader, read on) The main entry point is `check_crate()`. Type checking operates in several major phases: 1. The collect phase first passes over all items and determines their type, without examining their "innards". 2. Variance inference then runs to compute the variance of each parameter 3. Coherence checks for overlapping or orphaned impls 4. Finally, the check phase then checks function bodies and so forth. Within the check phase, we check each function body one at a time (bodies of function expressions are checked as part of the containing function). Inference is used to supply types wherever they are unknown. The actual checking of a function itself has several phases (check, regionck, writeback), as discussed in the documentation for the `check` module. The type checker is defined into various submodules which are documented independently: - astconv: converts the AST representation of types into the `ty` representation - collect: computes the types of each top-level item and enters them into the `cx.tcache` table for later use - coherence: enforces coherence rules, builds some tables - variance: variance inference - check: walks over function bodies and type checks them, inferring types for local variables, type parameters, etc as necessary. - infer: finds the types to use for each type variable such that all subtyping and assignment constraints are met. In essence, the check module specifies the constraints, and the infer module solves them. # Note This API is completely unstable and subject to change. */ // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364) #![cfg_attr(stage0, feature(custom_attribute))] #![crate_name = "rustc_typeck"] #![unstable(feature = "rustc_private", issue = "27812")] #![staged_api] #![crate_type = "dylib"] #![crate_type = "rlib"] #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![allow(non_camel_case_types)] #![feature(append)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(drain)] #![feature(iter_cmp)] #![feature(iter_arith)] #![feature(quote)] #![feature(ref_slice)] #![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(slice_splits)] #![feature(staged_api)] #![feature(vec_push_all)] #![feature(cell_extras)] #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate arena; extern crate fmt_macros; extern crate rustc; extern crate rustc_platform_intrinsics as intrinsics; pub use rustc::lint; pub use rustc::metadata; pub use rustc::middle; pub use rustc::session; pub use rustc::util; use middle::def; use middle::infer; use middle::subst; use middle::ty::{self, Ty, HasTypeFlags}; use rustc::ast_map; use session::config; use util::common::time; use syntax::codemap::Span; use syntax::print::pprust::*; use syntax::{ast, abi}; use syntax::ast_util::local_def; use std::cell::RefCell; // NB: This module needs to be declared first so diagnostics are // registered before they are used. pub mod diagnostics; pub mod check; mod rscope; mod astconv; pub mod collect; mod constrained_type_params; pub mod coherence; pub mod variance; pub struct TypeAndSubsts<'tcx> { pub substs: subst::Substs<'tcx>, pub ty: Ty<'tcx>, } pub struct CrateCtxt<'a, 'tcx: 'a> { // A mapping from method call sites to traits that have that method. pub trait_map: ty::TraitMap, /// A vector of every trait accessible in the whole crate /// (i.e. including those from subcrates). This is used only for /// error reporting, and so is lazily initialised and generally /// shouldn't taint the common path (hence the RefCell). pub all_traits: RefCell<Option<check::method::AllTraitsVec>>, pub tcx: &'a ty::ctxt<'tcx>, } // Functions that write types into the node type table fn write_ty_to_tcx<'tcx>(tcx: &ty::ctxt<'tcx>, node_id: ast::NodeId, ty: Ty<'tcx>) { debug!("write_ty_to_tcx({}, {:?})", node_id, ty); assert!(!ty.needs_infer()); tcx.node_type_insert(node_id, ty); } fn write_substs_to_tcx<'tcx>(tcx: &ty::ctxt<'tcx>, node_id: ast::NodeId, item_substs: ty::ItemSubsts<'tcx>) { if !item_substs.is_noop() { debug!("write_substs_to_tcx({}, {:?})", node_id, item_substs); assert!(!item_substs.substs.types.needs_infer()); tcx.tables.borrow_mut().item_substs.insert(node_id, item_substs); } } fn lookup_full_def(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) -> def::Def { match tcx.def_map.borrow().get(&id) { Some(x) => x.full_def(), None => { span_fatal!(tcx.sess, sp, E0242, "internal error looking up a definition") } } } fn require_c_abi_if_variadic(tcx: &ty::ctxt, decl: &ast::FnDecl, abi: abi::Abi, span: Span) { if decl.variadic && abi != abi::C { span_err!(tcx.sess, span, E0045, "variadic function must have C calling convention"); } } fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>, maybe_infcx: Option<&infer::InferCtxt<'a, 'tcx>>, t1_is_expected: bool, span: Span, t1: Ty<'tcx>, t2: Ty<'tcx>, msg: M) -> bool where M: FnOnce() -> String, { let result = match maybe_infcx { None => { let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false); infer::mk_eqty(&infcx, t1_is_expected, infer::Misc(span), t1, t2) } Some(infcx) => { infer::mk_eqty(infcx, t1_is_expected, infer::Misc(span), t1, t2) } }; match result { Ok(_) => true, Err(ref terr) => { span_err!(tcx.sess, span, E0211, "{}: {}", msg(), terr); tcx.note_and_explain_type_err(terr, span); false } } } fn check_main_fn_ty(ccx: &CrateCtxt, main_id: ast::NodeId, main_span: Span) { let tcx = ccx.tcx; let main_t = tcx.node_id_to_type(main_id); match main_t.sty { ty::TyBareFn(..) => { match tcx.map.find(main_id) { Some(ast_map::NodeItem(it)) => { match it.node { ast::ItemFn(_, _, _, _, ref ps, _) if ps.is_parameterized() => { span_err!(ccx.tcx.sess, main_span, E0131, "main function is not allowed to have type parameters"); return; } _ => () } } _ => ()<|fim▁hole|> sig: ty::Binder(ty::FnSig { inputs: Vec::new(), output: ty::FnConverging(tcx.mk_nil()), variadic: false }) })); require_same_types(tcx, None, false, main_span, main_t, se_ty, || { format!("main function expects type: `{}`", se_ty) }); } _ => { tcx.sess.span_bug(main_span, &format!("main has a non-function type: found `{}`", main_t)); } } } fn check_start_fn_ty(ccx: &CrateCtxt, start_id: ast::NodeId, start_span: Span) { let tcx = ccx.tcx; let start_t = tcx.node_id_to_type(start_id); match start_t.sty { ty::TyBareFn(..) => { match tcx.map.find(start_id) { Some(ast_map::NodeItem(it)) => { match it.node { ast::ItemFn(_,_,_,_,ref ps,_) if ps.is_parameterized() => { span_err!(tcx.sess, start_span, E0132, "start function is not allowed to have type parameters"); return; } _ => () } } _ => () } let se_ty = tcx.mk_fn(Some(local_def(start_id)), tcx.mk_bare_fn(ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, sig: ty::Binder(ty::FnSig { inputs: vec!( tcx.types.isize, tcx.mk_imm_ptr(tcx.mk_imm_ptr(tcx.types.u8)) ), output: ty::FnConverging(tcx.types.isize), variadic: false, }), })); require_same_types(tcx, None, false, start_span, start_t, se_ty, || { format!("start function expects type: `{}`", se_ty) }); } _ => { tcx.sess.span_bug(start_span, &format!("start has a non-function type: found `{}`", start_t)); } } } fn check_for_entry_fn(ccx: &CrateCtxt) { let tcx = ccx.tcx; match *tcx.sess.entry_fn.borrow() { Some((id, sp)) => match tcx.sess.entry_type.get() { Some(config::EntryMain) => check_main_fn_ty(ccx, id, sp), Some(config::EntryStart) => check_start_fn_ty(ccx, id, sp), Some(config::EntryNone) => {} None => tcx.sess.bug("entry function without a type") }, None => {} } } pub fn check_crate(tcx: &ty::ctxt, trait_map: ty::TraitMap) { let time_passes = tcx.sess.time_passes(); let ccx = CrateCtxt { trait_map: trait_map, all_traits: RefCell::new(None), tcx: tcx }; time(time_passes, "type collecting", || collect::collect_item_types(tcx)); // this ensures that later parts of type checking can assume that items // have valid types and not error tcx.sess.abort_if_errors(); time(time_passes, "variance inference", || variance::infer_variance(tcx)); time(time_passes, "coherence checking", || coherence::check_coherence(&ccx)); time(time_passes, "wf checking (old)", || check::check_wf_old(&ccx)); time(time_passes, "item-types checking", || check::check_item_types(&ccx)); time(time_passes, "item-bodies checking", || check::check_item_bodies(&ccx)); time(time_passes, "drop-impl checking", || check::check_drop_impls(&ccx)); // Do this last so that if there are errors in the old code, they // get reported, and we don't get extra warnings. time(time_passes, "wf checking (new)", || check::check_wf_new(&ccx)); check_for_entry_fn(&ccx); tcx.sess.abort_if_errors(); } __build_diagnostic_array! { librustc_typeck, DIAGNOSTICS }<|fim▁end|>
} let se_ty = tcx.mk_fn(Some(local_def(main_id)), tcx.mk_bare_fn(ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust,
<|file_name|>Solution.java<|end_file_name|><|fim▁begin|>import java.util.Scanner; /** * @author Oleg Cherednik * @since 27.10.2017 */ public class Solution { static int[] leftRotation(int[] a, int d) { for (int i = 0, j = a.length - 1; i < j; i++, j--) swap(a, i, j); d %= a.length; if (d > 0) { d = a.length - d; for (int i = 0, j = d - 1; i < j; i++, j--) swap(a, i, j); for (int i = d, j = a.length - 1; i < j; i++, j--) swap(a, i, j); } return a; } private static void swap(int[] arr, int i, int j) { int tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; } public static void main(String[] args) { Scanner in = new Scanner(System.in); int n = in.nextInt(); int d = in.nextInt(); int[] a = new int[n]; for (int a_i = 0; a_i < n; a_i++) { a[a_i] = in.nextInt(); } int[] result = leftRotation(a, d); for (int i = 0; i < result.length; i++) { System.out.print(result[i] + (i != result.length - 1 ? " " : "")); } System.out.println("");<|fim▁hole|> } }<|fim▁end|>
in.close();
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use libc::{self, c_int}; #[macro_use] pub mod dlsym; #[cfg(any(target_os = "linux", target_os = "android"))] mod epoll; #[cfg(any(target_os = "linux", target_os = "android"))] pub use self::epoll::{Events, Selector}; #[cfg(any(target_os = "bitrig", target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd"))] mod kqueue; #[cfg(any(target_os = "bitrig", target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "openbsd"))] pub use self::kqueue::{Events, Selector}; mod awakener; mod eventedfd; mod io; mod ready; mod tcp; mod udp; #[cfg(feature = "with-deprecated")] mod uds; pub use self::awakener::Awakener; pub use self::eventedfd::EventedFd; pub use self::io::{Io, set_nonblock}; pub use self::ready::UnixReady; pub use self::tcp::{TcpStream, TcpListener}; pub use self::udp::UdpSocket; #[cfg(feature = "with-deprecated")] pub use self::uds::UnixSocket; pub use iovec::IoVec; use std::os::unix::io::FromRawFd; pub fn pipe() -> ::io::Result<(Io, Io)> { // Use pipe2 for atomically setting O_CLOEXEC if we can, but otherwise // just fall back to using `pipe`. dlsym!(fn pipe2(*mut c_int, c_int) -> c_int); let mut pipes = [0; 2]; let flags = libc::O_NONBLOCK | libc::O_CLOEXEC; unsafe { match pipe2.get() { Some(pipe2_fn) => { cvt(pipe2_fn(pipes.as_mut_ptr(), flags))?; } None => { cvt(libc::pipe(pipes.as_mut_ptr()))?; libc::fcntl(pipes[0], libc::F_SETFL, flags); libc::fcntl(pipes[1], libc::F_SETFL, flags); } } } unsafe { Ok((Io::from_raw_fd(pipes[0]), Io::from_raw_fd(pipes[1]))) } } trait IsMinusOne { fn is_minus_one(&self) -> bool; } impl IsMinusOne for i32 { fn is_minus_one(&self) -> bool { *self == -1 } } impl IsMinusOne for isize { fn is_minus_one(&self) -> bool { *self == -1 }<|fim▁hole|> use std::io; if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) } }<|fim▁end|>
} fn cvt<T: IsMinusOne>(t: T) -> ::io::Result<T> {
<|file_name|>ColossusHero.tsx<|end_file_name|><|fim▁begin|>/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ import React, { useState } from 'react'; import { styled } from '@csegames/linaria/react'; const Hero = styled.div` position: fixed; top: 0; left: 0; bottom: 0; right: 0; flex: 1 1 auto; display: flex; flex-direction: column; align-content: stretch; align-items: stretch; justify-content: flex-start; flex-wrap: nowrap; user-select: none !important; -webkit-user-select: none !important; transition: opacity 2s ease; `; const Content = styled.div` width: 100%; height: 100%; flex: 1 1 auto; `; const Video = styled.video` position: absolute; top: 0; left: 0; width: 100%;<|fim▁hole|>`; export function ColossusHero(props: {}) { const [isInitialVideo, setIsInitialVideo] = useState(true); function onVideoEnded() { setIsInitialVideo(false); } return ( <Hero> <Content> <Video src='videos/fsr-logo-4k-10q-loop.webm' poster='' onEnded={onVideoEnded} autoPlay={isInitialVideo} loop></Video> {isInitialVideo && <Video src='videos/fsr-intro-4k-10q.webm' poster='images/cse/login-cse.jpg' onEnded={onVideoEnded} autoPlay></Video>} </Content> </Hero> ) }<|fim▁end|>
height: 100%; object-fit: cover;
<|file_name|>test_zpm.py<|end_file_name|><|fim▁begin|># Copyright 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import gzip import jinja2 import json import mock import os import pytest import shutil import swiftclient.exceptions import tarfile import tempfile import zpmlib try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO from zpmlib import zpm, commands class TestFindUIUploads: """ Tests for :func:`zpmlib.zpm._find_ui_uploads`. """ def test_with_files(self): zapp = {'ui': ['x']} tar = mock.Mock(getnames=lambda: ['x', 'y']) matches = zpm._find_ui_uploads(zapp, tar) assert sorted(matches) == ['x'] def test_with_glob(self): zapp = {'ui': ['x', 'ui/*']} tar = mock.Mock(getnames=lambda: ['x', 'y', 'ui/x', 'ui/y']) matches = zpm._find_ui_uploads(zapp, tar) assert sorted(matches) == ['ui/x', 'ui/y', 'x'] def test__prepare_job(): # Test for `zpmlib.zpm._prepare_job`. # Contents of `boot/system.map`, which is expected to be in the # `myapp.zapp` archive. myapp_json = [ {'exec': {'args': 'myapp.py', 'path': 'file://python2.7:python'}, 'devices': [{'name': 'python2.7'}, {'name': 'stdout'}], 'name': 'myapp'} ] zapp = {'meta': {'name': 'myapp'}} zapp_swift_url = ('swift://AUTH_469a9cd20b5a4fc5be9438f66bb5ee04/' 'test_container/hello.zapp') # Expected result exp_job_json = copy.deepcopy(myapp_json) exp_job_json[0]['devices'].append( {'name': 'image', 'path': zapp_swift_url} ) tempdir = tempfile.mkdtemp() try: tempzapp = os.path.join(tempdir, 'myapp.zapp') tf = tarfile.open(tempzapp, 'w:gz') # prepare a sample job description system_map = os.path.join(tempdir, 'system.map') with open(system_map, 'w') as fp: json.dump(myapp_json, fp) tf.add(system_map, arcname='boot/system.map') tf.close() tf = tarfile.open(tempzapp, 'r:gz') job = zpm._prepare_job(tf, zapp, zapp_swift_url) tf.close() assert exp_job_json == job finally: shutil.rmtree(tempdir) class TestFindProjectRoot: """ Tests for :func:`zpmlib.zpm.find_project_root`. """ def setup_method(self, _method): self.tempdir = tempfile.mkdtemp() self.subdir = os.path.join(self.tempdir, 'foo', 'bar') os.makedirs(self.subdir) def test_zapp_yaml_exists(self): try: zapp_path = os.path.join(self.tempdir, 'zapp.yaml') # "touch" the file open(zapp_path, 'w').close() with mock.patch('os.getcwd') as cwd: cwd.return_value = self.subdir root = zpm.find_project_root() assert root == self.tempdir finally: shutil.rmtree(self.tempdir) def test_zapp_yaml_not_exists(self): try: with mock.patch('os.getcwd') as cwd: cwd.return_value = self.subdir with pytest.raises(RuntimeError): zpm.find_project_root() finally: shutil.rmtree(self.tempdir) def test__generate_job_desc(): # Test :func:`zpmlib.zpm._generate_job_desc`. zapp_yaml_contents = { 'bundling': ['mapper.py', 'reducer.py'], 'execution': { 'groups': [ {'args': r'mapper.py "foo\\, \nbar"', 'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, {'name': 'input_swift_file', 'path': 'swift://AUTH_abc123/foo/bar.txt'}, ], 'name': 'mapper', 'connect': ['reducer'], 'env': {'FOO': 'bar', 'BAZ': 5}, 'path': 'swift://./container/python'}, {'args': r'mapper.py "foo\\, \nbar"', 'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, {'name': 'input_swift_file', 'path': 'swift://AUTH_abc123/foo/bar.txt'}, ], 'name': 'mapper', 'connect': ['reducer'], 'env': {'FOO': 'bar', 'BAZ': 5}, 'path': 'swift://~/container/path/to/python'}, {'args': 'reducer.py', 'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, ], 'name': 'reducer', 'path': 'file://python2.7:python'}, ] }, 'help': {'args': [['loglevel', 'Log Level']], 'description': 'sample map/reduce app'}, 'meta': {'Author-email': 'John Doe <[email protected]', 'Summary': 'Sample map/reduce app', 'Version': '0.1', 'name': 'mapreduce'} } expected_job = [ {'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, {'name': 'input_swift_file', 'path': 'swift://AUTH_abc123/foo/bar.txt'}], 'connect': ['reducer'], 'name': 'mapper', 'exec': {'path': 'swift://./container/python', 'name': 'python', 'args': 'mapper.py foo\\x5c\\x2c\\x20\\x5cnbar', 'env': {'FOO': 'bar', 'BAZ': 5}}}, {'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, {'name': 'input_swift_file', 'path': 'swift://AUTH_abc123/foo/bar.txt'}], 'connect': ['reducer'], 'name': 'mapper', 'exec': {'path': 'swift://~/container/path/to/python', 'name': 'path/to/python', 'args': 'mapper.py foo\\x5c\\x2c\\x20\\x5cnbar', 'env': {'FOO': 'bar', 'BAZ': 5}}}, {'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}], 'name': 'reducer', 'exec': {'path': 'file://python2.7:python', 'name': 'python', 'args': 'reducer.py'}}, ] actual_job = zpm._generate_job_desc(zapp_yaml_contents) assert actual_job == expected_job class TestGetZeroCloudConn: """ Tests for :func:`zpmlib.zpm._get_zerocloud_conn`. """ def setup_method(self, _method): self.v1_args = mock.Mock() self.v1_args.auth_version = '1.0' self.v1_args.auth = 'http://example.com/auth/v1.0' self.v1_args.user = 'tenant1:user1' self.v1_args.key = 'secret' self.v2_args = mock.Mock() self.v2_args.auth_version = '2.0' self.v2_args.os_auth_url = 'http://example.com/v2.0' self.v2_args.os_username = 'user1' self.v2_args.os_password = 'secret' self.v2_args.os_tenant_name = 'tenant1' def test_v1(self): conn = zpm._get_zerocloud_conn(self.v1_args) assert conn.authurl == self.v1_args.auth assert conn.user == self.v1_args.user assert conn.key == self.v1_args.key def test_v1_fail(self): self.v1_args.user = None with pytest.raises(zpmlib.ZPMException): zpm._get_zerocloud_conn(self.v1_args) def test_v2(self): conn = zpm._get_zerocloud_conn(self.v2_args) assert conn.authurl == self.v2_args.os_auth_url assert conn.user == self.v2_args.os_username assert conn.key == self.v2_args.os_password assert conn.os_options['tenant_name'] == self.v2_args.os_tenant_name def test_v2_fail(self): self.v2_args.os_tenant_name = None with pytest.raises(zpmlib.ZPMException): zpm._get_zerocloud_conn(self.v2_args) def test_no_auth_details_given(self): args = mock.Mock() args.auth_version = None args.auth = None args.user = None args.key = None args.os_auth_url = None args.os_username = None args.os_password = None args.os_tenant_name = None env = dict.fromkeys([ 'ST_AUTH', 'ST_USER', 'ST_KEY', 'OS_AUTH_URL', 'OS_USERNAME', 'OS_PASSWORD', 'OS_TENANT_NAME', ], '') with mock.patch.dict('os.environ', env): with pytest.raises(zpmlib.ZPMException): zpm._get_zerocloud_conn(args) class TestDeploy: """ Tests :function:`zpmlib.zpm.deploy` and its helper functions. """ @classmethod def setup_class(cls): cls.zapp_yaml_contents = """\ execution: groups: - name: "hello" path: file://python2.7:python args: "hello.py" devices: - name: python - name: stdout meta: Version: "" name: "hello" Author-email: "" Summary: "" help: description: "" args: - ["", ""] bundling: - "hello.py" ui: - "index.html" - "foo.js.tmpl" """.encode('utf-8') cls.job_json_contents = json.dumps([ {'exec': {'args': 'hello.py', 'path': 'file://python2.7:python'}, 'devices': [{'name': 'python'}, {'name': 'stdout'}], 'name': 'hello'} ]).encode('utf-8') cls.job_json_prepped = json.dumps([ {"exec": {"path": "file://python2.7:python", "args": "hello.py"}, "devices": [{"name": "python"}, {"name": "stdout"}, {"name": "image", "path": "swift:///container1/foo/bar/zapp.yaml"}], "name": "hello"} ]).encode('utf-8') cls.hellopy_contents = b"""\ print("Hello from ZeroVM!") """ cls.indexhtml_contents = bytearray("""\ <html> <head><title>Hello!</title></head> <body>Hello from ZeroVM!</body> </html>""", 'utf-8') cls.foojstmpl_contents = b"var opts = {{ auth_opts }};" cls.temp_dir = tempfile.mkdtemp() cls.temp_zapp_file = '%s/zapp.yaml' % cls.temp_dir tar = tarfile.open(cls.temp_zapp_file, 'w:gz') info = tarfile.TarInfo(name='foo.js.tmpl') info.size = len(cls.foojstmpl_contents) tar.addfile(info, BytesIO(cls.foojstmpl_contents)) info = tarfile.TarInfo(name='boot/system.map') info.size = len(cls.job_json_contents) tar.addfile(info, BytesIO(cls.job_json_contents)) info = tarfile.TarInfo(name='zapp.yaml') info.size = len(cls.zapp_yaml_contents) tar.addfile(info, BytesIO(cls.zapp_yaml_contents)) info = tarfile.TarInfo(name='hello.py') info.size = len(cls.hellopy_contents) tar.addfile(info, BytesIO(cls.hellopy_contents)) info = tarfile.TarInfo(name='index.html') info.size = len(cls.indexhtml_contents) tar.addfile(info, BytesIO(cls.indexhtml_contents)) tar.close() @classmethod def teardown_class(cls): shutil.rmtree(cls.temp_dir) def setup_method(self, _method): self.conn = mock.Mock() self.conn.get_container.return_value = ( {}, # response headers [], # object list ) self.target = 'container1/foo/bar' self.zapp_path = self.temp_zapp_file self.conn.url = 'http://example.com' args = mock.Mock() args.auth = 'http://example.com/auth/v1.0' args.user = 'user1' args.key = 'secret' self.auth_opts = jinja2.Markup( json.dumps(zpm._prepare_auth('1.0', args, self.conn)) ) def test__generate_uploads(self): uploads = zpm._generate_uploads(self.conn, self.target, self.zapp_path, self.auth_opts) uploads = list(uploads) foojs_tmpl = jinja2.Template(self.foojstmpl_contents.decode()) foojs = foojs_tmpl.render(auth_opts=self.auth_opts) expected_uploads = [ ('%s/zapp.yaml' % self.target, gzip.open(self.zapp_path).read(), 'application/x-tar'), ('%s/boot/system.map' % self.target, self.job_json_prepped.decode('utf-8'), 'application/json'), ('%s/foo.js' % self.target, foojs, None), ('%s/index.html' % self.target, self.indexhtml_contents, None), ] assert uploads[0] == expected_uploads[0] assert uploads[1][0] == expected_uploads[1][0] assert json.loads(uploads[1][1]) == json.loads(expected_uploads[1][1]) assert uploads[2] == expected_uploads[2] assert uploads[3] == expected_uploads[3] def test__deploy_zapp(self): with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('x/a', 'b', None), ('x/c', 'd', None)]) zpm._deploy_zapp(self.conn, self.target, self.zapp_path, self.auth_opts) put_object = self.conn.put_object assert put_object.call_count == 2 assert put_object.call_args_list == [ mock.call('x', 'a', 'b', content_type=None), mock.call('x', 'c', 'd', content_type=None)] def test__deploy_zapp_with_index_html(self): with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('cont/dir/index.html', 'data', 'text/html')]) index = zpm._deploy_zapp(self.conn, 'cont', None, None) assert index == 'cont/dir/index.html' put_object = self.conn.put_object assert put_object.call_count == 1 assert put_object.call_args_list == [ mock.call('cont', 'dir/index.html', 'data', content_type='text/html') ] def test__deploy_zapp_without_index_html(self): with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('cont/foo.html', 'data', 'text/html')]) index = zpm._deploy_zapp(self.conn, 'cont', None, None) assert index == 'cont/' put_object = self.conn.put_object assert put_object.call_count == 1 assert put_object.call_args_list == [ mock.call('cont', 'foo.html', 'data', content_type='text/html') ] def test__deploy_zapp_container_not_empty(self): self.conn.get_container.return_value = ( {}, # response headers # The actual files list response from Swift is a list of # dictionaries. For these tests, we don't actually check the # content; just length of the file list. ['file1'], ) with pytest.raises(zpmlib.ZPMException) as exc: zpm._deploy_zapp(self.conn, 'target/dir1/dir2', None, None) assert str(exc.value) == ( "Target container ('target') is not empty.\n" "Deploying to a non-empty container can cause consistency " "problems with overwritten objects.\n" "Specify the flag `--force/-f` to overwrite anyway." ) assert self.conn.get_container.call_args_list == [mock.call('target')] def test__deploy_zapp_container_not_empty_force(self): self.conn.get_container.return_value = ({}, ['file1']) with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('x/a', 'b', None), ('x/c', 'd', None)]) zpm._deploy_zapp(self.conn, self.target, self.zapp_path, self.auth_opts, force=True) put_object = self.conn.put_object assert put_object.call_count == 2 assert put_object.call_args_list == [ mock.call('x', 'a', 'b', content_type=None), mock.call('x', 'c', 'd', content_type=None)] def test__deploy_zapp_container_doesnt_exist(self): self.conn.get_container.side_effect = ( swiftclient.exceptions.ClientException(None) ) with mock.patch('zpmlib.zpm._generate_uploads') as gu: gu.return_value = iter([('target/dir/foo.py', 'data', None)]) zpm._deploy_zapp(self.conn, 'target/dir', None, None) # check that the container is created assert self.conn.put_container.call_count == 1 assert self.conn.put_container.call_args_list == [ mock.call('target') ] # check that files are uploaded correctly assert self.conn.put_object.call_count == 1 assert self.conn.put_object.call_args_list == [ mock.call('target', 'dir/foo.py', 'data', content_type=None) ] def test_deploy_project_execute(self): job_path = 'boot/system.map' job_json = self.job_json_contents.decode('utf-8') job_dict = json.loads(job_json) class FakeZeroCloudConnection(mock.Mock): url = 'http://127.0.0.1' token = 'abc123' def post_job(self, job, response_dict=None, response_body_buffer=None): response_dict['status'] = 200 response_dict['reason'] = 'OK' response_dict['headers'] = { 'x-nexe-system': 'node-1', 'x-nexe-cdr-line': ( '5.121, 4.993, 0.13 3.84 1025 75943662 23 735 8 399 0 ' '0' ), 'x-nexe-status': 'ok', 'x-nexe-retcode': '0', } # Check the job is passed properly here assert job == job_dict def get_container(self, *args, **kwargs): return {}, [] self.conn = FakeZeroCloudConnection() self.conn.auth_version = '1.0' parser = commands.set_up_arg_parser() args = parser.parse_args(['deploy', 'foo', self.zapp_path, '--exec']) with mock.patch('zpmlib.zpm._get_zerocloud_conn') as gzc: gzc.return_value = self.conn self.conn.get_object = mock.Mock() get_object = self.conn.get_object get_object.return_value = ([], job_json) zpm.deploy_project(args) assert get_object.call_args_list == [mock.call('foo', job_path)] def test__prepare_auth_v0(): # Test for :func:`zpmlib.zpm._prepare_auth`, with version 0.0 version = '0.0' args = None conn = mock.Mock() conn.url = 'http://example.com' expected = { 'version': '0.0', 'swiftUrl': 'http://example.com', } assert zpm._prepare_auth(version, args, conn) == expected def test__prepare_auth_v1(): # Test for :func:`zpmlib.zpm._prepare_auth`, with version 1.0 version = '1.0' args = mock.Mock() args.auth = 'http://example.com/auth/v1.0' args.user = 'user1' args.key = 'secret' conn = None expected = { 'version': '1.0', 'authUrl': 'http://example.com/auth/v1.0', 'username': 'user1', 'password': 'secret', } assert zpm._prepare_auth(version, args, conn) == expected # Make sure that we're robust enough to handle slightly varied version # inputs. version = '1' assert zpm._prepare_auth(version, args, conn) == expected def test__prepare_auth_v2(): # Test for :func:`zpmlib.zpm._prepare_auth`, with version 2.0 version = '2.0' args = mock.Mock() args.os_auth_url = 'http://example.com:5000/v2.0' args.os_username = 'user1' args.os_tenant_name = 'tenant1' args.os_password = 'secret' conn = None expected = { 'version': '2.0', 'authUrl': 'http://example.com:5000/v2.0', 'tenant': 'tenant1', 'username': 'user1', 'password': 'secret', } assert zpm._prepare_auth(version, args, conn) == expected # Make sure that we're robust enough to handle slightly varied version # inputs. version = '2' assert zpm._prepare_auth(version, args, conn) == expected class TestGuessAuthVersion: def setup_method(self, _method): self.args = mock.Mock() self.args.auth = None self.args.user = None self.args.key = None self.args.os_auth_url = None self.args.os_username = None self.args.os_password = None self.args.os_tenant_name = None def test_args_v1(self): args = self.args args.auth = 'auth' args.user = 'user' args.key = 'key' args.os_auth_url = 'authurl' assert zpm._guess_auth_version(args) == '1.0' def test_args_v2(self): args = self.args args.os_auth_url = 'authurl' args.os_username = 'username' args.os_password = 'password' args.os_tenant_name = 'tenant' args.auth = 'auth' assert zpm._guess_auth_version(args) == '2.0' def test_args_default(self): args = self.args args.auth = 'auth' args.user = 'user' args.key = 'key' args.os_auth_url = 'authurl' args.os_username = 'username' args.os_password = 'password' args.os_tenant_name = 'tenant' assert zpm._guess_auth_version(args) == '1.0' def test_env_v1(self): env = dict( ST_AUTH='auth', ST_USER='user', ST_KEY='key', OS_AUTH_URL='', OS_USERNAME='username', OS_PASSWORD='', OS_TENANT_NAME='', ) with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) == '1.0' def test_env_v2(self): env = dict( ST_AUTH='', ST_USER='user', ST_KEY='', OS_AUTH_URL='authurl', OS_USERNAME='username', OS_PASSWORD='password', OS_TENANT_NAME='tenant', ) with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) == '2.0' def test_env_default(self): env = dict( ST_AUTH='auth', ST_USER='user', ST_KEY='key', OS_AUTH_URL='authurl', OS_USERNAME='username', OS_PASSWORD='password', OS_TENANT_NAME='tenant', ) with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) == '1.0' def test_none(self): env = dict.fromkeys([ 'ST_AUTH', 'ST_USER', 'ST_KEY', 'OS_AUTH_URL', 'OS_USERNAME', 'OS_PASSWORD', 'OS_TENANT_NAME', ], '') with mock.patch.dict('os.environ', env): assert zpm._guess_auth_version(self.args) is None class TestExecSummaryTable: def test__get_exec_table_data_1_row(self): headers = { 'content-length': '20', 'content-type': 'text/html', 'date': 'Tue, 26 Aug 2014 09:27:08 GMT', 'etag': 'af0983cb8fef30642bae9ba0010e7a77', 'x-chain-total-time': '3.920', 'x-nexe-cdr-line': ( '3.920, 3.913, 0.11 3.37 1025 75943644 2 20 0 0 0 0' ), 'x-nexe-etag': 'disabled', 'x-nexe-policy': 'Policy-0', 'x-nexe-retcode': '0', 'x-nexe-status': 'ok', 'x-nexe-system': 'hello', 'x-nexe-validation': '0', 'x-timestamp': '1409045228.85265', 'x-trans-id': 'tx1d61239ed02a56fbbfe5d-0053fc52e9', 'x-zerovm-device': 'stdout', } expected_total_t = '3.920' expected_table = [ ['hello', 'ok', '0', '3.913', '0.11', '3.37', '1025', '75943644', '2', '20', '0', '0', '0', '0'] ] actual_total_t, actual_table = zpm._get_exec_table_data(headers) assert actual_total_t == expected_total_t assert actual_table == expected_table def test__get_exec_table_data_many_rows(self): cdr_line = ( '5.121, ' '4.993, 0.13 3.84 1025 75943662 23 735 8 399 0 0,' '4.511, 0.12 4.00 1026 75943758 0 0 0 0 1 11,' '4.468, 0.10 3.96 1026 75943758 0 0 0 0 1 11,' '4.965, 0.18 4.20 1025 75943664 0 0 15 33 5 100,' '4.962, 0.13 3.94 1025 75943664 0 0 15 33 5 100' ) headers = { 'content-length': '0', 'content-type': 'application/x-gtar', 'date': 'Tue, 26 Aug 2014 09:29:44 GMT', 'etag': '753e7eac4298c4994a7a19c7c783bad5', 'x-chain-total-time': '5.121', 'x-nexe-cdr-line': cdr_line, 'x-nexe-etag': 'disabled,disabled,disabled,disabled,disabled', 'x-nexe-policy': 'Policy-0,Policy-0,Policy-0,Policy-0,Policy-0', 'x-nexe-retcode': '1,0,0,0,0', 'x-nexe-status': 'some error,ok,ok,ok,ok', 'x-nexe-system': 'combiner,mapper-1,mapper-2,reducer-1,reducer-2', 'x-nexe-validation': '1,0,0,0,0', 'x-timestamp': '1409045384.22744', 'x-trans-id': 'txa881f777891648f4834d6-0053fc5382', } expected_total_t = '5.121' expected_table = [ ['combiner', 'some error', '1', '4.993', '0.13', '3.84', '1025', '75943662', '23', '735', '8', '399', '0', '0'], ['mapper-1', 'ok', '0', '4.511', '0.12', '4.00', '1026', '75943758', '0', '0', '0', '0', '1', '11'], ['mapper-2', 'ok', '0', '4.468', '0.10', '3.96', '1026', '75943758', '0', '0', '0', '0', '1', '11'], ['reducer-1', 'ok', '0', '4.965', '0.18', '4.20', '1025', '75943664', '0', '0', '15', '33', '5', '100'], ['reducer-2', 'ok', '0', '4.962', '0.13', '3.94', '1025', '75943664', '0', '0', '15', '33', '5', '100'], ] actual_total_t, actual_table = zpm._get_exec_table_data(headers) assert actual_total_t == expected_total_t assert actual_table == expected_table<|fim▁end|>
# # Unless required by applicable law or agreed to in writing, software
<|file_name|>attributes.rs<|end_file_name|><|fim▁begin|>extern crate polish; use polish::test_case::{TestRunner, TestCase, TestCaseStatus, TEST_RUNNER_ATTRIBUTES as attributes}; use polish::logger::Logger; <|fim▁hole|>fn main() { TestRunner::new() .set_attribute(attributes.minimize_output) .set_attribute(attributes.disable_final_stats) .set_attribute(attributes.bail_out_after_first_failure) .run_test(TestCase::new("title", "criteria", Box::new(|_: &mut Logger| -> TestCaseStatus {TestCaseStatus::UNKNOWN}))); }<|fim▁end|>
<|file_name|>if_.rs<|end_file_name|><|fim▁begin|>//! Network interface name resolution. //! //! Uses Linux and/or POSIX functions to resolve interface names like "eth0" //! or "socan1" into device numbers. use libc; use libc::c_uint; use {Result, Error, NixPath}; /// Resolve an interface into a interface number. pub fn if_nametoindex<P: ?Sized + NixPath>(name: &P) -> Result<c_uint> { let if_index = try!(name.with_nix_path(|name| unsafe { libc::if_nametoindex(name.as_ptr()) })); if if_index == 0 { Err(Error::last()) } else { Ok(if_index) } } libc_bitflags!( /// Standard interface flags, used by `getifaddrs` pub struct InterfaceFlags: libc::c_int { /// Interface is running. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_UP; /// Valid broadcast address set. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_BROADCAST; /// Internal debugging flag. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_DEBUG; /// Interface is a loopback interface. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_LOOPBACK; /// Interface is a point-to-point link. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_POINTOPOINT; /// Avoid use of trailers. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "solaris"))] IFF_NOTRAILERS; /// Interface manages own routes. #[cfg(any(target_os = "dragonfly"))] IFF_SMART; /// Resources allocated. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "dragonfly", target_os = "freebsd", target_os = "fuchsia", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "solaris"))] IFF_RUNNING; /// No arp protocol, L2 destination address not set. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_NOARP; /// Interface is in promiscuous mode. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_PROMISC; /// Receive all multicast packets. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_ALLMULTI; /// Master of a load balancing bundle. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_MASTER; /// transmission in progress, tx hardware queue is full #[cfg(any(target_os = "freebsd", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "ios"))] IFF_OACTIVE; /// Protocol code on board. #[cfg(target_os = "solaris")] IFF_INTELLIGENT; /// Slave of a load balancing bundle. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_SLAVE; /// Can't hear own transmissions. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "osx"))] IFF_SIMPLEX; /// Supports multicast. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) IFF_MULTICAST; /// Per link layer defined bit. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "ios"))] IFF_LINK0; /// Multicast using broadcast. #[cfg(any(target_os = "solaris"))] IFF_MULTI_BCAST; /// Is able to select media type via ifmap. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_PORTSEL; /// Per link layer defined bit. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "ios"))] IFF_LINK1; /// Non-unique address. #[cfg(any(target_os = "solaris"))] IFF_UNNUMBERED; /// Auto media selection active. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_AUTOMEDIA; /// Per link layer defined bit. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "netbsd", target_os = "openbsd", target_os = "ios"))] IFF_LINK2; /// Use alternate physical connection. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "macos", target_os = "ios"))] IFF_ALTPHYS; /// DHCP controlls interface. #[cfg(any(target_os = "solaris"))] IFF_DHCPRUNNING; /// The addresses are lost when the interface goes down. (see /// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html)) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_DYNAMIC; /// Do not advertise. #[cfg(any(target_os = "solaris"))] IFF_PRIVATE; /// Driver signals L1 up. Volatile. #[cfg(any(target_os = "fuchsia", target_os = "linux"))] IFF_LOWER_UP; /// Interface is in polling mode. #[cfg(any(target_os = "dragonfly"))] IFF_POLLING_COMPAT; /// Unconfigurable using ioctl(2). #[cfg(any(target_os = "freebsd"))] IFF_CANTCONFIG; /// Do not transmit packets. #[cfg(any(target_os = "solaris"))]<|fim▁hole|> /// User-requested promisc mode. #[cfg(any(target_os = "dragonfly", target_os = "freebsd"))] IFF_PPROMISC; /// Just on-link subnet. #[cfg(any(target_os = "solaris"))] IFF_NOLOCAL; /// Echo sent packets. Volatile. #[cfg(any(target_os = "fuchsia", target_os = "linux"))] IFF_ECHO; /// User-requested monitor mode. #[cfg(any(target_os = "dragonfly", target_os = "freebsd"))] IFF_MONITOR; /// Address is deprecated. #[cfg(any(target_os = "solaris"))] IFF_DEPRECATED; /// Static ARP. #[cfg(any(target_os = "dragonfly", target_os = "freebsd"))] IFF_STATICARP; /// Address from stateless addrconf. #[cfg(any(target_os = "solaris"))] IFF_ADDRCONF; /// Interface is in polling mode. #[cfg(any(target_os = "dragonfly"))] IFF_NPOLLING; /// Router on interface. #[cfg(any(target_os = "solaris"))] IFF_ROUTER; /// Interface is in polling mode. #[cfg(any(target_os = "dragonfly"))] IFF_IDIRECT; /// Interface is winding down #[cfg(any(target_os = "freebsd"))] IFF_DYING; /// No NUD on interface. #[cfg(any(target_os = "solaris"))] IFF_NONUD; /// Interface is being renamed #[cfg(any(target_os = "freebsd"))] IFF_RENAMING; /// Anycast address. #[cfg(any(target_os = "solaris"))] IFF_ANYCAST; /// Don't exchange routing info. #[cfg(any(target_os = "solaris"))] IFF_NORTEXCH; /// Do not provide packet information #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_NO_PI as libc::c_int; /// TUN device (no Ethernet headers) #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_TUN as libc::c_int; /// TAP device #[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))] IFF_TAP as libc::c_int; /// IPv4 interface. #[cfg(any(target_os = "solaris"))] IFF_IPV4; /// IPv6 interface. #[cfg(any(target_os = "solaris"))] IFF_IPV6; /// in.mpathd test address #[cfg(any(target_os = "solaris"))] IFF_NOFAILOVER; /// Interface has failed #[cfg(any(target_os = "solaris"))] IFF_FAILED; /// Interface is a hot-spare #[cfg(any(target_os = "solaris"))] IFF_STANDBY; /// Functioning but not used #[cfg(any(target_os = "solaris"))] IFF_INACTIVE; /// Interface is offline #[cfg(any(target_os = "solaris"))] IFF_OFFLINE; #[cfg(any(target_os = "solaris"))] IFF_COS_ENABLED; /// Prefer as source addr. #[cfg(any(target_os = "solaris"))] IFF_PREFERRED; /// RFC3041 #[cfg(any(target_os = "solaris"))] IFF_TEMPORARY; /// MTU set with SIOCSLIFMTU #[cfg(any(target_os = "solaris"))] IFF_FIXEDMTU; /// Cannot send / receive packets #[cfg(any(target_os = "solaris"))] IFF_VIRTUAL; /// Local address in use #[cfg(any(target_os = "solaris"))] IFF_DUPLICATE; /// IPMP IP interface #[cfg(any(target_os = "solaris"))] IFF_IPMP; } );<|fim▁end|>
IFF_NOXMIT; /// Driver signals dormant. Volatile. #[cfg(any(target_os = "fuchsia", target_os = "linux"))] IFF_DORMANT;
<|file_name|>ContextMapperWithControls.java<|end_file_name|><|fim▁begin|>/* * Copyright 2005-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.ldap.core.support; import org.springframework.ldap.core.ContextMapper; import javax.naming.NamingException; import javax.naming.ldap.HasControls; /** * Extension of the {@link org.springframework.ldap.core.ContextMapper} interface that allows * controls to be passed to the mapper implementation. Uses Java 5 covariant * return types to override the return type of the * {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method to be the * type parameter T.<|fim▁hole|> * @author Ulrik Sandberg * @param <T> return type of the * {@link #mapFromContextWithControls(Object, javax.naming.ldap.HasControls)} method */ public interface ContextMapperWithControls<T> extends ContextMapper<T> { T mapFromContextWithControls(final Object ctx, final HasControls hasControls) throws NamingException; }<|fim▁end|>
* * @author Tim Terry
<|file_name|>tutorial_pyPandora.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 import os, sys, random pandoraPath = os.getenv('PANDORAPATH', '/usr/local/pandora') sys.path.append(pandoraPath+'/bin') sys.path.append(pandoraPath+'/lib') from pyPandora import Config, World, Agent, SizeInt class MyAgent(Agent): gatheredResources = 0 def __init__(self, id): Agent.__init__( self, id) print('constructing agent: ',self.id) def updateState(self): print('updating state of: ',self.id) newPosition = self.position newPosition._x = newPosition._x + random.randint(-1,1) newPosition._y = newPosition._y + random.randint(-1,1) if self.getWorld().checkPosition(newPosition): self.position = newPosition self.gatheredResources = self.gatheredResources + self.getWorld().getValue('resources', self.position) self.getWorld().setValue('resources', self.position, 0) def registerAttributes(self): self.registerIntAttribute('resources') def serialize(self): print('serializing MyAgent: ',self.id) self.serializeIntAttribute('resources', self.gatheredResources)<|fim▁hole|> class MyWorld(World): def __init__(self, config): World.__init__( self, config) print('constructing MyWorld') def createRasters(self): print('creating rasters') self.registerDynamicRaster("resources", 1) self.getDynamicRaster("resources").setInitValues(0, 10, 0) return def createAgents(self): print('creating agents') for i in range (0, 10): newAgent = MyAgent('MyAgent_'+str(i)) self.addAgent(newAgent) newAgent.setRandomPosition() def main(): print('getting started with pyPandora') numTimeSteps = 10 worldSize = SizeInt(64,64) myConfig = Config(worldSize, numTimeSteps) myWorld = MyWorld(myConfig) myWorld.initialize() myWorld.run() print('simulation finished') if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for react-truncate 2.3 // Project: https://github.com/One-com/react-truncate // Definitions by: Matt Perry <https://github.com/mattvperry> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.8 import * as React from 'react';<|fim▁hole|>export interface TruncateProps extends React.HTMLProps<Truncate> { lines?: number | false; ellipsis?: React.ReactNode; trimWhitespace?: boolean; onTruncate?(isTruncated: boolean): void; } declare class Truncate extends React.Component<TruncateProps> { } export default Truncate;<|fim▁end|>
<|file_name|>hooks.py<|end_file_name|><|fim▁begin|>from contextlib import ContextDecorator from operator import itemgetter from wagtail.utils.apps import get_app_submodules _hooks = {} def register(hook_name, fn=None, order=0): """ Register hook for ``hook_name``. Can be used as a decorator:: @register('hook_name') def my_hook(...): pass or as a function call:: def my_hook(...): pass register('hook_name', my_hook) """ # Pretend to be a decorator if fn is not supplied if fn is None: def decorator(fn): register(hook_name, fn, order=order) return fn return decorator if hook_name not in _hooks: _hooks[hook_name] = [] _hooks[hook_name].append((fn, order)) class TemporaryHook(ContextDecorator): def __init__(self, hook_name, fn, order): self.hook_name = hook_name self.fn = fn self.order = order def __enter__(self): if self.hook_name not in _hooks:<|fim▁hole|> def __exit__(self, exc_type, exc_value, traceback): _hooks[self.hook_name].remove((self.fn, self.order)) def register_temporarily(hook_name, fn, order=0): """ Register hook for ``hook_name`` temporarily. This is useful for testing hooks. Can be used as a decorator:: def my_hook(...): pass class TestMyHook(Testcase): @hooks.register_temporarily('hook_name', my_hook) def test_my_hook(self): pass or as a context manager:: def my_hook(...): pass with hooks.register_temporarily('hook_name', my_hook): # Hook is registered here # Hook is unregistered here """ return TemporaryHook(hook_name, fn, order) _searched_for_hooks = False def search_for_hooks(): global _searched_for_hooks if not _searched_for_hooks: list(get_app_submodules('wagtail_hooks')) _searched_for_hooks = True def get_hooks(hook_name): """ Return the hooks function sorted by their order. """ search_for_hooks() hooks = _hooks.get(hook_name, []) hooks = sorted(hooks, key=itemgetter(1)) return [hook[0] for hook in hooks]<|fim▁end|>
_hooks[self.hook_name] = [] _hooks[self.hook_name].append((self.fn, self.order))
<|file_name|>test.js<|end_file_name|><|fim▁begin|>/* eslint-disable flowtype/require-parameter-type, flowtype/require-return-type, no-magic-numbers */ import {test} from "tap" import {spy} from "sinon" import aside from "./" test(({equal, end}) => { const unction = spy(() => "b") equal(aside([unction])("a"), "a") end()<|fim▁hole|>}) test(({ok, end}) => { const unction = spy(() => "b") aside([unction])("a") ok(unction.calledWith("a")) end() }) test(({equal, end}) => { const unction = spy(() => "b") equal(aside([unction])("a"), "a") end() }) test(({ok, equal, end}) => { const unctionA = spy(() => "b") const unctionB = spy(() => "c") equal(aside([unctionA, unctionB])("a"), "a") ok(unctionA.calledWith("a")) ok(unctionB.calledWith("b")) end() })<|fim▁end|>
<|file_name|>output.js<|end_file_name|><|fim▁begin|><|fim▁hole|> function render() { return _ref; } var _ref2 = /*#__PURE__*/ <div className="foo"><input type="checkbox" checked={true} /></div>; function render() { return _ref2; }<|fim▁end|>
var _ref = /*#__PURE__*/ <foo />;
<|file_name|>form_page.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from jinja2 import TemplateNotFound form_page = Blueprint('form_page', __name__, template_folder='templates') @form_page.route('/<page>') def show(page): try: form = AppForms.query.filter_by(name=page).first() return render_template('form.html', form=form) except TemplateNotFound: abort(404)<|fim▁end|>
from flask import Blueprint, render_template, abort
<|file_name|>phonelink-off.js<|end_file_name|><|fim▁begin|>/* */ 'use strict'; Object.defineProperty(exports, "__esModule", {value: true}); var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _reactAddonsPureRenderMixin = require('react-addons-pure-render-mixin'); var _reactAddonsPureRenderMixin2 = _interopRequireDefault(_reactAddonsPureRenderMixin); var _svgIcon = require('../../svg-icon'); var _svgIcon2 = _interopRequireDefault(_svgIcon); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : {default: obj};<|fim▁hole|>} var HardwarePhonelinkOff = _react2.default.createClass({ displayName: 'HardwarePhonelinkOff', mixins: [_reactAddonsPureRenderMixin2.default], render: function render() { return _react2.default.createElement(_svgIcon2.default, this.props, _react2.default.createElement('path', {d: 'M22 6V4H6.82l2 2H22zM1.92 1.65L.65 2.92l1.82 1.82C2.18 5.08 2 5.52 2 6v11H0v3h17.73l2.35 2.35 1.27-1.27L3.89 3.62 1.92 1.65zM4 6.27L14.73 17H4V6.27zM23 8h-6c-.55 0-1 .45-1 1v4.18l2 2V10h4v7h-2.18l3 3H23c.55 0 1-.45 1-1V9c0-.55-.45-1-1-1z'})); } }); exports.default = HardwarePhonelinkOff; module.exports = exports['default'];<|fim▁end|>
<|file_name|>references.go<|end_file_name|><|fim▁begin|>// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package source import ( "context" "fmt" "go/ast" "go/types" "golang.org/x/tools/internal/span" ) // ReferenceInfo holds information about reference to an identifier in Go source. type ReferenceInfo struct { Name string Range span.Range ident *ast.Ident obj types.Object } // References returns a list of references for a given identifier within a package. func (i *IdentifierInfo) References(ctx context.Context) ([]*ReferenceInfo, error) { pkg := i.File.GetPackage(ctx) if pkg == nil || pkg.IsIllTyped() { return nil, fmt.Errorf("package for %s is ill typed", i.File.URI()) } pkgInfo := pkg.GetTypesInfo() if pkgInfo == nil { return nil, fmt.Errorf("package %s has no types info", pkg.PkgPath()) } // If the object declaration is nil, assume it is an import spec and do not look for references. if i.decl.obj == nil { return []*ReferenceInfo{}, nil } <|fim▁hole|> // This occurs when the variable is declared in a type switch statement // or is an implicit package name. references = append(references, &ReferenceInfo{ Name: i.decl.obj.Name(), Range: i.decl.rng, obj: i.decl.obj, }) } for ident, obj := range pkgInfo.Defs { if obj == nil || obj.Pos() != i.decl.obj.Pos() { continue } references = append(references, &ReferenceInfo{ Name: ident.Name, Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()), ident: ident, obj: obj, }) } for ident, obj := range pkgInfo.Uses { if obj == nil || obj.Pos() != i.decl.obj.Pos() { continue } references = append(references, &ReferenceInfo{ Name: ident.Name, Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()), ident: ident, obj: obj, }) } return references, nil }<|fim▁end|>
var references []*ReferenceInfo if i.decl.wasImplicit { // The definition is implicit, so we must add it separately.
<|file_name|>url.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use extra::url; use extra::url::Url; use collections::HashMap; use std::os; /** Create a URL object from a string. Does various helpful browsery things like * If there's no current url and the path looks like a file then it will create a file url based of the current working directory * If there's a current url and the new path is relative then the new url is based off the current url */ // TODO: about:failure-> pub fn parse_url(str_url: &str, base_url: Option<Url>) -> Url { let str_url = str_url.trim_chars(& &[' ', '\t', '\n', '\r', '\x0C']).to_owned(); let schm = url::get_scheme(str_url); let str_url = match schm { Err(_) => { if base_url.is_none() { // Assume we've been given a file path. If it's absolute just return // it, otherwise make it absolute with the cwd. if str_url.starts_with("/") { ~"file://" + str_url } else { let mut path = os::getcwd(); path.push(str_url); // FIXME (#1094): not the right way to transform a path ~"file://" + path.display().to_str() } } else { let base_url = base_url.unwrap(); debug!("parse_url: base_url: {:?}", base_url); let mut new_url = base_url.clone(); new_url.query = ~[]; new_url.fragment = None; if str_url.starts_with("//") { new_url.scheme + ":" + str_url } else if base_url.path.is_empty() || str_url.starts_with("/") { new_url.path = ~"/"; new_url.to_str() + str_url.trim_left_chars(&'/')<|fim▁hole|> let base_path = base_url.path.trim_right_chars(&|c: char| c != '/'); new_url.path = base_path.to_owned(); new_url.to_str() + str_url } } }, Ok((scheme, page)) => { match scheme.as_slice() { "about" => { match page.as_slice() { "crash" => { fail!("about:crash"); } "failure" => { let mut path = os::self_exe_path().expect("can't get exe path"); path.push("../src/test/html/failure.html"); // FIXME (#1094): not the right way to transform a path ~"file://" + path.display().to_str() } // TODO: handle the rest of the about: pages _ => str_url } }, "data" => { // Drop whitespace within data: URLs, e.g. newlines within a base64 // src="..." block. Whitespace intended as content should be // %-encoded or base64'd. str_url.chars().filter(|&c| !c.is_whitespace()).collect() }, _ => str_url } } }; // FIXME: Need to handle errors url::from_str(str_url).ok().expect("URL parsing failed") } #[cfg(test)] mod parse_url_tests { use super::parse_url; use std::os; #[test] fn should_create_absolute_file_url_if_base_url_is_none_and_str_url_looks_filey() { let file = "local.html"; let url = parse_url(file, None); debug!("url: {:?}", url); assert!(url.scheme == ~"file"); let path = os::getcwd(); // FIXME (#1094): not the right way to transform a path assert!(url.path.contains(path.display().to_str())); } #[test] fn should_create_url_based_on_old_url_1() { let old_str = "http://example.com"; let old_url = parse_url(old_str, None); let new_str = "index.html"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/index.html"); } #[test] fn should_create_url_based_on_old_url_2() { let old_str = "http://example.com/"; let old_url = parse_url(old_str, None); let new_str = "index.html"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/index.html"); } #[test] fn should_create_url_based_on_old_url_3() { let old_str = "http://example.com/index.html"; let old_url = parse_url(old_str, None); let new_str = "crumpet.html"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/crumpet.html"); } #[test] fn should_create_url_based_on_old_url_4() { let old_str = "http://example.com/snarf/index.html"; let old_url = parse_url(old_str, None); let new_str = "crumpet.html"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/snarf/crumpet.html"); } #[test] fn should_create_url_based_on_old_url_5() { let old_str = "http://example.com/index.html"; let old_url = parse_url(old_str, None); let new_str = "#top"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/index.html"); assert!(new_url.fragment == Some(~"top")); } #[test] fn should_create_url_based_on_old_url_6() { use extra::url::UserInfo; let old_str = "http://foo:[email protected]:8080/index.html"; let old_url = parse_url(old_str, None); let new_str = "#top"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"http"); assert!(new_url.user == Some(UserInfo { user: ~"foo", pass: Some(~"bar") })); assert!(new_url.host == ~"example.com"); assert!(new_url.port == Some(~"8080")); assert!(new_url.path == ~"/index.html"); assert!(new_url.fragment == Some(~"top")); } #[test] fn should_create_url_based_on_old_url_7() { let old_str = "https://example.com/snarf/index.html"; let old_url = parse_url(old_str, None); let new_str = "//example.com/crumpet.html"; let new_url = parse_url(new_str, Some(old_url)); assert!(new_url.scheme == ~"https"); assert!(new_url.host == ~"example.com"); assert!(new_url.path == ~"/crumpet.html"); } } pub type UrlMap<T> = HashMap<Url, T>; pub fn url_map<T: Clone + 'static>() -> UrlMap<T> { HashMap::new() } pub fn is_image_data(uri: &str) -> bool { static types: &'static [&'static str] = &[&"data:image/png", &"data:image/gif", &"data:image/jpeg"]; types.iter().any(|&type_| uri.starts_with(type_)) }<|fim▁end|>
} else if str_url.starts_with("#") { new_url.to_str() + str_url } else { // relative path
<|file_name|>adminNotificationDataService.js<|end_file_name|><|fim▁begin|>/** * Copyright 2014 Pacific Controls Software Services LLC (PCSS). All Rights Reserved. * * This software is the property of Pacific Controls Software Services LLC and its * suppliers. The intellectual and technical concepts contained herein are proprietary * to PCSS. Dissemination of this information or reproduction of this material is * strictly forbidden unless prior written permission is obtained from Pacific * Controls Software Services. * * PCSS MAKES NO REPRESENTATION OR WARRANTIES ABOUT THE SUITABILITY OF THE SOFTWARE, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF<|fim▁hole|>* OR DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. */ /** * Version : 1.0 * User : pcseg306 * Function : Service for Super/client Admin Notification Functions */ gxMainApp.factory("adminFunctionsService", function($http,$rootScope,gxAPIServiceWrapper){ var _notificationArray = []; var _resultPromise; var _getNotificationArray = function() { _resultPromise = gxAPIServiceWrapper.get("models/superAdmin/dummySuperAdminNotification.json"); console.log(_resultPromise); return _resultPromise; } return{ notificationArray: _notificationArray, getNotificationArray: _getNotificationArray, resultPromise : _resultPromise }; });<|fim▁end|>
* MERCHANTANILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGMENT. PCSS SHALL * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
<|file_name|>CAM-resnet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # File: CAM-resnet.py import cv2 import sys import argparse import numpy as np import os import multiprocessing<|fim▁hole|>import tensorflow as tf from tensorpack import * from tensorpack.dataflow import dataset from tensorpack.tfutils import optimizer, gradproc from tensorpack.tfutils.symbolic_functions import * from tensorpack.tfutils.summary import * from tensorpack.utils.gpu import get_num_gpu from tensorpack.utils import viz from imagenet_utils import ( fbresnet_augmentor, ImageNetModel) from resnet_model import ( preresnet_basicblock, preresnet_group) TOTAL_BATCH_SIZE = 256 DEPTH = None class Model(ImageNetModel): def get_logits(self, image): cfg = { 18: ([2, 2, 2, 2], preresnet_basicblock), 34: ([3, 4, 6, 3], preresnet_basicblock), } defs, block_func = cfg[DEPTH] with argscope(Conv2D, use_bias=False, kernel_initializer=tf.variance_scaling_initializer(scale=2.0, mode='fan_out')), \ argscope([Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm], data_format='channels_first'): convmaps = (LinearWrap(image) .Conv2D('conv0', 64, 7, strides=2, activation=BNReLU) .MaxPooling('pool0', 3, strides=2, padding='SAME') .apply2(preresnet_group, 'group0', block_func, 64, defs[0], 1) .apply2(preresnet_group, 'group1', block_func, 128, defs[1], 2) .apply2(preresnet_group, 'group2', block_func, 256, defs[2], 2) .apply2(preresnet_group, 'group3new', block_func, 512, defs[3], 1)()) print(convmaps) convmaps = GlobalAvgPooling('gap', convmaps) logits = FullyConnected('linearnew', convmaps, 1000) return logits def optimizer(self): lr = tf.get_variable('learning_rate', initializer=0.1, trainable=False) opt = tf.train.MomentumOptimizer(lr, 0.9, use_nesterov=True) gradprocs = [gradproc.ScaleGradient( [('conv0.*', 0.1), ('group[0-2].*', 0.1)])] return optimizer.apply_grad_processors(opt, gradprocs) def get_data(train_or_test): # completely copied from imagenet-resnet.py example isTrain = train_or_test == 'train' datadir = args.data ds = dataset.ILSVRC12(datadir, train_or_test, shuffle=isTrain) augmentors = fbresnet_augmentor(isTrain) augmentors.append(imgaug.ToUint8()) ds = AugmentImageComponent(ds, augmentors, copy=False) if isTrain: ds = PrefetchDataZMQ(ds, min(25, multiprocessing.cpu_count())) ds = BatchData(ds, BATCH_SIZE, remainder=not isTrain) return ds def get_config(): dataset_train = get_data('train') dataset_val = get_data('val') return TrainConfig( model=Model(), dataflow=dataset_train, callbacks=[ ModelSaver(), PeriodicTrigger(InferenceRunner(dataset_val, [ ClassificationError('wrong-top1', 'val-error-top1'), ClassificationError('wrong-top5', 'val-error-top5')]), every_k_epochs=2), ScheduledHyperParamSetter('learning_rate', [(30, 1e-2), (55, 1e-3), (75, 1e-4), (95, 1e-5)]), ], steps_per_epoch=5000, max_epoch=105, ) def viz_cam(model_file, data_dir): ds = get_data('val') pred_config = PredictConfig( model=Model(), session_init=get_model_loader(model_file), input_names=['input', 'label'], output_names=['wrong-top1', 'group3new/bnlast/Relu', 'linearnew/W'], return_input=True ) meta = dataset.ILSVRCMeta().get_synset_words_1000() pred = SimpleDatasetPredictor(pred_config, ds) cnt = 0 for inp, outp in pred.get_result(): images, labels = inp wrongs, convmaps, W = outp batch = wrongs.shape[0] for i in range(batch): if wrongs[i]: continue weight = W[:, [labels[i]]].T # 512x1 convmap = convmaps[i, :, :, :] # 512xhxw mergedmap = np.matmul(weight, convmap.reshape((512, -1))).reshape(14, 14) mergedmap = cv2.resize(mergedmap, (224, 224)) heatmap = viz.intensity_to_rgb(mergedmap, normalize=True) blend = images[i] * 0.5 + heatmap * 0.5 concat = np.concatenate((images[i], heatmap, blend), axis=1) classname = meta[labels[i]].split(',')[0] cv2.imwrite('cam{}-{}.jpg'.format(cnt, classname), concat) cnt += 1 if cnt == 500: return if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.') parser.add_argument('--data', help='ILSVRC dataset dir') parser.add_argument('--depth', type=int, default=18) parser.add_argument('--load', help='load model') parser.add_argument('--cam', action='store_true', help='run visualization') args = parser.parse_args() DEPTH = args.depth if args.gpu: os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu num_gpu = get_num_gpu() BATCH_SIZE = TOTAL_BATCH_SIZE // num_gpu if args.cam: BATCH_SIZE = 128 # something that can run on one gpu viz_cam(args.load, args.data) sys.exit() logger.auto_set_dir() config = get_config() if args.load: config.session_init = get_model_loader(args.load) launch_train_with_config(config, SyncMultiGPUTrainerParameterServer(num_gpu))<|fim▁end|>
<|file_name|>integer_replacement.py<|end_file_name|><|fim▁begin|>""" Given a positive integer n and you can do operations as follow: If n is even, replace n with n/2. If n is odd, you can replace n with either n + 1 or n - 1. What is the minimum number of replacements needed for n to become 1? Example 1: Input: 8 Output: 3 Explanation: 8 -> 4 -> 2 -> 1 Example 2: Input: 7 Output: 4 Explanation: 7 -> 8 -> 4 -> 2 -> 1 or 7 -> 6 -> 3 -> 2 -> 1 """ class Solution(object): def integerReplacement(self, n): """ :type n: int :rtype: int """ count = 0 while n > 1: count += 1 if n % 2 == 0: n /= 2 elif (n+1) % 4 == 0 and (n-1) > 2: n += 1 else:<|fim▁hole|><|fim▁end|>
n -= 1 return count
<|file_name|>test_plot.py<|end_file_name|><|fim▁begin|>import pytest import eagerpy as ep import foolbox as fbn def test_plot(dummy: ep.Tensor) -> None: # just tests that the calls don't throw any errors images = ep.zeros(dummy, (10, 3, 32, 32)) fbn.plot.images(images) fbn.plot.images(images, n=3) fbn.plot.images(images, n=3, data_format="channels_first") fbn.plot.images(images, nrows=4) fbn.plot.images(images, ncols=3) fbn.plot.images(images, nrows=2, ncols=6) fbn.plot.images(images, nrows=2, ncols=4) # test for single channel images images = ep.zeros(dummy, (10, 32, 32, 1)) fbn.plot.images(images) with pytest.raises(ValueError): images = ep.zeros(dummy, (10, 3, 3, 3)) fbn.plot.images(images) with pytest.raises(ValueError): images = ep.zeros(dummy, (10, 1, 1, 1)) fbn.plot.images(images) with pytest.raises(ValueError): images = ep.zeros(dummy, (10, 32, 32)) fbn.plot.images(images) with pytest.raises(ValueError):<|fim▁hole|><|fim▁end|>
images = ep.zeros(dummy, (10, 3, 32, 32)) fbn.plot.images(images, data_format="foo")
<|file_name|>UserProfileDB.py<|end_file_name|><|fim▁begin|>""" UserProfileDB class is a front-end to the User Profile Database """ from __future__ import print_function __RCSID__ = "$Id$" import os import sys import hashlib from DIRAC import S_OK, S_ERROR, gLogger, gConfig from DIRAC.Core.Utilities import Time from DIRAC.ConfigurationSystem.Client.Helpers import Registry from DIRAC.Core.Base.DB import DB class UserProfileDB(DB): """ UserProfileDB class is a front-end to the User Profile Database """ tableDict = {'up_Users': {'Fields': {'Id': 'INTEGER AUTO_INCREMENT NOT NULL', 'UserName': 'VARCHAR(32) NOT NULL', 'LastAccess': 'DATETIME', }, 'PrimaryKey': 'Id', 'UniqueIndexes': {'U': ['UserName']}, 'Engine': 'InnoDB', }, 'up_Groups': {'Fields': {'Id': 'INTEGER AUTO_INCREMENT NOT NULL', 'UserGroup': 'VARCHAR(32) NOT NULL', 'LastAccess': 'DATETIME', }, 'PrimaryKey': 'Id', 'UniqueIndexes': {'G': ['UserGroup']}, 'Engine': 'InnoDB', }, 'up_VOs': {'Fields': {'Id': 'INTEGER AUTO_INCREMENT NOT NULL', 'VO': 'VARCHAR(32) NOT NULL', 'LastAccess': 'DATETIME', }, 'PrimaryKey': 'Id', 'UniqueIndexes': {'VO': ['VO']}, 'Engine': 'InnoDB', }, 'up_ProfilesData': {'Fields': {'UserId': 'INTEGER', 'GroupId': 'INTEGER', 'VOId': 'INTEGER', 'Profile': 'VARCHAR(255) NOT NULL', 'VarName': 'VARCHAR(255) NOT NULL', 'Data': 'BLOB', 'ReadAccess': 'VARCHAR(10) DEFAULT "USER"', 'PublishAccess': 'VARCHAR(10) DEFAULT "USER"', }, 'PrimaryKey': ['UserId', 'GroupId', 'Profile', 'VarName'], 'Indexes': {'ProfileKey': ['UserId', 'GroupId', 'Profile'], 'UserKey': ['UserId'], }, 'Engine': 'InnoDB', }, 'up_HashTags': {'Fields': {'UserId': 'INTEGER', 'GroupId': 'INTEGER', 'VOId': 'INTEGER', 'HashTag': 'VARCHAR(32) NOT NULL', 'TagName': 'VARCHAR(255) NOT NULL', 'LastAccess': 'DATETIME', }, 'PrimaryKey': ['UserId', 'GroupId', 'TagName'], 'Indexes': {'HashKey': ['UserId', 'HashTag']}, 'Engine': 'InnoDB', }, } def __init__(self): """ Constructor """ self.__permValues = ['USER', 'GROUP', 'VO', 'ALL'] self.__permAttrs = ['ReadAccess', 'PublishAccess'] DB.__init__(self, 'UserProfileDB', 'Framework/UserProfileDB') retVal = self.__initializeDB() if not retVal['OK']: raise Exception("Can't create tables: %s" % retVal['Message']) def _checkTable(self): """ Make sure the tables are created """ return self.__initializeDB() def __initializeDB(self): """ Create the tables """ retVal = self._query("show tables") if not retVal['OK']: return retVal tablesInDB = [t[0] for t in retVal['Value']] tablesD = {} if 'up_Users' not in tablesInDB: tablesD['up_Users'] = self.tableDict['up_Users'] if 'up_Groups' not in tablesInDB: tablesD['up_Groups'] = self.tableDict['up_Groups'] if 'up_VOs' not in tablesInDB: tablesD['up_VOs'] = self.tableDict['up_VOs'] if 'up_ProfilesData' not in tablesInDB: tablesD['up_ProfilesData'] = self.tableDict['up_ProfilesData'] if 'up_HashTags' not in tablesInDB: tablesD['up_HashTags'] = self.tableDict['up_HashTags'] return self._createTables(tablesD) def __getUserId(self, userName, insertIfMissing=True): return self.__getObjId(userName, 'UserName', 'up_Users', insertIfMissing) def __getGroupId(self, groupName, insertIfMissing=True): return self.__getObjId(groupName, 'UserGroup', 'up_Groups', insertIfMissing) def __getVOId(self, voName, insertIfMissing=True): return self.__getObjId(voName, 'VO', 'up_VOs', insertIfMissing) def __getObjId(self, objValue, varName, tableName, insertIfMissing=True): result = self.getFields(tableName, ['Id'], {varName: objValue}) if not result['OK']: return result data = result['Value'] if len(data) > 0: objId = data[0][0] self.updateFields(tableName, ['LastAccess'], ['UTC_TIMESTAMP()'], {'Id': objId}) return S_OK(objId) if not insertIfMissing: return S_ERROR("No entry %s for %s defined in the DB" % (objValue, varName)) result = self.insertFields(tableName, [varName, 'LastAccess'], [objValue, 'UTC_TIMESTAMP()']) if not result['OK']: return result return S_OK(result['lastRowId']) def getUserGroupIds(self, userName, userGroup, insertIfMissing=True): result = self.__getUserId(userName, insertIfMissing) if not result['OK']: return result userId = result['Value'] result = self.__getGroupId(userGroup, insertIfMissing) if not result['OK']: return result groupId = result['Value'] userVO = Registry.getVOForGroup(userGroup) if not userVO: userVO = "undefined" result = self.__getVOId(userVO, insertIfMissing) if not result['OK']: return result voId = result['Value'] return S_OK((userId, groupId, voId)) def deleteUserProfile(self, userName, userGroup=False): """ Delete the profiles for a user """ result = self.__getUserId(userName) if not result['OK']: return result userId = result['Value'] condDict = {'UserId': userId} if userGroup: result = self.__getGroupId(userGroup) if not result['OK']: return result groupId = result['Value'] condDict['GroupId'] = groupId result = self.deleteEntries('up_ProfilesData', condDict) if not result['OK'] or not userGroup: return result return self.deleteEntries('up_Users', {'Id': userId}) def __webProfileUserDataCond(self, userIds, sqlProfileName=False, sqlVarName=False): condSQL = ['`up_ProfilesData`.UserId=%s' % userIds[0], '`up_ProfilesData`.GroupId=%s' % userIds[1], '`up_ProfilesData`.VOId=%s' % userIds[2]] if sqlProfileName: condSQL.append('`up_ProfilesData`.Profile=%s' % sqlProfileName) if sqlVarName: condSQL.append('`up_ProfilesData`.VarName=%s' % sqlVarName) return " AND ".join(condSQL) def __webProfileReadAccessDataCond(self, userIds, ownerIds, sqlProfileName, sqlVarName=False, match=False): permCondSQL = [] sqlCond = [] if match: sqlCond.append('`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % (ownerIds[0], ownerIds[1])) else: permCondSQL.append( '`up_ProfilesData`.UserId = %s AND `up_ProfilesData`.GroupId = %s' % (ownerIds[0], ownerIds[1])) permCondSQL.append('`up_ProfilesData`.GroupId=%s AND `up_ProfilesData`.ReadAccess="GROUP"' % userIds[1]) permCondSQL.append('`up_ProfilesData`.VOId=%s AND `up_ProfilesData`.ReadAccess="VO"' % userIds[2]) permCondSQL.append('`up_ProfilesData`.ReadAccess="ALL"') sqlCond.append('`up_ProfilesData`.Profile = %s' % sqlProfileName) if sqlVarName: sqlCond.append("`up_ProfilesData`.VarName = %s" % (sqlVarName)) # Perms sqlCond.append("( ( %s ) )" % " ) OR ( ".join(permCondSQL)) return " AND ".join(sqlCond) def __parsePerms(self, perms, addMissing=True): normPerms = {} for pName in self.__permAttrs: if not perms or pName not in perms: if addMissing: normPerms[pName] = self.__permValues[0] continue else: permVal = perms[pName].upper() for nV in self.__permValues: if nV == permVal: normPerms[pName] = nV break if pName not in normPerms and addMissing: normPerms[pName] = self.__permValues[0] return normPerms def retrieveVarById(self, userIds, ownerIds, profileName, varName): """ Get a data entry for a profile """ result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] result = self._escapeString(varName) if not result['OK']: return result sqlVarName = result['Value'] sqlCond = self.__webProfileReadAccessDataCond(userIds, ownerIds, sqlProfileName, sqlVarName, True) # when we retrieve the user profile we have to take into account the user. selectSQL = "SELECT data FROM `up_ProfilesData` WHERE %s" % sqlCond result = self._query(selectSQL) if not result['OK']: return result data = result['Value'] if len(data) > 0: return S_OK(data[0][0]) return S_ERROR("No data for userIds %s profileName %s varName %s" % (userIds, profileName, varName)) def retrieveAllUserVarsById(self, userIds, profileName): """ Get a data entry for a profile """ result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] sqlCond = self.__webProfileUserDataCond(userIds, sqlProfileName) selectSQL = "SELECT varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond result = self._query(selectSQL) if not result['OK']: return result data = result['Value'] return S_OK(dict(data)) def retrieveUserProfilesById(self, userIds): """ Get all profiles and data for a user """ sqlCond = self.__webProfileUserDataCond(userIds) selectSQL = "SELECT Profile, varName, data FROM `up_ProfilesData` WHERE %s" % sqlCond result = self._query(selectSQL) if not result['OK']: return result data = result['Value'] dataDict = {} for row in data: if row[0] not in dataDict: dataDict[row[0]] = {} dataDict[row[0]][row[1]] = row[2] return S_OK(dataDict) def retrieveVarPermsById(self, userIds, ownerIds, profileName, varName): """ Get a data entry for a profile """ result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] result = self._escapeString(varName) if not result['OK']: return result sqlVarName = result['Value'] sqlCond = self.__webProfileReadAccessDataCond(userIds, ownerIds, sqlProfileName, sqlVarName) selectSQL = "SELECT %s FROM `up_ProfilesData` WHERE %s" % (", ".join(self.__permAttrs), sqlCond) result = self._query(selectSQL) if not result['OK']: return result data = result['Value'] if len(data) > 0: permDict = {} for i in range(len(self.__permAttrs)): permDict[self.__permAttrs[i]] = data[0][i] return S_OK(permDict) return S_ERROR("No data for userIds %s profileName %s varName %s" % (userIds, profileName, varName)) def deleteVarByUserId(self, userIds, profileName, varName): """ Remove a data entry for a profile """ result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] result = self._escapeString(varName) if not result['OK']: return result sqlVarName = result['Value'] sqlCond = self.__webProfileUserDataCond(userIds, sqlProfileName, sqlVarName) selectSQL = "DELETE FROM `up_ProfilesData` WHERE %s" % sqlCond return self._update(selectSQL) def storeVarByUserId(self, userIds, profileName, varName, data, perms): """ Set a data entry for a profile """ sqlInsertValues = [] sqlInsertKeys = [] sqlInsertKeys.append(('UserId', userIds[0])) sqlInsertKeys.append(('GroupId', userIds[1])) sqlInsertKeys.append(('VOId', userIds[2])) result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] sqlInsertKeys.append(('Profile', sqlProfileName)) result = self._escapeString(varName) if not result['OK']: return result sqlVarName = result['Value'] sqlInsertKeys.append(('VarName', sqlVarName)) result = self._escapeString(data) if not result['OK']: return result sqlInsertValues.append(('Data', result['Value'])) normPerms = self.__parsePerms(perms) for k in normPerms: sqlInsertValues.append((k, '"%s"' % normPerms[k])) sqlInsert = sqlInsertKeys + sqlInsertValues insertSQL = "INSERT INTO `up_ProfilesData` ( %s ) VALUES ( %s )" % (", ".join([f[0] for f in sqlInsert]), ", ".join([str(f[1]) for f in sqlInsert])) result = self._update(insertSQL) if result['OK']: return result # If error and not duplicate -> real error if result['Message'].find("Duplicate entry") == -1: return result updateSQL = "UPDATE `up_ProfilesData` SET %s WHERE %s" % (", ".join(["%s=%s" % f for f in sqlInsertValues]), self.__webProfileUserDataCond(userIds, sqlProfileName, sqlVarName)) return self._update(updateSQL) def setUserVarPermsById(self, userIds, profileName, varName, perms): result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] result = self._escapeString(varName) if not result['OK']: return result sqlVarName = result['Value'] nPerms = self.__parsePerms(perms, False) if not nPerms: return S_OK() sqlPerms = ",".join(["%s='%s'" % (k, nPerms[k]) for k in nPerms]) updateSql = "UPDATE `up_ProfilesData` SET %s WHERE %s" % (sqlPerms, self.__webProfileUserDataCond(userIds, sqlProfileName, sqlVarName)) return self._update(updateSql) def retrieveVar(self, userName, userGroup, ownerName, ownerGroup, profileName, varName): """ Get a data entry for a profile """ result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] result = self.getUserGroupIds(ownerName, ownerGroup) if not result['OK']: return result ownerIds = result['Value'] return self.retrieveVarById(userIds, ownerIds, profileName, varName) def retrieveUserProfiles(self, userName, userGroup): """ Helper for getting data """ result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.retrieveUserProfilesById(userIds) def retrieveAllUserVars(self, userName, userGroup, profileName): """ Helper for getting data """ result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.retrieveAllUserVarsById(userIds, profileName) def retrieveVarPerms(self, userName, userGroup, ownerName, ownerGroup, profileName, varName): result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] result = self.getUserGroupIds(ownerName, ownerGroup, False) if not result['OK']: return result ownerIds = result['Value'] return self.retrieveVarPermsById(userIds, ownerIds, profileName, varName) def setUserVarPerms(self, userName, userGroup, profileName, varName, perms): result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.setUserVarPermsById(userIds, profileName, varName, perms) def storeVar(self, userName, userGroup, profileName, varName, data, perms=None): """ Helper for setting data """ try: result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.storeVarByUserId(userIds, profileName, varName, data, perms=perms) finally: pass def deleteVar(self, userName, userGroup, profileName, varName): """ Helper for deleting data """ try: result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.deleteVarByUserId(userIds, profileName, varName) finally: pass def __profilesCondGenerator(self, value, varType, initialValue=False): if isinstance(value, basestring): value = [value] ids = [] if initialValue: ids.append(initialValue) for val in value: if varType == 'user': result = self.__getUserId(val, insertIfMissing=False) elif varType == 'group': result = self.__getGroupId(val, insertIfMissing=False) else: result = self.__getVOId(val, insertIfMissing=False) if not result['OK']: continue ids.append(result['Value']) if varType == 'user': fieldName = 'UserId' elif varType == 'group': fieldName = 'GroupId' else: fieldName = 'VOId' return "`up_ProfilesData`.%s in ( %s )" % (fieldName, ", ".join([str(iD) for iD in ids])) def listVarsById(self, userIds, profileName, filterDict=None): result = self._escapeString(profileName) if not result['OK']: return result sqlProfileName = result['Value'] sqlCond = ["`up_Users`.Id = `up_ProfilesData`.UserId", "`up_Groups`.Id = `up_ProfilesData`.GroupId", "`up_VOs`.Id = `up_ProfilesData`.VOId", self.__webProfileReadAccessDataCond(userIds, userIds, sqlProfileName)] if filterDict: fD = {} for k in filterDict: fD[k.lower()] = filterDict[k] filterDict = fD for k in ('user', 'group', 'vo'): if k in filterDict: sqlCond.append(self.__profilesCondGenerator(filterDict[k], k)) sqlVars2Get = ["`up_Users`.UserName", "`up_Groups`.UserGroup", "`up_VOs`.VO", "`up_ProfilesData`.VarName"] sqlQuery = "SELECT %s FROM `up_Users`, `up_Groups`, `up_VOs`, `up_ProfilesData` WHERE %s" % (", ".join(sqlVars2Get), " AND ".join(sqlCond)) return self._query(sqlQuery) def listVars(self, userName, userGroup, profileName, filterDict=None): result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.listVarsById(userIds, profileName, filterDict) def storeHashTagById(self, userIds, tagName, hashTag=False): """ Set a data entry for a profile """ if not hashTag: hashTag = hashlib.md5() hashTag.update("%s;%s;%s" % (Time.dateTime(), userIds, tagName)) hashTag = hashTag.hexdigest() result = self.insertFields('up_HashTags', ['UserId', 'GroupId', 'VOId', 'TagName', 'HashTag'], [userIds[0], userIds[1], userIds[2], tagName, hashTag]) if result['OK']: return S_OK(hashTag) # If error and not duplicate -> real error if result['Message'].find("Duplicate entry") == -1: return result result = self.updateFields('up_HashTags', ['HashTag'], [hashTag], {'UserId': userIds[0], 'GroupId': userIds[1], 'VOId': userIds[2], 'TagName': tagName}) if not result['OK']: return result return S_OK(hashTag) def retrieveHashTagById(self, userIds, hashTag): """ Get a data entry for a profile """ result = self.getFields('up_HashTags', ['TagName'], {'UserId': userIds[0], 'GroupId': userIds[1], 'VOId': userIds[2], 'HashTag': hashTag}) if not result['OK']: return result data = result['Value'] if len(data) > 0: return S_OK(data[0][0]) return S_ERROR("No data for combo userId %s hashTag %s" % (userIds, hashTag)) def retrieveAllHashTagsById(self, userIds): """ Get a data entry for a profile """ result = self.getFields('up_HashTags', ['HashTag', 'TagName'], {'UserId': userIds[0], 'GroupId': userIds[1], 'VOId': userIds[2]}) if not result['OK']: return result data = result['Value'] return S_OK(dict(data)) def storeHashTag(self, userName, userGroup, tagName, hashTag=False): """ Helper for storing HASH """ try: result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.storeHashTagById(userIds, tagName, hashTag) finally: pass def retrieveHashTag(self, userName, userGroup, hashTag):<|fim▁hole|> """ try: result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.retrieveHashTagById(userIds, hashTag) finally: pass def retrieveAllHashTags(self, userName, userGroup): """ Helper for retrieving HASH """ try: result = self.getUserGroupIds(userName, userGroup) if not result['OK']: return result userIds = result['Value'] return self.retrieveAllHashTagsById(userIds) finally: pass def getUserProfileNames(self, permission): """ it returns the available profile names by not taking account the permission: ReadAccess and PublishAccess """ result = None permissions = self.__parsePerms(permission, False) if not permissions: return S_OK() condition = ",".join(["%s='%s'" % (k, permissions[k]) for k in permissions]) query = "SELECT distinct Profile from `up_ProfilesData` where %s" % condition retVal = self._query(query) if retVal['OK']: result = S_OK([i[0] for i in retVal['Value']]) else: result = retVal return result def testUserProfileDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/Framework', 'Test') host = '127.0.0.1' user = 'Dirac' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/Host', host) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/DBName', db) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/User', user) gConfig.setOptionValue('/Systems/Framework/Test/Databases/UserProfileDB/Password', pwd) db = UserProfileDB() assert db._connect()['OK'] userName = 'testUser' userGroup = 'testGroup' profileName = 'testProfile' varName = 'testVar' tagName = 'testTag' hashTag = '237cadc4af90277e9524e6386e264630' data = 'testData' perms = 'USER' try: if False: for tableName in db.tableDict.keys(): result = db._update('DROP TABLE `%s`' % tableName) assert result['OK'] gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result == {'OK': True, 'Value': None} result = db._checkTable() assert result == {'OK': True, 'Value': 0} gLogger.info('\n Adding some data\n') result = db.storeVar(userName, userGroup, profileName, varName, data, perms) assert result['OK'] assert result['Value'] == 1 gLogger.info('\n Some queries\n') result = db.getUserGroupIds(userName, userGroup) assert result['OK'] assert result['Value'] == (1, 1, 1) result = db.listVars(userName, userGroup, profileName) assert result['OK'] assert result['Value'][0][3] == varName result = db.retrieveUserProfiles(userName, userGroup) assert result['OK'] assert result['Value'] == {profileName: {varName: data}} result = db.storeHashTag(userName, userGroup, tagName, hashTag) assert result['OK'] assert result['Value'] == hashTag result = db.retrieveAllHashTags(userName, userGroup) assert result['OK'] assert result['Value'] == {hashTag: tagName} result = db.retrieveHashTag(userName, userGroup, hashTag) assert result['OK'] assert result['Value'] == tagName gLogger.info('\n OK\n') except AssertionError: print('ERROR ', end=' ') if not result['OK']: print(result['Message']) else: print(result) sys.exit(1) if __name__ == '__main__': from DIRAC.Core.Base import Script Script.parseCommandLine() gLogger.setLevel('VERBOSE') if 'PYTHONOPTIMIZE' in os.environ and os.environ['PYTHONOPTIMIZE']: gLogger.info('Unset pyhthon optimization "PYTHONOPTIMIZE"') sys.exit(0) testUserProfileDB()<|fim▁end|>
""" Helper for retrieving HASH
<|file_name|>ILinePlot.py<|end_file_name|><|fim▁begin|># Copyright 2015-2017 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """This module contains the class for plotting and customizing Line/Linear Plots with :mod:`trappy.trace.BareTrace` or derived classes. This plot only works when run from an IPython notebook """ from collections import OrderedDict import matplotlib.pyplot as plt from trappy.plotter import AttrConf from trappy.plotter import Utils from trappy.plotter.Constraint import ConstraintManager from trappy.plotter.ILinePlotGen import ILinePlotGen from trappy.plotter.AbstractDataPlotter import AbstractDataPlotter from trappy.plotter.ColorMap import ColorMap from trappy.plotter import IPythonConf from trappy.utils import handle_duplicate_index import pandas as pd if not IPythonConf.check_ipython(): raise ImportError("Ipython Environment not Found") class ILinePlot(AbstractDataPlotter): """ This class uses :mod:`trappy.plotter.Constraint.Constraint` to represent different permutations of input parameters. These constraints are generated by creating an instance of :mod:`trappy.plotter.Constraint.ConstraintManager`. :param traces: The input data :type traces: a list of :mod:`trappy.trace.FTrace`, :mod:`trappy.trace.SysTrace`, :mod:`trappy.trace.BareTrace` or :mod:`pandas.DataFrame` or a single instance of them. :param column: specifies the name of the column to be plotted. :type column: (str, list(str)) :param templates: TRAPpy events .. note:: This is not required if a :mod:`pandas.DataFrame` is used :type templates: :mod:`trappy.base.Base` :param filters: Filter the column to be plotted as per the specified criteria. For Example: :: filters = { "pid": [ 3338 ], "cpu": [0, 2, 4], } :type filters: dict :param per_line: Used to control the number of graphs in each graph subplot row :type per_line: int :param concat: Draw all the pivots on a single graph :type concat: bool :param permute: Draw one plot for each of the traces specified :type permute: bool :param fill: Fill the area under the plots :type fill: bool :param fill_alpha: Opacity of filled area under the plots. Implies fill=True. :type fill_alpha: float :param xlim: A tuple representing the upper and lower xlimits :type xlim: tuple :param ylim: A tuple representing the upper and lower ylimits :type ylim: tuple :param drawstyle: Set the drawstyle to a matplotlib compatible drawing style. .. note:: Only "steps-post" is supported as a valid value for the drawstyle. This creates a step plot. :type drawstyle: str :param sync_zoom: Synchronize the zoom of a group of plots. Zooming in one plot of a group (see below) will zoom in every plot of that group. Defaults to False. :type sync_zoom: boolean :param group: Name given to the plots created by this ILinePlot instance. This name is only used for synchronized zoom. If you zoom on any plot in a group all plots will zoom at the same time. :type group: string :param signals: A string of the type event_name:column to indicate the value that needs to be plotted. You can add an additional parameter to specify the color of the lin in rgb: "event_name:column:color". The color is specified as a comma separated list of rgb values, from 0 to 255 or from 0x0 to 0xff. E.g. 0xff,0x0,0x0 is red and 100,40,32 is brown. .. note:: - Only one of `signals` or both `templates` and `columns` should be specified - Signals format won't work for :mod:`pandas.DataFrame` input :type signals: str """ def __init__(self, traces, templates=None, **kwargs): # Default keys, each can be overridden in kwargs self._layout = None super(ILinePlot, self).__init__(traces=traces, templates=templates) self.set_defaults() for key in kwargs: self._attr[key] = kwargs[key] if "signals" in self._attr: self._describe_signals() self._check_data() if "column" not in self._attr: raise RuntimeError("Value Column not specified") if self._attr["drawstyle"] and self._attr["drawstyle"].startswith("steps"): self._attr["step_plot"] = True zip_constraints = not self._attr["permute"] window = self._attr["xlim"] if "xlim" in self._attr else None self.c_mgr = ConstraintManager(traces, self._attr["column"], self.templates, self._attr["pivot"], self._attr["filters"], window=window, zip_constraints=zip_constraints) def savefig(self, *args, **kwargs):<|fim▁hole|> raise NotImplementedError("Not Available for ILinePlot") def view(self, max_datapoints=75000, test=False): """Displays the graph :param max_datapoints: Maximum number of datapoints to plot. Dygraph can make the browser unresponsive if it tries to plot too many datapoints. Chrome 50 chokes at around 75000 on an i7-4770 @ 3.4GHz, Firefox 47 can handle up to 200000 before becoming too slow in the same machine. You can increase this number if you know what you're doing and are happy to wait for the plot to render. :type max_datapoints: int :param test: For testing purposes. Only set to true if run from the testsuite. :type test: boolean """ # Defer installation of IPython components # to the .view call to avoid any errors at # when importing the module. This facilitates # the importing of the module from outside # an IPython notebook if not test: IPythonConf.iplot_install("ILinePlot") self._attr["max_datapoints"] = max_datapoints if self._attr["concat"]: self._plot_concat() else: self._plot(self._attr["permute"], test) def set_defaults(self): """Sets the default attrs""" self._attr["per_line"] = AttrConf.PER_LINE self._attr["concat"] = AttrConf.CONCAT self._attr["filters"] = {} self._attr["pivot"] = AttrConf.PIVOT self._attr["permute"] = False self._attr["drawstyle"] = None self._attr["step_plot"] = False self._attr["fill"] = AttrConf.FILL self._attr["scatter"] = AttrConf.PLOT_SCATTER self._attr["point_size"] = AttrConf.POINT_SIZE self._attr["map_label"] = {} self._attr["title"] = AttrConf.TITLE def _plot(self, permute, test): """Internal Method called to draw the plot""" pivot_vals, len_pivots = self.c_mgr.generate_pivots(permute) self._layout = ILinePlotGen(len_pivots, **self._attr) plot_index = 0 for p_val in pivot_vals: data_dict = OrderedDict() for constraint in self.c_mgr: if permute: trace_idx, pivot = p_val if constraint.trace_index != trace_idx: continue legend = constraint._template.name + ":" + constraint.column else: pivot = p_val legend = str(constraint) result = constraint.result if pivot in result: data_dict[legend] = result[pivot] if permute: title = self.traces[plot_index].name elif pivot != AttrConf.PIVOT_VAL: title = "{0}: {1}".format(self._attr["pivot"], self._attr["map_label"].get(pivot, pivot)) else: title = "" if len(data_dict) > 1: data_frame = self._fix_indexes(data_dict) else: data_frame = pd.DataFrame(data_dict) self._layout.add_plot(plot_index, data_frame, title, test=test) plot_index += 1 self._layout.finish() def _plot_concat(self): """Plot all lines on a single figure""" pivot_vals, _ = self.c_mgr.generate_pivots() plot_index = 0 self._layout = ILinePlotGen(len(self.c_mgr), **self._attr) for constraint in self.c_mgr: result = constraint.result title = str(constraint) data_dict = OrderedDict() for pivot in pivot_vals: if pivot in result: if pivot == AttrConf.PIVOT_VAL: key = ",".join(self._attr["column"]) else: key = "{0}: {1}".format(self._attr["pivot"], self._attr["map_label"].get(pivot, pivot)) data_dict[key] = result[pivot] if len(data_dict) > 1: data_frame = self._fix_indexes(data_dict) else: data_frame = pd.DataFrame(data_dict) self._layout.add_plot(plot_index, data_frame, title) plot_index += 1 self._layout.finish() def _fix_indexes(self, data_dict): """ In case of multiple traces with different indexes (i.e. x-axis values), create new ones with same indexes """ # 1) Check if we are processing multiple traces if len(data_dict) <= 1: raise ValueError("Cannot fix indexes for single trace. "\ "Expecting multiple traces!") # 2) Merge the data frames to obtain common indexes df_columns = list(data_dict.keys()) dedup_data = [handle_duplicate_index(s) for s in data_dict.values()] ret = pd.Series(dedup_data, index=df_columns) merged_df = pd.concat(ret.get_values(), axis=1) merged_df.columns = df_columns # 3) Fill NaN values depending on drawstyle if self._attr["drawstyle"] == "steps-post": merged_df = merged_df.ffill() elif self._attr["drawstyle"] == "steps-pre": merged_df = merged_df.bfill() elif self._attr["drawstyle"] == "steps-mid": merged_df = merged_df.ffill() else: # default merged_df = merged_df.interpolate() return merged_df<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- from setuptools import setup setup( name = "mobileclick", description = "mobileclick provides baseline methods and utility scripts for the NTCIR-12 MobileClick-2 task", author = "Makoto P. Kato", author_email = "[email protected]", license = "MIT License",<|fim▁hole|> 'mobileclick.nlp', 'mobileclick.methods', 'mobileclick.scripts' ], install_requires = [ 'BeautifulSoup', 'nltk>=3.1', 'numpy'], entry_points = { 'console_scripts': [ 'mobileclick_download_training_data=mobileclick.scripts.mobileclick_download_training_data:main', 'mobileclick_download_test_data=mobileclick.scripts.mobileclick_download_test_data:main', 'mobileclick_random_ranking_method=mobileclick.scripts.mobileclick_random_ranking_method:main', 'mobileclick_lang_model_ranking_method=mobileclick.scripts.mobileclick_lang_model_ranking_method:main', 'mobileclick_random_summarization_method=mobileclick.scripts.mobileclick_random_summarization_method:main', 'mobileclick_lang_model_summarization_method=mobileclick.scripts.mobileclick_lang_model_summarization_method:main', 'mobileclick_lang_model_two_layer_summarization_method=mobileclick.scripts.mobileclick_lang_model_two_layer_summarization_method:main', ], }, tests_require=['nose'] )<|fim▁end|>
url = "https://github.com/mpkato/mobileclick", version='0.2.0', packages=[ 'mobileclick',
<|file_name|>models.py<|end_file_name|><|fim▁begin|>""" Database ORM models managed by this Django app Please do not integrate directly with these models!!! This app currently offers one programmatic API -- api.py for direct Python integration. """ import re from django.core.exceptions import ValidationError from django.db import models from django.utils.translation import gettext_lazy as _ from model_utils.models import TimeStampedModel from simple_history.models import HistoricalRecords class Organization(TimeStampedModel): """<|fim▁hole|> name = models.CharField(max_length=255, db_index=True) short_name = models.CharField( max_length=255, unique=True, verbose_name='Short Name', help_text=_( 'Unique, short string identifier for organization. ' 'Please do not use spaces or special characters. ' 'Only allowed special characters are period (.), hyphen (-) and underscore (_).' ), ) description = models.TextField(null=True, blank=True) logo = models.ImageField( upload_to='organization_logos', help_text=_('Please add only .PNG files for logo images. This logo will be used on certificates.'), null=True, blank=True, max_length=255 ) active = models.BooleanField(default=True) history = HistoricalRecords() def __str__(self): return f"{self.name} ({self.short_name})" def clean(self): if not re.match("^[a-zA-Z0-9._-]*$", self.short_name): raise ValidationError(_('Please do not use spaces or special characters in the short name ' 'field. Only allowed special characters are period (.), hyphen (-) ' 'and underscore (_).')) class OrganizationCourse(TimeStampedModel): """ An OrganizationCourse represents the link between an Organization and a Course (via course key). Because Courses are not true Open edX entities (in the Django/ORM sense) the modeling and integrity is limited to that of specifying course identifier strings in this model. """ course_id = models.CharField(max_length=255, db_index=True, verbose_name='Course ID') organization = models.ForeignKey(Organization, db_index=True, on_delete=models.CASCADE) active = models.BooleanField(default=True) history = HistoricalRecords() class Meta: """ Meta class for this Django model """ unique_together = (('course_id', 'organization'),) verbose_name = _('Link Course') verbose_name_plural = _('Link Courses')<|fim▁end|>
An Organization is a representation of an entity which publishes/provides one or more courses delivered by the LMS. Organizations have a base set of metadata describing the organization, including id, name, and description. """
<|file_name|>webrender_helpers.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // TODO(gw): This contains helper traits and implementations for converting Servo display lists // into WebRender display lists. In the future, this step should be completely removed. // This might be achieved by sharing types between WR and Servo display lists, or // completely converting layout to directly generate WebRender display lists, for example. use app_units::Au; use azure::azure_hl::Color; use euclid::{Point2D, Rect, Size2D}; use gfx::display_list::{BorderRadii, BoxShadowClipMode, ClippingRegion}; use gfx::display_list::{DisplayItem, DisplayList}; use gfx::display_list::{DisplayListTraversal, GradientStop, StackingContext, StackingContextType}; use gfx_traits::ScrollPolicy; use layout_traits::ConvertPipelineIdToWebRender; use style::computed_values::filter::{self, Filter}; use style::computed_values::{image_rendering, mix_blend_mode}; use style::values::computed::BorderStyle; use webrender_traits::{self, AuxiliaryListsBuilder, DisplayListId, PipelineId, StackingContextId}; trait WebRenderStackingContextConverter { fn convert_to_webrender<'a>(&self, traversal: &mut DisplayListTraversal<'a>, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, scroll_layer_id: Option<webrender_traits::ScrollLayerId>, scroll_policy: ScrollPolicy, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::StackingContextId; fn convert_children_to_webrender<'a>(&self, traversal: &mut DisplayListTraversal<'a>, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, scroll_layer_id: Option<webrender_traits::ScrollLayerId>, scroll_policy: ScrollPolicy, builder: &mut webrender_traits::DisplayListBuilder, frame_builder: &mut WebRenderFrameBuilder, force_positioned_stacking_level: bool); } pub trait WebRenderDisplayListConverter { fn convert_to_webrender(&self, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, scroll_layer_id: Option<webrender_traits::ScrollLayerId>, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::StackingContextId; } trait WebRenderDisplayItemConverter { fn convert_to_webrender(&self, builder: &mut webrender_traits::DisplayListBuilder, frame_builder: &mut WebRenderFrameBuilder); } trait ToBorderStyle { fn to_border_style(&self) -> webrender_traits::BorderStyle; } impl ToBorderStyle for BorderStyle { fn to_border_style(&self) -> webrender_traits::BorderStyle { match *self { BorderStyle::none => webrender_traits::BorderStyle::None, BorderStyle::solid => webrender_traits::BorderStyle::Solid, BorderStyle::double => webrender_traits::BorderStyle::Double, BorderStyle::dotted => webrender_traits::BorderStyle::Dotted, BorderStyle::dashed => webrender_traits::BorderStyle::Dashed, BorderStyle::hidden => webrender_traits::BorderStyle::Hidden, BorderStyle::groove => webrender_traits::BorderStyle::Groove, BorderStyle::ridge => webrender_traits::BorderStyle::Ridge, BorderStyle::inset => webrender_traits::BorderStyle::Inset, BorderStyle::outset => webrender_traits::BorderStyle::Outset, } } } trait ToBoxShadowClipMode { fn to_clip_mode(&self) -> webrender_traits::BoxShadowClipMode; } impl ToBoxShadowClipMode for BoxShadowClipMode { fn to_clip_mode(&self) -> webrender_traits::BoxShadowClipMode { match *self { BoxShadowClipMode::None => webrender_traits::BoxShadowClipMode::None, BoxShadowClipMode::Inset => webrender_traits::BoxShadowClipMode::Inset, BoxShadowClipMode::Outset => webrender_traits::BoxShadowClipMode::Outset, } } } trait ToSizeF { fn to_sizef(&self) -> Size2D<f32>; } trait ToPointF { fn to_pointf(&self) -> Point2D<f32>; } impl ToPointF for Point2D<Au> { fn to_pointf(&self) -> Point2D<f32> { Point2D::new(self.x.to_f32_px(), self.y.to_f32_px()) } } impl ToSizeF for Size2D<Au> { fn to_sizef(&self) -> Size2D<f32> { Size2D::new(self.width.to_f32_px(), self.height.to_f32_px()) } } trait ToRectF { fn to_rectf(&self) -> Rect<f32>; } impl ToRectF for Rect<Au> { fn to_rectf(&self) -> Rect<f32> { let x = self.origin.x.to_f32_px(); let y = self.origin.y.to_f32_px(); let w = self.size.width.to_f32_px(); let h = self.size.height.to_f32_px(); Rect::new(Point2D::new(x, y), Size2D::new(w, h)) } } trait ToColorF { fn to_colorf(&self) -> webrender_traits::ColorF; } impl ToColorF for Color { fn to_colorf(&self) -> webrender_traits::ColorF { webrender_traits::ColorF::new(self.r, self.g, self.b, self.a) } } trait ToGradientStop { fn to_gradient_stop(&self) -> webrender_traits::GradientStop; } impl ToGradientStop for GradientStop { fn to_gradient_stop(&self) -> webrender_traits::GradientStop { webrender_traits::GradientStop { offset: self.offset, color: self.color.to_colorf(), } } } trait ToClipRegion { fn to_clip_region(&self, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::ClipRegion; } impl ToClipRegion for ClippingRegion { fn to_clip_region(&self, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::ClipRegion { webrender_traits::ClipRegion::new(&self.main.to_rectf(), self.complex.iter().map(|complex_clipping_region| { webrender_traits::ComplexClipRegion::new( complex_clipping_region.rect.to_rectf(), complex_clipping_region.radii.to_border_radius(), ) }).collect(), &mut frame_builder.auxiliary_lists_builder) } } trait ToBorderRadius { fn to_border_radius(&self) -> webrender_traits::BorderRadius; } impl ToBorderRadius for BorderRadii<Au> { fn to_border_radius(&self) -> webrender_traits::BorderRadius { webrender_traits::BorderRadius { top_left: self.top_left.to_sizef(), top_right: self.top_right.to_sizef(), bottom_left: self.bottom_left.to_sizef(), bottom_right: self.bottom_right.to_sizef(), } } } trait ToBlendMode { fn to_blend_mode(&self) -> webrender_traits::MixBlendMode; } impl ToBlendMode for mix_blend_mode::T { fn to_blend_mode(&self) -> webrender_traits::MixBlendMode { match *self { mix_blend_mode::T::normal => webrender_traits::MixBlendMode::Normal, mix_blend_mode::T::multiply => webrender_traits::MixBlendMode::Multiply, mix_blend_mode::T::screen => webrender_traits::MixBlendMode::Screen, mix_blend_mode::T::overlay => webrender_traits::MixBlendMode::Overlay, mix_blend_mode::T::darken => webrender_traits::MixBlendMode::Darken, mix_blend_mode::T::lighten => webrender_traits::MixBlendMode::Lighten, mix_blend_mode::T::color_dodge => webrender_traits::MixBlendMode::ColorDodge, mix_blend_mode::T::color_burn => webrender_traits::MixBlendMode::ColorBurn, mix_blend_mode::T::hard_light => webrender_traits::MixBlendMode::HardLight, mix_blend_mode::T::soft_light => webrender_traits::MixBlendMode::SoftLight, mix_blend_mode::T::difference => webrender_traits::MixBlendMode::Difference, mix_blend_mode::T::exclusion => webrender_traits::MixBlendMode::Exclusion, mix_blend_mode::T::hue => webrender_traits::MixBlendMode::Hue, mix_blend_mode::T::saturation => webrender_traits::MixBlendMode::Saturation, mix_blend_mode::T::color => webrender_traits::MixBlendMode::Color, mix_blend_mode::T::luminosity => webrender_traits::MixBlendMode::Luminosity, } } } trait ToImageRendering { fn to_image_rendering(&self) -> webrender_traits::ImageRendering; } impl ToImageRendering for image_rendering::T { fn to_image_rendering(&self) -> webrender_traits::ImageRendering { match *self { image_rendering::T::CrispEdges => webrender_traits::ImageRendering::CrispEdges, image_rendering::T::Auto => webrender_traits::ImageRendering::Auto, image_rendering::T::Pixelated => webrender_traits::ImageRendering::Pixelated, } } } trait ToFilterOps { fn to_filter_ops(&self) -> Vec<webrender_traits::FilterOp>; } impl ToFilterOps for filter::T { fn to_filter_ops(&self) -> Vec<webrender_traits::FilterOp> { let mut result = Vec::with_capacity(self.filters.len()); for filter in self.filters.iter() { match *filter { Filter::Blur(radius) => result.push(webrender_traits::FilterOp::Blur(radius)), Filter::Brightness(amount) => result.push(webrender_traits::FilterOp::Brightness(amount)), Filter::Contrast(amount) => result.push(webrender_traits::FilterOp::Contrast(amount)), Filter::Grayscale(amount) => result.push(webrender_traits::FilterOp::Grayscale(amount)), Filter::HueRotate(angle) => result.push(webrender_traits::FilterOp::HueRotate(angle.0)), Filter::Invert(amount) => result.push(webrender_traits::FilterOp::Invert(amount)), Filter::Opacity(amount) => result.push(webrender_traits::FilterOp::Opacity(amount)), Filter::Saturate(amount) => result.push(webrender_traits::FilterOp::Saturate(amount)), Filter::Sepia(amount) => result.push(webrender_traits::FilterOp::Sepia(amount)), } } result } } impl WebRenderStackingContextConverter for StackingContext { fn convert_children_to_webrender<'a>(&self, traversal: &mut DisplayListTraversal<'a>, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, scroll_layer_id: Option<webrender_traits::ScrollLayerId>, scroll_policy: ScrollPolicy, builder: &mut webrender_traits::DisplayListBuilder, frame_builder: &mut WebRenderFrameBuilder, _force_positioned_stacking_level: bool) { for child in self.children.iter() { while let Some(item) = traversal.advance(self) { item.convert_to_webrender(builder, frame_builder); } if child.context_type == StackingContextType::Real { let scroll_layer_id_for_children = if self.scrolls_overflow_area { scroll_layer_id } else { None }; let stacking_context_id = child.convert_to_webrender(traversal, api, pipeline_id, epoch, scroll_layer_id_for_children, scroll_policy, frame_builder); builder.push_stacking_context(stacking_context_id); } else { child.convert_children_to_webrender(traversal, api, pipeline_id, epoch, scroll_layer_id, scroll_policy, builder, frame_builder, true); } } while let Some(item) = traversal.advance(self) { item.convert_to_webrender(builder, frame_builder); } } fn convert_to_webrender<'a>(&self, traversal: &mut DisplayListTraversal<'a>, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, mut scroll_layer_id: Option<webrender_traits::ScrollLayerId>, mut scroll_policy: ScrollPolicy, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::StackingContextId { if let Some(ref layer_info) = self.layer_info { scroll_policy = layer_info.scroll_policy } let webrender_scroll_policy = match scroll_policy { ScrollPolicy::Scrollable => webrender_traits::ScrollPolicy::Scrollable, ScrollPolicy::FixedPosition => webrender_traits::ScrollPolicy::Fixed, }; let mut sc = webrender_traits::StackingContext::new(scroll_layer_id, webrender_scroll_policy, self.bounds.to_rectf(), self.overflow.to_rectf(), self.z_index, &self.transform, &self.perspective, self.establishes_3d_context, self.blend_mode.to_blend_mode(), self.filters.to_filter_ops(), &mut frame_builder.auxiliary_lists_builder); let mut builder = webrender_traits::DisplayListBuilder::new(); if self.scrolls_overflow_area { scroll_layer_id = Some(frame_builder.next_scroll_layer_id()); } self.convert_children_to_webrender(traversal, api, pipeline_id, epoch, scroll_layer_id, scroll_policy, &mut builder, frame_builder, false); frame_builder.add_display_list(api, builder.finalize(), &mut sc); frame_builder.add_stacking_context(api, pipeline_id, sc) } } impl WebRenderDisplayListConverter for DisplayList { fn convert_to_webrender(&self, api: &mut webrender_traits::RenderApi, pipeline_id: webrender_traits::PipelineId, epoch: webrender_traits::Epoch, scroll_layer_id: Option<webrender_traits::ScrollLayerId>, frame_builder: &mut WebRenderFrameBuilder) -> webrender_traits::StackingContextId { let mut traversal = DisplayListTraversal { display_list: self, current_item_index: 0, last_item_index: self.list.len() - 1, }; self.root_stacking_context.convert_to_webrender(&mut traversal, api, pipeline_id, epoch, scroll_layer_id, ScrollPolicy::Scrollable, frame_builder) } } impl WebRenderDisplayItemConverter for DisplayItem { fn convert_to_webrender(&self, builder: &mut webrender_traits::DisplayListBuilder, frame_builder: &mut WebRenderFrameBuilder) { match *self { DisplayItem::SolidColorClass(ref item) => { let color = item.color.to_colorf(); if color.a > 0.0 { builder.push_rect(item.base.bounds.to_rectf(), item.base.clip.to_clip_region(frame_builder), color); } } DisplayItem::TextClass(ref item) => { let mut origin = item.baseline_origin.clone(); let mut glyphs = vec!(); for slice in item.text_run.natural_word_slices_in_visual_order(&item.range) { for glyph in slice.glyphs.iter_glyphs_for_byte_range(&slice.range) { let glyph_advance = if glyph.char_is_space() { glyph.advance() + item.text_run.extra_word_spacing } else { glyph.advance() }; if !slice.glyphs.is_whitespace() { let glyph_offset = glyph.offset().unwrap_or(Point2D::zero()); let glyph = webrender_traits::GlyphInstance { index: glyph.id(), x: (origin.x + glyph_offset.x).to_f32_px(), y: (origin.y + glyph_offset.y).to_f32_px(), }; glyphs.push(glyph); } origin.x = origin.x + glyph_advance; }; } if glyphs.len() > 0 { builder.push_text(item.base.bounds.to_rectf(), item.base.clip.to_clip_region(frame_builder), glyphs, item.text_run.font_key.expect("Font not added to webrender!"), item.text_color.to_colorf(), item.text_run.actual_pt_size, item.blur_radius, &mut frame_builder.auxiliary_lists_builder); } } DisplayItem::ImageClass(ref item) => { if let Some(id) = item.webrender_image.key { if item.stretch_size.width > Au(0) && item.stretch_size.height > Au(0) { builder.push_image(item.base.bounds.to_rectf(), item.base.clip.to_clip_region(frame_builder), item.stretch_size.to_sizef(), item.image_rendering.to_image_rendering(), id); } } } DisplayItem::WebGLClass(ref item) => { builder.push_webgl_canvas(item.base.bounds.to_rectf(), item.base.clip.to_clip_region(frame_builder), item.context_id); } DisplayItem::BorderClass(ref item) => { let rect = item.base.bounds.to_rectf(); let left = webrender_traits::BorderSide { width: item.border_widths.left.to_f32_px(), color: item.color.left.to_colorf(), style: item.style.left.to_border_style(), }; let top = webrender_traits::BorderSide { width: item.border_widths.top.to_f32_px(), color: item.color.top.to_colorf(), style: item.style.top.to_border_style(), }; let right = webrender_traits::BorderSide { width: item.border_widths.right.to_f32_px(), color: item.color.right.to_colorf(), style: item.style.right.to_border_style(), }; let bottom = webrender_traits::BorderSide { width: item.border_widths.bottom.to_f32_px(), color: item.color.bottom.to_colorf(), style: item.style.bottom.to_border_style(), }; let radius = item.radius.to_border_radius(); builder.push_border(rect, item.base.clip.to_clip_region(frame_builder), left, top, right, bottom, radius); } DisplayItem::GradientClass(ref item) => { let rect = item.base.bounds.to_rectf(); let start_point = item.start_point.to_pointf(); let end_point = item.end_point.to_pointf(); let mut stops = Vec::new(); for stop in &item.stops { stops.push(stop.to_gradient_stop()); } builder.push_gradient(rect, item.base.clip.to_clip_region(frame_builder), start_point, end_point, stops, &mut frame_builder.auxiliary_lists_builder); } DisplayItem::LineClass(..) => { println!("TODO DisplayItem::LineClass"); } DisplayItem::LayeredItemClass(..) => { panic!("Unexpected in webrender!"); } DisplayItem::BoxShadowClass(ref item) => {<|fim▁hole|> item.base.clip.to_clip_region(frame_builder), box_bounds, item.offset.to_pointf(), item.color.to_colorf(), item.blur_radius.to_f32_px(), item.spread_radius.to_f32_px(), item.border_radius.to_f32_px(), item.clip_mode.to_clip_mode()); } DisplayItem::IframeClass(ref item) => { let rect = item.base.bounds.to_rectf(); let pipeline_id = item.iframe.to_webrender(); builder.push_iframe(rect, item.base.clip.to_clip_region(frame_builder), pipeline_id); } } } } pub struct WebRenderFrameBuilder { pub stacking_contexts: Vec<(StackingContextId, webrender_traits::StackingContext)>, pub display_lists: Vec<(DisplayListId, webrender_traits::BuiltDisplayList)>, pub auxiliary_lists_builder: AuxiliaryListsBuilder, pub root_pipeline_id: PipelineId, pub next_scroll_layer_id: usize, } impl WebRenderFrameBuilder { pub fn new(root_pipeline_id: PipelineId) -> WebRenderFrameBuilder { WebRenderFrameBuilder { stacking_contexts: vec![], display_lists: vec![], auxiliary_lists_builder: AuxiliaryListsBuilder::new(), root_pipeline_id: root_pipeline_id, next_scroll_layer_id: 0, } } pub fn add_stacking_context(&mut self, api: &mut webrender_traits::RenderApi, pipeline_id: PipelineId, stacking_context: webrender_traits::StackingContext) -> StackingContextId { assert!(pipeline_id == self.root_pipeline_id); let id = api.next_stacking_context_id(); self.stacking_contexts.push((id, stacking_context)); id } pub fn add_display_list(&mut self, api: &mut webrender_traits::RenderApi, display_list: webrender_traits::BuiltDisplayList, stacking_context: &mut webrender_traits::StackingContext) -> DisplayListId { let id = api.next_display_list_id(); stacking_context.has_stacking_contexts = stacking_context.has_stacking_contexts || display_list.descriptor().has_stacking_contexts; stacking_context.display_lists.push(id); self.display_lists.push((id, display_list)); id } pub fn next_scroll_layer_id(&mut self) -> webrender_traits::ScrollLayerId { let scroll_layer_id = self.next_scroll_layer_id; self.next_scroll_layer_id += 1; webrender_traits::ScrollLayerId::new(self.root_pipeline_id, scroll_layer_id) } }<|fim▁end|>
let rect = item.base.bounds.to_rectf(); let box_bounds = item.box_bounds.to_rectf(); builder.push_box_shadow(rect,
<|file_name|>test_ip_lib.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at #<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import netaddr from neutron.agent.common import utils # noqa from neutron.agent.linux import ip_lib from neutron.common import exceptions from neutron.tests import base NETNS_SAMPLE = [ '12345678-1234-5678-abcd-1234567890ab', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'cccccccc-cccc-cccc-cccc-cccccccccccc'] LINK_SAMPLE = [ '1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue state UNKNOWN \\' 'link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00 promiscuity 0', '2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP ' 'qlen 1000\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff' '\ alias openvswitch', '3: br-int: <BROADCAST,MULTICAST> mtu 1500 qdisc noop state DOWN ' '\ link/ether aa:bb:cc:dd:ee:ff brd ff:ff:ff:ff:ff:ff promiscuity 0', '4: gw-ddc717df-49: <BROADCAST,MULTICAST> mtu 1500 qdisc noop ' 'state DOWN \ link/ether fe:dc:ba:fe:dc:ba brd ff:ff:ff:ff:ff:ff ' 'promiscuity 0', '5: foo:foo: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state ' 'UP qlen 1000\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff ' 'promiscuity 0', '6: foo@foo: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state ' 'UP qlen 1000\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff ' 'promiscuity 0', '7: foo:foo@foo: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq ' 'state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0', '8: foo@foo:foo: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq ' 'state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0', '9: bar.9@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc ' ' noqueue master brq0b24798c-07 state UP mode DEFAULT' '\ link/ether ab:04:49:b6:ab:a0 brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan protocol 802.1q id 9 <REORDER_HDR>', '10: bar@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc ' ' noqueue master brq0b24798c-07 state UP mode DEFAULT' '\ link/ether ab:04:49:b6:ab:a0 brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan protocol 802.1Q id 10 <REORDER_HDR>', '11: bar:bar@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq ' 'state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan id 11 <REORDER_HDR>', '12: bar@bar@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq ' 'state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan id 12 <REORDER_HDR>', '13: bar:bar@bar@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 ' 'qdisc mq state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan protocol 802.1q id 13 <REORDER_HDR>', '14: bar@bar:bar@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 ' 'qdisc mq state UP qlen 1000' '\ link/ether cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff promiscuity 0' '\ vlan protocol 802.1Q id 14 <REORDER_HDR>'] ADDR_SAMPLE = (""" 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP qlen 1000 link/ether dd:cc:aa:b9:76:ce brd ff:ff:ff:ff:ff:ff inet 172.16.77.240/24 brd 172.16.77.255 scope global eth0 inet6 2001:470:9:1224:5595:dd51:6ba2:e788/64 scope global temporary dynamic valid_lft 14187sec preferred_lft 3387sec inet6 2001:470:9:1224:fd91:272:581e:3a32/64 scope global temporary """ """deprecated dynamic valid_lft 14187sec preferred_lft 0sec inet6 2001:470:9:1224:4508:b885:5fb:740b/64 scope global temporary """ """deprecated dynamic valid_lft 14187sec preferred_lft 0sec inet6 2001:470:9:1224:dfcc:aaff:feb9:76ce/64 scope global dynamic valid_lft 14187sec preferred_lft 3387sec inet6 fe80::dfcc:aaff:feb9:76ce/64 scope link valid_lft forever preferred_lft forever """) ADDR_SAMPLE2 = (""" 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP qlen 1000 link/ether dd:cc:aa:b9:76:ce brd ff:ff:ff:ff:ff:ff inet 172.16.77.240/24 scope global eth0 inet6 2001:470:9:1224:5595:dd51:6ba2:e788/64 scope global temporary dynamic valid_lft 14187sec preferred_lft 3387sec inet6 2001:470:9:1224:fd91:272:581e:3a32/64 scope global temporary """ """deprecated dynamic valid_lft 14187sec preferred_lft 0sec inet6 2001:470:9:1224:4508:b885:5fb:740b/64 scope global temporary """ """deprecated dynamic valid_lft 14187sec preferred_lft 0sec inet6 2001:470:9:1224:dfcc:aaff:feb9:76ce/64 scope global dynamic valid_lft 14187sec preferred_lft 3387sec inet6 fe80::dfcc:aaff:feb9:76ce/64 scope link valid_lft forever preferred_lft forever """) GATEWAY_SAMPLE1 = (""" default via 10.35.19.254 metric 100 10.35.16.0/22 proto kernel scope link src 10.35.17.97 """) GATEWAY_SAMPLE2 = (""" default via 10.35.19.254 metric 100 """) GATEWAY_SAMPLE3 = (""" 10.35.16.0/22 proto kernel scope link src 10.35.17.97 """) GATEWAY_SAMPLE4 = (""" default via 10.35.19.254 """) GATEWAY_SAMPLE5 = (""" default via 192.168.99.1 proto static """) GATEWAY_SAMPLE6 = (""" default via 192.168.99.1 proto static metric 100 """) IPv6_GATEWAY_SAMPLE1 = (""" default via 2001:470:9:1224:4508:b885:5fb:740b metric 100 2001:db8::/64 proto kernel scope link src 2001:470:9:1224:dfcc:aaff:feb9:76ce """) IPv6_GATEWAY_SAMPLE2 = (""" default via 2001:470:9:1224:4508:b885:5fb:740b metric 100 """) IPv6_GATEWAY_SAMPLE3 = (""" 2001:db8::/64 proto kernel scope link src 2001:470:9:1224:dfcc:aaff:feb9:76ce """) IPv6_GATEWAY_SAMPLE4 = (""" default via fe80::dfcc:aaff:feb9:76ce """) IPv6_GATEWAY_SAMPLE5 = (""" default via 2001:470:9:1224:4508:b885:5fb:740b metric 1024 """) DEVICE_ROUTE_SAMPLE = ("10.0.0.0/24 scope link src 10.0.0.2") SUBNET_SAMPLE1 = ("10.0.0.0/24 dev qr-23380d11-d2 scope link src 10.0.0.1\n" "10.0.0.0/24 dev tap1d7888a7-10 scope link src 10.0.0.2") SUBNET_SAMPLE2 = ("10.0.0.0/24 dev tap1d7888a7-10 scope link src 10.0.0.2\n" "10.0.0.0/24 dev qr-23380d11-d2 scope link src 10.0.0.1") RULE_V4_SAMPLE = (""" 0: from all lookup local 32766: from all lookup main 32767: from all lookup default 101: from 192.168.45.100 lookup 2 """) RULE_V6_SAMPLE = (""" 0: from all lookup local 32766: from all lookup main 32767: from all lookup default 201: from 2001:db8::1 lookup 3 """) class TestSubProcessBase(base.BaseTestCase): def setUp(self): super(TestSubProcessBase, self).setUp() self.execute_p = mock.patch('neutron.agent.common.utils.execute') self.execute = self.execute_p.start() def test_execute_wrapper(self): ip_lib.SubProcessBase._execute(['o'], 'link', ('list',), run_as_root=True) self.execute.assert_called_once_with(['ip', '-o', 'link', 'list'], run_as_root=True, log_fail_as_error=True) def test_execute_wrapper_int_options(self): ip_lib.SubProcessBase._execute([4], 'link', ('list',)) self.execute.assert_called_once_with(['ip', '-4', 'link', 'list'], run_as_root=False, log_fail_as_error=True) def test_execute_wrapper_no_options(self): ip_lib.SubProcessBase._execute([], 'link', ('list',)) self.execute.assert_called_once_with(['ip', 'link', 'list'], run_as_root=False, log_fail_as_error=True) def test_run_no_namespace(self): base = ip_lib.SubProcessBase() base._run([], 'link', ('list',)) self.execute.assert_called_once_with(['ip', 'link', 'list'], run_as_root=False, log_fail_as_error=True) def test_run_namespace(self): base = ip_lib.SubProcessBase(namespace='ns') base._run([], 'link', ('list',)) self.execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns', 'ip', 'link', 'list'], run_as_root=True, log_fail_as_error=True) def test_as_root_namespace(self): base = ip_lib.SubProcessBase(namespace='ns') base._as_root([], 'link', ('list',)) self.execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns', 'ip', 'link', 'list'], run_as_root=True, log_fail_as_error=True) class TestIpWrapper(base.BaseTestCase): def setUp(self): super(TestIpWrapper, self).setUp() self.execute_p = mock.patch.object(ip_lib.IPWrapper, '_execute') self.execute = self.execute_p.start() @mock.patch('os.path.islink') @mock.patch('os.listdir', return_value=['lo']) def test_get_devices(self, mocked_listdir, mocked_islink): retval = ip_lib.IPWrapper().get_devices() mocked_islink.assert_called_once_with('/sys/class/net/lo') self.assertEqual(retval, [ip_lib.IPDevice('lo')]) @mock.patch('neutron.agent.common.utils.execute') def test_get_devices_namespaces(self, mocked_execute): fake_str = mock.Mock() fake_str.split.return_value = ['lo'] mocked_execute.return_value = fake_str retval = ip_lib.IPWrapper(namespace='foo').get_devices() mocked_execute.assert_called_once_with( ['ip', 'netns', 'exec', 'foo', 'find', '/sys/class/net', '-maxdepth', '1', '-type', 'l', '-printf', '%f '], run_as_root=True, log_fail_as_error=True) self.assertTrue(fake_str.split.called) self.assertEqual(retval, [ip_lib.IPDevice('lo', namespace='foo')]) def test_get_namespaces(self): self.execute.return_value = '\n'.join(NETNS_SAMPLE) retval = ip_lib.IPWrapper.get_namespaces() self.assertEqual(retval, ['12345678-1234-5678-abcd-1234567890ab', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'cccccccc-cccc-cccc-cccc-cccccccccccc']) self.execute.assert_called_once_with([], 'netns', ('list',)) def test_add_tuntap(self): ip_lib.IPWrapper().add_tuntap('tap0') self.execute.assert_called_once_with([], 'tuntap', ('add', 'tap0', 'mode', 'tap'), run_as_root=True, namespace=None, log_fail_as_error=True) def test_add_veth(self): ip_lib.IPWrapper().add_veth('tap0', 'tap1') self.execute.assert_called_once_with([], 'link', ('add', 'tap0', 'type', 'veth', 'peer', 'name', 'tap1'), run_as_root=True, namespace=None, log_fail_as_error=True) def test_del_veth(self): ip_lib.IPWrapper().del_veth('fpr-1234') self.execute.assert_called_once_with([], 'link', ('del', 'fpr-1234'), run_as_root=True, namespace=None, log_fail_as_error=True) def test_add_veth_with_namespaces(self): ns2 = 'ns2' with mock.patch.object(ip_lib.IPWrapper, 'ensure_namespace') as en: ip_lib.IPWrapper().add_veth('tap0', 'tap1', namespace2=ns2) en.assert_has_calls([mock.call(ns2)]) self.execute.assert_called_once_with([], 'link', ('add', 'tap0', 'type', 'veth', 'peer', 'name', 'tap1', 'netns', ns2), run_as_root=True, namespace=None, log_fail_as_error=True) def test_get_device(self): dev = ip_lib.IPWrapper(namespace='ns').device('eth0') self.assertEqual(dev.namespace, 'ns') self.assertEqual(dev.name, 'eth0') def test_ensure_namespace(self): with mock.patch.object(ip_lib, 'IPDevice') as ip_dev: ip = ip_lib.IPWrapper() with mock.patch.object(ip.netns, 'exists') as ns_exists: with mock.patch('neutron.agent.common.utils.execute'): ns_exists.return_value = False ip.ensure_namespace('ns') self.execute.assert_has_calls( [mock.call([], 'netns', ('add', 'ns'), run_as_root=True, namespace=None, log_fail_as_error=True)]) ip_dev.assert_has_calls([mock.call('lo', namespace='ns'), mock.call().link.set_up()]) def test_ensure_namespace_existing(self): with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd: ip_ns_cmd.exists.return_value = True ns = ip_lib.IPWrapper().ensure_namespace('ns') self.assertFalse(self.execute.called) self.assertEqual(ns.namespace, 'ns') def test_namespace_is_empty_no_devices(self): ip = ip_lib.IPWrapper(namespace='ns') with mock.patch.object(ip, 'get_devices') as get_devices: get_devices.return_value = [] self.assertTrue(ip.namespace_is_empty()) get_devices.assert_called_once_with(exclude_loopback=True) def test_namespace_is_empty(self): ip = ip_lib.IPWrapper(namespace='ns') with mock.patch.object(ip, 'get_devices') as get_devices: get_devices.return_value = [mock.Mock()] self.assertFalse(ip.namespace_is_empty()) get_devices.assert_called_once_with(exclude_loopback=True) def test_garbage_collect_namespace_does_not_exist(self): with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls: ip_ns_cmd_cls.return_value.exists.return_value = False ip = ip_lib.IPWrapper(namespace='ns') with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty: self.assertFalse(ip.garbage_collect_namespace()) ip_ns_cmd_cls.assert_has_calls([mock.call().exists('ns')]) self.assertNotIn(mock.call().delete('ns'), ip_ns_cmd_cls.return_value.mock_calls) self.assertEqual(mock_is_empty.mock_calls, []) def test_garbage_collect_namespace_existing_empty_ns(self): with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls: ip_ns_cmd_cls.return_value.exists.return_value = True ip = ip_lib.IPWrapper(namespace='ns') with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty: mock_is_empty.return_value = True self.assertTrue(ip.garbage_collect_namespace()) mock_is_empty.assert_called_once_with() expected = [mock.call().exists('ns'), mock.call().delete('ns')] ip_ns_cmd_cls.assert_has_calls(expected) def test_garbage_collect_namespace_existing_not_empty(self): lo_device = mock.Mock() lo_device.name = 'lo' tap_device = mock.Mock() tap_device.name = 'tap1' with mock.patch.object(ip_lib, 'IpNetnsCommand') as ip_ns_cmd_cls: ip_ns_cmd_cls.return_value.exists.return_value = True ip = ip_lib.IPWrapper(namespace='ns') with mock.patch.object(ip, 'namespace_is_empty') as mock_is_empty: mock_is_empty.return_value = False self.assertFalse(ip.garbage_collect_namespace()) mock_is_empty.assert_called_once_with() expected = [mock.call(ip), mock.call().exists('ns')] self.assertEqual(ip_ns_cmd_cls.mock_calls, expected) self.assertNotIn(mock.call().delete('ns'), ip_ns_cmd_cls.mock_calls) def test_add_vxlan_valid_port_length(self): retval = ip_lib.IPWrapper().add_vxlan('vxlan0', 'vni0', group='group0', dev='dev0', ttl='ttl0', tos='tos0', local='local0', proxy=True, port=('1', '2')) self.assertIsInstance(retval, ip_lib.IPDevice) self.assertEqual(retval.name, 'vxlan0') self.execute.assert_called_once_with([], 'link', ['add', 'vxlan0', 'type', 'vxlan', 'id', 'vni0', 'group', 'group0', 'dev', 'dev0', 'ttl', 'ttl0', 'tos', 'tos0', 'local', 'local0', 'proxy', 'port', '1', '2'], run_as_root=True, namespace=None, log_fail_as_error=True) def test_add_vxlan_invalid_port_length(self): wrapper = ip_lib.IPWrapper() self.assertRaises(exceptions.NetworkVxlanPortRangeError, wrapper.add_vxlan, 'vxlan0', 'vni0', group='group0', dev='dev0', ttl='ttl0', tos='tos0', local='local0', proxy=True, port=('1', '2', '3')) def test_add_device_to_namespace(self): dev = mock.Mock() ip_lib.IPWrapper(namespace='ns').add_device_to_namespace(dev) dev.assert_has_calls([mock.call.link.set_netns('ns')]) def test_add_device_to_namespace_is_none(self): dev = mock.Mock() ip_lib.IPWrapper().add_device_to_namespace(dev) self.assertEqual(dev.mock_calls, []) class TestIPDevice(base.BaseTestCase): def test_eq_same_name(self): dev1 = ip_lib.IPDevice('tap0') dev2 = ip_lib.IPDevice('tap0') self.assertEqual(dev1, dev2) def test_eq_diff_name(self): dev1 = ip_lib.IPDevice('tap0') dev2 = ip_lib.IPDevice('tap1') self.assertNotEqual(dev1, dev2) def test_eq_same_namespace(self): dev1 = ip_lib.IPDevice('tap0', 'ns1') dev2 = ip_lib.IPDevice('tap0', 'ns1') self.assertEqual(dev1, dev2) def test_eq_diff_namespace(self): dev1 = ip_lib.IPDevice('tap0', namespace='ns1') dev2 = ip_lib.IPDevice('tap0', namespace='ns2') self.assertNotEqual(dev1, dev2) def test_eq_other_is_none(self): dev1 = ip_lib.IPDevice('tap0', namespace='ns1') self.assertIsNotNone(dev1) def test_str(self): self.assertEqual(str(ip_lib.IPDevice('tap0')), 'tap0') class TestIPCommandBase(base.BaseTestCase): def setUp(self): super(TestIPCommandBase, self).setUp() self.ip = mock.Mock() self.ip.namespace = 'namespace' self.ip_cmd = ip_lib.IpCommandBase(self.ip) self.ip_cmd.COMMAND = 'foo' def test_run(self): self.ip_cmd._run([], ('link', 'show')) self.ip.assert_has_calls([mock.call._run([], 'foo', ('link', 'show'))]) def test_run_with_options(self): self.ip_cmd._run(['o'], ('link')) self.ip.assert_has_calls([mock.call._run(['o'], 'foo', ('link'))]) def test_as_root_namespace_false(self): self.ip_cmd._as_root([], ('link')) self.ip.assert_has_calls( [mock.call._as_root([], 'foo', ('link'), use_root_namespace=False)]) def test_as_root_namespace_true(self): self.ip_cmd._as_root([], ('link'), use_root_namespace=True) self.ip.assert_has_calls( [mock.call._as_root([], 'foo', ('link'), use_root_namespace=True)]) def test_as_root_namespace_true_with_options(self): self.ip_cmd._as_root('o', 'link', use_root_namespace=True) self.ip.assert_has_calls( [mock.call._as_root('o', 'foo', ('link'), use_root_namespace=True)]) class TestIPDeviceCommandBase(base.BaseTestCase): def setUp(self): super(TestIPDeviceCommandBase, self).setUp() self.ip_dev = mock.Mock() self.ip_dev.name = 'eth0' self.ip_dev._execute = mock.Mock(return_value='executed') self.ip_cmd = ip_lib.IpDeviceCommandBase(self.ip_dev) self.ip_cmd.COMMAND = 'foo' def test_name_property(self): self.assertEqual(self.ip_cmd.name, 'eth0') class TestIPCmdBase(base.BaseTestCase): def setUp(self): super(TestIPCmdBase, self).setUp() self.parent = mock.Mock() self.parent.name = 'eth0' def _assert_call(self, options, args): self.parent.assert_has_calls([ mock.call._run(options, self.command, args)]) def _assert_sudo(self, options, args, use_root_namespace=False): self.parent.assert_has_calls( [mock.call._as_root(options, self.command, args, use_root_namespace=use_root_namespace)]) class TestIpRuleCommand(TestIPCmdBase): def setUp(self): super(TestIpRuleCommand, self).setUp() self.parent._as_root.return_value = '' self.command = 'rule' self.rule_cmd = ip_lib.IpRuleCommand(self.parent) def _test_add_rule(self, ip, table, priority): ip_version = netaddr.IPNetwork(ip).version self.rule_cmd.add(ip, table, priority) self._assert_sudo([ip_version], (['show'])) self._assert_sudo([ip_version], ('add', 'from', ip, 'table', table, 'priority', priority)) def _test_add_rule_exists(self, ip, table, priority, output): self.parent._as_root.return_value = output ip_version = netaddr.IPNetwork(ip).version self.rule_cmd.add(ip, table, priority) self._assert_sudo([ip_version], (['show'])) def _test_delete_rule(self, ip, table, priority): ip_version = netaddr.IPNetwork(ip).version self.rule_cmd.delete(ip, table, priority) self._assert_sudo([ip_version], ('del', 'table', table, 'priority', priority)) def test_add_rule_v4(self): self._test_add_rule('192.168.45.100', 2, 100) def test_add_rule_v4_exists(self): self._test_add_rule_exists('192.168.45.100', 2, 101, RULE_V4_SAMPLE) def test_add_rule_v6(self): self._test_add_rule('2001:db8::1', 3, 200) def test_add_rule_v6_exists(self): self._test_add_rule_exists('2001:db8::1', 3, 201, RULE_V6_SAMPLE) def test_delete_rule_v4(self): self._test_delete_rule('192.168.45.100', 2, 100) def test_delete_rule_v6(self): self._test_delete_rule('2001:db8::1', 3, 200) class TestIpLinkCommand(TestIPCmdBase): def setUp(self): super(TestIpLinkCommand, self).setUp() self.parent._run.return_value = LINK_SAMPLE[1] self.command = 'link' self.link_cmd = ip_lib.IpLinkCommand(self.parent) def test_set_address(self): self.link_cmd.set_address('aa:bb:cc:dd:ee:ff') self._assert_sudo([], ('set', 'eth0', 'address', 'aa:bb:cc:dd:ee:ff')) def test_set_mtu(self): self.link_cmd.set_mtu(1500) self._assert_sudo([], ('set', 'eth0', 'mtu', 1500)) def test_set_up(self): self.link_cmd.set_up() self._assert_sudo([], ('set', 'eth0', 'up')) def test_set_down(self): self.link_cmd.set_down() self._assert_sudo([], ('set', 'eth0', 'down')) def test_set_netns(self): self.link_cmd.set_netns('foo') self._assert_sudo([], ('set', 'eth0', 'netns', 'foo')) self.assertEqual(self.parent.namespace, 'foo') def test_set_name(self): self.link_cmd.set_name('tap1') self._assert_sudo([], ('set', 'eth0', 'name', 'tap1')) self.assertEqual(self.parent.name, 'tap1') def test_set_alias(self): self.link_cmd.set_alias('openvswitch') self._assert_sudo([], ('set', 'eth0', 'alias', 'openvswitch')) def test_delete(self): self.link_cmd.delete() self._assert_sudo([], ('delete', 'eth0')) def test_address_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.address, 'cc:dd:ee:ff:ab:cd') def test_mtu_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.mtu, 1500) def test_qdisc_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.qdisc, 'mq') def test_qlen_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.qlen, 1000) def test_alias_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.alias, 'openvswitch') def test_state_property(self): self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.state, 'UP') def test_settings_property(self): expected = {'mtu': 1500, 'qlen': 1000, 'state': 'UP', 'qdisc': 'mq', 'brd': 'ff:ff:ff:ff:ff:ff', 'link/ether': 'cc:dd:ee:ff:ab:cd', 'alias': 'openvswitch'} self.parent._execute = mock.Mock(return_value=LINK_SAMPLE[1]) self.assertEqual(self.link_cmd.attributes, expected) self._assert_call(['o'], ('show', 'eth0')) class TestIpAddrCommand(TestIPCmdBase): def setUp(self): super(TestIpAddrCommand, self).setUp() self.parent.name = 'tap0' self.command = 'addr' self.addr_cmd = ip_lib.IpAddrCommand(self.parent) def test_add_address(self): self.addr_cmd.add('192.168.45.100/24') self._assert_sudo([4], ('add', '192.168.45.100/24', 'scope', 'global', 'dev', 'tap0', 'brd', '192.168.45.255')) def test_add_address_scoped(self): self.addr_cmd.add('192.168.45.100/24', scope='link') self._assert_sudo([4], ('add', '192.168.45.100/24', 'scope', 'link', 'dev', 'tap0', 'brd', '192.168.45.255')) def test_del_address(self): self.addr_cmd.delete('192.168.45.100/24') self._assert_sudo([4], ('del', '192.168.45.100/24', 'dev', 'tap0')) def test_flush(self): self.addr_cmd.flush(6) self._assert_sudo([6], ('flush', 'tap0')) def test_list(self): expected = [ dict(scope='global', dynamic=False, cidr='172.16.77.240/24'), dict(scope='global', dynamic=True, cidr='2001:470:9:1224:5595:dd51:6ba2:e788/64'), dict(scope='global', dynamic=True, cidr='2001:470:9:1224:fd91:272:581e:3a32/64'), dict(scope='global', dynamic=True, cidr='2001:470:9:1224:4508:b885:5fb:740b/64'), dict(scope='global', dynamic=True, cidr='2001:470:9:1224:dfcc:aaff:feb9:76ce/64'), dict(scope='link', dynamic=False, cidr='fe80::dfcc:aaff:feb9:76ce/64')] test_cases = [ADDR_SAMPLE, ADDR_SAMPLE2] for test_case in test_cases: self.parent._run = mock.Mock(return_value=test_case) self.assertEqual(self.addr_cmd.list(), expected) self._assert_call([], ('show', 'tap0')) def test_list_filtered(self): expected = [ dict(scope='global', dynamic=False, cidr='172.16.77.240/24')] test_cases = [ADDR_SAMPLE, ADDR_SAMPLE2] for test_case in test_cases: output = '\n'.join(test_case.split('\n')[0:4]) self.parent._run.return_value = output self.assertEqual(self.addr_cmd.list('global', filters=['permanent']), expected) self._assert_call([], ('show', 'tap0', 'permanent', 'scope', 'global')) class TestIpRouteCommand(TestIPCmdBase): def setUp(self): super(TestIpRouteCommand, self).setUp() self.parent.name = 'eth0' self.command = 'route' self.route_cmd = ip_lib.IpRouteCommand(self.parent) self.ip_version = 4 self.table = 14 self.metric = 100 self.cidr = '192.168.45.100/24' self.ip = '10.0.0.1' self.gateway = '192.168.45.100' self.test_cases = [{'sample': GATEWAY_SAMPLE1, 'expected': {'gateway': '10.35.19.254', 'metric': 100}}, {'sample': GATEWAY_SAMPLE2, 'expected': {'gateway': '10.35.19.254', 'metric': 100}}, {'sample': GATEWAY_SAMPLE3, 'expected': None}, {'sample': GATEWAY_SAMPLE4, 'expected': {'gateway': '10.35.19.254'}}, {'sample': GATEWAY_SAMPLE5, 'expected': {'gateway': '192.168.99.1'}}, {'sample': GATEWAY_SAMPLE6, 'expected': {'gateway': '192.168.99.1', 'metric': 100}}] def test_add_gateway(self): self.route_cmd.add_gateway(self.gateway, self.metric, self.table) self._assert_sudo([self.ip_version], ('replace', 'default', 'via', self.gateway, 'metric', self.metric, 'dev', self.parent.name, 'table', self.table)) def test_del_gateway(self): self.route_cmd.delete_gateway(self.gateway, table=self.table) self._assert_sudo([self.ip_version], ('del', 'default', 'via', self.gateway, 'dev', self.parent.name, 'table', self.table)) def test_get_gateway(self): for test_case in self.test_cases: self.parent._run = mock.Mock(return_value=test_case['sample']) self.assertEqual(self.route_cmd.get_gateway(), test_case['expected']) def test_pullup_route(self): # NOTE(brian-haley) Currently we do not have any IPv6-specific usecase # for pullup_route, hence skipping. Revisit, if required, in future. if self.ip_version == 6: return # interface is not the first in the list - requires # deleting and creating existing entries output = [DEVICE_ROUTE_SAMPLE, SUBNET_SAMPLE1] def pullup_side_effect(self, *args): result = output.pop(0) return result self.parent._run = mock.Mock(side_effect=pullup_side_effect) self.route_cmd.pullup_route('tap1d7888a7-10') self._assert_sudo([], ('del', '10.0.0.0/24', 'dev', 'qr-23380d11-d2')) self._assert_sudo([], ('append', '10.0.0.0/24', 'proto', 'kernel', 'src', '10.0.0.1', 'dev', 'qr-23380d11-d2')) def test_pullup_route_first(self): # NOTE(brian-haley) Currently we do not have any IPv6-specific usecase # for pullup_route, hence skipping. Revisit, if required, in future. if self.ip_version == 6: return # interface is first in the list - no changes output = [DEVICE_ROUTE_SAMPLE, SUBNET_SAMPLE2] def pullup_side_effect(self, *args): result = output.pop(0) return result self.parent._run = mock.Mock(side_effect=pullup_side_effect) self.route_cmd.pullup_route('tap1d7888a7-10') # Check two calls - device get and subnet get self.assertEqual(len(self.parent._run.mock_calls), 2) def test_add_route(self): self.route_cmd.add_route(self.cidr, self.ip, self.table) self._assert_sudo([self.ip_version], ('replace', self.cidr, 'via', self.ip, 'dev', self.parent.name, 'table', self.table)) def test_delete_route(self): self.route_cmd.delete_route(self.cidr, self.ip, self.table) self._assert_sudo([self.ip_version], ('del', self.cidr, 'via', self.ip, 'dev', self.parent.name, 'table', self.table)) class TestIPv6IpRouteCommand(TestIpRouteCommand): def setUp(self): super(TestIPv6IpRouteCommand, self).setUp() self.ip_version = 6 self.cidr = '2001:db8::/64' self.ip = '2001:db8::100' self.gateway = '2001:db8::1' self.test_cases = [{'sample': IPv6_GATEWAY_SAMPLE1, 'expected': {'gateway': '2001:470:9:1224:4508:b885:5fb:740b', 'metric': 100}}, {'sample': IPv6_GATEWAY_SAMPLE2, 'expected': {'gateway': '2001:470:9:1224:4508:b885:5fb:740b', 'metric': 100}}, {'sample': IPv6_GATEWAY_SAMPLE3, 'expected': None}, {'sample': IPv6_GATEWAY_SAMPLE4, 'expected': {'gateway': 'fe80::dfcc:aaff:feb9:76ce'}}, {'sample': IPv6_GATEWAY_SAMPLE5, 'expected': {'gateway': '2001:470:9:1224:4508:b885:5fb:740b', 'metric': 1024}}] class TestIpNetnsCommand(TestIPCmdBase): def setUp(self): super(TestIpNetnsCommand, self).setUp() self.command = 'netns' self.netns_cmd = ip_lib.IpNetnsCommand(self.parent) def test_add_namespace(self): with mock.patch('neutron.agent.common.utils.execute') as execute: ns = self.netns_cmd.add('ns') self._assert_sudo([], ('add', 'ns'), use_root_namespace=True) self.assertEqual(ns.namespace, 'ns') execute.assert_called_once_with( ['ip', 'netns', 'exec', 'ns', 'sysctl', '-w', 'net.ipv4.conf.all.promote_secondaries=1'], run_as_root=True, check_exit_code=True, extra_ok_codes=None, log_fail_as_error=True) def test_delete_namespace(self): with mock.patch('neutron.agent.common.utils.execute'): self.netns_cmd.delete('ns') self._assert_sudo([], ('delete', 'ns'), use_root_namespace=True) def test_namespace_exists_use_helper(self): self.config(group='AGENT', use_helper_for_ns_read=True) retval = '\n'.join(NETNS_SAMPLE) # need another instance to avoid mocking netns_cmd = ip_lib.IpNetnsCommand(ip_lib.SubProcessBase()) with mock.patch('neutron.agent.common.utils.execute') as execute: execute.return_value = retval self.assertTrue( netns_cmd.exists('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb')) execute.assert_called_once_with(['ip', '-o', 'netns', 'list'], run_as_root=True, log_fail_as_error=True) def test_namespace_doest_not_exist_no_helper(self): self.config(group='AGENT', use_helper_for_ns_read=False) retval = '\n'.join(NETNS_SAMPLE) # need another instance to avoid mocking netns_cmd = ip_lib.IpNetnsCommand(ip_lib.SubProcessBase()) with mock.patch('neutron.agent.common.utils.execute') as execute: execute.return_value = retval self.assertFalse( netns_cmd.exists('bbbbbbbb-1111-2222-3333-bbbbbbbbbbbb')) execute.assert_called_once_with(['ip', '-o', 'netns', 'list'], run_as_root=False, log_fail_as_error=True) def test_execute(self): self.parent.namespace = 'ns' with mock.patch('neutron.agent.common.utils.execute') as execute: self.netns_cmd.execute(['ip', 'link', 'list']) execute.assert_called_once_with(['ip', 'netns', 'exec', 'ns', 'ip', 'link', 'list'], run_as_root=True, check_exit_code=True, extra_ok_codes=None, log_fail_as_error=True) def test_execute_env_var_prepend(self): self.parent.namespace = 'ns' with mock.patch('neutron.agent.common.utils.execute') as execute: env = dict(FOO=1, BAR=2) self.netns_cmd.execute(['ip', 'link', 'list'], env) execute.assert_called_once_with( ['ip', 'netns', 'exec', 'ns', 'env'] + ['%s=%s' % (k, v) for k, v in env.items()] + ['ip', 'link', 'list'], run_as_root=True, check_exit_code=True, extra_ok_codes=None, log_fail_as_error=True) def test_execute_nosudo_with_no_namespace(self): with mock.patch('neutron.agent.common.utils.execute') as execute: self.parent.namespace = None self.netns_cmd.execute(['test']) execute.assert_called_once_with(['test'], check_exit_code=True, extra_ok_codes=None, run_as_root=False, log_fail_as_error=True) class TestDeviceExists(base.BaseTestCase): def test_device_exists(self): with mock.patch.object(ip_lib.IPDevice, '_execute') as _execute: _execute.return_value = LINK_SAMPLE[1] self.assertTrue(ip_lib.device_exists('eth0')) _execute.assert_called_once_with(['o'], 'link', ('show', 'eth0'), log_fail_as_error=False) def test_device_does_not_exist(self): with mock.patch.object(ip_lib.IPDevice, '_execute') as _execute: _execute.return_value = '' _execute.side_effect = RuntimeError self.assertFalse(ip_lib.device_exists('eth0')) def test_ensure_device_is_ready(self): ip_lib_mock = mock.Mock() with mock.patch.object(ip_lib, 'IPDevice', return_value=ip_lib_mock): self.assertTrue(ip_lib.ensure_device_is_ready("eth0")) self.assertTrue(ip_lib_mock.link.set_up.called) ip_lib_mock.reset_mock() # device doesn't exists ip_lib_mock.link.set_up.side_effect = RuntimeError self.assertFalse(ip_lib.ensure_device_is_ready("eth0")) class TestIpNeighCommand(TestIPCmdBase): def setUp(self): super(TestIpNeighCommand, self).setUp() self.parent.name = 'tap0' self.command = 'neigh' self.neigh_cmd = ip_lib.IpNeighCommand(self.parent) def test_add_entry(self): self.neigh_cmd.add('192.168.45.100', 'cc:dd:ee:ff:ab:cd') self._assert_sudo([4], ('replace', '192.168.45.100', 'lladdr', 'cc:dd:ee:ff:ab:cd', 'nud', 'permanent', 'dev', 'tap0')) def test_delete_entry(self): self.neigh_cmd.delete('192.168.45.100', 'cc:dd:ee:ff:ab:cd') self._assert_sudo([4], ('del', '192.168.45.100', 'lladdr', 'cc:dd:ee:ff:ab:cd', 'dev', 'tap0')) class TestArpPing(TestIPCmdBase): @mock.patch.object(ip_lib, 'IPWrapper') @mock.patch('eventlet.spawn_n') def test_send_ipv4_addr_adv_notif(self, spawn_n, mIPWrapper): spawn_n.side_effect = lambda f: f() ARPING_COUNT = 3 address = '20.0.0.1' config = mock.Mock() config.send_arp_for_ha = ARPING_COUNT ip_lib.send_ip_addr_adv_notif(mock.sentinel.ns_name, mock.sentinel.iface_name, address, config) self.assertTrue(spawn_n.called) mIPWrapper.assert_called_once_with(namespace=mock.sentinel.ns_name) ip_wrapper = mIPWrapper(namespace=mock.sentinel.ns_name) # Just test that arping is called with the right arguments arping_cmd = ['arping', '-A', '-I', mock.sentinel.iface_name, '-c', ARPING_COUNT, '-w', mock.ANY, address] ip_wrapper.netns.execute.assert_any_call(arping_cmd, check_exit_code=True) @mock.patch('eventlet.spawn_n') def test_no_ipv6_addr_notif(self, spawn_n): ipv6_addr = 'fd00::1' config = mock.Mock() config.send_arp_for_ha = 3 ip_lib.send_ip_addr_adv_notif(mock.sentinel.ns_name, mock.sentinel.iface_name, ipv6_addr, config) self.assertFalse(spawn_n.called) class TestAddNamespaceToCmd(base.BaseTestCase): def test_add_namespace_to_cmd_with_namespace(self): cmd = ['ping', '8.8.8.8'] self.assertEqual(['ip', 'netns', 'exec', 'tmp'] + cmd, ip_lib.add_namespace_to_cmd(cmd, 'tmp')) def test_add_namespace_to_cmd_without_namespace(self): cmd = ['ping', '8.8.8.8'] self.assertEqual(cmd, ip_lib.add_namespace_to_cmd(cmd, None))<|fim▁end|>
<|file_name|>fn_params.ts<|end_file_name|><|fim▁begin|>// Optional Parameters let optParams = function(n: number, s?: string, b?: boolean) {}; // Variadic parameters let restParams = function(n: number, ...r: any[]) {}; let restParamsTyped = function(n: number, ...br: boolean[]) {};<|fim▁hole|><|fim▁end|>
let complex = function(n: number, o?: boolean, ...r: any[]): number { return n; };
<|file_name|>str.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[test] fn test_bool_from_str() { assert_eq!("true".parse().ok(), Some(true)); assert_eq!("false".parse().ok(), Some(false)); assert_eq!("not even a boolean".parse::<bool>().ok(), None); } fn check_contains_all_substrings(s: &str) { assert!(s.contains("")); for i in 0..s.len() { for j in range(i+1, s.len() + 1) { assert!(s.contains(&s[i..j])); } } } #[test] fn strslice_issue_16589() { assert!("bananas".contains("nana")); // prior to the fix for #16589, x.contains("abcdabcd") returned false // test all substrings for good measure check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd"); } #[test] fn strslice_issue_16878() { assert!(!"1234567ah012345678901ah".contains("hah")); assert!(!"00abc01234567890123456789abc".contains("bcabc")); } #[test] fn test_strslice_contains() { let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'"; check_contains_all_substrings(x); } #[test] fn test_rsplitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let mut split: Vec<&str> = data.rsplitn(3, ' ').collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); let mut split: Vec<&str> = data.rsplitn(3, |c: char| c == ' ').collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let mut split: Vec<&str> = data.rsplitn(3, 'ä').collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); let mut split: Vec<&str> = data.rsplitn(3, |c: char| c == 'ä').collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_split_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split(' ').collect(); assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let split: Vec<&str> = data.split(|c: char| c == ' ').collect(); assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let split: Vec<&str> = data.split('ä').collect(); assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split('ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let split: Vec<&str> = data.split(|c: char| c == 'ä').collect(); assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_rev_split_char_iterator_no_trailing() {<|fim▁hole|> let mut split: Vec<&str> = data.split('\n').rev().collect(); split.reverse(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]); let mut split: Vec<&str> = data.split_terminator('\n').rev().collect(); split.reverse(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]); } #[test] fn test_utf16_code_units() { use unicode::str::Utf16Encoder; assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(), vec![0xE9, 0xD83D, 0xDCA9]) }<|fim▁end|>
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
<|file_name|>setup.go<|end_file_name|><|fim▁begin|>package loadbalance import ( "fmt"<|fim▁hole|> "github.com/coredns/caddy" "github.com/inverse-inc/packetfence/go/coredns/core/dnsserver" "github.com/inverse-inc/packetfence/go/coredns/plugin" clog "github.com/inverse-inc/packetfence/go/coredns/plugin/pkg/log" ) var log = clog.NewWithPlugin("loadbalance") func init() { plugin.Register("loadbalance", setup) } func setup(c *caddy.Controller) error { err := parse(c) if err != nil { return plugin.Error("loadbalance", err) } dnsserver.GetConfig(c).AddPlugin(func(next plugin.Handler) plugin.Handler { return RoundRobin{Next: next} }) return nil } func parse(c *caddy.Controller) error { for c.Next() { args := c.RemainingArgs() switch len(args) { case 0: return nil case 1: if args[0] != "round_robin" { return fmt.Errorf("unknown policy: %s", args[0]) } return nil } } return c.ArgErr() }<|fim▁end|>
<|file_name|>DeleteDir.java<|end_file_name|><|fim▁begin|>package ncku.hpds.hadoop.fedhdfs.shell; import java.io.BufferedOutputStream; import java.io.ObjectInputStream; import java.net.InetSocketAddress; import java.net.Socket; import ncku.hpds.hadoop.fedhdfs.GlobalNamespaceObject; public class DeleteDir { private String SNaddress = "127.0.0.1"; private int SNport = 8765; private int port = 8764; public void rmGlobalFileFromGN(String command, String globalFileName) { Socket client = new Socket(); ObjectInputStream ObjectIn; InetSocketAddress isa = new InetSocketAddress(this.SNaddress, this.port); try { client.connect(isa, 10000); ObjectIn = new ObjectInputStream(client.getInputStream()); // received object GlobalNamespaceObject GN = new GlobalNamespaceObject(); try { GN = (GlobalNamespaceObject) ObjectIn.readObject();<|fim▁hole|> } //doing... if (GN.getGlobalNamespace().getLogicalDrive().getLogicalMappingTable().containsKey(globalFileName)) { Socket SNclient = new Socket(); InetSocketAddress SNisa = new InetSocketAddress(this.SNaddress, this.SNport); try { SNclient.connect(SNisa, 10000); BufferedOutputStream out = new BufferedOutputStream(SNclient .getOutputStream()); // send message String message = command + " " + globalFileName; out.write(message.getBytes()); out.flush(); out.close(); out = null; SNclient.close(); SNclient = null; } catch (java.io.IOException e) { System.out.println("Socket connect error"); System.out.println("IOException :" + e.toString()); } } else { System.out.println("Error: " + globalFileName + " not found "); } ObjectIn.close(); ObjectIn = null; client.close(); } catch (java.io.IOException e) { System.out.println("Socket connection error"); System.out.println("IOException :" + e.toString()); } System.out.println("globalFileName : " + globalFileName); } }<|fim▁end|>
} catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace();
<|file_name|>configuration.py<|end_file_name|><|fim▁begin|>"""Global variables for testing.""" from pathlib import Path from calcipy.file_helpers import delete_dir, ensure_dir from calcipy.log_helpers import activate_debug_logging from recipes import __pkg_name__ <|fim▁hole|>activate_debug_logging(pkg_names=[__pkg_name__], clear_log=True) TEST_DIR = Path(__file__).resolve().parent """Path to the `test` directory that contains this file and all other tests.""" TEST_DATA_DIR = TEST_DIR / 'data' """Path to subdirectory with test data within the Test Directory.""" TEST_TMP_CACHE = TEST_DIR / '_tmp_cache' """Path to the temporary cache folder in the Test directory.""" def clear_test_cache() -> None: """Remove the test cache directory if present.""" delete_dir(TEST_TMP_CACHE) ensure_dir(TEST_TMP_CACHE)<|fim▁end|>
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package http import ( "net/http" "time" "crypto/tls" "net" "sync" "github.com/Tecsisa/foulkon/api" "github.com/Tecsisa/foulkon/foulkon" "github.com/julienschmidt/httprouter" "github.com/kylelemons/godebug/pretty" ) type ReloadHandlerFunc func(watch *ProxyServer) bool // ProxyServer struct with reload Handler extension type ProxyServer struct { certFile string keyFile string resourceLock sync.Mutex reloadFunc ReloadHandlerFunc refreshTime time.Duration reloadServe chan struct{} currentResources []api.ProxyResource http.Server } // WorkerServer struct type WorkerServer struct { certFile string keyFile string http.Server } // Server interface that WorkerServer and ProxyServer have to implement type Server interface { Run() error Configuration() error } // Run starts an HTTP WorkerServer func (ws *WorkerServer) Run() error { var err error if ws.certFile != "" || ws.keyFile != "" { err = ws.ListenAndServeTLS(ws.certFile, ws.keyFile) } else { err = ws.ListenAndServe() } return err } // Configuration an HTTP ProxyServer with a given address func (ps *ProxyServer) Configuration() error { if ps.certFile != "" || ps.keyFile != "" { if ps.Addr == "" { ps.Addr = ":https" } if !strSliceContains(ps.TLSConfig.NextProtos, "http/1.1") { ps.TLSConfig.NextProtos = append(ps.TLSConfig.NextProtos, "http/1.1") } configHasCert := len(ps.TLSConfig.Certificates) > 0 || ps.TLSConfig.GetCertificate != nil if !configHasCert || ps.certFile != "" || ps.keyFile != "" { var err error ps.TLSConfig.Certificates = make([]tls.Certificate, 1) ps.TLSConfig.Certificates[0], err = tls.LoadX509KeyPair(ps.certFile, ps.keyFile) if err != nil { return err } } } if ps.Addr == "" { ps.Addr = ":http" } return nil } // Configuration an HTTP WorkerServer func (ws *WorkerServer) Configuration() error { return nil } // Run starts an HTTP ProxyServer func (ps *ProxyServer) Run() error { // Call reloadFunc every refreshTime timer := time.NewTicker(ps.refreshTime) // now wait for the other times when we needed to go func() { for range timer.C { // change the handler if ps.reloadFunc(ps) { ps.reloadServe <- struct{}{} // reset the listening binding } } }() var err error ln, err := net.Listen("tcp", ps.Addr) if err != nil { return err } for { l := ln.(*net.TCPListener) defer l.Close() go func(l net.Listener) { err = ps.Serve(l) }(l) if err != nil { return err } <-ps.reloadServe } } // NewProxy returns a new ProxyServer func NewProxy(proxy *foulkon.Proxy) Server { // Initialization ps := new(ProxyServer) ps.reloadServe = make(chan struct{}, 1) ps.TLSConfig = &tls.Config{} // Set Proxy parameters ps.certFile = proxy.CertFile ps.keyFile = proxy.KeyFile ps.Addr = proxy.Host + ":" + proxy.Port ps.refreshTime = proxy.RefreshTime ps.reloadFunc = ps.RefreshResources(proxy) ps.reloadFunc(ps) return ps } // NewWorker returns a new WorkerServer func NewWorker(worker *foulkon.Worker, h http.Handler) Server { ws := new(WorkerServer) ws.certFile = worker.CertFile ws.keyFile = worker.KeyFile ws.Addr = worker.Host + ":" + worker.Port ws.Handler = h return ws } // RefreshResources implements reloadFunc func (ps *ProxyServer) RefreshResources(proxy *foulkon.Proxy) func(s *ProxyServer) bool { return func(srv *ProxyServer) bool { proxyHandler := ProxyHandler{proxy: proxy, client: http.DefaultClient} // Get proxy resources newProxyResources, err := proxy.ProxyApi.GetProxyResources() if err != nil { api.Log.Errorf("Unexpected error reading proxy resources from database %v", err) return false } if diff := pretty.Compare(srv.currentResources, newProxyResources); diff != "" { router := httprouter.New() defer srv.resourceLock.Unlock() srv.resourceLock.Lock() // writer lock ps.currentResources = newProxyResources api.Log.Info("Updating resources ...") for _, pr := range newProxyResources { // Clean path pr.Resource.Path = httprouter.CleanPath(pr.Resource.Path) // Attach resource safeRouterAdderHandler(router, pr, &proxyHandler) } // TODO: test when resources are empty // If we had resources and those were deleted then handler must be // created with empty router. ps.Server.Handler = router return true } return false } } // Method to control when router has a resource already defined that collides with another func safeRouterAdderHandler(router *httprouter.Router, pr api.ProxyResource, ph *ProxyHandler) {<|fim▁hole|> }() router.Handle(pr.Resource.Method, pr.Resource.Path, ph.HandleRequest(pr)) } func strSliceContains(ss []string, s string) bool { for _, v := range ss { if v == s { return true } } return false }<|fim▁end|>
defer func() { if r := recover(); r != nil { api.Log.Errorf("There was a problem adding proxy resource with name %v and org %v: %v", pr.Name, pr.Org, r) }
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """Assorted base data structures. Assorted base data structures provide a generic communicator with V-REP simulator. """ from vrepsim.simulator import get_default_simulator <|fim▁hole|> class Communicator(object): """Generic communicator with V-REP simulator.""" def __init__(self, vrep_sim): if vrep_sim is not None: self._vrep_sim = vrep_sim else: self._vrep_sim = get_default_simulator(raise_on_none=True) @property def client_id(self): """Client ID.""" return self._vrep_sim.client_id @property def vrep_sim(self): """Interface to V-REP remote API server.""" return self._vrep_sim<|fim▁end|>
<|file_name|>repo_test.go<|end_file_name|><|fim▁begin|><|fim▁hole|> import ( jc "github.com/juju/testing/checkers" gc "gopkg.in/check.v1" "gopkg.in/juju/charm.v6-unstable" "gopkg.in/juju/charmrepo.v0" "gopkg.in/juju/charmrepo.v0/csclient" charmtesting "gopkg.in/juju/charmrepo.v0/testing" ) var TestCharms = charmtesting.NewRepo("internal/test-charm-repo", "quantal") type inferRepoSuite struct{} var _ = gc.Suite(&inferRepoSuite{}) var inferRepositoryTests = []struct { url string localRepoPath string err string }{{ url: "cs:trusty/django", }, { url: "local:precise/wordpress", err: "path to local repository not specified", }, { url: "local:precise/haproxy-47", localRepoPath: "/tmp/repo-path", }} func (s *inferRepoSuite) TestInferRepository(c *gc.C) { for i, test := range inferRepositoryTests { c.Logf("test %d: %s", i, test.url) ref := charm.MustParseReference(test.url) repo, err := charmrepo.InferRepository( ref, charmrepo.NewCharmStoreParams{}, test.localRepoPath) if test.err != "" { c.Assert(err, gc.ErrorMatches, test.err) c.Assert(repo, gc.IsNil) continue } c.Assert(err, jc.ErrorIsNil) switch store := repo.(type) { case *charmrepo.LocalRepository: c.Assert(store.Path, gc.Equals, test.localRepoPath) case *charmrepo.CharmStore: c.Assert(store.URL(), gc.Equals, csclient.ServerURL) default: c.Fatal("unknown repository type") } } }<|fim▁end|>
// Copyright 2012, 2013 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package charmrepo_test
<|file_name|>04_dE_VCM_bending.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- r""" Bending of collimating mirror ----------------------------- Uses :mod:`shadow` backend. File: `\\examples\\withShadow\\03\\03_DCM_energy.py` Influence onto energy resolution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pictures after monochromator, :ref:`type 2 of global normalization<globalNorm>`. The nominal radius is 7.4 km. Watch the energy distribution when the bending radius is smaller or greater than the nominal one. +---------+---------+---------+---------+ | |VCMR1| | |VCMR2| | |VCMR3| | | +---------+---------+---------+ |VCMR4| | | |VCMR7| | |VCMR6| | |VCMR5| | | +---------+---------+---------+---------+ .. |VCMR1| image:: _images/03VCM_R0496453_norm2.* :scale: 35 % .. |VCMR2| image:: _images/03VCM_R0568297_norm2.* :scale: 35 % .. |VCMR3| image:: _images/03VCM_R0650537_norm2.* :scale: 35 % .. |VCMR4| image:: _images/03VCM_R0744680_norm2.* :scale: 35 % :align: middle .. |VCMR5| image:: _images/03VCM_R0852445_norm2.* :scale: 35 % .. |VCMR6| image:: _images/03VCM_R0975806_norm2.* :scale: 35 % .. |VCMR7| image:: _images/03VCM_R1117020_norm2.* :scale: 35 % Influence onto focusing ~~~~~~~~~~~~~~~~~~~~~~~ Pictures at the sample position, :ref:`type 1 of global normalization<globalNorm>` +----------+----------+----------+----------+ | |VCMRF1| | |VCMRF2| | |VCMRF3| | | +----------+----------+----------+ |VCMRF4| | | |VCMRF7| | |VCMRF6| | |VCMRF5| | | +----------+----------+----------+----------+ .. |VCMRF1| image:: _images/04VCM_R0496453_norm1.* :scale: 35 % .. |VCMRF2| image:: _images/04VCM_R0568297_norm1.* :scale: 35 % .. |VCMRF3| image:: _images/04VCM_R0650537_norm1.* :scale: 35 % .. |VCMRF4| image:: _images/04VCM_R0744680_norm1.* :scale: 35 % :align: middle .. |VCMRF5| image:: _images/04VCM_R0852445_norm1.* :scale: 35 % .. |VCMRF6| image:: _images/04VCM_R0975806_norm1.* :scale: 35 % .. |VCMRF7| image:: _images/04VCM_R1117020_norm1.* :scale: 35 % """ __author__ = "Konstantin Klementiev" __date__ = "1 Mar 2012" import sys sys.path.append(r"c:\Alba\Ray-tracing\with Python") import numpy as np import xrt.plotter as xrtp import xrt.runner as xrtr import xrt.backends.shadow as shadow def main(): plot1 = xrtp.XYCPlot('star.03') plot1.caxis.offset = 6000 plot2 = xrtp.XYCPlot('star.04') plot2.caxis.offset = 6000 plot1.xaxis.limits = [-15, 15] plot1.yaxis.limits = [-15, 15] plot1.yaxis.factor *= -1 plot2.xaxis.limits = [-1, 1] plot2.yaxis.limits = [-1, 1] plot2.yaxis.factor *= -1 textPanel1 = plot1.fig.text( 0.89, 0.82, '', transform=plot1.fig.transFigure, size=14, color='r', ha='center') textPanel2 = plot2.fig.text( 0.89, 0.82, '', transform=plot2.fig.transFigure, size=14, color='r', ha='center') #========================================================================== threads = 4 #========================================================================== start01 = shadow.files_in_tmp_subdirs('start.01', threads) start04 = shadow.files_in_tmp_subdirs('start.04', threads) rmaj0 = 476597.0 shadow.modify_input(start04, ('R_MAJ', str(rmaj0))) angle = 4.7e-3 tIncidence = 90 - angle * 180 / np.pi shadow.modify_input( start01, ('T_INCIDENCE', str(tIncidence)), ('T_REFLECTION', str(tIncidence))) shadow.modify_input( start04, ('T_INCIDENCE', str(tIncidence)), ('T_REFLECTION', str(tIncidence))) rmirr0 = 744680. def plot_generator(): for rmirr in np.logspace(-1., 1., 7, base=1.5) * rmirr0: shadow.modify_input(start01, ('RMIRR', str(rmirr))) filename = 'VCM_R%07i' % rmirr filename03 = '03' + filename<|fim▁hole|> plot2.title = filename04 plot1.saveName = [filename03 + '.pdf', filename03 + '.png'] plot2.saveName = [filename04 + '.pdf', filename04 + '.png'] # plot1.persistentName = filename03 + '.pickle' # plot2.persistentName = filename04 + '.pickle' textToSet = 'collimating\nmirror\n$R =$ %.1f km' % (rmirr * 1e-5) textPanel1.set_text(textToSet) textPanel2.set_text(textToSet) yield def after(): # import subprocess # subprocess.call(["python", "05-VFM-bending.py"], # cwd='/home/kklementiev/Alba/Ray-tracing/with Python/05-VFM-bending') pass xrtr.run_ray_tracing( [plot1, plot2], repeats=640, updateEvery=2, energyRange=[5998, 6002], generator=plot_generator, threads=threads, globalNorm=True, afterScript=after, backend='shadow') #this is necessary to use multiprocessing in Windows, otherwise the new Python #contexts cannot be initialized: if __name__ == '__main__': main()<|fim▁end|>
filename04 = '04' + filename plot1.title = filename03
<|file_name|>fig6.py<|end_file_name|><|fim▁begin|>"""Produce contact map for Figure 5D from the PySB publication""" from __future__ import print_function import pysb.integrate import pysb.util import numpy as np import scipy.optimize import scipy.interpolate import matplotlib.pyplot as plt import os import sys import inspect from earm.lopez_embedded import model # List of model observables and corresponding data file columns for # point-by-point fitting obs_names = ['mBid', 'cPARP'] data_names = ['norm_ICRP', 'norm_ECRP'] var_names = ['nrm_var_ICRP', 'nrm_var_ECRP'] # Load experimental data file data_path = os.path.join(os.path.dirname(__file__), 'fig6_data.csv') exp_data = np.genfromtxt(data_path, delimiter=',', names=True) # Model observable corresponding to the IMS-RP reporter (MOMP timing) momp_obs = 'aSmac' # Mean and variance of Td (delay time) and Ts (switching time) of MOMP, and # yfinal (the last value of the IMS-RP trajectory) momp_data = np.array([9810.0, 180.0, 1.0]) momp_var = np.array([7245000.0, 3600.0, 1e-9]) # Build time points for the integrator, using the same time scale as the # experimental data but with greater resolution to help the integrator converge. ntimes = len(exp_data['Time']) # Factor by which to increase time resolution tmul = 10 # Do the sampling such that the original experimental timepoints can be # extracted with a slice expression instead of requiring interpolation. tspan = np.linspace(exp_data['Time'][0], exp_data['Time'][-1], (ntimes-1) * tmul + 1) # Initialize solver object solver = pysb.integrate.Solver(model, tspan, rtol=1e-5, atol=1e-5) # Get parameters for rates only rate_params = model.parameters_rules() # Build a boolean mask for those params against the entire param list rate_mask = np.array([p in rate_params for p in model.parameters]) # Build vector of nominal parameter values from the model nominal_values = np.array([p.value for p in model.parameters]) # Set the radius of a hypercube bounding the search space bounds_radius = 2 def objective_func(x, rate_mask, lb, ub): caller_frame, _, _, caller_func, _, _ = inspect.stack()[1] if caller_func in {'anneal', '_minimize_anneal'}: caller_locals = caller_frame.f_locals if caller_locals['n'] == 1: print(caller_locals['best_state'].cost, caller_locals['current_state'].cost) # Apply hard bounds if np.any((x < lb) | (x > ub)): print("bounds-check failed") return np.inf # Simulate model with rates taken from x (which is log transformed) param_values = np.array([p.value for p in model.parameters]) param_values[rate_mask] = 10 ** x solver.run(param_values) # Calculate error for point-by-point trajectory comparisons e1 = 0 for obs_name, data_name, var_name in zip(obs_names, data_names, var_names): # Get model observable trajectory (this is the slice expression # mentioned above in the comment for tspan) ysim = solver.yobs[obs_name][::tmul] # Normalize it to 0-1 ysim_norm = ysim / np.nanmax(ysim) # Get experimental measurement and variance ydata = exp_data[data_name] yvar = exp_data[var_name] # Compute error between simulation and experiment (chi-squared) e1 += np.sum((ydata - ysim_norm) ** 2 / (2 * yvar)) / len(ydata) # Calculate error for Td, Ts, and final value for IMS-RP reporter # ===== # Normalize trajectory<|fim▁hole|> ysim_momp = solver.yobs[momp_obs] ysim_momp_norm = ysim_momp / np.nanmax(ysim_momp) # Build a spline to interpolate it st, sc, sk = scipy.interpolate.splrep(solver.tspan, ysim_momp_norm) # Use root-finding to find the point where trajectory reaches 10% and 90% t10 = scipy.interpolate.sproot((st, sc-0.10, sk))[0] t90 = scipy.interpolate.sproot((st, sc-0.90, sk))[0] # Calculate Td as the mean of these times td = (t10 + t90) / 2 # Calculate Ts as their difference ts = t90 - t10 # Get yfinal, the last element from the trajectory yfinal = ysim_momp_norm[-1] # Build a vector of the 3 variables to fit momp_sim = [td, ts, yfinal] # Perform chi-squared calculation against mean and variance vectors e2 = np.sum((momp_data - momp_sim) ** 2 / (2 * momp_var)) / 3 # Calculate error for final cPARP value (ensure all PARP is cleaved) cparp_final = model.parameters['PARP_0'].value cparp_final_var = .01 cparp_final_sim = solver.yobs['cPARP'][-1] e3 = (cparp_final - cparp_final_sim) ** 2 / (2 * cparp_final_var) error = e1 + e2 + e3 return error def estimate(start_values=None): """Estimate parameter values by fitting to data. Parameters ========== parameter_values : numpy array of floats, optional Starting parameter values. Taken from model's nominal parameter values if not specified. Returns ======= numpy array of floats, containing fitted parameter values. """ # Set starting position to nominal parameter values if not specified if start_values is None: start_values = nominal_values else: assert start_values.shape == nominal_values.shape # Log-transform the starting position x0 = np.log10(start_values[rate_mask]) # Displacement size for annealing moves dx = .02 # The default 'fast' annealing schedule uses the 'lower' and 'upper' # arguments in a somewhat counterintuitive way. See # http://projects.scipy.org/scipy/ticket/1126 for more information. This is # how to get the search to start at x0 and use a displacement on the order # of dx (note that this will affect the T0 estimation which *does* expect # lower and upper to be the absolute expected bounds on x). lower = x0 - dx / 2 upper = x0 + dx / 2 # Log-transform the rate parameter values xnominal = np.log10(nominal_values[rate_mask]) # Hard lower and upper bounds on x lb = xnominal - bounds_radius ub = xnominal + bounds_radius # Perform the annealing args = [rate_mask, lb, ub] (xmin, Jmin, Tfinal, feval, iters, accept, retval) = \ scipy.optimize.anneal(objective_func, x0, full_output=True, maxiter=4000, quench=0.5, lower=lower, upper=upper, args=args) # Construct vector with resulting parameter values (un-log-transformed) params_estimated = start_values.copy() params_estimated[rate_mask] = 10 ** xmin # Display annealing results for v in ('xmin', 'Jmin', 'Tfinal', 'feval', 'iters', 'accept', 'retval'): print("%s: %s" % (v, locals()[v])) return params_estimated def display(params_estimated): # Simulate model with nominal parameters and construct a matrix of the # trajectories of the observables of interest, normalized to 0-1. solver.run() obs_names_disp = ['mBid', 'aSmac', 'cPARP'] obs_totals = [model.parameters[n].value for n in ('Bid_0', 'Smac_0', 'PARP_0')] sim_obs = solver.yobs[obs_names_disp].view(float).reshape(len(solver.yobs), -1) sim_obs_norm = (sim_obs / obs_totals).T # Do the same with the estimated parameters solver.run(params_estimated) sim_est_obs = solver.yobs[obs_names_disp].view(float).reshape(len(solver.yobs), -1) sim_est_obs_norm = (sim_est_obs / obs_totals).T # Plot data with simulation trajectories both before and after fitting color_data = '#C0C0C0' color_orig = '#FAAA6A' color_est = '#83C98E' plt.subplot(311) plt.errorbar(exp_data['Time'], exp_data['norm_ICRP'], yerr=exp_data['nrm_var_ICRP']**0.5, c=color_data, linewidth=2, elinewidth=0.5) plt.plot(solver.tspan, sim_obs_norm[0], color_orig, linewidth=2) plt.plot(solver.tspan, sim_est_obs_norm[0], color_est, linewidth=2) plt.ylabel('Fraction of\ncleaved IC-RP/Bid', multialignment='center') plt.axis([0, 20000, -0.2, 1.2]) plt.subplot(312) plt.vlines(momp_data[0], -0.2, 1.2, color=color_data, linewidth=2) plt.plot(solver.tspan, sim_obs_norm[1], color_orig, linewidth=2) plt.plot(solver.tspan, sim_est_obs_norm[1], color_est, linewidth=2) plt.ylabel('Td / Fraction of\nreleased Smac', multialignment='center') plt.axis([0, 20000, -0.2, 1.2]) plt.subplot(313) plt.errorbar(exp_data['Time'], exp_data['norm_ECRP'], yerr=exp_data['nrm_var_ECRP']**0.5, c=color_data, linewidth=2, elinewidth=0.5) plt.plot(solver.tspan, sim_obs_norm[2], color_orig, linewidth=2) plt.plot(solver.tspan, sim_est_obs_norm[2], color_est, linewidth=2) plt.ylabel('Fraction of\ncleaved EC-RP/PARP', multialignment='center') plt.xlabel('Time (s)') plt.axis([0, 20000, -0.2, 1.2]) plt.show() if __name__ == '__main__': params_estimated = None try: earm_path = sys.modules['earm'].__path__[0] fit_file = os.path.join(earm_path, '..', 'EARM_2_0_M1a_fitted_params.txt') params_estimated = np.genfromtxt(fit_file)[:,1].copy() except IOError: pass if params_estimated is None: np.random.seed(1) params_estimated = estimate() display(params_estimated)<|fim▁end|>
<|file_name|>date-from-array.js<|end_file_name|><|fim▁begin|>export function createDate(y, m, d, h, M, s, ms) { // can't just apply() to create a date: // https://stackoverflow.com/q/181348 var date = new Date(y, m, d, h, M, s, ms); // the date constructor remaps years 0-99 to 1900-1999 if (y < 100 && y >= 0 && isFinite(date.getFullYear())) { date.setFullYear(y); } return date;<|fim▁hole|> var date = new Date(Date.UTC.apply(null, arguments)); // the Date.UTC function remaps years 0-99 to 1900-1999 if (y < 100 && y >= 0 && isFinite(date.getUTCFullYear())) { date.setUTCFullYear(y); } return date; }<|fim▁end|>
} export function createUTCDate(y) {
<|file_name|>0006_auto_20151211_0747.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import markupfield.fields class Migration(migrations.Migration): dependencies = [ ('cal', '0005_attachment_comment'), ] operations = [ migrations.AddField( model_name='task', name='_body_rendered', field=models.TextField(default='', editable=False), preserve_default=False, ), migrations.AddField( model_name='task', name='body', field=markupfield.fields.MarkupField(verbose_name='Body', default='', rendered_field=True),<|fim▁hole|> name='body_markup_type', field=models.CharField(default='markdown', choices=[('', '--'), ('markdown', 'markdown')], max_length=30), ), ]<|fim▁end|>
preserve_default=False, ), migrations.AddField( model_name='task',
<|file_name|>tool_consumer.py<|end_file_name|><|fim▁begin|>from requests import Request from oauthlib.common import unquote from requests_oauthlib import OAuth1 from requests_oauthlib.oauth1_auth import SIGNATURE_TYPE_BODY from tool_base import ToolBase from launch_params import LAUNCH_PARAMS_REQUIRED from utils import parse_qs, InvalidLTIConfigError, generate_identifier class ToolConsumer(ToolBase): def __init__(self, consumer_key, consumer_secret, params=None, launch_url=None): ''' Create new ToolConsumer. ''' # allow launch_url to be specified in launch_params for # backwards compatibility if launch_url is None: if 'launch_url' not in params: raise InvalidLTIConfigError('missing \'launch_url\' arg!') else: launch_url = params['launch_url'] del params['launch_url'] self.launch_url = launch_url super(ToolConsumer, self).__init__(consumer_key, consumer_secret, params=params) def has_required_params(self): return all([ self.launch_params.get(x) for x in LAUNCH_PARAMS_REQUIRED ]) def generate_launch_request(self, **kwargs): """ returns a Oauth v1 "signed" requests.PreparedRequest instance """ if not self.has_required_params(): raise InvalidLTIConfigError( 'Consumer\'s launch params missing one of ' \ + str(LAUNCH_PARAMS_REQUIRED) ) # if 'oauth_consumer_key' not in self.launch_params: # self.launch_params['oauth_consumer_key'] = self.consumer_key params = self.to_params() r = Request('POST', self.launch_url, data=params).prepare() sign = OAuth1(self.consumer_key, self.consumer_secret, signature_type=SIGNATURE_TYPE_BODY, **kwargs) return sign(r) <|fim▁hole|> def generate_launch_data(self, **kwargs): """ Provided for backwards compatibility """ r = self.generate_launch_request(**kwargs) return parse_qs(unquote(r.body)) def set_config(self, config): ''' Set launch data from a ToolConfig. ''' if self.launch_url == None: self.launch_url = config.launch_url self.launch_params.update(config.custom_params)<|fim▁end|>
<|file_name|>darknet53_test.py<|end_file_name|><|fim▁begin|>import pytoolkit as tk module = tk.applications.darknet53<|fim▁hole|> def test_model(): model = module.create(input_shape=(256, 256, 3), weights=None) assert tuple(module.get_1_over_1(model).shape[1:3]) == (256, 256) assert tuple(module.get_1_over_2(model).shape[1:3]) == (128, 128) assert tuple(module.get_1_over_4(model).shape[1:3]) == (64, 64) assert tuple(module.get_1_over_8(model).shape[1:3]) == (32, 32) assert tuple(module.get_1_over_16(model).shape[1:3]) == (16, 16) assert tuple(module.get_1_over_32(model).shape[1:3]) == (8, 8) def test_save_load(tmpdir): model = module.create(input_shape=(256, 256, 3), weights=None) tk.models.save(model, str(tmpdir / "model.h5")) tk.models.load(str(tmpdir / "model.h5"))<|fim▁end|>
<|file_name|>StackTraceSimplifier.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.util.javalang; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.util.text.Strings; import com.google.common.collect.ImmutableSet; /** * Utility class for cleaning up stacktraces. */ public class StackTraceSimplifier { private static final Logger log = LoggerFactory.getLogger(StackTraceSimplifier.class); /** comma-separated prefixes (not regexes) */ public static final String DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME = "brooklyn.util.javalang.StackTraceSimplifier.blacklist"; /** @deprecated since 0.6.0 use {@link #DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME} */ @Deprecated public static final String LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME = "groovy.sanitized.stacktraces"; private static final Collection<String> DEFAULT_BLACKLIST; static { ImmutableSet.Builder<String> blacklist = ImmutableSet.builder(); blacklist.addAll(Arrays.asList( System.getProperty(DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME, "java.," + "javax.," + "sun.," + "groovy.," + "org.codehaus.groovy.," + "gjdk.groovy.," ).split("(\\s|,)+"))); String legacyDefaults = System.getProperty(LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME);<|fim▁hole|> if (Strings.isNonBlank(legacyDefaults)) { log.warn("Detected ude of legacy system property "+LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME); blacklist.addAll(Arrays.asList(legacyDefaults.split("(\\s|,)+"))); } DEFAULT_BLACKLIST = blacklist.build(); } private static final StackTraceSimplifier DEFAULT_INSTACE = newInstance(); private final Collection<String> blacklist; protected StackTraceSimplifier() { this(true); } protected StackTraceSimplifier(boolean includeDefaultBlacklist, String ...packages) { ImmutableSet.Builder<String> blacklistB = ImmutableSet.builder(); if (includeDefaultBlacklist) blacklistB.addAll(DEFAULT_BLACKLIST); blacklistB.add(packages); blacklist = blacklistB.build(); } public static StackTraceSimplifier newInstance() { return new StackTraceSimplifier(); } public static StackTraceSimplifier newInstance(String ...additionalBlacklistPackagePrefixes) { return new StackTraceSimplifier(true, additionalBlacklistPackagePrefixes); } public static StackTraceSimplifier newInstanceExcludingOnly(String ...blacklistPackagePrefixes) { return new StackTraceSimplifier(false, blacklistPackagePrefixes); } /** @return whether the given element is useful, that is, not in the blacklist */ public boolean isUseful(StackTraceElement el) { for (String s: blacklist){ if (el.getClassName().startsWith(s)) return false;; // gets underscores in some contexts ? if (el.getClassName().replace('_', '.').startsWith(s)) return false; } return true; } /** @return new list containing just the {@link #isUseful(StackTraceElement)} stack trace elements */ public List<StackTraceElement> clean(Iterable<StackTraceElement> st) { List<StackTraceElement> result = new LinkedList<StackTraceElement>(); for (StackTraceElement element: st){ if (isUseful(element)){ result.add(element); } } return result; } /** @return new array containing just the {@link #isUseful(StackTraceElement)} stack trace elements */ public StackTraceElement[] clean(StackTraceElement[] st) { List<StackTraceElement> result = clean(Arrays.asList(st)); return result.toArray(new StackTraceElement[result.size()]); } /** @return first {@link #isUseful(StackTraceElement)} stack trace elements, or null */ public StackTraceElement firstUseful(StackTraceElement[] st) { return nthUseful(0, st); } /** @return (n+1)th {@link #isUseful(StackTraceElement)} stack trace elements (ie 0 is {@link #firstUseful(StackTraceElement[])}), or null */ public StackTraceElement nthUseful(int n, StackTraceElement[] st) { for (StackTraceElement element: st){ if (isUseful(element)) { if (n==0) return element; n--; } } return null; } /** {@link #clean(StackTraceElement[])} the given throwable instance, returning the same instance for convenience */ public <T extends Throwable> T cleaned(T t) { t.setStackTrace(clean(t.getStackTrace())); return t; } // ---- statics /** static convenience for {@link #isUseful(StackTraceElement)} */ public static boolean isStackTraceElementUseful(StackTraceElement el) { return DEFAULT_INSTACE.isUseful(el); } /** static convenience for {@link #clean(Iterable)} */ public static List<StackTraceElement> cleanStackTrace(Iterable<StackTraceElement> st) { return DEFAULT_INSTACE.clean(st); } /** static convenience for {@link #clean(StackTraceElement[])} */ public static StackTraceElement[] cleanStackTrace(StackTraceElement[] st) { return DEFAULT_INSTACE.clean(st); } /** static convenience for {@link #cleaned(Throwable)} */ public static <T extends Throwable> T cleanedStackTrace(T t) { return DEFAULT_INSTACE.cleaned(t); } public static String toString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.getBuffer().toString(); } }<|fim▁end|>
<|file_name|>foo.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(lang_items, no_std, libc)] #![no_std] #![crate_type = "dylib"] extern crate libc;<|fim▁hole|>pub extern fn foo() {} #[lang = "stack_exhausted"] fn stack_exhausted() {} #[lang = "eh_personality"] fn eh_personality() {} #[lang = "panic_fmt"] fn panic_fmt() -> ! { loop {} }<|fim▁end|>
#[no_mangle]
<|file_name|>database_reset.rs<|end_file_name|><|fim▁begin|>#[cfg(feature = "postgres")] extern crate url; use crate::support::{database, project}; #[test] fn reset_drops_the_database() { let p = project("reset_drops_the_database") .folder("migrations") .build(); let db = database(&p.database_url()).create(); db.execute("CREATE TABLE posts ( id INTEGER )"); assert!(db.table_exists("posts")); let result = p.command("database").arg("reset").run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result); assert!(!db.table_exists("posts")); } #[test] fn reset_runs_database_setup() { let p = project("reset_runs_database_setup") .folder("migrations") .build(); let db = database(&p.database_url()).create(); db.execute("CREATE TABLE posts ( id INTEGER )"); db.execute("CREATE TABLE users ( id INTEGER )"); p.create_migration( "12345_create_users_table", "CREATE TABLE users ( id INTEGER )", "DROP TABLE users", ); assert!(db.table_exists("posts")); assert!(db.table_exists("users")); let result = p.command("database").arg("reset").run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result); assert!(!db.table_exists("posts")); assert!(db.table_exists("users")); assert!(db.table_exists("__diesel_schema_migrations")); } #[test] #[cfg(feature = "postgres")] fn reset_handles_postgres_urls_with_username_and_password() { let p = project("handles_postgres_urls") .folder("migrations") .build(); let db = database(&p.database_url()).create(); db.execute("DROP ROLE IF EXISTS foo"); db.execute("CREATE ROLE foo WITH LOGIN SUPERUSER PASSWORD 'password'"); let database_url = { let mut new_url = url::Url::parse(&p.database_url()).expect("invalid url"); new_url.set_username("foo").expect("could not set username"); new_url .set_password(Some("password")) .expect("could not set password"); new_url.to_string() }; let result = p .command("database") .arg("reset") .env("DATABASE_URL", &database_url) .run(); assert!( result.is_success(), "Result was unsuccessful {:?}", result.stdout() ); assert!( result.stdout().contains("Dropping database:"), "Unexpected stdout {}", result.stdout() ); assert!( result.stdout().contains("Creating database:"), "Unexpected stdout {}", result.stdout() ); } #[test] fn reset_works_with_migration_dir_by_arg() { let p = project("reset_works_with_migration_dir_by_arg") .folder("foo") .build(); let db = database(&p.database_url()).create(); db.execute("CREATE TABLE posts ( id INTEGER )"); db.execute("CREATE TABLE users ( id INTEGER )"); p.create_migration_in_directory( "foo", "12345_create_users_table", "CREATE TABLE users ( id INTEGER )", "DROP TABLE users", ); assert!(db.table_exists("posts")); assert!(db.table_exists("users")); let result = p .command("database") .arg("reset") .arg("--migration-dir=foo") .run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result); assert!(!db.table_exists("posts")); assert!(db.table_exists("users")); assert!(db.table_exists("__diesel_schema_migrations")); } #[test] fn reset_works_with_migration_dir_by_env() { let p = project("reset_works_with_migration_dir_by_env") .folder("bar") .build(); let db = database(&p.database_url()).create(); db.execute("CREATE TABLE posts ( id INTEGER )"); db.execute("CREATE TABLE users ( id INTEGER )"); p.create_migration_in_directory( "bar", "12345_create_users_table", "CREATE TABLE users ( id INTEGER )", "DROP TABLE users", ); assert!(db.table_exists("posts")); assert!(db.table_exists("users")); let result = p .command("database") .arg("reset") .env("MIGRATION_DIRECTORY", "bar") .run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result);<|fim▁hole|> #[test] fn reset_sanitize_database_name() { let p = project("name-with-dashes").folder("migrations").build(); let _db = database(&p.database_url()).create(); let result = p.command("database").arg("reset").run(); assert!( result.is_success(), "Result was unsuccessful {:?}", result.stdout() ); assert!( result.stdout().contains("Dropping database:"), "Unexpected stdout {}", result.stdout() ); assert!( result.stdout().contains("Creating database:"), "Unexpected stdout {}", result.stdout() ); } #[test] fn reset_updates_schema_if_config_present() { let p = project("reset_updates_schema_if_config_present") .folder("migrations") .file( "diesel.toml", r#" [print_schema] file = "src/my_schema.rs" "#, ) .build(); let result = p.command("database").arg("reset").run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result); assert!(p.has_file("src/my_schema.rs")); } #[test] fn reset_respects_migrations_dir_from_diesel_toml() { let p = project("reset_respects_migrations_dir_from_diesel_toml") .folder("custom_migrations") .file( "diesel.toml", r#" [migrations_directory] dir = "custom_migrations" "#, ) .build(); let db = database(&p.database_url()).create(); db.execute("CREATE TABLE users ( id INTEGER )"); p.create_migration_in_directory( "custom_migrations", "12345_create_users_table", "CREATE TABLE users ( id INTEGER )", "DROP TABLE users", ); assert!(db.table_exists("users")); let result = p.command("database").arg("reset").run(); assert!(result.is_success(), "Result was unsuccessful {:?}", result); assert!(db.table_exists("users")); assert!(db.table_exists("__diesel_schema_migrations")); }<|fim▁end|>
assert!(!db.table_exists("posts")); assert!(db.table_exists("users")); assert!(db.table_exists("__diesel_schema_migrations")); }
<|file_name|>models.go<|end_file_name|><|fim▁begin|>// +build go1.9 // Copyright 2019 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This code was auto-generated by: // github.com/Azure/azure-sdk-for-go/tools/profileBuilder <|fim▁hole|> import original "github.com/Azure/azure-sdk-for-go/services/marketplaceordering/mgmt/2015-06-01/marketplaceordering/marketplaceorderingapi" type MarketplaceAgreementsClientAPI = original.MarketplaceAgreementsClientAPI type OperationsClientAPI = original.OperationsClientAPI<|fim▁end|>
package marketplaceorderingapi
<|file_name|>validate.js.uncompressed.js<|end_file_name|><|fim▁begin|><|fim▁hole|>define( "dijit/form/nls/ca/validate", ({ invalidMessage: "El valor introduït no és vàlid", missingMessage: "Aquest valor és necessari", rangeMessage: "Aquest valor és fora de l'interval" }) );<|fim▁end|>
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/* eslint-disable @typescript-eslint/no-explicit-any */ // eslint-disable-next-line ember/no-computed-properties-in-native-classes import ComputedProperty from '@ember/object/computed'; export type TaskGenerator<T> = Generator<any, T, any>; export type TaskFunction<T, Args extends any[]> = ( ...args: Args ) => TaskGenerator<T>; export type TaskFunctionArgs<T extends TaskFunction<any, any[]>> = T extends ( ...args: infer A ) => TaskGenerator<any> ? A : []; export type TaskFunctionReturnType<T extends TaskFunction<any, any[]>> = T extends (...args: any[]) => TaskGenerator<infer R> ? R : unknown; export type TaskForTaskFunction<T extends TaskFunction<any, any[]>> = Task< TaskFunctionReturnType<T>, TaskFunctionArgs<T> >; export type TaskInstanceForTaskFunction<T extends TaskFunction<any, any[]>> = TaskInstance<TaskFunctionReturnType<T>>; export interface EncapsulatedTaskDescriptor<T, Args extends any[]> { perform(...args: Args): TaskGenerator<T>; } export type EncapsulatedTaskDescriptorArgs< T extends EncapsulatedTaskDescriptor<any, any[]> > = T extends { perform(...args: infer A): TaskGenerator<any> } ? A : []; export type EncapsulatedTaskDescriptorReturnType< T extends EncapsulatedTaskDescriptor<any, any[]> > = T extends { perform(...args: any[]): TaskGenerator<infer R> } ? R : unknown; // eslint-disable-next-line @typescript-eslint/ban-types export type EncapsulatedTaskState<T extends object> = Omit< T, 'perform' | keyof TaskInstance<any> >; export type TaskForEncapsulatedTaskDescriptor< T extends EncapsulatedTaskDescriptor<any, any[]> > = EncapsulatedTask< EncapsulatedTaskDescriptorReturnType<T>, EncapsulatedTaskDescriptorArgs<T>, EncapsulatedTaskState<T> >; export type TaskInstanceForEncapsulatedTaskDescriptor< T extends EncapsulatedTaskDescriptor<any, any[]> > = EncapsulatedTaskInstance< EncapsulatedTaskDescriptorReturnType<T>, EncapsulatedTaskState<T> >; interface TaskState<T extends TaskInstance<any>> { /** * `true` if any current task instances are running. */ readonly isRunning: boolean; /** * `true` if any future task instances are queued. */ readonly isQueued: boolean; /** * `true` if the task or task group is not in the running or queued state. */ readonly isIdle: boolean; /** * The current state of the task or task group: `"running"`, `"queued"` or `"idle"`. */ readonly state: 'running' | 'queued' | 'idle'; /** * The most recently started task instance. */ readonly last: T | null; /** * The most recent task instance that is currently running. */ readonly lastRunning: T | null; /** * The most recently performed task instance. */ readonly lastPerformed: T | null; /** * The most recent task instance that succeeded. */ readonly lastSuccessful: T | null; /** * The most recently completed task instance. */ readonly lastComplete: T | null; /** * The most recent task instance that errored. */ readonly lastErrored: T | null; /** * The most recently canceled task instance. */ readonly lastCanceled: T | null; /** * The most recent task instance that is incomplete. */ readonly lastIncomplete: T | null; /** * The number of times this task or task group has been performed. */ readonly performCount: number; } interface AbstractTask<Args extends any[], T extends TaskInstance<any>> extends TaskState<T> { /** * Cancels all running or queued `TaskInstance`s for this Task. * If you're trying to cancel a specific TaskInstance (rather * than all of the instances running under this task) call * `.cancel()` on the specific TaskInstance. * * @param options.reason A descriptive reason the task was * cancelled. Defaults to `".cancelAll() was explicitly called * on the Task"`. * @param options.resetState If true, will clear the task state * (`last*` and `performCount` properties will be set to initial * values). Defaults to false. */ cancelAll(options?: { reason?: string; resetState?: boolean }): Promise<void>; /** * Creates a new {@linkcode TaskInstance} and attempts to run it right away. * If running this task instance would increase the task's concurrency * to a number greater than the task's maxConcurrency, this task * instance might be immediately canceled (dropped), or enqueued * to run at later time, after the currently running task(s) have finished. * * @param args Arguments to pass to the task function. */ perform(...args: Args): T; /** * Flags the task as linked to the parent task's lifetime. Must be called * within another task's perform function. The task will be cancelled if the * parent task is canceled as well. * * ember-concurrency will indicate when this may be needed. */ linked(): this; /** * Flags the task as not linked to the parent task's lifetime. Must be called * within another task's perform function. The task will NOT be cancelled if the * parent task is canceled. * * This is useful for avoiding the so-called "self-cancel loop" for tasks. * ember-concurrency will indicate when this may be needed. */ unlinked(): this; } /** * The `Task` object lives on a host Ember object (e.g. * a Component, Route, or Controller). You call the * {@linkcode Task#perform .perform()} method on this object * to create run individual {@linkcode TaskInstance}s, * and at any point, you can call the {@linkcode Task#cancelAll .cancelAll()} * method on this object to cancel all running or enqueued * {@linkcode TaskInstance}s. */ // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface Task<T, Args extends any[]> extends AbstractTask<Args, TaskInstance<T>> {} // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface EncapsulatedTask< T, Args extends any[], // eslint-disable-next-line @typescript-eslint/ban-types State extends object > extends AbstractTask<Args, EncapsulatedTaskInstance<T, State>> {} /** * "Task Groups" provide a means for applying * task modifiers to groups of tasks. Once a {@linkcode Task} is declared * as part of a group task, modifiers like `drop` or `restartable` * will no longer affect the individual `Task`. Instead those * modifiers can be applied to the entire group. * * ```js * import { task, taskGroup } from 'ember-concurrency'; * * export default class MyController extends Controller { * @taskGroup({ drop: true }) chores; * * @task({ group: 'chores' }) mowLawn = taskFn; * @task({ group: 'chores' }) doDishes = taskFn; * @task({ group: 'chores' }) changeDiapers = taskFn; * } * ``` */ export interface TaskGroup<T> extends TaskState<TaskInstance<T>> { /** * Cancels all running or queued `TaskInstance`s for this task group. * If you're trying to cancel a specific TaskInstance (rather * than all of the instances running under this task group) call * `.cancel()` on the specific TaskInstance. * * @param options.reason A descriptive reason the task group was * cancelled. Defaults to `".cancelAll() was explicitly called * on the Task"`. * @param options.resetState If true, will clear the task group state * (`last*` and `performCount` properties will be set to initial * values). Defaults to false. */ cancelAll(options?: { reason?: string; resetState?: boolean }): Promise<void>; } /** * A `TaskInstance` represent a single execution of a * {@linkcode Task}. Every call to {@linkcode Task#perform} returns * a `TaskInstance`. * * `TaskInstance`s are cancelable, either explicitly * via {@linkcode TaskInstance#cancel} or {@linkcode Task#cancelAll}, * or automatically due to the host object being destroyed, or * because concurrency policy enforced by a * {@linkcode TaskProperty Task Modifier} canceled the task instance. */ export interface TaskInstance<T> extends Promise<T> { /** * If this TaskInstance runs to completion by returning a property * other than a rejecting promise, this property will be set * with that value. */ readonly value: T | null; /** * If this TaskInstance is canceled or throws an error (or yields * a promise that rejects), this property will be set with that error. * Otherwise, it is null. */ readonly error: unknown; /** * True if the task instance is fulfilled. */ readonly isSuccessful: boolean; /** * True if the task instance resolves to a rejection. */ readonly isError: boolean; /** * True if the task instance was canceled before it could run to completion. */ readonly isCanceled: boolean; /** * True if the task instance has started, else false. */ readonly hasStarted: boolean; /** * True if the task has run to completion. */ readonly isFinished: boolean; /** * True if the task is still running. */ readonly isRunning: boolean; /** * Describes the state that the task instance is in. Can be used for debugging, * or potentially driving some UI state. Possible values are: * * - `"dropped"`: task instance was canceled before it started * - `"canceled"`: task instance was canceled before it could finish * - `"finished"`: task instance ran to completion (even if an exception was thrown) * - `"running"`: task instance is currently running (returns true even if * is paused on a yielded promise) * - `"waiting"`: task instance hasn't begun running yet (usually * because the task is using the {@linkcode TaskProperty#enqueue enqueue} * task modifier) * * The animated timeline examples on the [Task Concurrency](/docs/task-concurrency) * docs page make use of this property. */ readonly state: 'dropped' | 'canceled' | 'finished' | 'running' | 'waiting'; /** * True if the TaskInstance was canceled before it could * ever start running. For example, calling * {@linkcode Task#perform .perform()} twice on a * task with the {@linkcode TaskProperty#drop .drop} modifier applied * will result in the second task instance being dropped. */ readonly isDropped: boolean; /** * Cancels the task instance. Has no effect if the task instance has * already been canceled or has already finished running. * * @param cancelReason Defaults to `".cancel() was explicitly called"`. */ cancel(cancelReason?: string): Promise<void>; /** * Returns a promise that resolves with the value returned * from the task's (generator) function, or rejects with * either the exception thrown from the task function, or * an error with a `.name` property with value `"TaskCancelation"`. */ then<TResult1 = T, TResult2 = never>( onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null ): Promise<TResult1 | TResult2>; catch<TResult = never>( onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null ): Promise<T | TResult>; finally(onfinally?: (() => void) | null): Promise<T>; } // eslint-disable-next-line @typescript-eslint/ban-types type EncapsulatedTaskInstance<T, State extends object> = TaskInstance<T> & EncapsulatedTaskState<State>; interface OnStateCallback<T> { (state: TaskState<TaskInstance<any>>, taskable: T): void; } interface AbstractTaskProperty<T extends Task<any, any[]>> extends ComputedProperty<T> { volatile: never; readOnly: never; property: never; meta: never; /** * Calling `task(...).on(eventName)` configures the task to be * automatically performed when the specified events fire. In * this way, it behaves like * [Ember.on](http://emberjs.com/api/classes/Ember.html#method_on). * * You can use `task(...).on('init')` to perform the task * when the host object is initialized. * * ```js * export default Component.extend({ * pollForUpdates: task(function * () { * // ... this runs when the Component is first created * // because we specified .on('init') * }).on('init'), * * handleFoo: task(function * (a, b, c) { * // this gets performed automatically if the 'foo' * // event fires on this Component, * // e.g., if someone called component.trigger('foo') * }).on('foo'), * }); * ``` * * [See the Writing Tasks Docs for more info](/docs/writing-tasks) */ on(...eventNames: string[]): this; /** * This behaves like the {@linkcode TaskProperty#on task(...).on() modifier}, * but instead will cause the task to be canceled if any of the * specified events fire on the parent object. * * [See the Live Example](/docs/examples/route-tasks/1) */ cancelOn(...eventNames: string[]): this; /** * This behaves like the {@linkcode TaskProperty#on task(...).on() modifier}, * but instead will cause the task to be performed if any of the * specified properties on the parent object change. */ observes(...keys: string[]): this; /** * Configures the task to cancel old currently task instances * to make room for a new one to perform. Sets default * maxConcurrency to 1. * * [See the Live Example](/docs/examples/route-tasks/1) */ restartable(): this; /** * Configures the task to run task instances one-at-a-time in * the order they were `.perform()`ed. Sets default * maxConcurrency to 1. */ enqueue(): this; /** * Configures the task to immediately cancel (i.e. drop) any * task instances performed when the task is already running * at maxConcurrency. Sets default maxConcurrency to 1. */ drop(): this; /** * Configures the task to drop all but the most recently * performed {@linkcode TaskInstance}. */ keepLatest(): this; /** * Sets the maximum number of task instances that are allowed * to run at the same time. By default, with no task modifiers * applied, this number is Infinity (there is no limit * to the number of tasks that can run at the same time). * {@linkcode TaskProperty#restartable restartable}, * {@linkcode TaskProperty#enqueue enqueue}, and * {@linkcode TaskProperty#drop drop} set the default * maxConcurrency to 1, but you can override this value * to set the maximum number of concurrently running tasks * to a number greater than 1. * * [See the AJAX Throttling example](/docs/examples/ajax-throttling) * * The example below uses a task with `maxConcurrency(3)` to limit * the number of concurrent AJAX requests (for anyone using this task) * to 3. * * ```js * doSomeAjax: task(function * (url) { * return fetch(url); * }).maxConcurrency(3), * * elsewhere() { * this.get('doSomeAjax').perform("http://www.example.com/json"); * }, * ``` * * @param n The maximum number of concurrently running tasks. */ maxConcurrency(n: number): this; /** * Adds this task to a TaskGroup so that concurrency constraints * can be shared between multiple tasks. * * [See the Task Group docs for more information](/docs/task-groups) * * @param groupPath A path to the TaskGroup property. */ group(groupPath: string): this; /** * Activates lifecycle events, allowing Evented host objects to react to task state * changes. * * ```js * * export default Component.extend({ * uploadTask: task(function* (file) { * // ... file upload stuff * }).evented(), * * uploadedStarted: on('uploadTask:started', function(taskInstance) { * this.analytics.track("User Photo: upload started"); * }), * }); * ``` */ evented(): this; /** * Logs lifecycle events to aid in debugging unexpected Task behavior. * Presently only logs cancelation events and the reason for the cancelation, * e.g. "TaskInstance 'doStuff' was canceled because the object it lives on was destroyed or unrendered" */ debug(): this; /** * Configures the task to call the passed in callback for derived state updates, * overriding the default derived state tracking. You may call with `null` to * completely opt-out of derived state tracking. * * @param {function?} callback Callback to be called. Receives an object argument with the new state. * @instance */ onState(callback: OnStateCallback<T> | null): this; } /** * A {@link TaskProperty} is the Computed Property-like object returned * from the {@linkcode task} function. You can call Task Modifier methods * on this object to configure the behavior of the {@link Task}. * * See [Managing Task Concurrency](/docs/task-concurrency) for an * overview of all the different task modifiers you can use and how * they impact automatic cancelation / enqueueing of task instances. */<|fim▁hole|>// eslint-disable-next-line @typescript-eslint/no-empty-interface export interface EncapsulatedTaskProperty< T, Args extends any[], // eslint-disable-next-line @typescript-eslint/ban-types State extends object > extends AbstractTaskProperty<EncapsulatedTask<T, Args, State>> {} export interface TaskGroupProperty<T> extends ComputedProperty<TaskGroup<T>> { volatile: never; readOnly: never; property: never; meta: never; /** * Configures the task group to cancel old currently task * instances to make room for a new one to perform. Sets * default maxConcurrency to 1. * * [See the Live Example](/docs/examples/route-tasks/1) * * @method restartable * @memberof TaskGroupProperty * @instance */ restartable(): this; /** * Configures the task group to run task instances * one-at-a-time in the order they were `.perform()`ed. * Sets default maxConcurrency to 1. * * @method enqueue * @memberof TaskGroupProperty * @instance */ enqueue(): this; /** * Configures the task group to immediately cancel (i.e. * drop) any task instances performed when the task group * is already running at maxConcurrency. Sets default * maxConcurrency to 1. * * @method drop * @memberof TaskGroupProperty * @instance */ drop(): this; /** * Configures the task group to drop all but the most * recently performed {@linkcode TaskInstance }. * * @method keepLatest * @memberof TaskGroupProperty * @instance */ keepLatest(): this; /** * Sets the maximum number of task instances that are * allowed to run in this task group at the same time. * By default, with no task modifiers applied, this number * is Infinity (there is no limit to the number of tasks * that can run at the same time). * {@linkcode TaskGroupProperty#restartable .restartable}, * {@linkcode TaskGroupProperty#enqueue .enqueue}, and * {@linkcode TaskGroupProperty#drop .drop} set the * default maxConcurrency to 1, but you can override this * value to set the maximum number of concurrently running * tasks to a number greater than 1. * * [See the AJAX Throttling example](/docs/examples/ajax-throttling) * * The example below uses a task group with `maxConcurrency(3)` * to limit the number of concurrent AJAX requests (for anyone * using tasks in this group) to 3. * * ```js * ajax: taskGroup().maxConcurrency(3), * * doSomeAjax: task(function * (url) { * return Ember.$.getJSON(url).promise(); * }).group('ajax'), * * doSomeAjax: task(function * (url) { * return Ember.$.getJSON(url).promise(); * }).group('ajax'), * * elsewhere() { * this.get('doSomeAjax').perform("http://www.example.com/json"); * }, * ``` * * @method maxConcurrency * @memberof TaskGroupProperty * @param {Number} n The maximum number of concurrently running tasks * @instance */ maxConcurrency(n: number): this; } export type TaskCancelation = Error & { name: 'TaskCancelation' }; export type TaskDefinition<T, Args extends any[]> = | TaskFunction<T, Args> | EncapsulatedTaskDescriptor<T, Args>; export interface TaskModifier<T, Args extends any[]> { (factory: AbstractTaskFactory<T, Args>, taskModifierOption: any): void; } interface AbstractTaskFactory<T, Args extends any[]> { readonly name: string; readonly taskDefinition: TaskDefinition<T, Args>; getOptions(): Record<string, any>; setDebug(isDebug: boolean): this; setEvented(isEvented: boolean): this; setGroup(groupName: string): this; setMaxConcurrency(maxConcurrency: number): this; setName(name: string): this; setOnState(onStateCallback: OnStateCallback<T> | null): this; setTaskDefinition(taskDefinition: TaskDefinition<T, Args>): this; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface TaskFactory<T, Args extends any[]> extends AbstractTaskFactory<T, Args> {} /** * Registers a new modifier with the modifier registry */ export function registerModifier( name: string, definition: TaskModifier<any, any[]> ): void; /** * Returns a specified modifier, if it exists in the registry */ export function getModifier( name: string ): TaskModifier<unknown, unknown[]> | null; /** * Returns whether a specified modifier exists in the registry */ export function hasModifier(name: string): boolean; export interface YieldableState { /** * Return yielded TaskInstance. Useful for introspection on instance state. * @method getTaskInstance * @memberof YieldableState */ getTaskInstance(): TaskInstance<any>; /** * Cancel the yielded TaskInstance. * @method cancel * @memberof YieldableState */ cancel(): void; /** * Cause the TaskInstance to return from its yield with an optional value, * and continue executing. * @method next * @param value */ next(value: any): void; /** * Short-cirsuit TaskInstance execution and have it return with an optional * value. * @param value */ return(value: any): void; /** * Raise a given error within the given task instance and halt execution * @param error */ throw(error: any): void; } export abstract class Yieldable<T> implements PromiseLike<T> { /** * Defines what happens when the task encounters `yield myYieldable` and returns * a disposer function that handles any cleanup. * * The state parameter is provided by the runtime, and provides operations for * interacting with the yielding task instance and advancing, returning, * throwing, or canceling its execution. * * @param {YieldableState} state */ abstract onYield(state: YieldableState): () => void; then<TResult1 = T, TResult2 = never>( onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null ): Promise<TResult1 | TResult2>; catch<TResult = never>( onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null ): Promise<T | TResult>; finally(onfinally?: (() => void) | null): Promise<T>; } type Evented = | { on(event: string, callback: (...args: any[]) => void): void; off(event: string, callback: (...args: any[]) => void): void; } | { one(event: string, callback: (...args: any[]) => void): void; } | { addEventListener(event: string, callback: (...args: any[]) => void): void; removeEventListener( event: string, callback: (...args: any[]) => void ): void; }; type Resolved<T> = T extends PromiseLike<infer R> ? R : T; type Settlement<T> = | { state: 'fulfilled'; value: T } | { state: 'rejected'; reason: any }; type Settled<T> = Settlement<Resolved<T>>; // Decorator option types from ember-concurrency-decorators // eslint-disable-next-line @typescript-eslint/ban-types type OptionsFor<T extends object> = { [K in OptionKeysFor<T>]?: OptionTypeFor<T, T[K]>; }; // eslint-disable-next-line @typescript-eslint/ban-types type OptionKeysFor<T extends object> = { [K in keyof T]: OptionKeyFor<T, K, T[K]>; }[keyof T]; type OptionKeyFor<T, K, F> = F extends (...args: any[]) => T ? K : never; type OptionTypeFor<T, F> = F extends (...args: infer Args) => T ? Args[0] extends undefined ? true : Args[0] : never; type TaskOptions = OptionsFor<TaskProperty<unknown, unknown[]>>; type TaskGroupOptions = OptionsFor<TaskGroupProperty<unknown>>; type MethodOrPropertyDecoratorWithParams<Params extends unknown[]> = MethodDecorator & PropertyDecorator & ((...params: Params) => MethodDecorator & PropertyDecorator); /** * A Task is a cancelable, restartable, asynchronous operation that * is driven by a generator function. Tasks are automatically canceled * when the object they live on is destroyed (e.g. a Component * is unrendered). * * Turns the decorated generator function into a task. * * Optionally takes a hash of options that will be applied as modifiers to the * task. For instance `maxConcurrency`, `on`, `group` or `keepLatest`. * * By default, tasks have no concurrency constraints * (multiple instances of a task can be running at the same time) * but much of a power of tasks lies in proper usage of Task Modifiers * that you can apply to a task. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by decorating an object that defines a `perform` generator * method. * * ```js * import Component from '@glimmer/component'; * import { task } from 'ember-concurrency'; * * class MyComponent extends Component { * @task * *plainTask() {} * * @task({ maxConcurrency: 5, keepLatest: true, cancelOn: 'click' }) * *taskWithModifiers() {} * } * ``` * * @function * @param {object?} [options={}] * @return {Task} */ export function task<T extends TaskOptions>( baseOptions?: T ): MethodOrPropertyDecoratorWithParams<[T]>; export function task<T>( target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor<T> ): TypedPropertyDescriptor<T>; export function task(target: Object, propertyKey: string): void; /** * A Task is a cancelable, restartable, asynchronous operation that * is driven by a generator function. Tasks are automatically canceled * when the object they live on is destroyed (e.g. a Component * is unrendered). * * To define a task, use the `task(...)` function, and pass in * a generator function, which will be invoked when the task * is performed. The reason generator functions are used is * that they (like the proposed ES7 async-await syntax) can * be used to elegantly express asynchronous, cancelable * operations. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by passing in an object that defined a `perform` generator * method. * * The following Component defines a task called `myTask` that, * when performed, prints a message to the console, sleeps for 1 second, * prints a final message to the console, and then completes. * * ```js * import { task, timeout } from 'ember-concurrency'; * export default Component.extend({ * myTask: task(function * () { * console.log("Pausing for a second..."); * yield timeout(1000); * console.log("Done!"); * }) * }); * ``` * * ```hbs * <button {{action myTask.perform}}>Perform Task</button> * ``` * * By default, tasks have no concurrency constraints * (multiple instances of a task can be running at the same time) * but much of a power of tasks lies in proper usage of Task Modifiers * that you can apply to a task. * * @param taskFn A generator function backing the task or an encapsulated task descriptor object with a `perform` generator method. */ export function task<T extends TaskFunction<any, any[]>>( taskFn: T ): TaskProperty<TaskFunctionReturnType<T>, TaskFunctionArgs<T>>; export function task<T extends EncapsulatedTaskDescriptor<any, any[]>>( taskFn: T ): EncapsulatedTaskProperty< EncapsulatedTaskDescriptorReturnType<T>, EncapsulatedTaskDescriptorArgs<T>, EncapsulatedTaskState<T> >; /** * Turns the decorated generator function into a task and applies the * `drop` modifier. * * Optionally takes a hash of options that will be applied as modifiers to the * task. For instance `maxConcurrency`, `on`, or `group`. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by decorating an object that defines a `perform` generator * method. * * ```js * import Component from '@ember/component'; * import { task, dropTask } from 'ember-concurrency'; * * class MyComponent extends Component { * @task * *plainTask() {} * * @dropTask({ cancelOn: 'click' }) * *myDropTask() {} * } * ``` * * @function * @param {object?} [options={}] * @return {Task} */ export function dropTask<T extends TaskOptions>( baseOptions?: T ): MethodOrPropertyDecoratorWithParams<[T]>; export function dropTask<T>( target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor<T> ): TypedPropertyDescriptor<T>; export function dropTask(target: Object, propertyKey: string): void; /** * Turns the decorated generator function into a task and applies the * `enqueue` modifier. * * Optionally takes a hash of options that will be applied as modifiers to the * task. For instance `maxConcurrency`, `on`, or `group`. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by decorating an object that defines a `perform` generator * method. * * ```js * import Component from '@ember/component'; * import { task, enqueueTask } from 'ember-concurrency'; * * class MyComponent extends Component { * @task * *plainTask() {} * * @enqueueTask({ cancelOn: 'click' }) * *myEnqueueTask() {} * } * ``` * * @function * @param {object?} [options={}] * @return {Task} */ export function enqueueTask<T extends TaskOptions>( baseOptions?: T ): MethodOrPropertyDecoratorWithParams<[T]>; export function enqueueTask<T>( target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor<T> ): TypedPropertyDescriptor<T>; export function enqueueTask(target: Object, propertyKey: string): void; /** * Turns the decorated generator function into a task and applies the * `keepLatest` modifier. * * Optionally takes a hash of options that will be applied as modifiers to the * task. For instance `maxConcurrency`, `on`, or `group`. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by decorating an object that defines a `perform` generator * method. * * ```js * import Component from '@ember/component'; * import { task, keepLatestTask } from 'ember-concurrency'; * * class MyComponent extends Component { * @task * *plainTask() {} * * @keepLatestTask({ cancelOn: 'click' }) * *myKeepLatestTask() {} * } * ``` * * @function * @param {object?} [options={}] * @return {Task} */ export function keepLatestTask<T extends TaskOptions>( baseOptions?: T ): MethodOrPropertyDecoratorWithParams<[T]>; export function keepLatestTask<T>( target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor<T> ): TypedPropertyDescriptor<T>; export function keepLatestTask(target: Object, propertyKey: string): void; /** * Turns the decorated generator function into a task and applies the * `restartable` modifier. * * Optionally takes a hash of options that will be applied as modifiers to the * task. For instance `maxConcurrency`, `on`, or `group`. * * You can also define an * <a href="/docs/advanced/encapsulated-task">Encapsulated Task</a> * by decorating an object that defines a `perform` generator * method. * * ```js * import Component from '@ember/component'; * import { task, restartableTask } from 'ember-concurrency'; * * class MyComponent extends Component { * @task * *plainTask() {} * * @restartableTask({ cancelOn: 'click' }) * *myRestartableTask() {} * } * ``` * * @function * @param {object?} [options={}] * @return {Task} */ export function restartableTask<T extends TaskOptions>( baseOptions?: T ): MethodOrPropertyDecoratorWithParams<[T]>; export function restartableTask<T>( target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor<T> ): TypedPropertyDescriptor<T>; export function restartableTask(target: Object, propertyKey: string): void; /** * "Task Groups" provide a means for applying * task modifiers to groups of tasks. Once a {@linkcode Task} is declared * as part of a group task, modifiers like `drop` or `restartable` * will no longer affect the individual `Task`. Instead those * modifiers can be applied to the entire group. * * Turns the decorated property into a task group. * * Optionally takes a hash of options that will be applied as modifiers to the * task group. For instance `maxConcurrency` or `keepLatest`. * * ```js * import Component from '@glimmer/component'; * import { task, taskGroup } from 'ember-concurrency'; * * class MyComponent extends Component { * @taskGroup({ maxConcurrency: 5 }) chores; * * @task({ group: 'chores' }) * *mowLawn() {} * * @task({ group: 'chores' }) * *doDishes() {} * } * ``` * * @function * @param {object?} [options={}] * @return {TaskGroup} */ export function taskGroup<T extends TaskGroupOptions>( baseOptions: T ): PropertyDecorator; export function taskGroup(target: Object, propertyKey: string): void; /** * "Task Groups" provide a means for applying * task modifiers to groups of tasks. Once a {@linkcode Task} is declared * as part of a group task, modifiers like `drop` or `restartable` * will no longer affect the individual `Task`. Instead those * modifiers can be applied to the entire group. * * ```js * import { task, taskGroup } from 'ember-concurrency'; * * export default Controller.extend({ * chores: taskGroup().drop(), * * mowLawn: task(taskFn).group('chores'), * doDishes: task(taskFn).group('chores'), * changeDiapers: task(taskFn).group('chores') * }); * ``` * * @returns {TaskGroupProperty} */ export function taskGroup<T>(): TaskGroupProperty<T>; /** * Turns the decorated property into a task group and applies the * `drop` modifier. * * Optionally takes a hash of further options that will be applied as modifiers * to the task group. * * @function * @param {object?} [options={}] * @return {TaskGroup} */ export function dropTaskGroup<T extends TaskGroupOptions>( baseOptions: T ): PropertyDecorator; export function dropTaskGroup(target: Object, propertyKey: string): void; /** * Turns the decorated property into a task group and applies the * `enqueue` modifier. * * Optionally takes a hash of further options that will be applied as modifiers * to the task group. * * @function * @param {object?} [options={}] * @return {TaskGroup} */ export function enqueueTaskGroup<T extends TaskGroupOptions>( baseOptions: T ): PropertyDecorator; export function enqueueTaskGroup(target: Object, propertyKey: string): void; /** * Turns the decorated property into a task group and applies the * `keepLatest` modifier. * * Optionally takes a hash of further options that will be applied as modifiers * to the task group. * * @function * @param {object?} [options={}] * @return {TaskGroup} */ export function keepLatestTaskGroup<T extends TaskGroupOptions>( baseOptions: T ): PropertyDecorator; export function keepLatestGroup(target: Object, propertyKey: string): void; /** * Turns the decorated property into a task group and applies the * `restartable` modifier. * * Optionally takes a hash of further options that will be applied as modifiers * to the task group. * * @function * @param {object?} [options={}] * @return {TaskGroup} */ export function restartableTaskGroup<T extends TaskGroupOptions>( baseOptions: T ): PropertyDecorator; export function restartableTaskGroup(target: Object, propertyKey: string): void; /** * A cancelation-aware variant of [Promise.all](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all). * The normal version of a `Promise.all` just returns a regular, uncancelable * Promise. The `ember-concurrency` variant of `all()` has the following * additional behavior: * * - if the task that `yield`ed `all()` is canceled, any of the * {@linkcode TaskInstance}s passed in to `all` will be canceled * - if any of the {@linkcode TaskInstance}s (or regular promises) passed in reject (or * are canceled), all of the other unfinished `TaskInstance`s will * be automatically canceled. * * [Check out the "Awaiting Multiple Child Tasks example"](/docs/examples/joining-tasks) */ export function all<T extends readonly unknown[] | readonly [unknown]>( values: T ): Promise<{ -readonly [K in keyof T]: Resolved<T[K]> }>; export function all<T>(values: Iterable<T>): Promise<Array<Resolved<T>>>; /** * A cancelation-aware variant of [RSVP.allSettled](https://api.emberjs.com/ember/release/functions/rsvp/allSettled). * The normal version of a `RSVP.allSettled` just returns a regular, uncancelable * Promise. The `ember-concurrency` variant of `allSettled()` has the following * additional behavior: * * - if the task that `yield`ed `allSettled()` is canceled, any of the * {@linkcode TaskInstance}s passed in to `allSettled` will be canceled */ export function allSettled<T extends readonly unknown[] | readonly [unknown]>( values: T ): Promise<{ -readonly [K in keyof T]: Settled<T[K]> }>; export function allSettled<T>(values: Iterable<T>): Promise<Array<Settled<T>>>; /** * Yielding `animationFrame()` will pause a task until after the next animation * frame using the native `requestAnimationFrame()` browser API. * * The task below, when performed, will print the time since the last loop run * for every animation frame. * * ```js * export default class MyComponent extends Component { * @task *myTask() { * let lastNow = performance.now(); * while (true) { * yield animationFrame(); * * let now = performance.now(); * let dt = now - lastNow; * lastNow = now; * * console.log(dt); * } * } * } * ``` */ export function animationFrame(): Yieldable<void>; /** * Returns true if the object passed to it is a TaskCancelation error. * If you call `someTask.perform().catch(...)` or otherwise treat * a {@linkcode TaskInstance} like a promise, you may need to * handle the cancelation of a TaskInstance differently from * other kinds of errors it might throw, and you can use this * convenience function to distinguish cancelation from errors. * * ```js * click() { * this.get('myTask').perform().catch(e => { * if (!didCancel(e)) { throw e; } * }); * } * ``` * * @param error The caught error, which might be a TaskCancelation. */ export function didCancel(error: unknown): error is TaskCancelation; /** * A cancelation-aware variant of [RSVP.hash](https://api.emberjs.com/ember/release/functions/rsvp/hash). * The normal version of a `RSVP.hash` just returns a regular, uncancelable * Promise. The `ember-concurrency` variant of `hash()` has the following * additional behavior: * * - if the task that `yield`ed `hash()` is canceled, any of the * {@linkcode TaskInstance}s passed in to `hash` will be canceled * - if any of the items rejects/cancels, all other cancelable items * (e.g. {@linkcode TaskInstance}s) will be canceled */ export function hash<T extends Record<string, unknown>>( values: T ): Promise<{ [K in keyof T]: Resolved<T[K]> }>; export function hash<T>( values: Record<string, T> ): Promise<Record<string, Resolved<T>>>; /** * A cancelation-aware variant of [RSVP.hashSettled](https://api.emberjs.com/ember/release/functions/rsvp/hashSettled). * The normal version of a `RSVP.hashSettled` just returns a regular, uncancelable * Promise. The `ember-concurrency` variant of `hashSettled()` has the following * additional behavior: * * - if the task that `yield`ed `hashSettled()` is canceled, any of the * {@linkcode TaskInstance}s passed in to `hashSettled` will be canceled */ export function hashSettled<T extends Record<string, unknown>>( values: T ): Promise<{ [K in keyof T]: Settled<T[K]> }>; export function hashSettled<T>( values: Record<string, T> ): Promise<Record<string, Settled<T>>>; /** * A cancelation-aware variant of [Promise.race](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race). * The normal version of a `Promise.race` just returns a regular, uncancelable * Promise. The `ember-concurrency` variant of `race()` has the following * additional behavior: * * - if the task that `yield`ed `race()` is canceled, any of the * {@linkcode TaskInstance}s passed in to `race` will be canceled * - once any of the tasks/promises passed in complete (either success, failure, * or cancelation), any of the {@linkcode TaskInstance}s passed in will be canceled * * [Check out the "Awaiting Multiple Child Tasks example"](/docs/examples/joining-tasks) */ export function race<T>(values: readonly T[]): Promise<Resolved<T>>; export function race<T>(values: Iterable<T>): Promise<Resolved<T>>; /** * Yielding `timeout(ms)` will pause a task for the duration * of time passed in, in milliseconds. * * This timeout will be scheduled on the Ember runloop, which * means that test helpers will wait for it to complete before * continuing with the test. See `rawTimeout()` if you need * different behavior. * * The task below, when performed, will print a message to the * console every second. * * ```js * export default class MyComponent extends Component { * @task *myTask() { * while (true) { * console.log("Hello!"); * yield timeout(1000); * } * } * } * ``` * * @param ms The amount of time to sleep before resuming * the task, in milliseconds. */ export function timeout(ms: number): Yieldable<void>; /** * Yielding `rawTimeout(ms)` will pause a task for the duration * of time passed in, in milliseconds. * * The timeout will use the native `setTimeout()` browser API, * instead of the Ember runloop, which means that test helpers * will *not* wait for it to complete. * * The task below, when performed, will print a message to the * console every second. * * ```js * export default class MyComponent extends Component { * @task *myTask() { * while (true) { * console.log("Hello!"); * yield rawTimeout(1000); * } * } * } * ``` * * @param ms The amount of time to sleep before resuming * the task, in milliseconds. */ export function rawTimeout(ms: number): Yieldable<void>; /** * Use `waitForQueue` to pause the task until a certain run loop queue is reached. * * ```js * import { task, waitForQueue } from 'ember-concurrency'; * export default Component.extend({ * myTask: task(function * () { * yield waitForQueue('afterRender'); * console.log("now we're in the afterRender queue"); * }) * }); * ``` * * @param queueName The name of the Ember run loop queue. */ export function waitForQueue(queueName: string): Yieldable<void>; /** * Use `waitForEvent` to pause the task until an event is fired. The event * can either be a jQuery event or an Ember.Evented event (or any event system * where the object supports `.on()` `.one()` and `.off()`). * * ```js * import { task, waitForEvent } from 'ember-concurrency'; * export default Component.extend({ * myTask: task(function * () { * console.log("Please click anywhere.."); * let clickEvent = yield waitForEvent($('body'), 'click'); * console.log("Got event", clickEvent); * * let emberEvent = yield waitForEvent(this, 'foo'); * console.log("Got foo event", emberEvent); * * // somewhere else: component.trigger('foo', { value: 123 }); * }) * }); * ``` * * @param object The Ember Object, jQuery element, or other object with .on() and .off() APIs * that the event fires from. * @param eventName The name of the event to wait for. */ export function waitForEvent( object: Evented, eventName: string ): Yieldable<void>; /** * Use `waitForProperty` to pause the task until a property on an object * changes to some expected value. This can be used for a variety of use * cases, including synchronizing with another task by waiting for it * to become idle, or change state in some other way. If you omit the * callback, `waitForProperty` will resume execution when the observed * property becomes truthy. If you provide a callback, it'll be called * immediately with the observed property's current value, and multiple * times thereafter whenever the property changes, until you return * a truthy value from the callback, or the current task is canceled. * You can also pass in a non-Function value in place of the callback, * in which case the task will continue executing when the property's * value becomes the value that you passed in. * * ```js * import { task, waitForProperty } from 'ember-concurrency'; * export default Component.extend({ * foo: 0, * * myTask: task(function * () { * console.log("Waiting for `foo` to become 5"); * * yield waitForProperty(this, 'foo', v => v === 5); * // alternatively: yield waitForProperty(this, 'foo', 5); * * // somewhere else: this.set('foo', 5) * * console.log("`foo` is 5!"); * * // wait for another task to be idle before running: * yield waitForProperty(this, 'otherTask.isIdle'); * console.log("otherTask is idle!"); * }) * }); * ``` * * @param object An object (most likely an Ember Object). * @param key The property name that is observed for changes. * @param callbackOrValue a Function that should return a truthy value * when the task should continue executing, or * a non-Function value that the watched property * needs to equal before the task will continue running. */ export function waitForProperty<O extends object, K extends keyof O>( object: O, key: K, callbackOrValue: (value: O[K]) => boolean ): Yieldable<void>; export function waitForProperty( object: object, key: string, callbackOrValue: (value: unknown) => boolean ): Yieldable<void>; export function waitForProperty<O extends object, K extends keyof O>( object: O, key: K, callbackOrValue: O[K] ): Yieldable<void>; /** * * Yielding `forever` will pause a task indefinitely until * it is cancelled (i.e. via host object destruction, the restartable modifier, * or manual cancellation). * * This is often useful in cases involving animation: if you're * using Liquid Fire, or some other animation scheme, sometimes you'll * notice buttons visibly reverting to their inactive states during * a route transition. By yielding `forever` in a Component task that drives a * button's active state, you can keep a task indefinitely running * until the animation runs to completion. * * NOTE: Liquid Fire also includes a useful `waitUntilIdle()` method * on the `liquid-fire-transitions` service that you can use in a lot * of these cases, but it won't cover cases of asynchrony that are * unrelated to animation, in which case `forever` might be better suited * to your needs. * * ```js * import { task, forever } from 'ember-concurrency'; * * export default class MyComponent extends Component { * @service myService; * * @task *myTask() { * yield this.myService.doSomethingThatCausesATransition(); * yield forever; * } * } * ``` */ export function forever(): Yieldable<never>; /** * This decorator allows you to alias a property to the result of a task. * You can also provide a default value to use before the task has completed. * * ```js * import Component from '@glimmer/component'; * import { task, lastValue } from 'ember-concurrency'; * * export default class ExampleComponent extends Component { * @task * someTask = function*() { * // ... * }; * * @lastValue('someTask') * someTaskValue; * * @lastValue('someTask') * someTaskValueWithDefault = 'A default value'; * } * ``` * * @function * @param {string} taskName the name of the task to read a value from */ export function lastValue(taskName: string): PropertyDecorator;<|fim▁end|>
// eslint-disable-next-line @typescript-eslint/no-empty-interface export interface TaskProperty<T, Args extends any[]> extends AbstractTaskProperty<Task<T, Args>> {}
<|file_name|>SoapCallReturn.java<|end_file_name|><|fim▁begin|>// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.common.lib.soap; /** * Used to package a SOAP call's return. Contains the return value, the request * and response info, and the originating {@link SoapCall}. * * @author Adam Rogal */ public class SoapCallReturn { private Object returnValue; private RequestInfo requestInfo; private ResponseInfo responseInfo; private Throwable exception; /** * Constructor. */ public SoapCallReturn(){ requestInfo = new RequestInfo(); responseInfo = new ResponseInfo(); } /** * Gets the return value from the SOAP call that was made. * * @return the return value from the SOAP call that was made or {@code null} * if there was an exception */ public Object getReturnValue() { return returnValue; } /** * Gets the request info from the SOAP call that was made. */ public RequestInfo getRequestInfo() { return requestInfo; } /** * Gets the response info from the SOAP call that was made. */ public ResponseInfo getResponseInfo() { return responseInfo; } /** * Gets the exception from the SOAP call that was made if one occurred. * * @return the exception from the SOAP call that was made or {@code null} * if there was no exception */ public Throwable getException() { return exception; } /** * Builder for {@link SoapCallReturn} objects. * * @author Adam Rogal */ public static class Builder { private SoapCallReturn soapCallReturn; /** * Constructor. */ public Builder() { this.soapCallReturn = new SoapCallReturn(); } /** * Adds a return value to the SoapCallReturn under construction. * * @param returnValue the return value to add to the SoapCallReturn * @return this builder */ public Builder withReturnValue(Object returnValue) { soapCallReturn.returnValue = returnValue; return this; } /** * Adds a response info to the SoapCallReturn under construction. * * @param responseInfo the response info to add to the SoapCallReturn * @return this builder */ public Builder withResponseInfo(ResponseInfo responseInfo) { soapCallReturn.responseInfo = responseInfo; return this; } /** * Adds a request info to the SoapCallReturn under construction. * * @param requestInfo the request info to add to the SoapCallReturn * @return this builder */ public Builder withRequestInfo(RequestInfo requestInfo) { soapCallReturn.requestInfo = requestInfo; return this; } /** * Adds an exception to the SoapCallReturn under construction. * * @param exception the exception to add to the SoapCallReturn * @return this builder */ public Builder withException(Throwable exception) { soapCallReturn.exception = exception; return this; } /** * Returns the SoapCallReturn this Builder has been constructing.<|fim▁hole|> */ public SoapCallReturn build() { return soapCallReturn; } } }<|fim▁end|>
* * @return the built SoapCallReturn object
<|file_name|>freq.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import matplotlib.pyplot as plt CHUNK = 1 << 8 def play(filename): PlaySound(filename, SND_FILENAME | SND_ASYNC) fn = r"D:\b.wav" f = wave.open(fn) print(f.getparams()) ch = f.getnchannels() sw = f.getsampwidth() n = f.getnframes() data = bytearray() while len(data) < n * ch * sw: data.extend(f.readframes(CHUNK)) data = np.array(struct.unpack('{n}h'.format(n=n * ch), data)) w = np.fft.fft(data) freqs = np.fft.fftfreq(len(w)) module = np.abs(w) idmax = module.argmax() print(abs(freqs[idmax]) * f.getframerate()) plt.specgram(data) plt.show()<|fim▁end|>
import numpy as np import struct import wave from winsound import PlaySound, SND_FILENAME, SND_ASYNC
<|file_name|>jquery-showcase.js<|end_file_name|><|fim▁begin|>// ----------------------------------------------------------------------- // Eros Fratini - [email protected] // jquery.showcase 2.0.1 // // 02/02/2010 - Wow, a fix about 10 minute after release.... // 02/02/2010 - Debugging and demos // 27/12/2009 - Optimized animation, added functions to pause and resume autocycling // - Tested external ease functions // 24/12/2009 - Added a lot of settings, redefine css // 21/12/2009 - Begin to write v2.0 // 27/07/2009 - Added asynchronous loading of images // 26/06/2009 - some new implementations // 19/06/2009 - standardization // 08/06/2009 - Initial sketch // // requires jQuery 1.3.x //------------------------------------------------------------------------ (function($) { $.fn.showcase = function (options) { var $container = this; // Retrieve options var opt; opt = $.extend({}, $.fn.showcase.defaults, options); if (!/images|titles/.test(opt.linksOn)) { opt.linksOn = "images"; } if (options && options.css) { opt.css = $.extend({}, $.fn.showcase.defaults.css, options.css); } if (options && options.animation) { opt.animation = $.extend({}, $.fn.showcase.defaults.animation, options.animation); if (!/horizontal-slider|vertical-slider|fade/.test(opt.animation.type)) { opt.animation.type = "horizontal-slider"; } } if (options && options.navigator) { opt.navigator = $.extend({}, $.fn.showcase.defaults.navigator, options.navigator); if (!/top-left|top-right|bottom-left|bottom-right/.test(opt.navigator.position)) { opt.navigator.position = "top-right"; } if (!/horizontal|vertical/.test(opt.navigator.orientation)) { opt.navigator.orientation = "horizontal"; } if (options.navigator.css) { opt.navigator.css = $.extend({}, $.fn.showcase.defaults.navigator.css, options.navigator.css); } if (options.navigator.item) { opt.navigator.item = $.extend({}, $.fn.showcase.defaults.navigator.item, options.navigator.item); // Progressive extensions of hover and selected states, inherited by standard css properties opt.navigator.item.cssHover = $.extend({}, $.fn.showcase.defaults.navigator.item.css, $.fn.showcase.defaults.navigator.item.cssHover); opt.navigator.item.cssSelected = $.extend({}, $.fn.showcase.defaults.navigator.item.css, $.fn.showcase.defaults.navigator.item.cssSelected); if (options.navigator.item.css) { opt.navigator.item.css = $.extend({}, $.fn.showcase.defaults.navigator.item.css, options.navigator.item.css); opt.navigator.item.cssHover = $.extend({}, $.fn.showcase.defaults.navigator.item.cssHover, options.navigator.item.css); opt.navigator.item.cssSelected = $.extend({}, $.fn.showcase.defaults.navigator.item.cssSelected, options.navigator.item.css); } if (options.navigator.item.cssHover) { opt.navigator.item.cssHover = $.extend({}, $.fn.showcase.defaults.navigator.item.cssHover, options.navigator.item.cssHover); } if (options.navigator.item.cssSelected) { opt.navigator.item.cssSelected = $.extend({}, $.fn.showcase.defaults.navigator.item.cssSelected, options.navigator.item.cssSelected); } } } if (options && options.titleBar) { opt.titleBar = $.extend({}, $.fn.showcase.defaults.titleBar, options.titleBar); if (!/bottom|top/.test(opt.titleBar.position)) { opt.titleBar.position = "bottom"; } if (options.titleBar.css) { opt.titleBar.css = $.extend({}, $.fn.showcase.defaults.titleBar.css, options.titleBar.css); } } // Check loading mode. // If there's something in opt.images[], I'll load them asynchronously, // it will be nice to have width and height setted, in order to define the $container sizes if (opt.images.length != 0) { $container.css({ width: opt.css.width, height: opt.css.height, overflow: "hidden" }); for (var i in opt.images) { var img = new Image(); img.src = opt.images[i].url; img.alt = opt.images[i].description || ""; var $link = $("<a />").attr({ "href": opt.images[i].link || "#", "target": opt.images[i].target || "_self" }); $link.append(img); $container.append($link); } } // Check loading state of #1 image if ($container.find("img:first")[0].complete) { $.fn.showcase.start($container, opt); } else { $container.find("img:first").load( function() { $.fn.showcase.start($container, opt); }); } // functions to control the palyback of showcase $.fn.extend({ pause: function() { $container.data("stopped", true); }, go: function() { $container.data("stopped", false); } }) } // This will start all showcase's stuffs $.fn.showcase.start = function($container, opt) { // Define local vars var index = 0; var nImages = $container.find("img").length; var $fi = $container.find("img:first"); var imagesize = { width: $fi.removeAttr("width").width(), height: $fi.removeAttr("height").height() }; opt.css.width = imagesize.width; opt.css.height = imagesize.height; // setup container $container.css(opt.css) .find("a").css({ position: "absolute", top: "0", left: "0" }) .find("img").css("border", "0px"); // setup navigator var $slider = $("<div id='slider' />").css({ position:"absolute" }); var $divNavigator = $("<div id='navigator' />").css(opt.navigator.css); switch (opt.navigator.position) { case "top-left": $divNavigator.css({ top: "0px", left: "0px" }); break; case "top-right": $divNavigator.css({ top: "0px", right: "0px" }); break; case "bottom-left": $divNavigator.css({ bottom: "0px", left: "0px" }); break; case "bottom-right": $divNavigator.css({ bottom: "0px", right: "0px" }); break; } $container.find("a").wrapAll($slider).each( function(i) { switch (opt.animation.type) { case "horizontal-slider": $(this).css("left", i*imagesize.width); break; case "vertical-slider": $(this).css("top", i*imagesize.height); break; case "fade": $(this).css({ top: "0", left: "0", opacity:1, "z-index": 1000-i }); break; } // Create navigation bar item var $navElement = $("<a href='#'>" + (opt.navigator.showNumber ? (i + 1) : "") + "</a>") .css({ display: "block", "text-decoration": "none", "-moz-outline-style": "none" }) .click( function() { if (opt.animation.autoCycle) { clearInterval(opt.animation.intervalID); } // stop the current automatic animation $.fn.showcase.showImage(i, $container, imagesize, opt); index = i; if (opt.animation.autoCycle) { opt.animation.intervalID = showcaseCycler(index, nImages, $container, imagesize, opt); } // restart the automatic animation return false; }) .hover( function() { if (!$(this).data("selected")) { if (opt.navigator.item.cssClassHover) { $(this).addClass(opt.navigator.item.cssClassHover); } else { $(this).css(opt.navigator.item.cssHover); } } }, function() { if (!$(this).data("selected")) { if (opt.navigator.item.cssClassHover) { $(this).removeClass(opt.navigator.item.cssClassHover); } else { $(this).css(opt.navigator.item.css); } } } ) .appendTo($divNavigator); if (opt.navigator.item.cssClass) { $navElement.attr("class", opt.navigator.item.cssClass); } else { $.extend({}, $navElement.css, opt.navigator.item); $navElement.css(opt.navigator.item.css); } switch (opt.navigator.orientation) { case "horizontal": $navElement.css("float", "left"); break; case "vertical": $navElement.css("float", "none"); break; } if (opt.navigator.showMiniature) { $("<img />").attr({ src: $(this).find("img").attr("src"), width: $navElement.css("width").replace("px", ""), height: $navElement.css("height").replace("px", ""), border: "0px" }).appendTo($navElement); } }); if (opt.navigator.autoHide) { $divNavigator.css("opacity", 0); } $container.append($divNavigator).hover( function() { if (opt.titleBar.autoHide && opt.titleBar.enabled) { $($titleBar).stop().animate({ opacity: opt.titleBar.css.opacity, left: 0, right: 0, height: opt.titleBar.css.height }, 250); } if (opt.navigator.autoHide) { $($divNavigator).stop().animate({ opacity: 1 }, 250); } $(this).data("isMouseHover", true); }, function() { if (opt.titleBar.autoHide && opt.titleBar.enabled) { $titleBar.stop().animate({ opacity: 0, height: "0px" }, 400); } if (opt.navigator.autoHide) { $divNavigator.stop().animate({ opacity: 0 }, 250); } $(this).data("isMouseHover", false); } ); // Create titleBar if (opt.titleBar.enabled) { if (opt.linksOn == "images") { var $titleBar = $("<div id='subBar' />").html( $("<span />").html($container.find("a:first img").attr("alt")) ); } else { var $a = $("<a />").attr("href", $container.find("a:first").attr("href")).html("<span>" + $container.find("a:first img").attr("alt") + "</span>"); var $titleBar = $("<div id='subBar' />").html($a) $container.find("#slider a").each( function() { $(this).attr("rel", $(this).attr("href")); }); $container.find("#slider a").removeAttr("href"); } $titleBar.css({ opacity: 0.50, width: "100%", overflow: "hidden", "z-index": 10002, position: "absolute" }); if(opt.titleBar.position == "top") { $titleBar.css("top", "0"); } else { $titleBar.css("bottom", "0"); } if (opt.titleBar.cssClass) { $titleBar.attr("class", opt.titleBar.cssClass); } else { $titleBar.css(opt.titleBar.css); $("a", $titleBar).css("color", opt.titleBar.css.color); } if (opt.titleBar.autoHide) { $titleBar.css({ "height": "0px", "opacity": 0 }); } $titleBar.appendTo($container); } // set first image as selected $.fn.showcase.setNavigationItem(0, $container, opt); // startup cycling if (opt.animation.autoCycle) { opt.animation.intervalID = showcaseCycler(index, nImages, $container, imagesize, opt); } } var showcaseCycler = function(index, nImages, $container, imagesize, opt) { return setInterval( function() { if (!$container.data("stopped")){ if (!$container.data("isMouseHover") || !opt.animation.stopOnHover) $.fn.showcase.showImage(++index % nImages, $container, imagesize, opt); } }, opt.animation.interval); }; $.fn.showcase.showImage = function(i, $container, imagesize, opt) { var $a = $container.find("a"); var $a_this = $container.find("a").eq(i); switch (opt.animation.type) { <|fim▁hole|> case "vertical-slider": $container.find("#slider").stop().animate({ top: - (i*imagesize.height) }, opt.animation.speed, opt.animation.easefunction); break; case "fade": $container.css({ "z-index": "1001" }); if ($a_this.css("z-index") != "1000") { $a_this.css({ "z-index": "1000", opacity: 0 }); $a.not($a_this).each( function() { if ($(this).css("z-index") != "auto") $(this).css("z-index", parseInt($(this).css("z-index"), 10) - 1); }); $a_this.stop().animate({ opacity: 1 }, opt.animation.speed, opt.animation.easefunction); } break; } if (opt.titleBar.enabled) { if (opt.linksOn == "titles") { $("#subBar a", $container).attr({ "href": $a_this.attr("rel"), "target": $a_this.attr("target") }); } } $("#subBar span", $container).html($a_this.find("img").attr("alt")); // Setting selected navigationItem $.fn.showcase.setNavigationItem(i, $container, opt); }; // Highlight the navigationItem related to image $.fn.showcase.setNavigationItem = function(i, $container, opt) { if (opt.navigator.item.cssClassSelected) { $container.find("#navigator a").removeClass(opt.navigator.item.cssClassSelected).data("selected", false); $container.find("#navigator a").eq(i).addClass(opt.navigator.item.cssClassSelected).data("selected", true); } else { if (opt.navigator.item.cssClass) { //$container.find("#navigator a").removeAttr("style").data("selected", false); $container.find("#navigator a").eq(i).css(opt.navigator.item.cssSelected).data("selected", true); } else { $container.find("#navigator a").css(opt.navigator.item.css).data("selected", false); $container.find("#navigator a").eq(i).css(opt.navigator.item.cssSelected).data("selected", true); } } }; $.fn.showcase.defaults = { images: [], linksOn: "images", css: { position: "relative", overflow: "hidden", border: "none", width: "", height: "" }, animation: { autoCycle: true, stopOnHover: true, interval: 4000, speed: 500, easefunction: "swing", type: "horizontal-slider" }, navigator: { css: { border: "none", padding: "5px", margin: "0px", position: "absolute", "z-index": 1000 }, position: "top-right", orientation: "horizontal", autoHide: false, showNumber: false, showMiniature: false, item: { cssClass: null, cssClassHover: null, cssClassSelected: null, css: { color: "#000000", "text-decoration": "none", "text-align": "center", "-moz-outline-style": "none", width: "12px", height: "12px", lineHeight: "12px", verticalAlign: "middle", backgroundColor: "#878787", margin: "0px 3px 3px 0px", border: "solid 1px #acacac", "-moz-border-radius": "4px", "-webkit-border-radius": "4px" }, cssHover: { backgroundColor: "#767676", border: "solid 1px #676767" }, cssSelected: { backgroundColor: "#00FF00", border: "solid 1px #acacac" } } }, titleBar: { enabled: true, autoHide: true, position: "bottom", cssClass: null, css: { opacity: 0.50, color: "#ffffff", backgroundColor: "#000000", height: "40px", padding: "4px", fontColor: "#444444", fontStyle: "italic", fontWeight: "bold", fontSize: "1em" } } }; })(jQuery);<|fim▁end|>
case "horizontal-slider": $container.find("#slider").stop().animate({ left: - (i*imagesize.width) }, opt.animation.speed, opt.animation.easefunction); break;
<|file_name|>prosiebensat1.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import unicode_literals import re from hashlib import sha1 from .common import InfoExtractor from ..compat import compat_str from ..utils import ( ExtractorError, determine_ext, float_or_none, int_or_none, unified_strdate, ) class ProSiebenSat1BaseIE(InfoExtractor): def _extract_video_info(self, url, clip_id): client_location = url video = self._download_json( 'http://vas.sim-technik.de/vas/live/v2/videos', clip_id, 'Downloading videos JSON', query={ 'access_token': self._TOKEN, 'client_location': client_location, 'client_name': self._CLIENT_NAME, 'ids': clip_id, })[0] if video.get('is_protected') is True: raise ExtractorError('This video is DRM protected.', expected=True) duration = float_or_none(video.get('duration')) source_ids = [compat_str(source['id']) for source in video['sources']] client_id = self._SALT[:2] + sha1(''.join([clip_id, self._SALT, self._TOKEN, client_location, self._SALT, self._CLIENT_NAME]).encode('utf-8')).hexdigest() sources = self._download_json( 'http://vas.sim-technik.de/vas/live/v2/videos/%s/sources' % clip_id, clip_id, 'Downloading sources JSON', query={ 'access_token': self._TOKEN, 'client_id': client_id, 'client_location': client_location, 'client_name': self._CLIENT_NAME, }) server_id = sources['server_id'] def fix_bitrate(bitrate): bitrate = int_or_none(bitrate) if not bitrate: return None return (bitrate // 1000) if bitrate % 1000 == 0 else bitrate formats = [] for source_id in source_ids: client_id = self._SALT[:2] + sha1(''.join([self._SALT, clip_id, self._TOKEN, server_id, client_location, source_id, self._SALT, self._CLIENT_NAME]).encode('utf-8')).hexdigest() urls = self._download_json( 'http://vas.sim-technik.de/vas/live/v2/videos/%s/sources/url' % clip_id, clip_id, 'Downloading urls JSON', fatal=False, query={ 'access_token': self._TOKEN, 'client_id': client_id, 'client_location': client_location, 'client_name': self._CLIENT_NAME, 'server_id': server_id, 'source_ids': source_id, }) if not urls: continue if urls.get('status_code') != 0: raise ExtractorError('This video is unavailable', expected=True) urls_sources = urls['sources'] if isinstance(urls_sources, dict): urls_sources = urls_sources.values() for source in urls_sources: source_url = source.get('url') if not source_url: continue protocol = source.get('protocol') mimetype = source.get('mimetype') if mimetype == 'application/f4m+xml' or 'f4mgenerator' in source_url or determine_ext(source_url) == 'f4m': formats.extend(self._extract_f4m_formats( source_url, clip_id, f4m_id='hds', fatal=False)) elif mimetype == 'application/x-mpegURL': formats.extend(self._extract_m3u8_formats( source_url, clip_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)) elif mimetype == 'application/dash+xml': formats.extend(self._extract_mpd_formats( source_url, clip_id, mpd_id='dash', fatal=False)) else: tbr = fix_bitrate(source['bitrate']) if protocol in ('rtmp', 'rtmpe'): mobj = re.search(r'^(?P<url>rtmpe?://[^/]+)/(?P<path>.+)$', source_url) if not mobj: continue path = mobj.group('path') mp4colon_index = path.rfind('mp4:') app = path[:mp4colon_index] play_path = path[mp4colon_index:] formats.append({ 'url': '%s/%s' % (mobj.group('url'), app), 'app': app, 'play_path': play_path, 'player_url': 'http://livepassdl.conviva.com/hf/ver/2.79.0.17083/LivePassModuleMain.swf', 'page_url': 'http://www.prosieben.de', 'tbr': tbr, 'ext': 'flv', 'format_id': 'rtmp%s' % ('-%d' % tbr if tbr else ''), }) else: formats.append({ 'url': source_url, 'tbr': tbr, 'format_id': 'http%s' % ('-%d' % tbr if tbr else ''), }) self._sort_formats(formats) return { 'duration': duration, 'formats': formats, } class ProSiebenSat1IE(ProSiebenSat1BaseIE): IE_NAME = 'prosiebensat1' IE_DESC = 'ProSiebenSat.1 Digital' _VALID_URL = r'''(?x) https?:// (?:www\.)? (?: (?: prosieben(?:maxx)?|sixx|sat1(?:gold)?|kabeleins(?:doku)?|the-voice-of-germany|7tv|advopedia )\.(?:de|at|ch)| ran\.de|fem\.com|advopedia\.de ) /(?P<id>.+) ''' _TESTS = [ { # Tests changes introduced in https://github.com/rg3/youtube-dl/pull/6242 # in response to fixing https://github.com/rg3/youtube-dl/issues/6215: # - malformed f4m manifest support # - proper handling of URLs starting with `https?://` in 2.0 manifests # - recursive child f4m manifests extraction 'url': 'http://www.prosieben.de/tv/circus-halligalli/videos/218-staffel-2-episode-18-jahresrueckblick-ganze-folge', 'info_dict': { 'id': '2104602', 'ext': 'mp4', 'title': 'Episode 18 - Staffel 2', 'description': 'md5:8733c81b702ea472e069bc48bb658fc1', 'upload_date': '20131231', 'duration': 5845.04, }, }, { 'url': 'http://www.prosieben.de/videokatalog/Gesellschaft/Leben/Trends/video-Lady-Umstyling-f%C3%BCr-Audrina-Rebekka-Audrina-Fergen-billig-aussehen-Battal-Modica-700544.html', 'info_dict': { 'id': '2570327', 'ext': 'mp4', 'title': 'Lady-Umstyling für Audrina', 'description': 'md5:4c16d0c17a3461a0d43ea4084e96319d', 'upload_date': '20131014', 'duration': 606.76,<|fim▁hole|> 'params': { # rtmp download 'skip_download': True, }, 'skip': 'Seems to be broken', }, { 'url': 'http://www.prosiebenmaxx.de/tv/experience/video/144-countdown-fuer-die-autowerkstatt-ganze-folge', 'info_dict': { 'id': '2429369', 'ext': 'mp4', 'title': 'Countdown für die Autowerkstatt', 'description': 'md5:809fc051a457b5d8666013bc40698817', 'upload_date': '20140223', 'duration': 2595.04, }, 'params': { # rtmp download 'skip_download': True, }, 'skip': 'This video is unavailable', }, { 'url': 'http://www.sixx.de/stars-style/video/sexy-laufen-in-ugg-boots-clip', 'info_dict': { 'id': '2904997', 'ext': 'mp4', 'title': 'Sexy laufen in Ugg Boots', 'description': 'md5:edf42b8bd5bc4e5da4db4222c5acb7d6', 'upload_date': '20140122', 'duration': 245.32, }, 'params': { # rtmp download 'skip_download': True, }, 'skip': 'This video is unavailable', }, { 'url': 'http://www.sat1.de/film/der-ruecktritt/video/im-interview-kai-wiesinger-clip', 'info_dict': { 'id': '2906572', 'ext': 'mp4', 'title': 'Im Interview: Kai Wiesinger', 'description': 'md5:e4e5370652ec63b95023e914190b4eb9', 'upload_date': '20140203', 'duration': 522.56, }, 'params': { # rtmp download 'skip_download': True, }, 'skip': 'This video is unavailable', }, { 'url': 'http://www.kabeleins.de/tv/rosins-restaurants/videos/jagd-auf-fertigkost-im-elsthal-teil-2-ganze-folge', 'info_dict': { 'id': '2992323', 'ext': 'mp4', 'title': 'Jagd auf Fertigkost im Elsthal - Teil 2', 'description': 'md5:2669cde3febe9bce13904f701e774eb6', 'upload_date': '20141014', 'duration': 2410.44, }, 'params': { # rtmp download 'skip_download': True, }, 'skip': 'This video is unavailable', }, { 'url': 'http://www.ran.de/fussball/bundesliga/video/schalke-toennies-moechte-raul-zurueck-ganze-folge', 'info_dict': { 'id': '3004256', 'ext': 'mp4', 'title': 'Schalke: Tönnies möchte Raul zurück', 'description': 'md5:4b5b271d9bcde223b54390754c8ece3f', 'upload_date': '20140226', 'duration': 228.96, }, 'params': { # rtmp download 'skip_download': True, }, 'skip': 'This video is unavailable', }, { 'url': 'http://www.the-voice-of-germany.de/video/31-andreas-kuemmert-rocket-man-clip', 'info_dict': { 'id': '2572814', 'ext': 'mp4', 'title': 'Andreas Kümmert: Rocket Man', 'description': 'md5:6ddb02b0781c6adf778afea606652e38', 'upload_date': '20131017', 'duration': 469.88, }, 'params': { 'skip_download': True, }, }, { 'url': 'http://www.fem.com/wellness/videos/wellness-video-clip-kurztripps-zum-valentinstag.html', 'info_dict': { 'id': '2156342', 'ext': 'mp4', 'title': 'Kurztrips zum Valentinstag', 'description': 'Romantischer Kurztrip zum Valentinstag? Nina Heinemann verrät, was sich hier wirklich lohnt.', 'duration': 307.24, }, 'params': { 'skip_download': True, }, }, { 'url': 'http://www.prosieben.de/tv/joko-gegen-klaas/videos/playlists/episode-8-ganze-folge-playlist', 'info_dict': { 'id': '439664', 'title': 'Episode 8 - Ganze Folge - Playlist', 'description': 'md5:63b8963e71f481782aeea877658dec84', }, 'playlist_count': 2, 'skip': 'This video is unavailable', }, { 'url': 'http://www.7tv.de/circus-halligalli/615-best-of-circus-halligalli-ganze-folge', 'info_dict': { 'id': '4187506', 'ext': 'mp4', 'title': 'Best of Circus HalliGalli', 'description': 'md5:8849752efd90b9772c9db6fdf87fb9e9', 'upload_date': '20151229', }, 'params': { 'skip_download': True, }, }, { # geo restricted to Germany 'url': 'http://www.kabeleinsdoku.de/tv/mayday-alarm-im-cockpit/video/102-notlandung-im-hudson-river-ganze-folge', 'only_matching': True, }, { # geo restricted to Germany 'url': 'http://www.sat1gold.de/tv/edel-starck/video/11-staffel-1-episode-1-partner-wider-willen-ganze-folge', 'only_matching': True, }, { 'url': 'http://www.sat1gold.de/tv/edel-starck/playlist/die-gesamte-1-staffel', 'only_matching': True, }, { 'url': 'http://www.advopedia.de/videos/lenssen-klaert-auf/lenssen-klaert-auf-folge-8-staffel-3-feiertage-und-freie-tage', 'only_matching': True, }, ] _TOKEN = 'prosieben' _SALT = '01!8d8F_)r9]4s[qeuXfP%' _CLIENT_NAME = 'kolibri-2.0.19-splec4' _CLIPID_REGEXES = [ r'"clip_id"\s*:\s+"(\d+)"', r'clipid: "(\d+)"', r'clip[iI]d=(\d+)', r'clip[iI]d\s*=\s*["\'](\d+)', r"'itemImageUrl'\s*:\s*'/dynamic/thumbnails/full/\d+/(\d+)", ] _TITLE_REGEXES = [ r'<h2 class="subtitle" itemprop="name">\s*(.+?)</h2>', r'<header class="clearfix">\s*<h3>(.+?)</h3>', r'<!-- start video -->\s*<h1>(.+?)</h1>', r'<h1 class="att-name">\s*(.+?)</h1>', r'<header class="module_header">\s*<h2>([^<]+)</h2>\s*</header>', r'<h2 class="video-title" itemprop="name">\s*(.+?)</h2>', r'<div[^>]+id="veeseoTitle"[^>]*>(.+?)</div>', ] _DESCRIPTION_REGEXES = [ r'<p itemprop="description">\s*(.+?)</p>', r'<div class="videoDecription">\s*<p><strong>Beschreibung</strong>: (.+?)</p>', r'<div class="g-plusone" data-size="medium"></div>\s*</div>\s*</header>\s*(.+?)\s*<footer>', r'<p class="att-description">\s*(.+?)\s*</p>', r'<p class="video-description" itemprop="description">\s*(.+?)</p>', r'<div[^>]+id="veeseoDescription"[^>]*>(.+?)</div>', ] _UPLOAD_DATE_REGEXES = [ r'<meta property="og:published_time" content="(.+?)">', r'<span>\s*(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}) \|\s*<span itemprop="duration"', r'<footer>\s*(\d{2}\.\d{2}\.\d{4}) \d{2}:\d{2} Uhr', r'<span style="padding-left: 4px;line-height:20px; color:#404040">(\d{2}\.\d{2}\.\d{4})</span>', r'(\d{2}\.\d{2}\.\d{4}) \| \d{2}:\d{2} Min<br/>', ] _PAGE_TYPE_REGEXES = [ r'<meta name="page_type" content="([^"]+)">', r"'itemType'\s*:\s*'([^']*)'", ] _PLAYLIST_ID_REGEXES = [ r'content[iI]d=(\d+)', r"'itemId'\s*:\s*'([^']*)'", ] _PLAYLIST_CLIP_REGEXES = [ r'(?s)data-qvt=.+?<a href="([^"]+)"', ] def _extract_clip(self, url, webpage): clip_id = self._html_search_regex( self._CLIPID_REGEXES, webpage, 'clip id') title = self._html_search_regex(self._TITLE_REGEXES, webpage, 'title') info = self._extract_video_info(url, clip_id) description = self._html_search_regex( self._DESCRIPTION_REGEXES, webpage, 'description', default=None) if description is None: description = self._og_search_description(webpage) thumbnail = self._og_search_thumbnail(webpage) upload_date = unified_strdate(self._html_search_regex( self._UPLOAD_DATE_REGEXES, webpage, 'upload date', default=None)) info.update({ 'id': clip_id, 'title': title, 'description': description, 'thumbnail': thumbnail, 'upload_date': upload_date, }) return info def _extract_playlist(self, url, webpage): playlist_id = self._html_search_regex( self._PLAYLIST_ID_REGEXES, webpage, 'playlist id') playlist = self._parse_json( self._search_regex( r'var\s+contentResources\s*=\s*(\[.+?\]);\s*</script', webpage, 'playlist'), playlist_id) entries = [] for item in playlist: clip_id = item.get('id') or item.get('upc') if not clip_id: continue info = self._extract_video_info(url, clip_id) info.update({ 'id': clip_id, 'title': item.get('title') or item.get('teaser', {}).get('headline'), 'description': item.get('teaser', {}).get('description'), 'thumbnail': item.get('poster'), 'duration': float_or_none(item.get('duration')), 'series': item.get('tvShowTitle'), 'uploader': item.get('broadcastPublisher'), }) entries.append(info) return self.playlist_result(entries, playlist_id) def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) page_type = self._search_regex( self._PAGE_TYPE_REGEXES, webpage, 'page type', default='clip').lower() if page_type == 'clip': return self._extract_clip(url, webpage) elif page_type == 'playlist': return self._extract_playlist(url, webpage) else: raise ExtractorError( 'Unsupported page type %s' % page_type, expected=True)<|fim▁end|>
},
<|file_name|>EaselPlugin.js<|end_file_name|><|fim▁begin|>/*! * VERSION: 0.2.1 * DATE: 2017-01-17 * UPDATES AND DOCS AT: http://greensock.com * * @license Copyright (c) 2008-2017, GreenSock. All rights reserved. * This work is subject to the terms at http://greensock.com/standard-license or for * Club GreenSock members, the software agreement that was issued with your membership. * * @author: Jack Doyle, [email protected] **/ var _gsScope = (typeof(module) !== "undefined" && module.exports && typeof(global) !== "undefined") ? global : this || window; //helps ensure compatibility with AMD/RequireJS and CommonJS/Node (_gsScope._gsQueue || (_gsScope._gsQueue = [])).push( function() { "use strict"; var _numExp = /(\d|\.)+/g, _ColorFilter, _ColorMatrixFilter, _colorProps = ["redMultiplier","greenMultiplier","blueMultiplier","alphaMultiplier","redOffset","greenOffset","blueOffset","alphaOffset"], _colorLookup = {aqua:[0,255,255], lime:[0,255,0], silver:[192,192,192], black:[0,0,0], maroon:[128,0,0], teal:[0,128,128], blue:[0,0,255], navy:[0,0,128], white:[255,255,255], fuchsia:[255,0,255], olive:[128,128,0], yellow:[255,255,0], orange:[255,165,0], gray:[128,128,128], purple:[128,0,128], green:[0,128,0], red:[255,0,0], pink:[255,192,203], cyan:[0,255,255], transparent:[255,255,255,0]}, _parseColor = function(color) { if (color === "" || color == null || color === "none") { return _colorLookup.transparent; } else if (_colorLookup[color]) { return _colorLookup[color]; } else if (typeof(color) === "number") { return [color >> 16, (color >> 8) & 255, color & 255]; } else if (color.charAt(0) === "#") { if (color.length === 4) { //for shorthand like #9F0 color = "#" + color.charAt(1) + color.charAt(1) + color.charAt(2) + color.charAt(2) + color.charAt(3) + color.charAt(3); } color = parseInt(color.substr(1), 16); return [color >> 16, (color >> 8) & 255, color & 255]; } return color.match(_numExp) || _colorLookup.transparent; }, _parseColorFilter = function(t, v, pg) { if (!_ColorFilter) { _ColorFilter = (_gsScope.ColorFilter || _gsScope.createjs.ColorFilter); if (!_ColorFilter) { throw("EaselPlugin error: The EaselJS ColorFilter JavaScript file wasn't loaded."); } } var filters = t.filters || [], i = filters.length, c, s, e, a, p; while (--i > -1) { if (filters[i] instanceof _ColorFilter) { s = filters[i]; break; } } if (!s) { s = new _ColorFilter(); filters.push(s); t.filters = filters; } e = s.clone(); if (v.tint != null) { c = _parseColor(v.tint); a = (v.tintAmount != null) ? Number(v.tintAmount) : 1; e.redOffset = Number(c[0]) * a; e.greenOffset = Number(c[1]) * a; e.blueOffset = Number(c[2]) * a; e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1 - a; } else { for (p in v) { if (p !== "exposure") if (p !== "brightness") { e[p] = Number(v[p]); } } } if (v.exposure != null) { e.redOffset = e.greenOffset = e.blueOffset = 255 * (Number(v.exposure) - 1); e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1; } else if (v.brightness != null) { a = Number(v.brightness) - 1; e.redOffset = e.greenOffset = e.blueOffset = (a > 0) ? a * 255 : 0; e.redMultiplier = e.greenMultiplier = e.blueMultiplier = 1 - Math.abs(a); } i = 8; while (--i > -1) { p = _colorProps[i]; if (s[p] !== e[p]) { pg._addTween(s, p, s[p], e[p], "easel_colorFilter"); } } pg._overwriteProps.push("easel_colorFilter"); if (!t.cacheID) { throw("EaselPlugin warning: for filters to display in EaselJS, you must call the object's cache() method first. " + t); } }, _idMatrix = [1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0], _lumR = 0.212671, _lumG = 0.715160, _lumB = 0.072169, _applyMatrix = function(m, m2) { if (!(m instanceof Array) || !(m2 instanceof Array)) { return m2; } var temp = [], i = 0, z = 0, y, x; for (y = 0; y < 4; y++) { for (x = 0; x < 5; x++) { z = (x === 4) ? m[i + 4] : 0; temp[i + x] = m[i] * m2[x] + m[i+1] * m2[x + 5] + m[i+2] * m2[x + 10] + m[i+3] * m2[x + 15] + z; } i += 5; } return temp; }, _setSaturation = function(m, n) { if (isNaN(n)) { return m; } var inv = 1 - n, r = inv * _lumR, g = inv * _lumG, b = inv * _lumB; return _applyMatrix([r + n, g, b, 0, 0, r, g + n, b, 0, 0, r, g, b + n, 0, 0, 0, 0, 0, 1, 0], m); }, _colorize = function(m, color, amount) { if (isNaN(amount)) { amount = 1; } var c = _parseColor(color), r = c[0] / 255, g = c[1] / 255, b = c[2] / 255, inv = 1 - amount; return _applyMatrix([inv + amount * r * _lumR, amount * r * _lumG, amount * r * _lumB, 0, 0, amount * g * _lumR, inv + amount * g * _lumG, amount * g * _lumB, 0, 0, amount * b * _lumR, amount * b * _lumG, inv + amount * b * _lumB, 0, 0, 0, 0, 0, 1, 0], m); }, _setHue = function(m, n) { if (isNaN(n)) { return m; } n *= Math.PI / 180; var c = Math.cos(n), s = Math.sin(n); return _applyMatrix([(_lumR + (c * (1 - _lumR))) + (s * (-_lumR)), (_lumG + (c * (-_lumG))) + (s * (-_lumG)), (_lumB + (c * (-_lumB))) + (s * (1 - _lumB)), 0, 0, (_lumR + (c * (-_lumR))) + (s * 0.143), (_lumG + (c * (1 - _lumG))) + (s * 0.14), (_lumB + (c * (-_lumB))) + (s * -0.283), 0, 0, (_lumR + (c * (-_lumR))) + (s * (-(1 - _lumR))), (_lumG + (c * (-_lumG))) + (s * _lumG), (_lumB + (c * (1 - _lumB))) + (s * _lumB), 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1], m); }, _setContrast = function(m, n) { if (isNaN(n)) { return m; } n += 0.01; return _applyMatrix([n,0,0,0,128 * (1 - n), 0,n,0,0,128 * (1 - n), 0,0,n,0,128 * (1 - n), 0,0,0,1,0], m); }, _parseColorMatrixFilter = function(t, v, pg) { if (!_ColorMatrixFilter) { _ColorMatrixFilter = (_gsScope.ColorMatrixFilter || _gsScope.createjs.ColorMatrixFilter); if (!_ColorMatrixFilter) { throw("EaselPlugin error: The EaselJS ColorMatrixFilter JavaScript file wasn't loaded."); } } var filters = t.filters || [], i = filters.length, matrix, startMatrix, s; while (--i > -1) { if (filters[i] instanceof _ColorMatrixFilter) { s = filters[i]; break; } } if (!s) { s = new _ColorMatrixFilter(_idMatrix.slice()); filters.push(s); t.filters = filters; } startMatrix = s.matrix; matrix = _idMatrix.slice(); if (v.colorize != null) { matrix = _colorize(matrix, v.colorize, Number(v.colorizeAmount)); } if (v.contrast != null) { matrix = _setContrast(matrix, Number(v.contrast)); } if (v.hue != null) { matrix = _setHue(matrix, Number(v.hue)); } if (v.saturation != null) { matrix = _setSaturation(matrix, Number(v.saturation)); } i = matrix.length; while (--i > -1) { if (matrix[i] !== startMatrix[i]) { pg._addTween(startMatrix, i, startMatrix[i], matrix[i], "easel_colorMatrixFilter"); } } pg._overwriteProps.push("easel_colorMatrixFilter"); if (!t.cacheID) { throw("EaselPlugin warning: for filters to display in EaselJS, you must call the object's cache() method first. " + t); } pg._matrix = startMatrix; }; _gsScope._gsDefine.plugin({ propName: "easel", priority: -1, version: "0.2.1", API: 2, //called when the tween renders for the first time. This is where initial values should be recorded and any setup routines should run. init: function(target, value, tween, index) { this._target = target; var p, pt, tint, colorMatrix, end, labels, i; for (p in value) { end = value[p]; if (typeof(end) === "function") { end = end(index, target); } if (p === "colorFilter" || p === "tint" || p === "tintAmount" || p === "exposure" || p === "brightness") { if (!tint) { _parseColorFilter(target, value.colorFilter || value, this); tint = true; } <|fim▁hole|> } else if (p === "saturation" || p === "contrast" || p === "hue" || p === "colorize" || p === "colorizeAmount") { if (!colorMatrix) { _parseColorMatrixFilter(target, value.colorMatrixFilter || value, this); colorMatrix = true; } } else if (p === "frame") { this._firstPT = pt = {_next:this._firstPT, t:target, p:"gotoAndStop", s:target.currentFrame, f:true, n:"frame", pr:0, type:0, m:Math.round}; if (typeof(end) === "string" && end.charAt(1) !== "=" && (labels = target.labels)) { for (i = 0; i < labels.length; i++) { if (labels[i].label === end) { end = labels[i].position; } } } pt.c = (typeof(end) === "number") ? end - pt.s : parseFloat((end+"").split("=").join("")); if (pt._next) { pt._next._prev = pt; } } else if (target[p] != null) { this._firstPT = pt = {_next:this._firstPT, t:target, p:p, f:(typeof(target[p]) === "function"), n:p, pr:0, type:0}; pt.s = (!pt.f) ? parseFloat(target[p]) : target[ ((p.indexOf("set") || typeof(target["get" + p.substr(3)]) !== "function") ? p : "get" + p.substr(3)) ](); pt.c = (typeof(end) === "number") ? end - pt.s : (typeof(end) === "string") ? parseFloat(end.split("=").join("")) : 0; if (pt._next) { pt._next._prev = pt; } } } return true; }, //called each time the values should be updated, and the ratio gets passed as the only parameter (typically it's a value between 0 and 1, but it can exceed those when using an ease like Elastic.easeOut or Back.easeOut, etc.) set: function(v) { var pt = this._firstPT, min = 0.000001, val; while (pt) { val = pt.c * v + pt.s; if (pt.m) { val = pt.m(val, pt.t); } else if (val < min && val > -min) { val = 0; } if (pt.f) { pt.t[pt.p](val); } else { pt.t[pt.p] = val; } pt = pt._next; } if (this._target.cacheID) { this._target.updateCache(); } } }); }); if (_gsScope._gsDefine) { _gsScope._gsQueue.pop()(); } //export to AMD/RequireJS and CommonJS/Node (precursor to full modular build system coming at a later date) (function(name) { "use strict"; var getGlobal = function() { return (_gsScope.GreenSockGlobals || _gsScope)[name]; }; if (typeof(define) === "function" && define.amd) { //AMD define(["./TweenLite"], getGlobal); } else if (typeof(module) !== "undefined" && module.exports) { //node require("./TweenLite.js"); module.exports = getGlobal(); } }("EaselPlugin"));<|fim▁end|>
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> var jsonschema = require("jsonschema"); var utils = {}; utils.validateJSON = function(schema, json){ var result = jsonschema.validate(json, schema); if(result.errors.length == 0){ return json; }else{ throw new Error("message not valid, " + result.errors.join()); } }; utils.validateRawString = function(schema, message){ var self = this; var json = JSON.parse(message); return self.validateJSON(json, schema); }; /** * load and initial an object from specified path, and check the function exists in this object * @param filePath * @param checkFuncs * @constructor */ utils.loadAndCheck = function(filePath, checkFuncs){ var loadCls = require(filePath); var loadObj = new loadCls(); checkFuncs.forEach(function(checkFunc){ if (typeof(loadObj[checkFunc]) != "function") { throw new Error(filePath + " doesn't have " + checkFunc + "()") } }); return loadObj; }; module.exports = utils;<|fim▁end|>
* Created by kliu on 10/10/2015. */
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates. *<|fim▁hole|>mod constants; mod hash_supported_argument; mod match_transform; mod split_module_import; mod split_operation_metadata; mod subscription_transform; mod validation_message; pub use constants::MATCH_CONSTANTS; pub use hash_supported_argument::hash_supported_argument; pub use match_transform::{transform_match, ModuleMetadata}; pub use split_module_import::split_module_import; pub use split_operation_metadata::{SplitOperationMetadata, DIRECTIVE_SPLIT_OPERATION}; pub use subscription_transform::transform_subscriptions;<|fim▁end|>
* This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */
<|file_name|>test_compute.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from datetime import datetime from functools import lru_cache import inspect import pickle import pytest import random import textwrap import numpy as np import pyarrow as pa import pyarrow.compute as pc all_array_types = [ ('bool', [True, False, False, True, True]), ('uint8', np.arange(5)), ('int8', np.arange(5)), ('uint16', np.arange(5)), ('int16', np.arange(5)), ('uint32', np.arange(5)), ('int32', np.arange(5)), ('uint64', np.arange(5, 10)), ('int64', np.arange(5, 10)), ('float', np.arange(0, 0.5, 0.1)), ('double', np.arange(0, 0.5, 0.1)), ('string', ['a', 'b', None, 'ddd', 'ee']), ('binary', [b'a', b'b', b'c', b'ddd', b'ee']), (pa.binary(3), [b'abc', b'bcd', b'cde', b'def', b'efg']), (pa.list_(pa.int8()), [[1, 2], [3, 4], [5, 6], None, [9, 16]]), (pa.large_list(pa.int16()), [[1], [2, 3, 4], [5, 6], None, [9, 16]]), (pa.struct([('a', pa.int8()), ('b', pa.int8())]), [ {'a': 1, 'b': 2}, None, {'a': 3, 'b': 4}, None, {'a': 5, 'b': 6}]), ] exported_functions = [ func for (name, func) in sorted(pc.__dict__.items()) if hasattr(func, '__arrow_compute_function__')] exported_option_classes = [ cls for (name, cls) in sorted(pc.__dict__.items()) if (isinstance(cls, type) and cls is not pc.FunctionOptions and issubclass(cls, pc.FunctionOptions))] numerical_arrow_types = [ pa.int8(), pa.int16(), pa.int64(), pa.uint8(), pa.uint16(), pa.uint64(), pa.float32(), pa.float64() ] def test_exported_functions(): # Check that all exported concrete functions can be called with # the right number of arguments. # Note that unregistered functions (e.g. with a mismatching name) # will raise KeyError. functions = exported_functions assert len(functions) >= 10 for func in functions: args = [object()] * func.__arrow_compute_function__['arity'] with pytest.raises(TypeError, match="Got unexpected argument type " "<class 'object'> for compute function"): func(*args) def test_exported_option_classes(): classes = exported_option_classes assert len(classes) >= 10 for cls in classes: # Option classes must have an introspectable constructor signature, # and that signature should not have any *args or **kwargs. sig = inspect.signature(cls) for param in sig.parameters.values(): assert param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD) def test_list_functions(): assert len(pc.list_functions()) > 10 assert "add" in pc.list_functions() def _check_get_function(name, expected_func_cls, expected_ker_cls, min_num_kernels=1): func = pc.get_function(name) assert isinstance(func, expected_func_cls) n = func.num_kernels assert n >= min_num_kernels assert n == len(func.kernels) assert all(isinstance(ker, expected_ker_cls) for ker in func.kernels) def test_get_function_scalar(): _check_get_function("add", pc.ScalarFunction, pc.ScalarKernel, 8) def test_get_function_vector(): _check_get_function("unique", pc.VectorFunction, pc.VectorKernel, 8) def test_get_function_aggregate(): _check_get_function("mean", pc.ScalarAggregateFunction, pc.ScalarAggregateKernel, 8) def test_call_function_with_memory_pool(): arr = pa.array(["foo", "bar", "baz"]) indices = np.array([2, 2, 1]) result1 = arr.take(indices) result2 = pc.call_function('take', [arr, indices], memory_pool=pa.default_memory_pool()) expected = pa.array(["baz", "baz", "bar"]) assert result1.equals(expected) assert result2.equals(expected) result3 = pc.take(arr, indices, memory_pool=pa.default_memory_pool()) assert result3.equals(expected) def test_pickle_functions(): # Pickle registered functions for name in pc.list_functions(): func = pc.get_function(name) reconstructed = pickle.loads(pickle.dumps(func)) assert type(reconstructed) is type(func) assert reconstructed.name == func.name assert reconstructed.arity == func.arity assert reconstructed.num_kernels == func.num_kernels def test_pickle_global_functions(): # Pickle global wrappers (manual or automatic) of registered functions for name in pc.list_functions(): func = getattr(pc, name) reconstructed = pickle.loads(pickle.dumps(func)) assert reconstructed is func def test_function_attributes(): # Sanity check attributes of registered functions for name in pc.list_functions(): func = pc.get_function(name) assert isinstance(func, pc.Function) assert func.name == name kernels = func.kernels assert func.num_kernels == len(kernels) assert all(isinstance(ker, pc.Kernel) for ker in kernels) assert func.arity >= 1 # no varargs functions for now repr(func) for ker in kernels: repr(ker) def test_input_type_conversion(): # Automatic array conversion from Python arr = pc.add([1, 2], [4, None]) assert arr.to_pylist() == [5, None] # Automatic scalar conversion from Python arr = pc.add([1, 2], 4) assert arr.to_pylist() == [5, 6] # Other scalar type assert pc.equal(["foo", "bar", None], "foo").to_pylist() == [True, False, None] @pytest.mark.parametrize('arrow_type', numerical_arrow_types) def test_sum_array(arrow_type): arr = pa.array([1, 2, 3, 4], type=arrow_type) assert arr.sum().as_py() == 10 assert pc.sum(arr).as_py() == 10 arr = pa.array([], type=arrow_type) assert arr.sum().as_py() is None # noqa: E711 @pytest.mark.parametrize('arrow_type', numerical_arrow_types) def test_sum_chunked_array(arrow_type): arr = pa.chunked_array([pa.array([1, 2, 3, 4], type=arrow_type)]) assert pc.sum(arr).as_py() == 10 arr = pa.chunked_array([ pa.array([1, 2], type=arrow_type), pa.array([3, 4], type=arrow_type) ]) assert pc.sum(arr).as_py() == 10 arr = pa.chunked_array([ pa.array([1, 2], type=arrow_type), pa.array([], type=arrow_type), pa.array([3, 4], type=arrow_type) ]) assert pc.sum(arr).as_py() == 10 arr = pa.chunked_array((), type=arrow_type) assert arr.num_chunks == 0 assert pc.sum(arr).as_py() is None # noqa: E711 def test_mode_array(): # ARROW-9917 arr = pa.array([1, 1, 3, 4, 3, 5], type='int64') mode = pc.mode(arr) assert len(mode) == 1 assert mode[0].as_py() == {"mode": 1, "count": 2} mode = pc.mode(arr, 2) assert len(mode) == 2 assert mode[0].as_py() == {"mode": 1, "count": 2} assert mode[1].as_py() == {"mode": 3, "count": 2} arr = pa.array([], type='int64') assert len(pc.mode(arr)) == 0 def test_mode_chunked_array(): # ARROW-9917 arr = pa.chunked_array([pa.array([1, 1, 3, 4, 3, 5], type='int64')]) mode = pc.mode(arr) assert len(mode) == 1 assert mode[0].as_py() == {"mode": 1, "count": 2} mode = pc.mode(arr, 2) assert len(mode) == 2 assert mode[0].as_py() == {"mode": 1, "count": 2} assert mode[1].as_py() == {"mode": 3, "count": 2} arr = pa.chunked_array((), type='int64') assert arr.num_chunks == 0 assert len(pc.mode(arr)) == 0 def test_variance(): data = [1, 2, 3, 4, 5, 6, 7, 8] assert pc.variance(data).as_py() == 5.25 assert pc.variance(data, ddof=0).as_py() == 5.25 assert pc.variance(data, ddof=1).as_py() == 6.0 def test_match_substring(): arr = pa.array(["ab", "abc", "ba", None]) result = pc.match_substring(arr, "ab") expected = pa.array([True, True, False, None]) assert expected.equals(result) def test_split_pattern(): arr = pa.array(["-foo---bar--", "---foo---b"]) result = pc.split_pattern(arr, pattern="---") expected = pa.array([["-foo", "bar--"], ["", "foo", "b"]]) assert expected.equals(result) result = pc.split_pattern(arr, pattern="---", max_splits=1) expected = pa.array([["-foo", "bar--"], ["", "foo---b"]]) assert expected.equals(result) result = pc.split_pattern(arr, pattern="---", max_splits=1, reverse=True) expected = pa.array([["-foo", "bar--"], ["---foo", "b"]]) assert expected.equals(result) def test_split_whitespace_utf8(): arr = pa.array(["foo bar", " foo \u3000\tb"]) result = pc.utf8_split_whitespace(arr) expected = pa.array([["foo", "bar"], ["", "foo", "b"]]) assert expected.equals(result) result = pc.utf8_split_whitespace(arr, max_splits=1) expected = pa.array([["foo", "bar"], ["", "foo \u3000\tb"]]) assert expected.equals(result) result = pc.utf8_split_whitespace(arr, max_splits=1, reverse=True) expected = pa.array([["foo", "bar"], [" foo", "b"]]) assert expected.equals(result) def test_split_whitespace_ascii(): arr = pa.array(["foo bar", " foo \u3000\tb"]) result = pc.ascii_split_whitespace(arr) expected = pa.array([["foo", "bar"], ["", "foo", "\u3000", "b"]]) assert expected.equals(result) result = pc.ascii_split_whitespace(arr, max_splits=1) expected = pa.array([["foo", "bar"], ["", "foo \u3000\tb"]]) assert expected.equals(result) result = pc.ascii_split_whitespace(arr, max_splits=1, reverse=True) expected = pa.array([["foo", "bar"], [" foo \u3000", "b"]]) assert expected.equals(result) def test_min_max(): # An example generated function wrapper with possible options data = [4, 5, 6, None, 1] s = pc.min_max(data) assert s.as_py() == {'min': 1, 'max': 6} s = pc.min_max(data, options=pc.MinMaxOptions()) assert s.as_py() == {'min': 1, 'max': 6} s = pc.min_max(data, options=pc.MinMaxOptions(null_handling='skip')) assert s.as_py() == {'min': 1, 'max': 6} s = pc.min_max(data, options=pc.MinMaxOptions(null_handling='emit_null')) assert s.as_py() == {'min': None, 'max': None} # Options as dict of kwargs s = pc.min_max(data, options={'null_handling': 'emit_null'}) assert s.as_py() == {'min': None, 'max': None} # Options as named functions arguments s = pc.min_max(data, null_handling='emit_null') assert s.as_py() == {'min': None, 'max': None} # Both options and named arguments with pytest.raises(TypeError): s = pc.min_max(data, options=pc.MinMaxOptions(), null_handling='emit_null') # Wrong options type options = pc.TakeOptions() with pytest.raises(TypeError): s = pc.min_max(data, options=options) # Missing argument with pytest.raises( TypeError, match=r"min_max\(\) missing 1 required positional argument"): s = pc.min_max() def test_is_valid(): # An example generated function wrapper without options data = [4, 5, None] assert pc.is_valid(data).to_pylist() == [True, True, False] with pytest.raises(TypeError): pc.is_valid(data, options=None) def test_generated_docstrings(): assert pc.min_max.__doc__ == textwrap.dedent("""\ Compute the minimum and maximum values of a numeric array. Null values are ignored by default. This can be changed through MinMaxOptions. Parameters ---------- array : Array-like Argument to compute function memory_pool : pyarrow.MemoryPool, optional If not passed, will allocate memory from the default memory pool. options : pyarrow.compute.MinMaxOptions, optional Parameters altering compute function semantics **kwargs: optional Parameters for MinMaxOptions constructor. Either `options` or `**kwargs` can be passed, but not both at the same time.<|fim▁hole|> Add the arguments element-wise. Results will wrap around on integer overflow. Use function "add_checked" if you want overflow to return an error. Parameters ---------- x : Array-like or scalar-like Argument to compute function y : Array-like or scalar-like Argument to compute function memory_pool : pyarrow.MemoryPool, optional If not passed, will allocate memory from the default memory pool. """) # We use isprintable to find about codepoints that Python doesn't know, but # utf8proc does (or in a future version of Python the other way around). # These codepoints cannot be compared between Arrow and the Python # implementation. @lru_cache() def find_new_unicode_codepoints(): new = set() characters = [chr(c) for c in range(0x80, 0x11000) if not (0xD800 <= c < 0xE000)] is_printable = pc.utf8_is_printable(pa.array(characters)).to_pylist() for i, c in enumerate(characters): if is_printable[i] != c.isprintable(): new.add(ord(c)) return new # Python claims there are not alpha, not sure why, they are in # gc='Other Letter': https://graphemica.com/%E1%B3%B2 unknown_issue_is_alpha = {0x1cf2, 0x1cf3} # utf8proc does not know if codepoints are lower case utf8proc_issue_is_lower = { 0xaa, 0xba, 0x2b0, 0x2b1, 0x2b2, 0x2b3, 0x2b4, 0x2b5, 0x2b6, 0x2b7, 0x2b8, 0x2c0, 0x2c1, 0x2e0, 0x2e1, 0x2e2, 0x2e3, 0x2e4, 0x37a, 0x1d2c, 0x1d2d, 0x1d2e, 0x1d2f, 0x1d30, 0x1d31, 0x1d32, 0x1d33, 0x1d34, 0x1d35, 0x1d36, 0x1d37, 0x1d38, 0x1d39, 0x1d3a, 0x1d3b, 0x1d3c, 0x1d3d, 0x1d3e, 0x1d3f, 0x1d40, 0x1d41, 0x1d42, 0x1d43, 0x1d44, 0x1d45, 0x1d46, 0x1d47, 0x1d48, 0x1d49, 0x1d4a, 0x1d4b, 0x1d4c, 0x1d4d, 0x1d4e, 0x1d4f, 0x1d50, 0x1d51, 0x1d52, 0x1d53, 0x1d54, 0x1d55, 0x1d56, 0x1d57, 0x1d58, 0x1d59, 0x1d5a, 0x1d5b, 0x1d5c, 0x1d5d, 0x1d5e, 0x1d5f, 0x1d60, 0x1d61, 0x1d62, 0x1d63, 0x1d64, 0x1d65, 0x1d66, 0x1d67, 0x1d68, 0x1d69, 0x1d6a, 0x1d78, 0x1d9b, 0x1d9c, 0x1d9d, 0x1d9e, 0x1d9f, 0x1da0, 0x1da1, 0x1da2, 0x1da3, 0x1da4, 0x1da5, 0x1da6, 0x1da7, 0x1da8, 0x1da9, 0x1daa, 0x1dab, 0x1dac, 0x1dad, 0x1dae, 0x1daf, 0x1db0, 0x1db1, 0x1db2, 0x1db3, 0x1db4, 0x1db5, 0x1db6, 0x1db7, 0x1db8, 0x1db9, 0x1dba, 0x1dbb, 0x1dbc, 0x1dbd, 0x1dbe, 0x1dbf, 0x2071, 0x207f, 0x2090, 0x2091, 0x2092, 0x2093, 0x2094, 0x2095, 0x2096, 0x2097, 0x2098, 0x2099, 0x209a, 0x209b, 0x209c, 0x2c7c, 0x2c7d, 0xa69c, 0xa69d, 0xa770, 0xa7f8, 0xa7f9, 0xab5c, 0xab5d, 0xab5e, 0xab5f, } # utf8proc does not store if a codepoint is numeric numeric_info_missing = { 0x3405, 0x3483, 0x382a, 0x3b4d, 0x4e00, 0x4e03, 0x4e07, 0x4e09, 0x4e5d, 0x4e8c, 0x4e94, 0x4e96, 0x4ebf, 0x4ec0, 0x4edf, 0x4ee8, 0x4f0d, 0x4f70, 0x5104, 0x5146, 0x5169, 0x516b, 0x516d, 0x5341, 0x5343, 0x5344, 0x5345, 0x534c, 0x53c1, 0x53c2, 0x53c3, 0x53c4, 0x56db, 0x58f1, 0x58f9, 0x5e7a, 0x5efe, 0x5eff, 0x5f0c, 0x5f0d, 0x5f0e, 0x5f10, 0x62fe, 0x634c, 0x67d2, 0x6f06, 0x7396, 0x767e, 0x8086, 0x842c, 0x8cae, 0x8cb3, 0x8d30, 0x9621, 0x9646, 0x964c, 0x9678, 0x96f6, 0xf96b, 0xf973, 0xf978, 0xf9b2, 0xf9d1, 0xf9d3, 0xf9fd, 0x10fc5, 0x10fc6, 0x10fc7, 0x10fc8, 0x10fc9, 0x10fca, 0x10fcb, } # utf8proc has no no digit/numeric information digit_info_missing = { 0xb2, 0xb3, 0xb9, 0x1369, 0x136a, 0x136b, 0x136c, 0x136d, 0x136e, 0x136f, 0x1370, 0x1371, 0x19da, 0x2070, 0x2074, 0x2075, 0x2076, 0x2077, 0x2078, 0x2079, 0x2080, 0x2081, 0x2082, 0x2083, 0x2084, 0x2085, 0x2086, 0x2087, 0x2088, 0x2089, 0x2460, 0x2461, 0x2462, 0x2463, 0x2464, 0x2465, 0x2466, 0x2467, 0x2468, 0x2474, 0x2475, 0x2476, 0x2477, 0x2478, 0x2479, 0x247a, 0x247b, 0x247c, 0x2488, 0x2489, 0x248a, 0x248b, 0x248c, 0x248d, 0x248e, 0x248f, 0x2490, 0x24ea, 0x24f5, 0x24f6, 0x24f7, 0x24f8, 0x24f9, 0x24fa, 0x24fb, 0x24fc, 0x24fd, 0x24ff, 0x2776, 0x2777, 0x2778, 0x2779, 0x277a, 0x277b, 0x277c, 0x277d, 0x277e, 0x2780, 0x2781, 0x2782, 0x2783, 0x2784, 0x2785, 0x2786, 0x2787, 0x2788, 0x278a, 0x278b, 0x278c, 0x278d, 0x278e, 0x278f, 0x2790, 0x2791, 0x2792, 0x10a40, 0x10a41, 0x10a42, 0x10a43, 0x10e60, 0x10e61, 0x10e62, 0x10e63, 0x10e64, 0x10e65, 0x10e66, 0x10e67, 0x10e68, } numeric_info_missing = { 0x3405, 0x3483, 0x382a, 0x3b4d, 0x4e00, 0x4e03, 0x4e07, 0x4e09, 0x4e5d, 0x4e8c, 0x4e94, 0x4e96, 0x4ebf, 0x4ec0, 0x4edf, 0x4ee8, 0x4f0d, 0x4f70, 0x5104, 0x5146, 0x5169, 0x516b, 0x516d, 0x5341, 0x5343, 0x5344, 0x5345, 0x534c, 0x53c1, 0x53c2, 0x53c3, 0x53c4, 0x56db, 0x58f1, 0x58f9, 0x5e7a, 0x5efe, 0x5eff, 0x5f0c, 0x5f0d, 0x5f0e, 0x5f10, 0x62fe, 0x634c, 0x67d2, 0x6f06, 0x7396, 0x767e, 0x8086, 0x842c, 0x8cae, 0x8cb3, 0x8d30, 0x9621, 0x9646, 0x964c, 0x9678, 0x96f6, 0xf96b, 0xf973, 0xf978, 0xf9b2, 0xf9d1, 0xf9d3, 0xf9fd, } codepoints_ignore = { 'is_alnum': numeric_info_missing | digit_info_missing | unknown_issue_is_alpha, 'is_alpha': unknown_issue_is_alpha, 'is_digit': digit_info_missing, 'is_numeric': numeric_info_missing, 'is_lower': utf8proc_issue_is_lower } @pytest.mark.parametrize('function_name', ['is_alnum', 'is_alpha', 'is_ascii', 'is_decimal', 'is_digit', 'is_lower', 'is_numeric', 'is_printable', 'is_space', 'is_upper', ]) @pytest.mark.parametrize('variant', ['ascii', 'utf8']) def test_string_py_compat_boolean(function_name, variant): arrow_name = variant + "_" + function_name py_name = function_name.replace('_', '') ignore = codepoints_ignore.get(function_name, set()) |\ find_new_unicode_codepoints() for i in range(128 if ascii else 0x11000): if i in range(0xD800, 0xE000): continue # bug? pyarrow doesn't allow utf16 surrogates # the issues we know of, we skip if i in ignore: continue # Compare results with the equivalent Python predicate # (except "is_space" where functions are known to be incompatible) c = chr(i) if hasattr(pc, arrow_name) and function_name != 'is_space': ar = pa.array([c]) arrow_func = getattr(pc, arrow_name) assert arrow_func(ar)[0].as_py() == getattr(c, py_name)() @pytest.mark.parametrize(('ty', 'values'), all_array_types) def test_take(ty, values): arr = pa.array(values, type=ty) for indices_type in [pa.int8(), pa.int64()]: indices = pa.array([0, 4, 2, None], type=indices_type) result = arr.take(indices) result.validate() expected = pa.array([values[0], values[4], values[2], None], type=ty) assert result.equals(expected) # empty indices indices = pa.array([], type=indices_type) result = arr.take(indices) result.validate() expected = pa.array([], type=ty) assert result.equals(expected) indices = pa.array([2, 5]) with pytest.raises(IndexError): arr.take(indices) indices = pa.array([2, -1]) with pytest.raises(IndexError): arr.take(indices) def test_take_indices_types(): arr = pa.array(range(5)) for indices_type in ['uint8', 'int8', 'uint16', 'int16', 'uint32', 'int32', 'uint64', 'int64']: indices = pa.array([0, 4, 2, None], type=indices_type) result = arr.take(indices) result.validate() expected = pa.array([0, 4, 2, None]) assert result.equals(expected) for indices_type in [pa.float32(), pa.float64()]: indices = pa.array([0, 4, 2], type=indices_type) with pytest.raises(NotImplementedError): arr.take(indices) def test_take_on_chunked_array(): # ARROW-9504 arr = pa.chunked_array([ [ "a", "b", "c", "d", "e" ], [ "f", "g", "h", "i", "j" ] ]) indices = np.array([0, 5, 1, 6, 9, 2]) result = arr.take(indices) expected = pa.chunked_array([["a", "f", "b", "g", "j", "c"]]) assert result.equals(expected) indices = pa.chunked_array([[1], [9, 2]]) result = arr.take(indices) expected = pa.chunked_array([ [ "b" ], [ "j", "c" ] ]) assert result.equals(expected) @pytest.mark.parametrize('ordered', [False, True]) def test_take_dictionary(ordered): arr = pa.DictionaryArray.from_arrays([0, 1, 2, 0, 1, 2], ['a', 'b', 'c'], ordered=ordered) result = arr.take(pa.array([0, 1, 3])) result.validate() assert result.to_pylist() == ['a', 'b', 'a'] assert result.dictionary.to_pylist() == ['a', 'b', 'c'] assert result.type.ordered is ordered def test_take_null_type(): # ARROW-10027 arr = pa.array([None] * 10) chunked_arr = pa.chunked_array([[None] * 5] * 2) batch = pa.record_batch([arr], names=['a']) table = pa.table({'a': arr}) indices = pa.array([1, 3, 7, None]) assert len(arr.take(indices)) == 4 assert len(chunked_arr.take(indices)) == 4 assert len(batch.take(indices).column(0)) == 4 assert len(table.take(indices).column(0)) == 4 @pytest.mark.parametrize(('ty', 'values'), all_array_types) def test_filter(ty, values): arr = pa.array(values, type=ty) mask = pa.array([True, False, False, True, None]) result = arr.filter(mask, null_selection_behavior='drop') result.validate() assert result.equals(pa.array([values[0], values[3]], type=ty)) result = arr.filter(mask, null_selection_behavior='emit_null') result.validate() assert result.equals(pa.array([values[0], values[3], None], type=ty)) # non-boolean dtype mask = pa.array([0, 1, 0, 1, 0]) with pytest.raises(NotImplementedError): arr.filter(mask) # wrong length mask = pa.array([True, False, True]) with pytest.raises(ValueError, match="must all be the same length"): arr.filter(mask) def test_filter_chunked_array(): arr = pa.chunked_array([["a", None], ["c", "d", "e"]]) expected_drop = pa.chunked_array([["a"], ["e"]]) expected_null = pa.chunked_array([["a"], [None, "e"]]) for mask in [ # mask is array pa.array([True, False, None, False, True]), # mask is chunked array pa.chunked_array([[True, False, None], [False, True]]), # mask is python object [True, False, None, False, True] ]: result = arr.filter(mask) assert result.equals(expected_drop) result = arr.filter(mask, null_selection_behavior="emit_null") assert result.equals(expected_null) def test_filter_record_batch(): batch = pa.record_batch( [pa.array(["a", None, "c", "d", "e"])], names=["a'"]) # mask is array mask = pa.array([True, False, None, False, True]) result = batch.filter(mask) expected = pa.record_batch([pa.array(["a", "e"])], names=["a'"]) assert result.equals(expected) result = batch.filter(mask, null_selection_behavior="emit_null") expected = pa.record_batch([pa.array(["a", None, "e"])], names=["a'"]) assert result.equals(expected) def test_filter_table(): table = pa.table([pa.array(["a", None, "c", "d", "e"])], names=["a"]) expected_drop = pa.table([pa.array(["a", "e"])], names=["a"]) expected_null = pa.table([pa.array(["a", None, "e"])], names=["a"]) for mask in [ # mask is array pa.array([True, False, None, False, True]), # mask is chunked array pa.chunked_array([[True, False], [None, False, True]]), # mask is python object [True, False, None, False, True] ]: result = table.filter(mask) assert result.equals(expected_drop) result = table.filter(mask, null_selection_behavior="emit_null") assert result.equals(expected_null) def test_filter_errors(): arr = pa.chunked_array([["a", None], ["c", "d", "e"]]) batch = pa.record_batch( [pa.array(["a", None, "c", "d", "e"])], names=["a'"]) table = pa.table([pa.array(["a", None, "c", "d", "e"])], names=["a"]) for obj in [arr, batch, table]: # non-boolean dtype mask = pa.array([0, 1, 0, 1, 0]) with pytest.raises(NotImplementedError): obj.filter(mask) # wrong length mask = pa.array([True, False, True]) with pytest.raises(pa.ArrowInvalid, match="must all be the same length"): obj.filter(mask) def test_filter_null_type(): # ARROW-10027 arr = pa.array([None] * 10) chunked_arr = pa.chunked_array([[None] * 5] * 2) batch = pa.record_batch([arr], names=['a']) table = pa.table({'a': arr}) mask = pa.array([True, False] * 5) assert len(arr.filter(mask)) == 5 assert len(chunked_arr.filter(mask)) == 5 assert len(batch.filter(mask).column(0)) == 5 assert len(table.filter(mask).column(0)) == 5 @pytest.mark.parametrize("typ", ["array", "chunked_array"]) def test_compare_array(typ): if typ == "array": def con(values): return pa.array(values) else: def con(values): return pa.chunked_array([values]) arr1 = con([1, 2, 3, 4, None]) arr2 = con([1, 1, 4, None, 4]) result = pc.equal(arr1, arr2) assert result.equals(con([True, False, False, None, None])) result = pc.not_equal(arr1, arr2) assert result.equals(con([False, True, True, None, None])) result = pc.less(arr1, arr2) assert result.equals(con([False, False, True, None, None])) result = pc.less_equal(arr1, arr2) assert result.equals(con([True, False, True, None, None])) result = pc.greater(arr1, arr2) assert result.equals(con([False, True, False, None, None])) result = pc.greater_equal(arr1, arr2) assert result.equals(con([True, True, False, None, None])) @pytest.mark.parametrize("typ", ["array", "chunked_array"]) def test_compare_scalar(typ): if typ == "array": def con(values): return pa.array(values) else: def con(values): return pa.chunked_array([values]) arr = con([1, 2, 3, None]) # TODO this is a hacky way to construct a scalar .. scalar = pa.array([2]).sum() result = pc.equal(arr, scalar) assert result.equals(con([False, True, False, None])) result = pc.not_equal(arr, scalar) assert result.equals(con([True, False, True, None])) result = pc.less(arr, scalar) assert result.equals(con([True, False, False, None])) result = pc.less_equal(arr, scalar) assert result.equals(con([True, True, False, None])) result = pc.greater(arr, scalar) assert result.equals(con([False, False, True, None])) result = pc.greater_equal(arr, scalar) assert result.equals(con([False, True, True, None])) def test_compare_chunked_array_mixed(): arr = pa.array([1, 2, 3, 4, None]) arr_chunked = pa.chunked_array([[1, 2, 3], [4, None]]) arr_chunked2 = pa.chunked_array([[1, 2], [3, 4, None]]) expected = pa.chunked_array([[True, True, True, True, None]]) for left, right in [ (arr, arr_chunked), (arr_chunked, arr), (arr_chunked, arr_chunked2), ]: result = pc.equal(left, right) assert result.equals(expected) def test_arithmetic_add(): left = pa.array([1, 2, 3, 4, 5]) right = pa.array([0, -1, 1, 2, 3]) result = pc.add(left, right) expected = pa.array([1, 1, 4, 6, 8]) assert result.equals(expected) def test_arithmetic_subtract(): left = pa.array([1, 2, 3, 4, 5]) right = pa.array([0, -1, 1, 2, 3]) result = pc.subtract(left, right) expected = pa.array([1, 3, 2, 2, 2]) assert result.equals(expected) def test_arithmetic_multiply(): left = pa.array([1, 2, 3, 4, 5]) right = pa.array([0, -1, 1, 2, 3]) result = pc.multiply(left, right) expected = pa.array([0, -2, 3, 8, 15]) assert result.equals(expected) def test_is_null(): arr = pa.array([1, 2, 3, None]) result = arr.is_null() result = arr.is_null() expected = pa.array([False, False, False, True]) assert result.equals(expected) assert result.equals(pc.is_null(arr)) result = arr.is_valid() expected = pa.array([True, True, True, False]) assert result.equals(expected) assert result.equals(pc.is_valid(arr)) arr = pa.chunked_array([[1, 2], [3, None]]) result = arr.is_null() expected = pa.chunked_array([[False, False], [False, True]]) assert result.equals(expected) result = arr.is_valid() expected = pa.chunked_array([[True, True], [True, False]]) assert result.equals(expected) def test_fill_null(): arr = pa.array([1, 2, None, 4], type=pa.int8()) fill_value = pa.array([5], type=pa.int8()) with pytest.raises(pa.ArrowInvalid, match="tried to convert to int"): arr.fill_null(fill_value) arr = pa.array([None, None, None, None], type=pa.null()) fill_value = pa.scalar(None, type=pa.null()) result = arr.fill_null(fill_value) expected = pa.array([None, None, None, None]) assert result.equals(expected) arr = pa.array(['a', 'bb', None]) result = arr.fill_null('ccc') expected = pa.array(['a', 'bb', 'ccc']) assert result.equals(expected) arr = pa.array([b'a', b'bb', None], type=pa.large_binary()) result = arr.fill_null('ccc') expected = pa.array([b'a', b'bb', b'ccc'], type=pa.large_binary()) assert result.equals(expected) @pytest.mark.parametrize('arrow_type', numerical_arrow_types) def test_fill_null_array(arrow_type): arr = pa.array([1, 2, None, 4], type=arrow_type) fill_value = pa.scalar(5, type=arrow_type) result = arr.fill_null(fill_value) expected = pa.array([1, 2, 5, 4], type=arrow_type) assert result.equals(expected) # Implicit conversions result = arr.fill_null(5) assert result.equals(expected) # ARROW-9451: Unsigned integers allow this for some reason if not pa.types.is_unsigned_integer(arr.type): with pytest.raises((ValueError, TypeError)): arr.fill_null('5') result = arr.fill_null(pa.scalar(5, type='int8')) assert result.equals(expected) @pytest.mark.parametrize('arrow_type', numerical_arrow_types) def test_fill_null_chunked_array(arrow_type): fill_value = pa.scalar(5, type=arrow_type) arr = pa.chunked_array([pa.array([None, 2, 3, 4], type=arrow_type)]) result = arr.fill_null(fill_value) expected = pa.chunked_array([pa.array([5, 2, 3, 4], type=arrow_type)]) assert result.equals(expected) arr = pa.chunked_array([ pa.array([1, 2], type=arrow_type), pa.array([], type=arrow_type), pa.array([None, 4], type=arrow_type) ]) expected = pa.chunked_array([ pa.array([1, 2], type=arrow_type), pa.array([], type=arrow_type), pa.array([5, 4], type=arrow_type) ]) result = arr.fill_null(fill_value) assert result.equals(expected) # Implicit conversions result = arr.fill_null(5) assert result.equals(expected) result = arr.fill_null(pa.scalar(5, type='int8')) assert result.equals(expected) def test_logical(): a = pa.array([True, False, False, None]) b = pa.array([True, True, False, True]) assert pc.and_(a, b) == pa.array([True, False, False, None]) assert pc.and_kleene(a, b) == pa.array([True, False, False, None]) assert pc.or_(a, b) == pa.array([True, True, False, None]) assert pc.or_kleene(a, b) == pa.array([True, True, False, True]) assert pc.xor(a, b) == pa.array([False, True, False, None]) assert pc.invert(a) == pa.array([False, True, True, None]) def test_cast(): arr = pa.array([2**63 - 1], type='int64') with pytest.raises(pa.ArrowInvalid): pc.cast(arr, 'int32') assert pc.cast(arr, 'int32', safe=False) == pa.array([-1], type='int32') arr = pa.array([datetime(2010, 1, 1), datetime(2015, 1, 1)]) expected = pa.array([1262304000000, 1420070400000], type='timestamp[ms]') assert pc.cast(arr, 'timestamp[ms]') == expected def test_strptime(): arr = pa.array(["5/1/2020", None, "12/13/1900"]) got = pc.strptime(arr, format='%m/%d/%Y', unit='s') expected = pa.array([datetime(2020, 5, 1), None, datetime(1900, 12, 13)], type=pa.timestamp('s')) assert got == expected def test_count(): arr = pa.array([1, 2, 3, None, None]) assert pc.count(arr).as_py() == 3 assert pc.count(arr, count_mode='count_non_null').as_py() == 3 assert pc.count(arr, count_mode='count_null').as_py() == 2 with pytest.raises(ValueError, match="'zzz' is not a valid count_mode"): pc.count(arr, count_mode='zzz') def test_partition_nth(): data = list(range(100, 140)) random.shuffle(data) pivot = 10 indices = pc.partition_nth_indices(data, pivot=pivot).to_pylist() assert len(indices) == len(data) assert sorted(indices) == list(range(len(data))) assert all(data[indices[i]] <= data[indices[pivot]] for i in range(pivot)) assert all(data[indices[i]] >= data[indices[pivot]] for i in range(pivot, len(data)))<|fim▁end|>
""") assert pc.add.__doc__ == textwrap.dedent("""\
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>import factory from ...models import Dashboard, Link, ModuleType, Module from ....organisation.tests.factories import NodeFactory, NodeTypeFactory class DashboardFactory(factory.DjangoModelFactory): class Meta: model = Dashboard status = 'published' title = "title" slug = factory.Sequence(lambda n: 'slug%s' % n) class LinkFactory(factory.DjangoModelFactory): class Meta: model = Link url = factory.Sequence(lambda n: 'https://www.gov.uk/link-%s' % n) title = 'Link title' link_type = 'transaction' dashboard = factory.SubFactory(DashboardFactory) class ModuleTypeFactory(factory.DjangoModelFactory): <|fim▁hole|> name = factory.Sequence(lambda n: 'name %s' % n) schema = {} class ModuleFactory(factory.DjangoModelFactory): class Meta: model = Module type = factory.SubFactory(ModuleTypeFactory) dashboard = factory.SubFactory(DashboardFactory) slug = factory.Sequence(lambda n: 'slug{}'.format(n)) title = 'title' info = [] options = {} order = factory.Sequence(lambda n: n) class DepartmentTypeFactory(NodeTypeFactory): name = 'department' class AgencyTypeFactory(NodeTypeFactory): name = 'agency' class ServiceTypeFactory(NodeTypeFactory): name = 'service' class DepartmentFactory(NodeFactory): name = factory.Sequence(lambda n: 'department-%s' % n) typeOf = factory.SubFactory(DepartmentTypeFactory) class AgencyFactory(NodeFactory): name = factory.Sequence(lambda n: 'agency-%s' % n) typeOf = factory.SubFactory(AgencyTypeFactory) class AgencyWithDepartmentFactory(AgencyFactory): parent = factory.SubFactory(DepartmentFactory) class ServiceFactory(NodeFactory): parent = factory.SubFactory(AgencyWithDepartmentFactory) name = factory.Sequence(lambda n: 'service-%s' % n) typeOf = factory.SubFactory(ServiceTypeFactory)<|fim▁end|>
class Meta: model = ModuleType
<|file_name|>bolt.go<|end_file_name|><|fim▁begin|>/* Vuls - Vulnerability Scanner Copyright (C) 2016 Future Architect, Inc. Japan. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package cache import ( "encoding/json" "fmt" "time" "github.com/Sirupsen/logrus" "github.com/boltdb/bolt" "github.com/future-architect/vuls/util" ) // Bolt holds a pointer of bolt.DB // boltdb is used to store a cache of Changelogs of Ubuntu/Debian type Bolt struct { Path string Log *logrus.Entry db *bolt.DB } // SetupBolt opens a boltdb and creates a meta bucket if not exists. func SetupBolt(path string, l *logrus.Entry) error { l.Infof("Open boltDB: %s", path) db, err := bolt.Open(path, 0600, nil) if err != nil { return err } b := Bolt{ Path: path, Log: l, db: db, } if err = b.createBucketIfNotExists(metabucket); err != nil { return err } DB = b return nil } // Close a db. func (b Bolt) Close() error { if b.db == nil { return nil } return b.db.Close() } // CreateBucketIfNotExists creates a buket that is specified by arg. func (b *Bolt) createBucketIfNotExists(name string) error { return b.db.Update(func(tx *bolt.Tx) error { _, err := tx.CreateBucketIfNotExists([]byte(name)) if err != nil { return fmt.Errorf("Failed to create bucket: %s", err) } return nil }) } // GetMeta gets a Meta Information os the servername to boltdb. func (b Bolt) GetMeta(serverName string) (meta Meta, found bool, err error) { err = b.db.View(func(tx *bolt.Tx) error { bkt := tx.Bucket([]byte(metabucket)) v := bkt.Get([]byte(serverName)) if len(v) == 0 { found = false return nil } if e := json.Unmarshal(v, &meta); e != nil { return e } found = true return nil }) return } // RefreshMeta gets a Meta Information os the servername to boltdb. func (b Bolt) RefreshMeta(meta Meta) error { meta.CreatedAt = time.Now() jsonBytes, err := json.Marshal(meta) if err != nil { return fmt.Errorf("Failed to marshal to JSON: %s", err) } return b.db.Update(func(tx *bolt.Tx) error { bkt := tx.Bucket([]byte(metabucket)) if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil { return err } b.Log.Debugf("Refreshed Meta: %s", meta.Name) return nil }) } // EnsureBuckets puts a Meta information and create a buket that holds changelogs. func (b Bolt) EnsureBuckets(meta Meta) error { jsonBytes, err := json.Marshal(meta) if err != nil { return fmt.Errorf("Failed to marshal to JSON: %s", err) } return b.db.Update(func(tx *bolt.Tx) error { b.Log.Debugf("Put to meta: %s", meta.Name) bkt := tx.Bucket([]byte(metabucket)) if err := bkt.Put([]byte(meta.Name), jsonBytes); err != nil { return err } // re-create a bucket (bucket name: servername) bkt = tx.Bucket([]byte(meta.Name)) if bkt != nil { b.Log.Debugf("Delete bucket: %s", meta.Name) if err := tx.DeleteBucket([]byte(meta.Name)); err != nil { return err } b.Log.Debugf("Bucket deleted: %s", meta.Name) } b.Log.Debugf("Create bucket: %s", meta.Name) if _, err := tx.CreateBucket([]byte(meta.Name)); err != nil { return err } b.Log.Debugf("Bucket created: %s", meta.Name) return nil }) } // PrettyPrint is for debug func (b Bolt) PrettyPrint(meta Meta) error { return b.db.View(func(tx *bolt.Tx) error { bkt := tx.Bucket([]byte(metabucket)) v := bkt.Get([]byte(meta.Name)) b.Log.Debugf("Meta: key:%s, value:%s", meta.Name, v) bkt = tx.Bucket([]byte(meta.Name))<|fim▁hole|> for k, v := c.First(); k != nil; k, v = c.Next() { b.Log.Debugf("key:%s, len: %d, %s...", k, len(v), util.Truncate(string(v), 30)) } return nil }) } // GetChangelog get the changelgo of specified packName from the Bucket func (b Bolt) GetChangelog(servername, packName string) (changelog string, err error) { err = b.db.View(func(tx *bolt.Tx) error { bkt := tx.Bucket([]byte(servername)) if bkt == nil { return fmt.Errorf("Faild to get Bucket: %s", servername) } v := bkt.Get([]byte(packName)) if v == nil { changelog = "" return nil } changelog = string(v) return nil }) return } // PutChangelog put the changelgo of specified packName into the Bucket func (b Bolt) PutChangelog(servername, packName, changelog string) error { return b.db.Update(func(tx *bolt.Tx) error { bkt := tx.Bucket([]byte(servername)) if bkt == nil { return fmt.Errorf("Faild to get Bucket: %s", servername) } if err := bkt.Put([]byte(packName), []byte(changelog)); err != nil { return err } return nil }) }<|fim▁end|>
c := bkt.Cursor()
<|file_name|>WowCrypt.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2014-2022 AscEmu Team <http://www.ascemu.org> This file is released under the MIT license. See README-MIT for more information. */ #include "WowCrypt.hpp" #include <algorithm> #include <openssl/hmac.h> WowCrypt::WowCrypt() { m_isInitialized = false; m_clientWotlkDecryptKey.x = 0; m_clientWotlkDecryptKey.y = 0; m_serverWotlkEncryptKey.x = 0; m_serverWotlkEncryptKey.y = 0; m_sendI = 0; m_sendJ = 0; m_recvI = 0; m_recvJ = 0; } WowCrypt::~WowCrypt() { } bool WowCrypt::isInitialized() { return m_isInitialized; } ////////////////////////////////////////////////////////////////////////////////////////// // WotLK void WowCrypt::initWotlkCrypt(uint8_t* key) { static const uint8_t send[seedLenght] = { 0xC2, 0xB3, 0x72, 0x3C, 0xC6, 0xAE, 0xD9, 0xB5, 0x34, 0x3C, 0x53, 0xEE, 0x2F, 0x43, 0x67, 0xCE }; static const uint8_t recv[seedLenght] = { 0xCC, 0x98, 0xAE, 0x04, 0xE8, 0x97, 0xEA, 0xCA, 0x12, 0xDD, 0xC0, 0x93, 0x42, 0x91, 0x53, 0x57 }; uint8_t encryptHash[SHA_DIGEST_LENGTH]; uint8_t decryptHash[SHA_DIGEST_LENGTH]; uint8_t pass[1024]; uint32_t mdLength; HMAC(EVP_sha1(), send, seedLenght, key, 40, decryptHash, &mdLength); assert(mdLength == SHA_DIGEST_LENGTH); HMAC(EVP_sha1(), recv, seedLenght, key, 40, encryptHash, &mdLength); assert(mdLength == SHA_DIGEST_LENGTH); RC4_set_key(&m_clientWotlkDecryptKey, SHA_DIGEST_LENGTH, decryptHash); RC4_set_key(&m_serverWotlkEncryptKey, SHA_DIGEST_LENGTH, encryptHash); RC4(&m_serverWotlkEncryptKey, 1024, pass, pass); RC4(&m_clientWotlkDecryptKey, 1024, pass, pass); m_isInitialized = true; } void WowCrypt::initMopCrypt(uint8_t* key) { static const uint8_t send[seedLenght] = { 0x40, 0xAA, 0xD3, 0x92, 0x26, 0x71, 0x43, 0x47, 0x3A, 0x31, 0x08, 0xA6, 0xE7, 0xDC, 0x98, 0x2A }; static const uint8_t recv[seedLenght] = { 0x08, 0xF1, 0x95, 0x9F, 0x47, 0xE5, 0xD2, 0xDB, 0xA1, 0x3D, 0x77, 0x8F, 0x3F, 0x3E, 0xE7, 0x00 }; <|fim▁hole|> uint8_t pass[1024]; uint32_t mdLength; HMAC(EVP_sha1(), send, seedLenght, key, 40, decryptHash, &mdLength); assert(mdLength == SHA_DIGEST_LENGTH); HMAC(EVP_sha1(), recv, seedLenght, key, 40, encryptHash, &mdLength); assert(mdLength == SHA_DIGEST_LENGTH); RC4_set_key(&m_clientWotlkDecryptKey, SHA_DIGEST_LENGTH, decryptHash); RC4_set_key(&m_serverWotlkEncryptKey, SHA_DIGEST_LENGTH, encryptHash); RC4(&m_serverWotlkEncryptKey, 1024, pass, pass); RC4(&m_clientWotlkDecryptKey, 1024, pass, pass); m_isInitialized = true; } void WowCrypt::decryptWotlkReceive(uint8_t* data, size_t length) { if (!m_isInitialized) return; RC4(&m_clientWotlkDecryptKey, (unsigned long)length, data, data); } void WowCrypt::encryptWotlkSend(uint8_t* data, size_t length) { if (!m_isInitialized) return; RC4(&m_serverWotlkEncryptKey, (unsigned long)length, data, data); } ////////////////////////////////////////////////////////////////////////////////////////// // Legacy void WowCrypt::initLegacyCrypt() { m_isInitialized = true; } void WowCrypt::decryptLegacyReceive(uint8_t* data, size_t length) { if (!m_isInitialized) return; if (length < cryptedReceiveLength) return; uint8_t x; for (size_t t = 0; t < cryptedReceiveLength; ++t) { m_recvI %= crypKeyVector.size(); x = (data[t] - m_recvJ) ^ crypKeyVector[m_recvI]; ++m_recvI; m_recvJ = data[t]; data[t] = x; } } void WowCrypt::encryptLegacySend(uint8_t* data, size_t length) { if (!m_isInitialized) return; if (length < cryptedSendLength) return; for (size_t t = 0; t < cryptedSendLength; ++t) { m_sendI %= crypKeyVector.size(); data[t] = m_sendJ = (data[t] ^ crypKeyVector[m_sendI]) + m_sendJ; ++m_sendI; } } void WowCrypt::setLegacyKey(uint8_t* key, size_t length) { crypKeyVector.resize(length); std::copy(key, key + length, crypKeyVector.begin()); } void WowCrypt::generateTbcKey(uint8_t* key, uint8_t* sessionkey) { uint8_t seedKey[seedLenght] = { 0x38, 0xA7, 0x83, 0x15, 0xF8, 0x92, 0x25, 0x30, 0x71, 0x98, 0x67, 0xB1, 0x8C, 0x4, 0xE2, 0xAA }; uint8_t firstBuffer[64]; uint8_t secondBuffer[64]; memset(firstBuffer, 0x36, 64); memset(secondBuffer, 0x5C, 64); for (uint8_t i = 0; i < seedLenght; ++i) { firstBuffer[i] = (uint8_t)(seedKey[i] ^ firstBuffer[i]); secondBuffer[i] = (uint8_t)(seedKey[i] ^ secondBuffer[i]); } Sha1Hash sha1; sha1.UpdateData(firstBuffer, 64); sha1.UpdateData(sessionkey, 40); sha1.Finalize(); uint8_t* tempDigest = sha1.GetDigest(); Sha1Hash sha2; sha2.UpdateData(secondBuffer, 64); sha2.UpdateData(tempDigest, SHA_DIGEST_LENGTH); sha2.Finalize(); memcpy(key, sha2.GetDigest(), SHA_DIGEST_LENGTH); }<|fim▁end|>
uint8_t encryptHash[SHA_DIGEST_LENGTH]; uint8_t decryptHash[SHA_DIGEST_LENGTH];
<|file_name|>noncebased.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Tink-Rust Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// //! Provide a reusable streaming AEAD framework. //! //! It tackles the segment handling portions of the nonce based online //! encryption scheme proposed in "Online Authenticated-Encryption and its //! Nonce-Reuse Misuse-Resistance" by Hoang, Reyhanitabar, Rogaway and Vizár //! (<https://eprint.iacr.org/2015/189.pdf>). //! //! In this scheme, the format of a ciphertext is: //! //! header || segment_0 || segment_1 || ... || segment_k. //! //! The format of header is: //! //! header_length || salt || nonce_prefix //! //! header_length is 1 byte which documents the size of the header and can be //! obtained via header_length(). In principle, header_length is redundant //! information, since the length of the header can be determined from the key //! size. //! //! salt is a salt used in the key derivation. //! //! nonce_prefix is a prefix for all per-segment nonces. //! //! segment_i is the i-th segment of the ciphertext. The size of segment_1 .. //! segment_{k-1} is ciphertextSegmentSize. segment_0 is shorter, so that //! segment_0 plus additional data of size firstCiphertextSegmentOffset (e.g. //! the header) aligns with ciphertextSegmentSize. //! //! The first segment size will be: //! //! ciphertext_segment_size - header_length() - first_ciphertext_segment_offset. use std::{convert::TryFrom, io}; use tink_core::{utils::wrap_err, EncryptingWrite, TinkError}; /// `SegmentEncrypter` facilitates implementing various streaming AEAD encryption modes. pub trait SegmentEncrypter { fn encrypt_segment(&self, segment: &[u8], nonce: &[u8]) -> Result<Vec<u8>, TinkError>; } /// `Writer` provides a framework for ingesting plaintext data and /// writing encrypted data to the wrapped [`io::Write`]. The scheme used for /// encrypting segments is specified by providing a `SegmentEncrypter` /// implementation. pub struct Writer { w: Box<dyn io::Write>, segment_encrypter: Box<dyn SegmentEncrypter>, encrypted_segment_cnt: u64, first_ciphertext_segment_offset: usize, nonce_size: usize, nonce_prefix: Vec<u8>, /// Buffer to hold incomplete segments of plaintext, until they are complete and /// ready for encryption. plaintext: Vec<u8>, /// Next free position in `plaintext`. plaintext_pos: usize, /// A final smaller segment can be written by calling `close()`, but after that /// no more data can be written. closed: bool, } /// `WriterParams` contains the options for instantiating a `Writer` via `Writer::new()`. pub struct WriterParams { /// `w` is the underlying writer being wrapped. pub w: Box<dyn io::Write>, /// `segment_encrypter` provides a method for encrypting segments. pub segment_encrypter: Box<dyn SegmentEncrypter>, /// `nonce_size` is the length of generated nonces. It must be at least 5 + /// `nonce_prefix.len()`. It can be longer, but longer nonces introduce more /// overhead in the resultant ciphertext. pub nonce_size: usize, /// `nonce_prefix` is a constant that all nonces throughout the ciphertext will /// start with. Its length must be at least 5 bytes shorter than `nonce_size`. pub nonce_prefix: Vec<u8>, /// The size of the segments which the plaintext will be split into. pub plaintext_segment_size: usize, /// `first_ciphertex_segment_offset` indicates where the ciphertext should begin in /// `w`. This allows for the existence of overhead in the stream unrelated to /// this encryption scheme. pub first_ciphertext_segment_offset: usize, } impl Writer { /// Create a new Writer instance. pub fn new(params: WriterParams) -> Result<Writer, TinkError> { if params.nonce_size - params.nonce_prefix.len() < 5 { return Err("nonce size too short".into()); } let ct_size = params.plaintext_segment_size + params.nonce_size; match ct_size.checked_sub(params.first_ciphertext_segment_offset) { None => { return Err( "first ciphertext segment offset bigger than ciphertext segment size".into(), ) } Some(sz) if sz <= params.nonce_size => { return Err("first ciphertext segment not large enough for full nonce".into()) } _ => {} } Ok(Writer { w: params.w, segment_encrypter: params.segment_encrypter, encrypted_segment_cnt: 0, first_ciphertext_segment_offset: params.first_ciphertext_segment_offset, nonce_size: params.nonce_size, nonce_prefix: params.nonce_prefix, plaintext: vec![0; params.plaintext_segment_size], plaintext_pos: 0, closed: false, }) } } impl io::Write for Writer { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { if self.closed { return Err(io::Error::new( io::ErrorKind::InvalidInput, "write on closed writer", )); } let mut pos = 0; // read position in input plaintext (`buf`) loop { // Move a chunk of the input plaintext into the internal buffer. let mut pt_lim = self.plaintext.len(); if self.encrypted_segment_cnt == 0 { pt_lim -= self.first_ciphertext_segment_offset } let n = std::cmp::min(pt_lim - self.plaintext_pos, buf.len() - pos); self.plaintext[self.plaintext_pos..self.plaintext_pos + n] .copy_from_slice(&buf[pos..pos + n]); self.plaintext_pos += n; pos += n; if pos == buf.len() { // All of the input plaintext has been consumed, but some (less than a segment's // worth) may be still be pending-encryption, held in // `self.plaintext`. It will be emitted on another `write()` (or by // `close()`). break; } // At this point there is a full segment's worth of plaintext in // `self.plaintext[..pt_lim]`, ready to encrypt and write out. if self.plaintext_pos != pt_lim { return Err(io::Error::new( io::ErrorKind::Other, format!( "internal error: pos={} != pt_lim={}", self.plaintext_pos, pt_lim ), )); } let nonce = generate_segment_nonce( self.nonce_size, &self.nonce_prefix, self.encrypted_segment_cnt, /* last= */ false, )?; let ciphertext = self .segment_encrypter .encrypt_segment(&self.plaintext[..pt_lim], &nonce) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, format!("{:?}", e)))?; self.w.write_all(&ciphertext)?; // Ready to accumulate next segment. self.plaintext_pos = 0; self.encrypted_segment_cnt += 1; } Ok(pos) } /// Flushing an encrypting writer does nothing even when there is buffered plaintext, /// because only complete segments can be written. fn flush(&mut self) -> io::Result<()> { Ok(()) } } impl EncryptingWrite for Writer { fn close(&mut self) -> Result<(), TinkError> { if self.closed { return Ok(()); } let nonce = generate_segment_nonce( self.nonce_size, &self.nonce_prefix, self.encrypted_segment_cnt, /* last= */ true, ) .map_err(|e| wrap_err("internal error", e))?; let ciphertext = self .segment_encrypter .encrypt_segment(&self.plaintext[..self.plaintext_pos], &nonce)?; self.w .write_all(&ciphertext) .map_err(|e| wrap_err("write failure", e))?; self.plaintext_pos = 0; self.encrypted_segment_cnt += 1; self.closed = true; Ok(()) } } /// Manual [`Drop`] implementation which ensures the stream is closed. impl Drop for Writer { fn drop(&mut self) { let _ = self.close(); } } /// `SegmentDecrypter` facilitates implementing various streaming AEAD encryption modes. pub trait SegmentDecrypter { fn decrypt_segment(&self, segment: &[u8], nonce: &[u8]) -> Result<Vec<u8>, TinkError>; } /// `Reader` facilitates the decryption of ciphertexts created using a [`Writer`]. /// /// The scheme used for decrypting segments is specified by providing a /// [`SegmentDecrypter`] implementation. The implementation must align /// with the [`SegmentEncrypter`] used in the [`Writer`]. pub struct Reader { r: Box<dyn io::Read>, segment_decrypter: Box<dyn SegmentDecrypter>, decrypted_segment_cnt: u64, first_ciphertext_segment_offset: usize, nonce_size: usize, nonce_prefix: Vec<u8>, /// `plaintext` holds data that has already been decrypted, and `plaintext_pos` /// indicates the part of it that has not yet been returns from a `read` operation. plaintext: Vec<u8>, plaintext_pos: usize, /// `ciphertext` is a fixed-size buffer that holds encrypted data that has already been read /// from `r`. ciphertext: Vec<u8>, ciphertext_pos: usize, } /// `ReaderParams` contains the options for instantiating a [`Reader`] via `Reader::new()`. pub struct ReaderParams { /// `r` is the underlying reader being wrapped. pub r: Box<dyn io::Read>, /// `segment_decrypter` provides a method for decrypting segments. pub segment_decrypter: Box<dyn SegmentDecrypter>, /// `nonce_size` is the length of generated nonces. It must match the `nonce_size` /// of the [`Writer`] used to create the ciphertext, and must be somewhat larger /// than the size of the common `nonce_prefix` pub nonce_size: usize, /// `nonce_prefix` is a constant that all nonces throughout the ciphertext start /// with. It's extracted from the header of the ciphertext. pub nonce_prefix: Vec<u8>, /// The size of the ciphertext segments, equal to `nonce_size` plus the /// size of the plaintext segment. pub ciphertext_segment_size: usize, /// `first_ciphertext_segment_offset` indicates where the ciphertext actually begins /// in `r`. This allows for the existence of overhead in the stream unrelated to /// this encryption scheme. pub first_ciphertext_segment_offset: usize, } impl Reader { /// Create a new `Reader` instance. pub fn new(params: ReaderParams) -> Result<Reader, TinkError> { if params.nonce_size - params.nonce_prefix.len() < 5 { return Err("nonce size too short".into()); } match params .ciphertext_segment_size .checked_sub(params.first_ciphertext_segment_offset) { None => { return Err( "first ciphertext segment offset bigger than ciphertext segment size".into(), )<|fim▁hole|> Some(sz) if sz <= params.nonce_size => { return Err("first ciphertext segment not large enough for full nonce".into()) } _ => {} } Ok(Reader { r: params.r, segment_decrypter: params.segment_decrypter, decrypted_segment_cnt: 0, first_ciphertext_segment_offset: params.first_ciphertext_segment_offset, nonce_size: params.nonce_size, nonce_prefix: params.nonce_prefix, plaintext: vec![], plaintext_pos: 0, // Allocate an extra byte to detect the last segment. ciphertext: vec![0; params.ciphertext_segment_size + 1], // Offset of data in `ciphertext`. Only ever set to: // - 0 (for first segment), or // - 1 (for all subsequent segments). ciphertext_pos: 0, }) } } /// Extension trait for [`std::io::Read`] to support `read_full()` method. trait ReadFullExt { /// Read the exact number of bytes required to fill `buf`, if possible. /// /// This function reads as many bytes as necessary to completely fill the /// specified buffer `buf`. /// /// If this function encounters an error of the kind /// [`std::io::ErrorKind::Interrupted`] then the error is ignored and the /// operation will continue. /// /// If this function encounters an "end of file" before completely filling /// the buffer, it returns an `Ok(n)` value holding the number of bytes read /// into `buf`. /// /// If any other read error is encountered then this function immediately /// returns. The contents of `buf` are unspecified in this case. /// /// (This is similar to `Read::read_exact` except for partial read behaviour, /// and also behaves like Go's `io::ReadFull`, as used in the upstream Go code.) fn read_full(&mut self, buf: &mut [u8]) -> std::io::Result<usize>; } impl ReadFullExt for dyn std::io::Read { fn read_full(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> { let mut count = 0; while !buf.is_empty() { match self.read(buf) { Ok(0) => break, Ok(n) => { count += n; let tmp = buf; buf = &mut tmp[n..]; } Err(e) if e.kind() == std::io::ErrorKind::Interrupted => {} Err(e) => return Err(e), } } Ok(count) } } impl io::Read for Reader { /// Read decrypts data from underlying reader and passes it to `buf`. fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { if self.plaintext_pos < self.plaintext.len() { // There is already-decrypted plaintext available -- return it first before attempting // any more decryption. let n = std::cmp::min(buf.len(), self.plaintext.len() - self.plaintext_pos); buf[..n].copy_from_slice(&self.plaintext[self.plaintext_pos..(self.plaintext_pos + n)]); self.plaintext_pos += n; return Ok(n); } // No available plaintext. self.plaintext_pos = 0; // Read up to a segment's worth of ciphertext. let mut ct_lim = self.ciphertext.len(); if self.decrypted_segment_cnt == 0 { // The first segment of ciphertext might be offset in the stream. ct_lim -= self.first_ciphertext_segment_offset; } let n = self .r .read_full(&mut self.ciphertext[self.ciphertext_pos..ct_lim])?; if n == 0 { // No ciphertext available, so therefore no plaintext available for now. return Ok(0); } let last_segment; let segment; if n != (ct_lim - self.ciphertext_pos) { // Read less than a full segment, so this should be the last segment. last_segment = true; segment = self.ciphertext_pos + n; } else { last_segment = false; if (self.ciphertext_pos + n) < 1 { return Err(io::Error::new( io::ErrorKind::InvalidInput, "ciphertext segment too short", )); } segment = self.ciphertext_pos + n - 1; } // Calculate the expected segment nonce and decrypt a segment. let nonce = generate_segment_nonce( self.nonce_size, &self.nonce_prefix, self.decrypted_segment_cnt, last_segment, )?; self.plaintext = self .segment_decrypter .decrypt_segment(&self.ciphertext[..segment], &nonce) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, format!("{:?}", e)))?; // Copy 1 byte remainder to the beginning of `self.ciphertext`. if !last_segment { let remainder_offset = segment; self.ciphertext[0] = self.ciphertext[remainder_offset]; self.ciphertext_pos = 1; } self.decrypted_segment_cnt += 1; // A segment's worth of plaintext is now available in `self.plaintext`; // copy from this to the caller's buffer. let n = std::cmp::min(buf.len(), self.plaintext.len()); buf[..n].copy_from_slice(&self.plaintext[..n]); self.plaintext_pos = n; Ok(n) } } /// Return a nonce for a segment. /// /// The format of the nonce is: /// /// nonce_prefix || ctr || last_block. /// /// nonce_prefix is a constant prefix used throughout the whole ciphertext. /// /// The ctr is a 32 bit counter. /// /// last_block is 1 byte which is set to 1 for the last segment and 0 /// otherwise. fn generate_segment_nonce( size: usize, prefix: &[u8], segment_num: u64, last: bool, ) -> io::Result<Vec<u8>> { let segment_num = match u32::try_from(segment_num) { Ok(v) => v, Err(_) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, "too many segments", )) } }; let mut nonce = vec![0; size]; nonce[..prefix.len()].copy_from_slice(prefix); let mut offset = prefix.len(); nonce[offset..offset + 4].copy_from_slice(&segment_num.to_be_bytes()[..]); offset += 4; if last { nonce[offset] = 1; } Ok(nonce) }<|fim▁end|>
}
<|file_name|>directive_bind_html_with_js.js<|end_file_name|><|fim▁begin|>(function () { "use strict"; angular.module('BuDirectives', []).<|fim▁hole|> /** * It removes script tags from html and inserts it into DOM. * * Testing: * html += '<script>alert(1234)</script><script type="text/javascript">alert(12345)</script><script type="asdf">alert(1234)</script><script src="/js/alert.js">alert(1234)</script><span style="color: red;">1234</span>'; * or * html += '<script src="/js/alert.js"></script><script type="text/javascript">console.log(window.qwerqwerqewr1234)</script><span style="color: red;">1234</span>'; * * @param html {String} * @returns {String} */ function handleScripts(html) { // html must start with tag - it's angularjs' jqLite bug/feature html = '<i></i>' + html; var originElements = angular.element(html), elements = angular.element('<div></div>'); if (originElements.length) { // start from 1 for removing first tag we just added for (var i = 1, l = originElements.length; i < l; i ++) { var $el = originElements.eq(i), el = $el[0]; if (el.nodeName == 'SCRIPT' && ((! el.type) || el.type == 'text/javascript')) { evalScript($el[0]); } else { elements.append($el); } } } // elements = elements.contents(); html = elements.html(); return html; } /** * It's taken from AngularJS' jsonpReq function. * It's not ie < 9 compatible. * @param {DOMElement} element */ function evalScript(element) { var script = document.createElement('script'), body = document.body, doneWrapper = function() { script.onload = script.onerror = null; body.removeChild(script); }; script.type = 'text/javascript'; if (element.src) { script.src = element.src; script.async = element.async; script.onload = script.onerror = function () { doneWrapper(); }; } else { // doesn't work on ie... try { script.appendChild(document.createTextNode(element.innerText)); } // IE has funky script nodes catch (e) { script.text = element.innerText; } setTimeout(function () {doneWrapper()}, 10); } body.appendChild(script); } return function ($scope, element, attr) { element.addClass('ng-binding').data('$binding', attr.bindHtmlWithJs); var parsed = $parse(attr.bindHtmlWithJs); function getStringValue() { return (parsed($scope) || '').toString(); } $scope.$watch(getStringValue, function bindHtmlWithJsWatchAction(value) { var html = value ? $sce.getTrustedHtml(parsed($scope)) : ''; if (html) { html = handleScripts(html); } element.html(html || ''); }); }; }]). /* This filter is for demo only */ filter('trustAsHtml', ['$sce', function ($sce) { return function trustAsHtml(value) { return $sce.trustAsHtml(value); } }]); }());<|fim▁end|>
directive('bindHtmlWithJs', ['$sce', '$parse', function ($sce, $parse) {
<|file_name|>apsim75.py<|end_file_name|><|fim▁begin|>import glob import os import shutil import sys import tarfile import traceback from model import Model from subprocess import Popen, PIPE class Apsim75(Model): def run(self, latidx, lonidx): try: apsim_bin = self.config.get('executable') # The apsim 'executable' is a gzipped tarball that needs to be extracted into the current working directory tar = tarfile.open(apsim_bin) tar.extractall() tar.close() model_dir = 'Model' for xml_file in glob.glob('*.xml'): if os.path.basename(xml_file) == 'Apsim.xml': continue old_xml = '%s/%s' % (model_dir, os.path.basename(xml_file)) if os.path.isfile(old_xml): os.remove(old_xml) if os.path.islink(xml_file): link = os.readlink(xml_file) shutil.copy(link, model_dir) else: shutil.copy(xml_file, model_dir) # Create sim files p = Popen('source paths.sh ; mono Model/ApsimToSim.exe Generic.apsim', shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() stdout_file = open('RESULT.OUT', 'w') stdout_file.write(stdout) if p.returncode != 0: rc = p.returncode # Run apsim for each sim file for sim in glob.glob('*.sim'): p = Popen('source paths.sh ; Model/ApsimModel.exe %s' % sim, shell=True, executable='/bin/bash', stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() stdout_file.write(stdout) if p.returncode != 0: rc = p.returncode stdout_file.close() return True<|fim▁hole|> return False<|fim▁end|>
except: print "[%s]: %s" % (os.path.basename(__file__), traceback.format_exc())
<|file_name|>numeric.ts<|end_file_name|><|fim▁begin|>import { isEmpty } from './utils'; const ar = /^[٠١٢٣٤٥٦٧٨٩]+$/; const en = /^[0-9]+$/; const numericValidator = (value: unknown) => { if (isEmpty(value)) { return true; } const testValue = (val: unknown) => {<|fim▁hole|> return en.test(strValue) || ar.test(strValue); }; if (Array.isArray(value)) { return value.every(testValue); } return testValue(value); }; export default numericValidator;<|fim▁end|>
const strValue = String(val);
<|file_name|>write_records_to_file.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- mode: python; coding: utf-8 -*- # Copyright 2018 the HERA Collaboration # Licensed under the 2-clause BSD license. """script to write M&C records to a CSV file """ from astropy.time import Time, TimeDelta from hera_mc import mc, cm_utils valid_tables = { "hera_obs": {"method": "get_obs_by_time"}, "rtp_server_status": { "method": "get_rtp_server_status", "filter_column": "hostname", "arg_name": "hostname", }, "lib_server_status": { "method": "get_librarian_server_status", "filter_column": "hostname", "arg_name": "hostname", }, "subsystem_error": {"method": "get_subsystem_error"}, "daemon_status": { "method": "get_daemon_status", "filter_column": "name", "arg_name": "daemon_name", }, "lib_status": {"method": "get_lib_status"}, "lib_raid_status": {"method": "get_lib_raid_status"}, "lib_raid_errors": { "method": "get_lib_raid_error", "filter_column": "hostname", "arg_name": "hostname", }, "lib_remote_status": { "method": "get_lib_remote_status", "filter_column": "remote_name", "arg_name": "remote_name", }, "lib_files": { "method": "get_lib_files", "filter_column": "obsid", "arg_name": "obsid", }, "rtp_status": {"method": "get_rtp_status"}, "rtp_process_event": { "method": "get_rtp_process_event", "filter_column": "obsid", "arg_name": "obsid", }, "rtp_process_record": { "method": "get_rtp_process_record", "filter_column": "obsid", "arg_name": "obsid", }, "rtp_task_resource_record": { "method": "get_rtp_task_resource_record", "filter_column": "obsid", "arg_name": "obsid", }, "weather_data": { "method": "get_weather_data", "filter_column": "variable", "arg_name": "variable", }, "node_sensor": { "method": "get_node_sensor_readings", "filter_column": "nodeID", "arg_name": "node", }, "node_power_status": { "method": "get_node_power_status", "filter_column": "nodeID", "arg_name": "node",<|fim▁hole|> "method": "get_node_power_command", "filter_column": "nodeID", "arg_name": "node", }, "correlator_control_state": { "method": "get_correlator_control_state", "filter_column": "state_type", "arg_name": "state_type", }, "correlator_config_status": { "method": "get_correlator_config_status", "filter_column": "config_hash", "arg_name": "config_hash", }, "correlator_control_command": { "method": "get_correlator_control_command", "filter_column": "command", "arg_name": "command", }, "correlator_take_data_arguments": {"method": "get_correlator_take_data_arguments"}, "correlator_config_command": { "method": "get_correlator_config_command", "filter_column": "config_hash", "arg_name": "config_hash", }, "correlator_software_version": { "method": "get_correlator_software_versions", "filter_column": "package", "arg_name": "package", }, "snap_config_version": {"method": "get_snap_config_version"}, "snap_status": { "method": "get_snap_status", "filter_column": "nodeID", "arg_name": "node", }, "antenna_status": { "method": "get_antenna_status", "filter_column": "antenna_number", "arg_name": "antenna_number", }, } # get commands without write_to_file options: # get_correlator_config_file, get_ant_metric, get_array_metric, get_metric_desc if __name__ == "__main__": parser = mc.get_mc_argument_parser() parser.description = """Write M&C records to a CSV file""" parser.add_argument("table", help="table to get info from") list_of_filter_args = [] for table, table_dict in valid_tables.items(): if "arg_name" in table_dict: arg_name = table_dict["arg_name"] if arg_name not in list_of_filter_args: list_of_filter_args.append(arg_name) parser.add_argument( "--" + arg_name, help="only include the specified " + arg_name, default=None, ) parser.add_argument("--filename", help="filename to save data to") parser.add_argument( "--start-date", dest="start_date", help="Start date YYYY/MM/DD", default=None ) parser.add_argument( "--start-time", dest="start_time", help="Start time in HH:MM", default="17:00" ) parser.add_argument( "--stop-date", dest="stop_date", help="Stop date YYYY/MM/DD", default=None ) parser.add_argument( "--stop-time", dest="stop_time", help="Stop time in HH:MM", default="7:00" ) parser.add_argument( "-l", "--last-period", dest="last_period", default=None, help="Time period from present for data (in minutes). " "If present ignores start/stop.", ) args = parser.parse_args() if args.last_period: stop_time = Time.now() start_time = stop_time - TimeDelta( float(args.last_period) / (60.0 * 24.0), format="jd" ) else: start_time = cm_utils.get_astropytime(args.start_date, args.start_time) stop_time = cm_utils.get_astropytime(args.stop_date, args.stop_time) db = mc.connect_to_mc_db(args) session = db.sessionmaker() relevant_arg_name = valid_tables[args.table]["arg_name"] for arg in list_of_filter_args: if getattr(args, arg) is not None and arg != relevant_arg_name: print( "{arg} is specified but does not apply to table {table}, " "so it will be ignored.".format(arg=arg, table=args.table) ) method_kwargs = { "starttime": start_time, "stoptime": stop_time, valid_tables[args.table]["filter_column"]: getattr( args, valid_tables[args.table]["arg_name"] ), "write_to_file": True, "filename": args.filename, } getattr(session, valid_tables[args.table]["method"])(**method_kwargs)<|fim▁end|>
}, "node_power_command": {
<|file_name|>TriplesAdder.java<|end_file_name|><|fim▁begin|>package it.polito.nexa.pc; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Statement;<|fim▁hole|>import java.util.List; /** * Created by giuseppe on 19/05/15. */ public interface TriplesAdder { public Model addTriples(Model model, List<Statement> statementList); }<|fim▁end|>
<|file_name|>PropertySheet.cpp<|end_file_name|><|fim▁begin|>//========= Copyright © 1996-2005, Valve Corporation, All rights reserved. ============// // // Purpose: // // $NoKeywords: $ //=============================================================================// #include <vgui/IBorder.h> #include <vgui/IInputInternal.h> #include <vgui/IPanel.h> #include <vgui/IScheme.h> #include <vgui/IVGui.h> #include <vgui/KeyCode.h> #include <KeyValues.h> #include <vgui/MouseCode.h> #include <vgui/ISurface.h> #include <vgui_controls/Button.h> #include <vgui_controls/Controls.h> #include <vgui_controls/Label.h> #include <vgui_controls/PropertySheet.h> #include <vgui_controls/ComboBox.h> #include <vgui_controls/Panel.h> #include <vgui_controls/ToolWindow.h> #include <vgui_controls/TextImage.h> #include <vgui_controls/ImagePanel.h> #include <vgui_controls/PropertyPage.h> #include "vgui_controls/AnimationController.h" #include "strtools_local.h" // TODO: temp // memdbgon must be the last include file in a .cpp file!!! #include <tier0/memdbgon.h> using namespace vgui2; namespace vgui2 { class ContextLabel : public Label { DECLARE_CLASS_SIMPLE(ContextLabel, Label); public: ContextLabel(Button *parent, char const *panelName, char const *text) : BaseClass((Panel *)parent, panelName, text), m_pTabButton(parent) { SetBlockDragChaining(true); } virtual void OnMousePressed(MouseCode code) { if(m_pTabButton) { m_pTabButton->FireActionSignal(); } } virtual void OnMouseReleased(MouseCode code) { BaseClass::OnMouseReleased(code); if(GetParent()) { GetParent()->OnCommand("ShowContextMenu"); } } virtual void ApplySchemeSettings(IScheme *pScheme) { BaseClass::ApplySchemeSettings(pScheme); HFont marlett = pScheme->GetFont("Marlett"); SetFont(marlett); SetTextInset(0, 0); SetContentAlignment(Label::a_northwest); if(GetParent()) { SetFgColor(pScheme->GetColor("Button.TextColor", GetParent()->GetFgColor())); SetBgColor(GetParent()->GetBgColor()); } } private: Button *m_pTabButton; }; //----------------------------------------------------------------------------- // Purpose: Helper for drag drop // Input : msglist - // Output : static PropertySheet //----------------------------------------------------------------------------- static PropertySheet *IsDroppingSheet(CUtlVector<KeyValues *> &msglist) { if(msglist.Count() == 0) return NULL; KeyValues *data = msglist[0]; PropertySheet *sheet = reinterpret_cast<PropertySheet *>(data->GetPtr("propertysheet")); if(sheet) return sheet; return NULL; } //----------------------------------------------------------------------------- // Purpose: A single tab //----------------------------------------------------------------------------- class PageTab : public Button { DECLARE_CLASS_SIMPLE(PageTab, Button); private: bool _active; SDK_Color _textColor; SDK_Color _dimTextColor; int m_bMaxTabWidth; IBorder *m_pActiveBorder; IBorder *m_pNormalBorder; PropertySheet *m_pParent; Panel *m_pPage; ImagePanel *m_pImage; char *m_pszImageName; bool m_bShowContextLabel; ContextLabel *m_pContextLabel; public: PageTab(PropertySheet *parent, const char *panelName, const char *text, char const *imageName, int maxTabWidth, Panel *page, bool showContextButton) : Button((Panel *)parent, panelName, text), m_pParent(parent), m_pPage(page), m_pImage(0), m_pszImageName(0), m_bShowContextLabel(showContextButton) { SetCommand(new KeyValues("TabPressed")); _active = false; m_bMaxTabWidth = maxTabWidth; SetDropEnabled(true); SetDragEnabled(m_pParent->IsDraggableTab()); if(imageName) { m_pImage = new ImagePanel(this, text); int buflen = Q_strlen(imageName) + 1; m_pszImageName = new char[buflen]; Q_strncpy(m_pszImageName, imageName, buflen); } SetMouseClickEnabled(MOUSE_RIGHT, true); m_pContextLabel = m_bShowContextLabel ? new ContextLabel(this, "Context", "9") : NULL; REGISTER_COLOR_AS_OVERRIDABLE(_textColor, "selectedcolor"); REGISTER_COLOR_AS_OVERRIDABLE(_dimTextColor, "unselectedcolor"); } ~PageTab() { delete[] m_pszImageName; } virtual void Paint() { BaseClass::Paint(); } virtual bool IsDroppable(CUtlVector<KeyValues *> &msglist) { // It's never droppable, but should activate FireActionSignal(); SetSelected(true); Repaint(); if(!GetParent()) return false; PropertySheet *sheet = IsDroppingSheet(msglist); if(sheet) { return GetParent()->IsDroppable(msglist); } // Defer to active page... Panel *active = m_pParent->GetActivePage(); if(!active || !active->IsDroppable(msglist)) return false; return active->IsDroppable(msglist); } virtual void OnDroppablePanelPaint(CUtlVector<KeyValues *> &msglist, CUtlVector<Panel *> &dragPanels) { PropertySheet *sheet = IsDroppingSheet(msglist); if(sheet) { Panel *target = GetParent()->GetDropTarget(msglist); if(target) { // Fixme, mouse pos could be wrong... target->OnDroppablePanelPaint(msglist, dragPanels); return; } } // Just highlight the tab if dropping onto active page via the tab BaseClass::OnDroppablePanelPaint(msglist, dragPanels); } virtual void OnPanelDropped(CUtlVector<KeyValues *> &msglist) { PropertySheet *sheet = IsDroppingSheet(msglist); if(sheet) { Panel *target = GetParent()->GetDropTarget(msglist); if(target) { // Fixme, mouse pos could be wrong... target->OnPanelDropped(msglist); } } // Defer to active page... Panel *active = m_pParent->GetActivePage(); if(!active || !active->IsDroppable(msglist)) return; active->OnPanelDropped(msglist); } virtual void OnDragFailed(CUtlVector<KeyValues *> &msglist) { PropertySheet *sheet = IsDroppingSheet(msglist); if(!sheet) return; // Create a new property sheet if(m_pParent->IsDraggableTab()) { if(msglist.Count() == 1) { KeyValues *data = msglist[0]; int screenx = data->GetInt("screenx"); int screeny = data->GetInt("screeny"); // m_pParent->ScreenToLocal( screenx, screeny ); if(!m_pParent->IsWithin(screenx, screeny)) { Panel *page = reinterpret_cast<Panel *>(data->GetPtr("propertypage")); PropertySheet *sheet = reinterpret_cast<PropertySheet *>(data->GetPtr("propertysheet")); char const *title = data->GetString("tabname", ""); if(!page || !sheet) return; // Can only create if sheet was part of a ToolWindow derived object ToolWindow *tw = dynamic_cast<ToolWindow *>(sheet->GetParent()); if(tw) { IToolWindowFactory *factory = tw->GetToolWindowFactory(); if(factory) { bool hasContextMenu = sheet->PageHasContextMenu(page); sheet->RemovePage(page); factory->InstanceToolWindow(tw->GetParent(), sheet->ShouldShowContextButtons(), page, title, hasContextMenu); if(sheet->GetNumPages() == 0) { tw->MarkForDeletion(); } } } } } } } virtual void OnCreateDragData(KeyValues *msg) { Assert(m_pParent->IsDraggableTab()); msg->SetPtr("propertypage", m_pPage); msg->SetPtr("propertysheet", m_pParent); char sz[256]; GetText(sz, sizeof(sz)); msg->SetString("tabname", sz); msg->SetString("text", sz); } virtual void ApplySchemeSettings(IScheme *pScheme) { // set up the scheme settings Button::ApplySchemeSettings(pScheme); _textColor = GetSchemeColor("PropertySheet.SelectedTextColor", GetFgColor(), pScheme); _dimTextColor = GetSchemeColor("PropertySheet.TextColor", GetFgColor(), pScheme); m_pActiveBorder = pScheme->GetBorder("TabActiveBorder"); m_pNormalBorder = pScheme->GetBorder("TabBorder"); if(m_pImage) { ClearImages(); m_pImage->SetImage(scheme()->GetImage(m_pszImageName, false)); AddImage(m_pImage->GetImage(), 2); int w, h; m_pImage->GetSize(w, h); w += m_pContextLabel ? 10 : 0; if(m_pContextLabel) { m_pImage->SetPos(10, 0); } SetSize(w + 4, h + 2); } else { int wide, tall; int contentWide, contentTall; GetSize(wide, tall); GetContentSize(contentWide, contentTall); wide = std::max(m_bMaxTabWidth, contentWide + 10); // 10 = 5 pixels margin on each side wide += m_pContextLabel ? 10 : 0; SetSize(wide, tall); } if(m_pContextLabel) { SetTextInset(12, 0); } } virtual void OnCommand(char const *cmd) { if(!Q_stricmp(cmd, "ShowContextMenu")) { KeyValues *kv = new KeyValues("OpenContextMenu"); kv->SetPtr("page", m_pPage); kv->SetPtr("contextlabel", m_pContextLabel); PostActionSignal(kv); return; } BaseClass::OnCommand(cmd); } IBorder *GetBorder(bool depressed, bool armed, bool selected, bool keyfocus) { if(_active) { return m_pActiveBorder; } return m_pNormalBorder; } virtual SDK_Color GetButtonFgColor() { if(_active) { return _textColor; } else { return _dimTextColor; } } virtual void SetActive(bool state) { _active = state; InvalidateLayout(); Repaint(); } virtual bool CanBeDefaultButton(void) { return false; } //Fire action signal when mouse is pressed down instead of on release. virtual void OnMousePressed(MouseCode code) { // check for context menu open if(!IsEnabled()) return; if(!IsMouseClickEnabled(code)) return; if(IsUseCaptureMouseEnabled()) { { RequestFocus(); FireActionSignal(); SetSelected(true); Repaint(); } // lock mouse input to going to this button input()->SetMouseCapture(GetVPanel()); } } virtual void OnMouseReleased(MouseCode code) { // ensure mouse capture gets released if(IsUseCaptureMouseEnabled()) { input()->SetMouseCapture(NULL_HANDLE); } // make sure the button gets unselected SetSelected(false); Repaint(); if(code == MOUSE_RIGHT) { KeyValues *kv = new KeyValues("OpenContextMenu"); kv->SetPtr("page", m_pPage); kv->SetPtr("contextlabel", m_pContextLabel); PostActionSignal(kv); } } virtual void PerformLayout() { BaseClass::PerformLayout(); if(m_pContextLabel) { int w, h; GetSize(w, h); m_pContextLabel->SetBounds(0, 0, 10, h); } } }; }; // namespace vgui2 //----------------------------------------------------------------------------- // Purpose: Constructor //----------------------------------------------------------------------------- PropertySheet::PropertySheet( Panel *parent, const char *panelName, bool draggableTabs /*= false*/) : BaseClass(parent, panelName) { _activePage = NULL; _activeTab = NULL; _tabWidth = 64; _activeTabIndex = 0; _showTabs = true; _combo = NULL; _tabFocus = false; m_flPageTransitionEffectTime = 0.0f; m_bSmallTabs = false; m_tabFont = 0; m_bDraggableTabs = draggableTabs; if(m_bDraggableTabs) { SetDropEnabled(true); } m_bKBNavigationEnabled = true; } //----------------------------------------------------------------------------- // Purpose: Constructor, associates pages with a combo box //----------------------------------------------------------------------------- PropertySheet::PropertySheet(Panel *parent, const char *panelName, ComboBox *combo) : BaseClass(parent, panelName) { _activePage = NULL; _activeTab = NULL; _tabWidth = 64; _activeTabIndex = 0; _combo = combo; _combo->AddActionSignalTarget(this); _showTabs = false; _tabFocus = false; m_flPageTransitionEffectTime = 0.0f; m_bSmallTabs = false; m_tabFont = 0; m_bDraggableTabs = false; } //----------------------------------------------------------------------------- // Purpose: Destructor //----------------------------------------------------------------------------- PropertySheet::~PropertySheet() { } //----------------------------------------------------------------------------- // Purpose: ToolWindow uses this to drag tools from container to container by dragging the tab // Input : - // Output : Returns true on success, false on failure. //----------------------------------------------------------------------------- bool PropertySheet::IsDraggableTab() const { return m_bDraggableTabs; } void PropertySheet::SetDraggableTabs(bool state) { m_bDraggableTabs = state; } //----------------------------------------------------------------------------- // Purpose: Lower profile tabs // Input : state - //----------------------------------------------------------------------------- void PropertySheet::SetSmallTabs(bool state) { m_bSmallTabs = state; m_tabFont = scheme()->GetIScheme(GetScheme())->GetFont(m_bSmallTabs ? "DefaultVerySmall" : "Default"); int c = m_PageTabs.Count(); for(int i = 0; i < c; ++i) { PageTab *tab = m_PageTabs[i]; Assert(tab); tab->SetFont(m_tabFont); } } //----------------------------------------------------------------------------- // Purpose: // Input : - // Output : Returns true on success, false on failure. //----------------------------------------------------------------------------- bool PropertySheet::IsSmallTabs() const { return m_bSmallTabs; } //----------------------------------------------------------------------------- // Purpose: // Input : state - //----------------------------------------------------------------------------- void PropertySheet::ShowContextButtons(bool state) { m_bContextButton = state; } //----------------------------------------------------------------------------- // Purpose: // Input : - // Output : Returns true on success, false on failure. //----------------------------------------------------------------------------- bool PropertySheet::ShouldShowContextButtons() const { return m_bContextButton; } int PropertySheet::FindPage(Panel *page) const { int c = m_Pages.Count(); for(int i = 0; i < c; ++i) { if(m_Pages[i].page == page) return i; } return m_Pages.InvalidIndex(); } //----------------------------------------------------------------------------- // Purpose: adds a page to the sheet //----------------------------------------------------------------------------- void PropertySheet::AddPage(Panel *page, const char *title, char const *imageName /*= NULL*/, bool bHasContextMenu /*= false*/) { if(!page) return; // don't add the page if we already have it if(FindPage(page) != m_Pages.InvalidIndex()) return; PageTab *tab = new PageTab(this, "tab", title, imageName, _tabWidth, page, m_bContextButton && bHasContextMenu); if(m_bDraggableTabs) { tab->SetDragEnabled(true); } tab->SetFont(m_tabFont); if(_showTabs) { tab->AddActionSignalTarget(this); } else if(_combo) { _combo->AddItem(title, NULL); } m_PageTabs.AddToTail(tab); Page_t info; info.page = page; info.contextMenu = m_bContextButton && bHasContextMenu; m_Pages.AddToTail(info); page->SetParent(this); page->AddActionSignalTarget(this); PostMessage(page, new KeyValues("ResetData")); page->SetVisible(false); InvalidateLayout(); if(!_activePage) { // first page becomes the active page ChangeActiveTab(0); if(_activePage) { _activePage->RequestFocus(0); } } } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::SetActivePage(Panel *page) { // walk the list looking for this page int index = FindPage(page); if(!m_Pages.IsValidIndex(index)) return; ChangeActiveTab(index); } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::SetTabWidth(int pixels) { _tabWidth = pixels; InvalidateLayout(); } //----------------------------------------------------------------------------- // Purpose: reloads the data in all the property page //----------------------------------------------------------------------------- void PropertySheet::ResetAllData() { // iterate all the dialogs resetting them for(int i = 0; i < m_Pages.Count(); i++) { ipanel()->SendMessage(m_Pages[i].page->GetVPanel(), new KeyValues("ResetData"), GetVPanel()); } } //----------------------------------------------------------------------------- // Purpose: Applies any changes made by the dialog //----------------------------------------------------------------------------- void PropertySheet::ApplyChanges() { // iterate all the dialogs resetting them for(int i = 0; i < m_Pages.Count(); i++) { ipanel()->SendMessage(m_Pages[i].page->GetVPanel(), new KeyValues("ApplyChanges"), GetVPanel()); } } //----------------------------------------------------------------------------- // Purpose: gets a pointer to the currently active page //----------------------------------------------------------------------------- Panel *PropertySheet::GetActivePage() { return _activePage; } //----------------------------------------------------------------------------- // Purpose: gets a pointer to the currently active tab //----------------------------------------------------------------------------- Panel *PropertySheet::GetActiveTab() { return _activeTab; } //----------------------------------------------------------------------------- // Purpose: returns the number of panels in the sheet //----------------------------------------------------------------------------- int PropertySheet::GetNumPages() { return m_Pages.Count(); } //----------------------------------------------------------------------------- // Purpose: returns the name contained in the active tab // Input : a text buffer to contain the output //----------------------------------------------------------------------------- void PropertySheet::GetActiveTabTitle(char *textOut, int bufferLen) { if(_activeTab) _activeTab->GetText(textOut, bufferLen); } //----------------------------------------------------------------------------- // Purpose: returns the name contained in the active tab // Input : a text buffer to contain the output //----------------------------------------------------------------------------- bool PropertySheet::GetTabTitle(int i, char *textOut, int bufferLen) { if(i < 0 && i > m_PageTabs.Count()) { return false; } m_PageTabs[i]->GetText(textOut, bufferLen); return true; } //----------------------------------------------------------------------------- // Purpose: Returns the index of the currently active page //----------------------------------------------------------------------------- int PropertySheet::GetActivePageNum() { for(int i = 0; i < m_Pages.Count(); i++) { if(m_Pages[i].page == _activePage) { return i; } } return -1; } //----------------------------------------------------------------------------- // Purpose: Forwards focus requests to current active page //----------------------------------------------------------------------------- void PropertySheet::RequestFocus(int direction) { if(direction == -1 || direction == 0) { if(_activePage) { _activePage->RequestFocus(direction); _tabFocus = false; } } else { if(_showTabs && _activeTab) { _activeTab->RequestFocus(direction); _tabFocus = true; } else if(_activePage) { _activePage->RequestFocus(direction); _tabFocus = false; } } } //----------------------------------------------------------------------------- // Purpose: moves focus back //----------------------------------------------------------------------------- bool PropertySheet::RequestFocusPrev(VPANEL panel) { if(_tabFocus || !_showTabs || !_activeTab) { _tabFocus = false; return BaseClass::RequestFocusPrev(panel); } else { if(GetVParent()) { PostMessage(GetVParent(), new KeyValues("FindDefaultButton")); } _activeTab->RequestFocus(-1); _tabFocus = true; return true; } } //----------------------------------------------------------------------------- // Purpose: moves focus forward //----------------------------------------------------------------------------- bool PropertySheet::RequestFocusNext(VPANEL panel) { if(!_tabFocus || !_activePage) { return BaseClass::RequestFocusNext(panel); } else { if(!_activeTab) { return BaseClass::RequestFocusNext(panel); } else { _activePage->RequestFocus(1); _tabFocus = false; return true; } } } //----------------------------------------------------------------------------- // Purpose: Gets scheme settings //----------------------------------------------------------------------------- void PropertySheet::ApplySchemeSettings(IScheme *pScheme) { BaseClass::ApplySchemeSettings(pScheme); // a little backwards-compatibility with old scheme files IBorder *pBorder = pScheme->GetBorder("PropertySheetBorder"); if(pBorder == pScheme->GetBorder("Default")) { // get the old name pBorder = pScheme->GetBorder("RaisedBorder"); } SetBorder(pBorder); m_flPageTransitionEffectTime = atof(pScheme->GetResourceString("PropertySheet.TransitionEffectTime")); m_tabFont = pScheme->GetFont(m_bSmallTabs ? "DefaultVerySmall" : "Default"); } //----------------------------------------------------------------------------- // Purpose: Paint our border specially, with the tabs in mind //----------------------------------------------------------------------------- void PropertySheet::PaintBorder() { IBorder *border = GetBorder(); if(!border) return; // draw the border, but with a break at the active tab int px = 0, py = 0, pwide = 0, ptall = 0; if(_activeTab) { _activeTab->GetBounds(px, py, pwide, ptall); ptall -= 1; } // draw the border underneath the buttons, with a break int wide, tall; GetSize(wide, tall); border->Paint(0, py + ptall, wide, tall, IBorder::SIDE_TOP, px + 1, px + pwide - 1); } //----------------------------------------------------------------------------- // Purpose: Lays out the dialog //----------------------------------------------------------------------------- void PropertySheet::PerformLayout() { BaseClass::PerformLayout(); int x, y, wide, tall; GetBounds(x, y, wide, tall); if(_activePage) { int tabHeight = IsSmallTabs() ? 14 : 28; if(_showTabs) { _activePage->SetBounds(0, tabHeight, wide, tall - tabHeight); } else { _activePage->SetBounds(0, 0, wide, tall); } _activePage->InvalidateLayout(); } int xtab; int limit = m_PageTabs.Count(); xtab = 0; // draw the visible tabs if(_showTabs) { for(int i = 0; i < limit; i++) { int tabHeight = IsSmallTabs() ? 13 : 27; int width, tall; m_PageTabs[i]->GetSize(width, tall); if(m_PageTabs[i] == _activeTab) { // active tab is taller _activeTab->SetBounds(xtab, 2, width, tabHeight); } else { m_PageTabs[i]->SetBounds(xtab, 4, width, tabHeight - 2); } m_PageTabs[i]->SetVisible(true); xtab += (width + 1); } } else { for(int i = 0; i < limit; i++) { m_PageTabs[i]->SetVisible(false); } } // ensure draw order (page drawing over all the tabs except one) if(_activePage) { _activePage->MoveToFront(); _activePage->Repaint(); } if(_activeTab) { _activeTab->MoveToFront(); _activeTab->Repaint(); } } //----------------------------------------------------------------------------- // Purpose: Switches the active panel //----------------------------------------------------------------------------- void PropertySheet::OnTabPressed(Panel *panel) { // look for the tab in the list for(int i = 0; i < m_PageTabs.Count(); i++) { if(m_PageTabs[i] == panel) { // flip to the new tab ChangeActiveTab(i); return; } } } //----------------------------------------------------------------------------- // Purpose: returns the panel associated with index i // Input : the index of the panel to return //----------------------------------------------------------------------------- Panel *PropertySheet::GetPage(int i) { if(i < 0 && i > m_Pages.Count()) { return NULL; } return m_Pages[i].page; } //----------------------------------------------------------------------------- // Purpose: disables page by name //----------------------------------------------------------------------------- void PropertySheet::DisablePage(const char *title) { SetPageEnabled(title, false); } //----------------------------------------------------------------------------- // Purpose: enables page by name //----------------------------------------------------------------------------- void PropertySheet::EnablePage(const char *title) { SetPageEnabled(title, true); } //----------------------------------------------------------------------------- // Purpose: enabled or disables page by name //----------------------------------------------------------------------------- void PropertySheet::SetPageEnabled(const char *title, bool state) { for(int i = 0; i < m_PageTabs.Count(); i++) { if(_showTabs) { char tmp[50]; m_PageTabs[i]->GetText(tmp, 50); if(!strnicmp(title, tmp, strlen(tmp))) { m_PageTabs[i]->SetEnabled(state); } } else { _combo->SetItemEnabled(title, state); } } } void PropertySheet::RemoveAllPages() { int c = m_Pages.Count(); for(int i = c - 1; i >= 0; --i) { RemovePage(m_Pages[i].page); } } //----------------------------------------------------------------------------- // Purpose: deletes the page associated with panel // Input : *panel - the panel of the page to remove //----------------------------------------------------------------------------- void PropertySheet::RemovePage(Panel *panel) { int location = FindPage(panel); if(location == m_Pages.InvalidIndex()) return; // Since it's being deleted, don't animate!!! m_hPreviouslyActivePage = NULL; _activeTab = NULL; // ASSUMPTION = that the number of pages equals number of tabs if(_showTabs) { m_PageTabs[location]->RemoveActionSignalTarget(this); } // now remove the tab PageTab *tab = m_PageTabs[location]; m_PageTabs.Remove(location); tab->MarkForDeletion(); // Remove from page list m_Pages.Remove(location); // Unparent panel->SetParent((Panel *)NULL); if(_activePage == panel) { _activePage = NULL; // if this page is currently active, backup to the page before this. ChangeActiveTab(std::max(location - 1, 0)); } PerformLayout(); } //----------------------------------------------------------------------------- // Purpose: deletes the page associated with panel // Input : *panel - the panel of the page to remove //----------------------------------------------------------------------------- void PropertySheet::DeletePage(Panel *panel) { Assert(panel); RemovePage(panel); panel->MarkForDeletion(); } //----------------------------------------------------------------------------- // Purpose: flips to the new tab, sending out all the right notifications // flipping to a tab activates the tab. //----------------------------------------------------------------------------- void PropertySheet::ChangeActiveTab(int index) { if(!m_Pages.IsValidIndex(index)) { _activeTab = NULL; if(m_Pages.Count() > 0) { _activePage = NULL; ChangeActiveTab(0); } return; } if(m_Pages[index].page == _activePage) { if(_activeTab) { _activeTab->RequestFocus(); } _tabFocus = true; return; } int c = m_Pages.Count(); for(int i = 0; i < c; ++i) { m_Pages[i].page->SetVisible(false); } m_hPreviouslyActivePage = _activePage; // notify old page if(_activePage) { ivgui()->PostMessage(_activePage->GetVPanel(), new KeyValues("PageHide"), GetVPanel()); KeyValues *msg = new KeyValues("PageTabActivated"); msg->SetPtr("panel", (Panel *)NULL); ivgui()->PostMessage(_activePage->GetVPanel(), msg, GetVPanel()); } if(_activeTab) { //_activeTabIndex=index; _activeTab->SetActive(false); // does the old tab have the focus? _tabFocus = _activeTab->HasFocus(); } else { _tabFocus = false; } // flip page _activePage = m_Pages[index].page; _activeTab = m_PageTabs[index]; _activeTabIndex = index; _activePage->SetVisible(true); _activePage->MoveToFront(); _activeTab->SetVisible(true); _activeTab->MoveToFront(); _activeTab->SetActive(true); if(_tabFocus) { // if a tab already has focused,give the new tab the focus _activeTab->RequestFocus(); } else { // otherwise, give the focus to the page _activePage->RequestFocus(); } if(!_showTabs) { _combo->ActivateItemByRow(index); } _activePage->MakeReadyForUse(); // transition effect if(m_flPageTransitionEffectTime) { if(m_hPreviouslyActivePage.Get()) { // fade out the previous page GetAnimationController()->RunAnimationCommand(m_hPreviouslyActivePage, "Alpha", 0.0f, 0.0f, m_flPageTransitionEffectTime / 2, AnimationController::INTERPOLATOR_LINEAR); } // fade in the new page _activePage->SetAlpha(0); GetAnimationController()->RunAnimationCommand(_activePage, "Alpha", 255.0f, m_flPageTransitionEffectTime / 2, m_flPageTransitionEffectTime / 2, AnimationController::INTERPOLATOR_LINEAR); } else { if(m_hPreviouslyActivePage.Get()) { // no transition, just hide the previous page m_hPreviouslyActivePage->SetVisible(false); } _activePage->SetAlpha(255); } // notify ivgui()->PostMessage(_activePage->GetVPanel(), new KeyValues("PageShow"), GetVPanel()); KeyValues *msg = new KeyValues("PageTabActivated"); msg->SetPtr("panel", (Panel *)_activeTab); ivgui()->PostMessage(_activePage->GetVPanel(), msg, GetVPanel()); // tell parent PostActionSignal(new KeyValues("PageChanged")); // Repaint InvalidateLayout(); Repaint(); } //----------------------------------------------------------------------------- // Purpose: Gets the panel with the specified hotkey, from the current page //----------------------------------------------------------------------------- Panel *PropertySheet::HasHotkey(wchar_t key) { if(!_activePage) return NULL; for(int i = 0; i < _activePage->GetChildCount(); i++) { Panel *hot = _activePage->GetChild(i)->HasHotkey(key); if(hot) { return hot; } } return NULL; } //----------------------------------------------------------------------------- // Purpose: catches the opencontextmenu event //----------------------------------------------------------------------------- void PropertySheet::OnOpenContextMenu(KeyValues *params) { // tell parent KeyValues *kv = params->MakeCopy(); PostActionSignal(kv); Panel *page = reinterpret_cast<Panel *>(params->GetPtr("page")); if(page) { PostMessage(page->GetVPanel(), params->MakeCopy()); } } //----------------------------------------------------------------------------- // Purpose: Handle key presses, through tabs. //----------------------------------------------------------------------------- void PropertySheet::OnKeyCodeTyped(KeyCode code) { bool shift = (input()->IsKeyDown(KEY_LSHIFT) || input()->IsKeyDown(KEY_RSHIFT)); bool ctrl = (input()->IsKeyDown(KEY_LCONTROL) || input()->IsKeyDown(KEY_RCONTROL)); bool alt = (input()->IsKeyDown(KEY_LALT) || input()->IsKeyDown(KEY_RALT)); if(ctrl && shift && alt && code == KEY_B) { // enable build mode EditablePanel *ep = dynamic_cast<EditablePanel *>(GetActivePage()); if(ep) { ep->ActivateBuildMode(); return; } } if(IsKBNavigationEnabled()) { switch(code) { // for now left and right arrows just open or close submenus if they are there. case KEY_RIGHT: { ChangeActiveTab(_activeTabIndex + 1); break; } case KEY_LEFT: { ChangeActiveTab(_activeTabIndex - 1); break; } default: BaseClass::OnKeyCodeTyped(code); break; } } else { BaseClass::OnKeyCodeTyped(code); } } //----------------------------------------------------------------------------- // Purpose: Called by the associated combo box (if in that mode), changes the current panel //----------------------------------------------------------------------------- void PropertySheet::OnTextChanged(Panel *panel, const wchar_t *wszText) { if(panel == _combo) { wchar_t tabText[30]; for(int i = 0; i < m_PageTabs.Count(); i++) { tabText[0] = 0; m_PageTabs[i]->GetText(tabText, 30); if(!wcsicmp(wszText, tabText)) { ChangeActiveTab(i); } } } } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::OnCommand(const char *command) { // propogate the close command to our parent if(!stricmp(command, "Close") && GetVParent()) { CallParentFunction(new KeyValues("Command", "command", command)); } } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::OnApplyButtonEnable() { // tell parent PostActionSignal(new KeyValues("ApplyButtonEnable")); } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::OnCurrentDefaultButtonSet(Panel *defaultButton) { // forward the message up if(GetVParent()) { KeyValues *msg = new KeyValues("CurrentDefaultButtonSet"); msg->SetPtr("button", defaultButton); PostMessage(GetVParent(), msg); } } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::OnDefaultButtonSet(Panel *defaultButton) { // forward the message up if(GetVParent()) { KeyValues *msg = new KeyValues("DefaultButtonSet"); msg->SetPtr("button", defaultButton); PostMessage(GetVParent(), msg); } } //----------------------------------------------------------------------------- // Purpose: //----------------------------------------------------------------------------- void PropertySheet::OnFindDefaultButton() { if(GetVParent()) { PostMessage(GetVParent(), new KeyValues("FindDefaultButton")); } } bool PropertySheet::PageHasContextMenu(Panel *page) const { int pageNum = FindPage(page); if(pageNum == m_Pages.InvalidIndex()) return false; return m_Pages[pageNum].contextMenu; } void PropertySheet::OnPanelDropped(CUtlVector<KeyValues *> &msglist) { if(msglist.Count() != 1) { return; } PropertySheet *sheet = IsDroppingSheet(msglist); if(!sheet) { // Defer to active page if(_activePage && _activePage->IsDropEnabled()) { return _activePage->OnPanelDropped(msglist); } return; } KeyValues *data = msglist[0]; Panel *page = reinterpret_cast<Panel *>(data->GetPtr("propertypage")); char const *title = data->GetString("tabname", ""); if(!page || !sheet) return; // Can only create if sheet was part of a ToolWindow derived object ToolWindow *tw = dynamic_cast<ToolWindow *>(sheet->GetParent()); if(tw)<|fim▁hole|> IToolWindowFactory *factory = tw->GetToolWindowFactory(); if(factory) { bool showContext = sheet->PageHasContextMenu(page); sheet->RemovePage(page); if(sheet->GetNumPages() == 0) { tw->MarkForDeletion(); } AddPage(page, title, NULL, showContext); } } } bool PropertySheet::IsDroppable(CUtlVector<KeyValues *> &msglist) { if(!m_bDraggableTabs) return false; if(msglist.Count() != 1) { return false; } int mx, my; input()->GetCursorPos(mx, my); ScreenToLocal(mx, my); int tabHeight = IsSmallTabs() ? 14 : 28; if(my > tabHeight) return false; PropertySheet *sheet = IsDroppingSheet(msglist); if(!sheet) { return false; } if(sheet == this) return false; return true; } // Mouse is now over a droppable panel void PropertySheet::OnDroppablePanelPaint(CUtlVector<KeyValues *> &msglist, CUtlVector<Panel *> &dragPanels) { // Convert this panel's bounds to screen space int x, y, w, h; GetSize(w, h); int tabHeight = IsSmallTabs() ? 14 : 28; h = tabHeight + 4; x = y = 0; LocalToScreen(x, y); surface()->DrawSetColor(GetDropFrameColor()); // Draw 2 pixel frame surface()->DrawOutlinedRect(x, y, x + w, y + h); surface()->DrawOutlinedRect(x + 1, y + 1, x + w - 1, y + h - 1); if(!IsDroppable(msglist)) { return; } if(!_showTabs) { return; } // Draw a fake new tab... x = 0; y = 2; w = 1; h = tabHeight; int last = m_PageTabs.Count(); if(last != 0) { m_PageTabs[last - 1]->GetBounds(x, y, w, h); } // Compute left edge of "fake" tab x += (w + 1); // Compute size of new panel KeyValues *data = msglist[0]; char const *text = data->GetString("tabname", ""); Assert(text); PageTab *fakeTab = new PageTab(this, "FakeTab", text, NULL, _tabWidth, NULL, false); fakeTab->SetBounds(x, 4, w, tabHeight - 4); fakeTab->SetFont(m_tabFont); SETUP_PANEL(fakeTab); fakeTab->Repaint(); surface()->SolveTraverse(fakeTab->GetVPanel(), true); surface()->PaintTraverse(fakeTab->GetVPanel()); delete fakeTab; } //----------------------------------------------------------------------------- // Purpose: // Input : state - //----------------------------------------------------------------------------- void PropertySheet::SetKBNavigationEnabled(bool state) { m_bKBNavigationEnabled = state; } //----------------------------------------------------------------------------- // Purpose: // Input : - // Output : Returns true on success, false on failure. //----------------------------------------------------------------------------- bool PropertySheet::IsKBNavigationEnabled() const { return m_bKBNavigationEnabled; }<|fim▁end|>
{
<|file_name|>LongAssert_usingDefaultComparator_Test.java<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2020 the original author or authors. */ package org.assertj.core.api.long_; import static org.assertj.core.api.Assertions.assertThat;<|fim▁hole|>import org.assertj.core.api.LongAssert; import org.assertj.core.api.LongAssertBaseTest; import org.assertj.core.internal.Longs; import org.assertj.core.internal.Objects; import org.junit.jupiter.api.DisplayName; /** * Tests for <code>{@link LongAssert#usingDefaultComparator()}</code>. * * @author Joel Costigliola */ @DisplayName("LongAssert usingDefaultComparator") class LongAssert_usingDefaultComparator_Test extends LongAssertBaseTest { @Override protected LongAssert invoke_api_method() { return assertions.usingComparator(alwaysEqual()).usingDefaultComparator(); } @Override protected void verify_internal_effects() { assertThat(getObjects(assertions)).isSameAs(Objects.instance()); assertThat(getLongs(assertions)).isSameAs(Longs.instance()); } }<|fim▁end|>
import static org.assertj.core.test.AlwaysEqualComparator.alwaysEqual;
<|file_name|>outputs.py<|end_file_name|><|fim▁begin|>""" The outputs.py module represents some form of all outputs from the Automater program to include all variation of output files. Any addition to the Automater that brings any other output requirement should be programmed in this module. Class(es): SiteDetailOutput -- Wrapper class around all functions that print output from Automater, to include standard output and file system output. Function(s): No global exportable functions are defined. Exception(s): No exceptions exported. """ import csv import socket import re from datetime import datetime from operator import attrgetter class SiteDetailOutput(object): """ SiteDetailOutput provides the capability to output information to the screen, a text file, a comma-seperated value file, or a file formatted with html markup (readable by web browsers). Public Method(s): createOutputInfo Instance variable(s): _listofsites - list storing the list of site results stored. """ def __init__(self,sitelist): """ Class constructor. Stores the incoming list of sites in the _listofsites list. Argument(s): sitelist -- list containing site result information to be printed. Return value(s): Nothing is returned from this Method. """ self._listofsites = [] self._listofsites = sitelist @property def ListOfSites(self): """ Checks instance variable _listofsites for content. Returns _listofsites if it has content or None if it does not. Argument(s): No arguments are required. Return value(s): _listofsites -- list containing list of site results if variable contains data. None -- if _listofsites is empty or not assigned. Restriction(s): This Method is tagged as a Property. """ if self._listofsites is None or len(self._listofsites) == 0: return None return self._listofsites def createOutputInfo(self,parser): """ Checks parser information calls correct print methods based on parser requirements. Returns nothing. Argument(s): parser -- Parser object storing program input parameters used when program was run. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ self.PrintToScreen(parser.hasBotOut()) if parser.hasCEFOutFile(): self.PrintToCEFFile(parser.CEFOutFile) if parser.hasTextOutFile(): self.PrintToTextFile(parser.TextOutFile) if parser.hasHTMLOutFile(): self.PrintToHTMLFile(parser.HTMLOutFile) if parser.hasCSVOutSet(): self.PrintToCSVFile(parser.CSVOutFile) def PrintToScreen(self, printinbotformat): """ Calls correct function to ensure site information is printed to the user's standard output correctly. Returns nothing. Argument(s): printinbotformat -- True or False argument representing minimized output. True if minimized requested. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ if printinbotformat: self.PrintToScreenBot() else: self.PrintToScreenNormal() def PrintToScreenBot(self): """ Formats site information minimized and prints it to the user's standard output. Returns nothing. Argument(s): No arguments are required.<|fim▁hole|> Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" if sites is not None: for site in sites: if not isinstance(site._regex,basestring): # this is a multisite for index in range(len(site.RegEx)): # the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: print "\n**_ Results found for: " + site.Target + " _**" target = site.Target # Check for them ALL to be None or 0 length sourceurlhasnoreturn = True for answer in siteimpprop: if answer is not None: if len(answer) > 0: sourceurlhasnoreturn = False if sourceurlhasnoreturn: print '[+] ' + site.SourceURL + ' No results found' break else: if siteimpprop is None or len(siteimpprop) == 0: print "No results in the " + site.FriendlyName[index] + " category" else: if siteimpprop[index] is None or len(siteimpprop[index]) == 0: print site.ReportStringForResult[index] + ' No results found' else: laststring = "" # if it's just a string we don't want it output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: print "" + site.ReportStringForResult[index] + " " + str(siteimpprop).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) # must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: print "" + site.ReportStringForResult[index] + " " + str(siteresult).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: print "\n**_ Results found for: " + site.Target + " _**" target = site.Target if siteimpprop is None or len(siteimpprop)==0: print '[+] ' + site.FriendlyName + ' No results found' else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: print "" + site.ReportStringForResult + " " + str(siteimpprop).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: print "" + site.ReportStringForResult + " " + str(siteresult).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult + " " + str(siteresult) else: pass def PrintToScreenNormal(self): """ Formats site information correctly and prints it to the user's standard output. Returns nothing. Argument(s): No arguments are required. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" if sites is not None: for site in sites: if not isinstance(site._regex, basestring): # this is a multisite for index in range(len(site.RegEx)): # the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop) == 0: print "No results in the " + site.FriendlyName[index] + " category" else: if siteimpprop[index] is None or len(siteimpprop[index]) == 0: print site.ReportStringForResult[index] + ' No results found' else: laststring = "" # if it's just a string we don't want it output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: print "" + site.ReportStringForResult[index] + " " + str(siteimpprop).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) # must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: print "" + site.ReportStringForResult[index] + " " + str(siteresult).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else: # this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop) == 0: print "No results found in the " + site.FriendlyName else: laststring = "" # if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: print "" + site.ReportStringForResult + " " + str(siteimpprop).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) # must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: print "" + site.ReportStringForResult + " " + str(siteresult).replace('www.', 'www[.]').replace('http', 'hxxp') laststring = "" + site.ReportStringForResult + " " + str(siteresult) else: pass def PrintToCEFFile(self,cefoutfile): """ Formats site information correctly and prints it to an output file in CEF format. CEF format specification from http://mita-tac.wikispaces.com/file/view/CEF+White+Paper+071709.pdf "Jan 18 11:07:53 host message" where message: "CEF:Version|Device Vendor|Device Product|Device Version|Signature ID|Name|Severity|Extension" Returns nothing. Argument(s): cefoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) curr_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') hostname = socket.gethostname() prefix = ' '.join([curr_date,hostname]) cef_version = "CEF:Version1.1" cef_deviceVendor = "TekDefense" cef_deviceProduct = "Automater" cef_deviceVersion = "2.1" cef_SignatureID = "0" cef_Severity = "2" cef_Extension = " " cef_fields = [cef_version,cef_deviceVendor,cef_deviceProduct,cef_deviceVersion, \ cef_SignatureID, cef_Severity, cef_Extension] pattern = "^\[\+\]\s+" target = "" print '\n[+] Generating CEF output: ' + cefoutfile f = open(cefoutfile, "wb") csv.register_dialect('escaped', delimiter='|', escapechar='\\', doublequote=False, quoting=csv.QUOTE_NONE) cefRW = csv.writer(f, 'escaped') # cefRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): # this is a multisite: for index in range(len(site.RegEx)): # the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" # if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult[index])+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res # must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + re.sub(pattern, "", site.ReportStringForResult[index]) + str(siteresult)] + [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) else: # this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" # if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() print "" + cefoutfile + " Generated" def PrintToTextFile(self,textoutfile): """ Formats site information correctly and prints it to an output file in text format. Returns nothing. Argument(s): textoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" print "\n[+] Generating text output: " + textoutfile f = open(textoutfile, "w") if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results in the " + site.FriendlyName[index] + " category") else: if siteimpprop[index] is None or len(siteimpprop[index]) == 0: f.write('\n' + site.ReportStringForResult[index] + ' No results found') else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteresult)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results found in the " + site.FriendlyName) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteresult)) laststring = "" + site.ReportStringForResult + " " + str(siteresult) f.flush() f.close() print "" + textoutfile + " Generated" def PrintToCSVFile(self,csvoutfile): """ Formats site information correctly and prints it to an output file with comma-seperators. Returns nothing. Argument(s): csvoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" print '\n[+] Generating CSV output: ' + csvoutfile f = open(csvoutfile, "wb") csvRW = csv.writer(f, quoting=csv.QUOTE_ALL) csvRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() print "" + csvoutfile + " Generated" def PrintToHTMLFile(self, htmloutfile): """ Formats site information correctly and prints it to an output file using HTML markup. Returns nothing. Argument(s): htmloutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" print '\n[+] Generating HTML output: ' + htmloutfile f = open(htmloutfile, "w") f.write(self.getHTMLOpening()) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: # if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: # this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: # if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) f.write(self.getHTMLClosing()) f.flush() f.close() print "" + htmloutfile + " Generated" @classmethod def PrintStandardOutput(cls, strout, *args, **kwargs): if 'verbose' in kwargs.keys(): if kwargs['verbose'] is True: print strout else: return else: print strout def getHTMLOpening(self): """ Creates HTML markup to provide correct formatting for initial HTML file requirements. Returns string that contains opening HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return '''<style type="text/css"> #table-3 { border: 1px solid #DFDFDF; background-color: #F9F9F9; width: 100%; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; font-family: Arial,"Bitstream Vera Sans",Helvetica,Verdana,sans-serif; color: #333; } #table-3 td, #table-3 th { border-top-color: white; border-bottom: 1px solid #DFDFDF; color: #555; } #table-3 th { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 14px; } #table-3 td { font-size: 12px; padding: 4px 7px 2px; vertical-align: top; }res h1 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: Center; line-height: 1.3em; font-size: 40px; } h2 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 16px; } h4 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 10px; } </style> <html> <body> <title> Automater Results </title> <h1> Automater Results </h1> <table id="table-3"> <tr> <th>Target</th> <th>Type</th> <th>Source</th> <th>Result</th> </tr> ''' def getHTMLClosing(self): """ Creates HTML markup to provide correct formatting for closing HTML file requirements. Returns string that contains closing HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return ''' </table> <br> <br> <p>Created using Automater.py by @TekDefense <a href="http://www.tekdefense.com">http://www.tekdefense.com</a>; <a href="https://github.com/1aN0rmus/TekDefense">https://github.com/1aN0rmus/TekDefense</a></p> </body> </html> '''<|fim▁end|>
Return value(s): Nothing is returned from this Method.
<|file_name|>web.dom.iterable.js<|end_file_name|><|fim▁begin|>var $iterators = require('./es6.array.iterator') , redefine = require('./_redefine') , global = require('./_global') , hide = require('./_hide') , Iterators = require('./_iterators') , wks = require('./_wks') , CORRECT_SYMBOL = require('./_correct-symbol') , ITERATOR = wks('iterator') , TO_STRING_TAG = wks('toStringTag') , ArrayValues = Iterators.Array; require('./_').each.call(( 'CSSRuleList,CSSStyleDeclaration,DOMStringList,DOMTokenList,FileList,HTMLCollection,MediaList,' + 'MimeTypeArray,NamedNodeMap,NodeList,NodeListOf,Plugin,PluginArray,StyleSheetList,TouchList'<|fim▁hole|> var Collection = global[NAME] , proto = Collection && Collection.prototype , key; if(proto){ if(!proto[ITERATOR])hide(proto, ITERATOR, ArrayValues); if(!proto[TO_STRING_TAG])hide(proto, TO_STRING_TAG, NAME); Iterators[NAME] = ArrayValues; for(key in $iterators){ if(!CORRECT_SYMBOL || !proto[key])redefine(proto, key, $iterators[key], true); } } });<|fim▁end|>
).split(','), function(NAME){
<|file_name|>validate.py<|end_file_name|><|fim▁begin|># # Validator for "idlebg" Test # from pscheduler import json_validate MAX_SCHEMA = 1 def spec_is_valid(json): schema = { "type": "object", "properties": { "schema": { "$ref": "#/pScheduler/Cardinal" }, "duration": { "$ref": "#/pScheduler/Duration" }, "host": { "$ref": "#/pScheduler/Host" }, "host-node": { "$ref": "#/pScheduler/URLHostPort" }, "interval": { "$ref": "#/pScheduler/Duration" }, "parting-comment": { "$ref": "#/pScheduler/String" }, "starting-comment": { "$ref": "#/pScheduler/String" }, }, "required": [ "duration" ] } return json_validate(json, schema, max_schema=MAX_SCHEMA) def result_is_valid(json): schema = { "type": "object", "properties": { "schema": { "$ref": "#/pScheduler/Cardinal" },<|fim▁hole|> "succeeded": { "$ref": "#/pScheduler/Boolean" }, "error": { "$ref": "#/pScheduler/String" }, "diags": { "$ref": "#/pScheduler/String" }, "time-slept": { "$ref": "#/pScheduler/Duration" }, }, "required": [ "succeeded", "time-slept", ] } return json_validate(json, schema) def limit_is_valid(json): schema = { "type": "object", "properties": { "schema": { "$ref": "#/pScheduler/Cardinal" }, "duration": { "$ref": "#/pScheduler/Limit/Duration" }, "starting-comment": { "$ref": "#/pScheduler/Limit/String" }, "parting-comment": { "$ref": "#/pScheduler/Limit/String" } }, "additionalProperties": False } return json_validate(json, schema)<|fim▁end|>
<|file_name|>validation-renderer-custom-attribute.js<|end_file_name|><|fim▁begin|>'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.ValidationRendererCustomAttribute = undefined; var _validationController = require('./validation-controller'); var ValidationRendererCustomAttribute = exports.ValidationRendererCustomAttribute = function () { function ValidationRendererCustomAttribute() { } ValidationRendererCustomAttribute.prototype.created = function created(view) { this.container = view.container; }; <|fim▁hole|> this.controller = this.container.get(_validationController.ValidationController); this.renderer = this.container.get(this.value); this.controller.addRenderer(this.renderer); }; ValidationRendererCustomAttribute.prototype.unbind = function unbind() { this.controller.removeRenderer(this.renderer); this.controller = null; this.renderer = null; }; return ValidationRendererCustomAttribute; }();<|fim▁end|>
ValidationRendererCustomAttribute.prototype.bind = function bind() {
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// main.rs // include external crates extern crate color; extern crate time; extern crate cmq; // use these functions use color::{Rgb, ToHsv};<|fim▁hole|> println!("Converting RGB to HSV!"); let red = Rgb::new(255u8, 0, 0); println!("HSV: {:?}", red.to_hsv::<f32>()); let m = "2"; cmq::pretty_print(m); }<|fim▁end|>
// main fn main() {
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- import sys,os from PyQt5 import QtCore, QtGui, QtWidgets, uic from model import MainModel from view import MainView class App(QtWidgets.QApplication):<|fim▁hole|> self.model = MainModel() self.main_view = MainView(self.model, scriptpath) self.main_view.show() #Maximized() self.model.gridChanged.emit() if __name__ == '__main__': scriptpath = os.path.dirname(os.path.abspath(sys.argv[0])) app = App(scriptpath, sys.argv) sys.exit(app.exec_())<|fim▁end|>
def __init__(self, scriptpath, sys_argv): super(App, self).__init__(sys_argv)
<|file_name|>docker_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package api import ( "fmt" "os" "strconv" "testing" "time" info "github.com/google/cadvisor/info/v1" "github.com/google/cadvisor/info/v2" "github.com/google/cadvisor/integration/framework" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) // Sanity check the container by: // - Checking that the specified alias is a valid one for this container. // - Verifying that stats are not empty. func sanityCheck(alias string, containerInfo info.ContainerInfo, t *testing.T) { assert.Contains(t, containerInfo.Aliases, alias, "Alias %q should be in list of aliases %v", alias, containerInfo.Aliases) assert.NotEmpty(t, containerInfo.Stats, "Expected container to have stats") } // Sanity check the container by: // - Checking that the specified alias is a valid one for this container. // - Verifying that stats are not empty. func sanityCheckV2(alias string, info v2.ContainerInfo, t *testing.T) { assert.Contains(t, info.Spec.Aliases, alias, "Alias %q should be in list of aliases %v", alias, info.Spec.Aliases) assert.NotEmpty(t, info.Stats, "Expected container to have stats") } // Waits up to 5s for a container with the specified alias to appear. func waitForContainer(alias string, fm framework.Framework) { err := framework.RetryForDuration(func() error { ret, err := fm.Cadvisor().Client().DockerContainer(alias, &info.ContainerInfoRequest{ NumStats: 1, }) if err != nil { return err } if len(ret.Stats) != 1 { return fmt.Errorf("no stats returned for container %q", alias) } return nil }, 5*time.Second) require.NoError(fm.T(), err, "Timed out waiting for container %q to be available in cAdvisor: %v", alias, err) } func getDockerMinorVersion(fm framework.Framework) int { val, err := strconv.Atoi(fm.Docker().Version()[1]) assert.Nil(fm.T(), err) return val } // A Docker container in /docker/<ID> func TestDockerContainerById(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() containerId := fm.Docker().RunPause() // Wait for the container to show up. waitForContainer(containerId, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) require.NoError(t, err) sanityCheck(containerId, containerInfo, t) } // A Docker container in /docker/<name> func TestDockerContainerByName(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() containerName := fmt.Sprintf("test-docker-container-by-name-%d", os.Getpid()) fm.Docker().Run(framework.DockerRunArgs{ Image: "kubernetes/pause", Args: []string{"--name", containerName}, }) // Wait for the container to show up. waitForContainer(containerName, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerName, request) require.NoError(t, err) sanityCheck(containerName, containerInfo, t) } // Find the first container with the specified alias in containers. func findContainer(alias string, containers []info.ContainerInfo, t *testing.T) info.ContainerInfo { for _, cont := range containers { for _, a := range cont.Aliases { if alias == a { return cont } } } t.Fatalf("Failed to find container %q in %+v", alias, containers) return info.ContainerInfo{} } // All Docker containers through /docker func TestGetAllDockerContainers(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() // Wait for the containers to show up. containerId1 := fm.Docker().RunPause() containerId2 := fm.Docker().RunPause() waitForContainer(containerId1, fm) waitForContainer(containerId2, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containersInfo, err := fm.Cadvisor().Client().AllDockerContainers(request) require.NoError(t, err) if len(containersInfo) < 2 { t.Fatalf("At least 2 Docker containers should exist, received %d: %+v", len(containersInfo), containersInfo) } sanityCheck(containerId1, findContainer(containerId1, containersInfo, t), t) sanityCheck(containerId2, findContainer(containerId2, containersInfo, t), t) } // Check expected properties of a Docker container. func TestBasicDockerContainer(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() containerName := fmt.Sprintf("test-basic-docker-container-%d", os.Getpid()) containerId := fm.Docker().Run(framework.DockerRunArgs{ Image: "kubernetes/pause", Args: []string{ "--name", containerName, }, }) // Wait for the container to show up. waitForContainer(containerId, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) require.NoError(t, err) // Check that the contianer is known by both its name and ID. sanityCheck(containerId, containerInfo, t) sanityCheck(containerName, containerInfo, t) assert.Empty(t, containerInfo.Subcontainers, "Should not have subcontainers") assert.Len(t, containerInfo.Stats, 1, "Should have exactly one stat") } // TODO(vmarmol): Handle if CPU or memory is not isolated on this system. // Check the ContainerSpec. func TestDockerContainerSpec(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() var ( cpuShares = uint64(2048) cpuMask = "0" memoryLimit = uint64(1 << 30) // 1GB image = "kubernetes/pause" env = map[string]string{"test_var": "FOO"} labels = map[string]string{"bar": "baz"} ) cpusetArg := "--cpuset" if getDockerMinorVersion(fm) >= 10 { cpusetArg = "--cpuset-cpus" } containerId := fm.Docker().Run(framework.DockerRunArgs{ Image: image, Args: []string{ "--cpu-shares", strconv.FormatUint(cpuShares, 10), cpusetArg, cpuMask, "--memory", strconv.FormatUint(memoryLimit, 10), "--env", "TEST_VAR=FOO", "--label", "bar=baz", }, }) // Wait for the container to show up. waitForContainer(containerId, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) require.NoError(t, err) sanityCheck(containerId, containerInfo, t) assert := assert.New(t) assert.True(containerInfo.Spec.HasCpu, "CPU should be isolated") assert.Equal(cpuShares, containerInfo.Spec.Cpu.Limit, "Container should have %d shares, has %d", cpuShares, containerInfo.Spec.Cpu.Limit) assert.Equal(cpuMask, containerInfo.Spec.Cpu.Mask, "Cpu mask should be %q, but is %q", cpuMask, containerInfo.Spec.Cpu.Mask) assert.True(containerInfo.Spec.HasMemory, "Memory should be isolated") assert.Equal(memoryLimit, containerInfo.Spec.Memory.Limit, "Container should have memory limit of %d, has %d", memoryLimit, containerInfo.Spec.Memory.Limit) assert.True(containerInfo.Spec.HasNetwork, "Network should be isolated") assert.True(containerInfo.Spec.HasDiskIo, "Blkio should be isolated") assert.Equal(image, containerInfo.Spec.Image, "Spec should include container image") assert.Equal(env, containerInfo.Spec.Envs, "Spec should include environment variables") assert.Equal(labels, containerInfo.Spec.Labels, "Spec should include labels") } // Check the CPU ContainerStats. func TestDockerContainerCpuStats(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() // Wait for the container to show up. containerId := fm.Docker().RunBusybox("ping", "www.google.com") waitForContainer(containerId, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) if err != nil { t.Fatal(err) } sanityCheck(containerId, containerInfo, t) // Checks for CpuStats. checkCpuStats(t, containerInfo.Stats[0].Cpu) } // Check the memory ContainerStats. func TestDockerContainerMemoryStats(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() // Wait for the container to show up. containerId := fm.Docker().RunBusybox("ping", "www.google.com") waitForContainer(containerId, fm) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) require.NoError(t, err) sanityCheck(containerId, containerInfo, t) // Checks for MemoryStats. checkMemoryStats(t, containerInfo.Stats[0].Memory) } // Check the network ContainerStats. func TestDockerContainerNetworkStats(t *testing.T) { fm := framework.New(t) defer fm.Cleanup() // Wait for the container to show up. containerId := fm.Docker().RunBusybox("watch", "-n1", "wget", "http://www.google.com/") waitForContainer(containerId, fm) time.Sleep(10 * time.Second) request := &info.ContainerInfoRequest{ NumStats: 1, } containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request) require.NoError(t, err) sanityCheck(containerId, containerInfo, t) // Checks for NetworkStats. stat := containerInfo.Stats[0] assert := assert.New(t) assert.NotEqual(0, stat.Network.TxBytes, "Network tx bytes should not be zero") assert.NotEqual(0, stat.Network.TxPackets, "Network tx packets should not be zero") assert.NotEqual(0, stat.Network.RxBytes, "Network rx bytes should not be zero") assert.NotEqual(0, stat.Network.RxPackets, "Network rx packets should not be zero") assert.NotEqual(stat.Network.RxBytes, stat.Network.TxBytes, "Network tx and rx bytes should not be equal") assert.NotEqual(stat.Network.RxPackets, stat.Network.TxPackets, "Network tx and rx packets should not be equal") } func TestDockerFilesystemStats(t *testing.T) { fm := framework.New(t)<|fim▁hole|> storageDriver := fm.Docker().StorageDriver() if storageDriver == framework.DeviceMapper { // Filesystem stats not supported with devicemapper, yet return } const ( ddUsage = uint64(1 << 3) // 1 KB sleepDuration = 10 * time.Second ) // Wait for the container to show up. // FIXME: Tests should be bundled and run on the remote host instead of being run over ssh. // Escaping bash over ssh is ugly. // Once github issue 1130 is fixed, this logic can be removed. dockerCmd := fmt.Sprintf("dd if=/dev/zero of=/file count=2 bs=%d & ping google.com", ddUsage) if fm.Hostname().Host != "localhost" { dockerCmd = fmt.Sprintf("'%s'", dockerCmd) } containerId := fm.Docker().RunBusybox("/bin/sh", "-c", dockerCmd) waitForContainer(containerId, fm) request := &v2.RequestOptions{ IdType: v2.TypeDocker, Count: 1, } needsBaseUsageCheck := false switch storageDriver { case framework.Aufs, framework.Overlay: needsBaseUsageCheck = true } pass := false // We need to wait for the `dd` operation to complete. for i := 0; i < 10; i++ { containerInfo, err := fm.Cadvisor().ClientV2().Stats(containerId, request) if err != nil { t.Logf("%v stats unavailable - %v", time.Now().String(), err) t.Logf("retrying after %s...", sleepDuration.String()) time.Sleep(sleepDuration) continue } require.Equal(t, len(containerInfo), 1) var info v2.ContainerInfo // There is only one container in containerInfo. Since it is a map with unknown key, // use the value blindly. for _, cInfo := range containerInfo { info = cInfo } sanityCheckV2(containerId, info, t) require.NotNil(t, info.Stats[0], "got info: %+v", info) require.NotNil(t, info.Stats[0].Filesystem, "got info: %+v", info) require.NotNil(t, info.Stats[0].Filesystem.TotalUsageBytes, "got info: %+v", info.Stats[0].Filesystem) if *info.Stats[0].Filesystem.TotalUsageBytes >= ddUsage { if !needsBaseUsageCheck { pass = true break } require.NotNil(t, info.Stats[0].Filesystem.BaseUsageBytes) if *info.Stats[0].Filesystem.BaseUsageBytes >= ddUsage { pass = true break } } t.Logf("expected total usage %d bytes to be greater than %d bytes", *info.Stats[0].Filesystem.TotalUsageBytes, ddUsage) if needsBaseUsageCheck { t.Logf("expected base %d bytes to be greater than %d bytes", *info.Stats[0].Filesystem.BaseUsageBytes, ddUsage) } t.Logf("retrying after %s...", sleepDuration.String()) time.Sleep(sleepDuration) } if !pass { t.Fail() } }<|fim▁end|>
defer fm.Cleanup()
<|file_name|>test_pivocram.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from tests import base from app import pivocram class PivocramConnetcTest(base.TestCase): def setUp(self): self.connect = pivocram.Connect('PIVOTAL_TEST_TOKEN') def test_should_have_the_pivotal_api_url(self): self.connect.PIVOTAL_URL.should.be.equal('https://www.pivotaltracker.com/services/v5') def test_should_have_header_with_token(self): self.connect.headers.should.be.equal({'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) def test_should_have_projects_url_for_list(self): self.connect.projects_url().should.be.equal('https://www.pivotaltracker.com/services/v5/projects') def test_should_have_projects_url_for_item(self): self.connect.projects_url(123).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123') def test_should_have_account_member_url(self): self.connect.account_member_url(123, 333).should.be.equal('https://www.pivotaltracker.com/services/v5/accounts/123/memberships/333') def test_should_have_iterations_url(self): self.connect.iterations_url(123, 1).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/iterations/1') def test_should_have_project_story_url(self): self.connect.project_story_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234') def test_should_have_project_story_tasks_url(self): self.connect.project_story_tasks_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks') def test_should_have_project_story_task_url(self): self.connect.project_story_task_url(123, 1234, 12345).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks/12345') @base.TestCase.mock.patch('app.pivocram.requests') def test_should_make_get(self, req_mock): response = self.mock.MagicMock() response.json.return_value = 'req-response' req_mock.get.return_value = response self.connect.get('url').should.be.equal('req-response') req_mock.get.assert_called_with('url', headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) @base.TestCase.mock.patch('app.pivocram.requests') def test_should_make_put(self, req_mock): response = self.mock.MagicMock() response.json.return_value = 'req-response' req_mock.put.return_value = response self.connect.put('url', {'data': 'value'}).should.be.equal('req-response') req_mock.put.assert_called_with('url', {'data': 'value'}, headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) def test_should_get_projects_list(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.projects_url = self.mock.MagicMock(return_value='url-projects') self.connect.get_projects().should.be.equal('req-response') self.connect.get.assert_called_with('url-projects') def test_should_get_project(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.projects_url = self.mock.MagicMock(return_value='url-projects') self.connect.get_project(123).should.be.equal('req-response') self.connect.get.assert_called_with('url-projects') self.connect.projects_url.assert_called_with(123) def test_should_get_project_member(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.account_member_url = self.mock.MagicMock(return_value='url-project-member') self.connect.get_account_member(123, 333).should.be.equal('req-response') self.connect.get.assert_called_with('url-project-member') self.connect.account_member_url.assert_called_with(123, 333) def test_should_get_project_story_tasks(self):<|fim▁hole|> self.connect.get.assert_called_with('url-tasks') self.connect.project_story_tasks_url.assert_called_with(123, 1234) def test_should_get_iteration_stories(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.iterations_url = self.mock.MagicMock(return_value='url-iterations') self.connect.get_current_iteration(123, 1).should.be.equal('req-response') self.connect.get.assert_called_with('url-iterations') self.connect.iterations_url.assert_called_with(123, 1) def test_should_update_story(self): self.connect.put = self.mock.MagicMock(return_value='req-response') self.connect.project_story_url = self.mock.MagicMock(return_value='url-stories') self.connect.update_story(123, 1234, {'data': 'value'}).should.be.equal('req-response') self.connect.put.assert_called_with('url-stories', {'data': 'value'}) self.connect.project_story_url.assert_called_with(123, 1234) def test_should_update_story_task(self): self.connect.put = self.mock.MagicMock(return_value='req-response') self.connect.project_story_task_url = self.mock.MagicMock(return_value='url-stories') self.connect.update_story_task(123, 1234, 12345, {'data': 'value'}).should.be.equal('req-response') self.connect.put.assert_called_with('url-stories', {'data': 'value'}) self.connect.project_story_task_url.assert_called_with(123, 1234, 12345) class PivocramClientTest(base.TestCase): project_mock = {"current_iteration_number": 1} def setUp(self): user = self.mock.MagicMock() user.pivotal_token = 'PIVOTAL_TEST_TOKEN' self.client = pivocram.Client(user, project_id='PROJECT-ID') def test_should_have_connect_attribute(self): self.assertTrue(isinstance(self.client.connect, pivocram.Connect)) def test_should_be_create_with_project_id(self): self.client.project_id.should.be.equal('PROJECT-ID') def test_should_have_property_list_stories(self): self.client._current_iteration = 'CURRENT' self.client.current_iteration.should.be.equal('CURRENT') def test_should_have_method_to_get_story(self): self.client.get_story('STORY-ID').should.be.equal(None) def test_should_have_method_to_list_story_tasks(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_project_story_tasks.return_value = [1, 2, 3] self.client.get_story_tasks('STORY-ID').should.be.equal([1, 2, 3]) self.client.connect.get_project_story_tasks.assert_called_with('PROJECT-ID', 'STORY-ID') def test_should_have_method_to_get_story_task(self): self.client.get_story_task('STORY-ID', 'TASKS-ID').should.be.equal(None) def test_should_get_projects(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_projects.return_value = [1, 2, 3] self.client.get_projects().should.be.equal([1, 2, 3]) def test_should_get_empty_if_no_projects(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_projects.return_value = [] self.client.get_projects().should.be.equal([]) def test_should_set_current_iteration(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_project.return_value = self.project_mock self.client._current_iteration_number = None self.client.current_iteration_number.should.be.equal(1) self.client.connect.get_project.assert_called_with('PROJECT-ID') def test_should_get_current_stories(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_current_iteration.return_value = {'stories': [1, 2, 3]} self.client.current_iteration.should.be.equal({'stories': [1, 2, 3]}) def test_should_update_story(self): self.client.connect = self.mock.MagicMock() self.client.connect.update_story.return_value = {'id': 1234} self.client.update_story(1234, {'data': 'value'}).should.be.equal({'id': 1234}) def test_should_complete_story_task(self): self.client.connect = self.mock.MagicMock() self.client.connect.update_story_task.return_value = {'id': 1234} self.client.complete_story_task(1234, 12345, {'data': 'value'}).should.be.equal({'id': 1234})<|fim▁end|>
self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.project_story_tasks_url = self.mock.MagicMock(return_value='url-tasks') self.connect.get_project_story_tasks(123, 1234).should.be.equal('req-response')
<|file_name|>sequence_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The trfl Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests for multistep_ops.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # Dependency imports import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow.compat.v1 as tf from trfl import sequence_ops def get_n_step_backup(rewards, pcontinues, state_values, start, n): """Evaluates a single n-step backup (return) starting at position start. http://incompleteideas.net/sutton/book/ebook/node73.html (Eq. 7.1) Args: rewards: a list containing a sequence of rewards. pcontinues: a list containing a sequence of discounts. state_values: a list containing a sequence of state-values. start: position at which the n-Step return has to be evaluated. n: number of steps over which rewards are summed before adding the respective bootstrapped state-value. Returns: Sum of discounted rewards plus discounted bootstrapped value. """ accumulator = 0.0 k = 1.0 for i in xrange(start, start + n): accumulator += k * rewards[i] k *= pcontinues[i] accumulator += k * state_values[start + n - 1] return accumulator def get_complex_n_step_backup(rewards, pcontinues, state_values, start, n, lambda_): """Evaluates a complex n=step backup (sum of lambda-weighted n-step backups). http://incompleteideas.net/sutton/book/ebook/node74.html (Eq. 7.3) Args: rewards: a list containing rewards. pcontinues: a list containing discounts. state_values: a list containing boostrapped state values. start: position at which the n-Step return has to be evaluated. n: number of steps over which rewards are summed before adding respective boostrapped state values. lambda_: mixing parameter lambda. Returns: A single complex backup. """ accumulator = 0.0 for t in xrange(1, n): value = get_n_step_backup(rewards, pcontinues, state_values, start, t) weight = (1 - lambda_) * (lambda_ ** (t - 1)) accumulator += + value * weight value = get_n_step_backup(rewards, pcontinues, state_values, start, n) weight = lambda_ ** (n - 1) accumulator += value * weight return accumulator def get_complex_n_step_backup_at_all_times(rewards, pcontinues, state_values, lambda_): """Evaluates complex n-step backups at all time-points. Args: rewards: a list containing rewards. pcontinues: a list containing discounts. state_values: a list containing bootstrapped state values. lambda_: mixing parameter lambda. Returns: A list containing complex backups at all times. """ res = [] length = len(rewards) for i in xrange(0, length): res.append(get_complex_n_step_backup(rewards, pcontinues, state_values, i, length - i, lambda_)) return res class ScanDiscountedSumTest(tf.test.TestCase): def testScanSumShapeInference(self): """scan_discounted_sum should support static shape inference.""" # No session needed since we're not evaluating any ops. sequence_in = tf.placeholder(tf.float32, shape=[1647, 2001]) decays_in = tf.placeholder(tf.float32, shape=[1647, 2001]) bootstrap = tf.placeholder(tf.float32, shape=[2001]) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=False) self.assertAllEqual(result.get_shape(), [1647, 2001]) # Let's do it again with higher-dimensional inputs. sequence_in = tf.placeholder(tf.float32, shape=[4, 8, 15, 16, 23, 42]) decays_in = tf.placeholder(tf.float32, shape=[4, 8, 15, 16, 23, 42]) bootstrap = tf.placeholder(tf.float32, shape=[8, 15, 16, 23, 42]) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=False) self.assertAllEqual(result.get_shape(), [4, 8, 15, 16, 23, 42]) def testScanSumShapeInferenceWithSeqLen(self): """scan_discounted_sum should support static shape inference.""" # No session needed since we're not evaluating any ops. sequence_in = tf.placeholder(tf.float32, shape=[1647, 2001]) decays_in = tf.placeholder(tf.float32, shape=[1647, 2001]) bootstrap = tf.placeholder(tf.float32, shape=[2001]) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=False) self.assertAllEqual(result.get_shape(), [1647, 2001]) # Let's do it again with higher-dimensional inputs. sequence_in = tf.placeholder(tf.float32, shape=[4, 8, 15, 16, 23, 42]) decays_in = tf.placeholder(tf.float32, shape=[4, 8, 15, 16, 23, 42]) bootstrap = tf.placeholder(tf.float32, shape=[8, 15, 16, 23, 42]) sequence_lengths = tf.placeholder(tf.float32, shape=[8]) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=False, sequence_lengths=sequence_lengths) self.assertAllEqual(result.get_shape(), [4, 8, 15, 16, 23, 42]) def testScanSumWithDecays(self): with self.test_session() as sess: sequence = [[3, 1, 5, 2, 1], [-1.7, 1.2, 2.3, 0, 1]] decays = [[0.5, 0.9, 1.0, 0.1, 0.5], [0.9, 0.5, 0.0, 2, 0.8]] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32)) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32)) bootstrap = tf.constant([0, 1.5], dtype=tf.float32) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=False) expected_result = tf.constant( [[3, 3 * 0.9 + 1, (3 * 0.9 + 1) * 1.0 + 5, ((3 * 0.9 + 1) * 1.0 + 5) * 0.1 + 2, (((3 * 0.9 + 1) * 1.0 + 5) * 0.1 + 2) * 0.5 + 1], [-1.7 + 1.5 * 0.9, (-1.7 + 1.5 * 0.9) * 0.5 + 1.2, ((-1.7 + 1.5 * 0.9) * 0.5 + 1.2) * 0.0 + 2.3, (((-1.7 + 1.5 * 0.9) * 0.5 + 1.2) * 0.0 + 2.3) * 2 + 0, ((((-1.7 + 1.5 * 0.9) * 0.5 + 1.2) * 0.0 + 2.3) * 2 + 0) * 0.8 + 1, ]], dtype=tf.float32) self.assertAllClose(sess.run(result), sess.run(tf.transpose(expected_result))) def testScanSumWithDecaysWithSeqLen(self): with self.test_session() as sess: sequence = [[3, 1, 5, 2, 1], [-1.7, 1.2, 2.3, 0, 1]] decays = [[0.5, 0.9, 1.0, 0.1, 0.5], [0.9, 0.5, 0.0, 2, 0.8]] sequence_lengths = [0, 2] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32)) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32)) bootstrap = tf.constant([0, 1.5], dtype=tf.float32) result = sequence_ops.scan_discounted_sum( sequence_in, decays_in, bootstrap, reverse=False, sequence_lengths=sequence_lengths) expected_result = tf.constant( [[0, 0, 0, 0, 0], [-1.7 + 1.5 * 0.9, (-1.7 + 1.5 * 0.9) * 0.5 + 1.2, 0, 0, 0]], dtype=tf.float32) self.assertAllClose(sess.run(result), sess.run(tf.transpose(expected_result))) def testScanSumEquivalenceWithSeqLen(self): with self.test_session() as sess: sequence_lengths = [0, 2] bootstrap = tf.constant([0.5, 1.5], dtype=tf.float32) sequence = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]] decays = [[.1, .2, .3, .4, .5], [.6, .7, .8, .9, .10]] eq_sequence = [[0, 0, 0, 0, 0], [6, 7, 0, 0, 0]] eq_decays = [[0, 0, 0, 0, 0], [.6, .7, 0, 0, 0]] eq_reverse_sequence = [[0, 0, 0, 0, 0], [7, 6, 0, 0, 0]] eq_reverse_decays = [[0, 0, 0, 0, 0], [.7, .6, 0, 0, 0]] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32)) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32)) eq_sequence_in = tf.transpose(tf.constant(eq_sequence, dtype=tf.float32)) eq_decays_in = tf.transpose(tf.constant(eq_decays, dtype=tf.float32)) eq_reverse_sequence_in = tf.transpose( tf.constant(eq_reverse_sequence, dtype=tf.float32)) eq_reverse_decays_in = tf.transpose( tf.constant(eq_reverse_decays, dtype=tf.float32)) eq_result = sequence_ops.scan_discounted_sum( sequence_in, decays_in, bootstrap, reverse=False, sequence_lengths=sequence_lengths) exp_eq_result = sequence_ops.scan_discounted_sum( eq_sequence_in, eq_decays_in, bootstrap) eq_reverse_result = sequence_ops.scan_discounted_sum( sequence_in, decays_in, bootstrap, reverse=True, sequence_lengths=sequence_lengths) exp_eq_reverse_result = sequence_ops.scan_discounted_sum( eq_reverse_sequence_in, eq_reverse_decays_in, bootstrap) exp_eq_reverse_result = tf.reverse_sequence( exp_eq_reverse_result, sequence_lengths, seq_axis=0, batch_axis=1) self.assertAllClose(sess.run(eq_result), sess.run(exp_eq_result)) self.assertAllClose(sess.run(eq_reverse_result), sess.run(exp_eq_reverse_result)) def testScanSumWithDecaysReverse(self): with self.test_session() as sess: sequence = [[3, 1, 5], [-1.7, 1.2, 2.3]] decays = [[0.5, 0.9, 1.0], [0.9, 0.5, 0.3]] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32)) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32)) bootstrap = tf.constant([0, 1.5], dtype=tf.float32) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=True) expected_result = tf.constant( [[(5 * 0.9 + 1) * 0.5 + 3, 5 * 0.9 + 1, 5], [((2.3 + 0.3 * 1.5) * 0.5 + 1.2) * 0.9 - 1.7, (2.3 + 0.3 * 1.5) * 0.5 + 1.2, 2.3 + 0.3 * 1.5, ]], dtype=tf.float32) self.assertAllClose(sess.run(result), sess.run(tf.transpose(expected_result))) def testScanSumWithDecaysReverseWithSeqLen(self): with self.test_session() as sess: sequence = [[3, 1, 5], [-1.7, 1.2, 2.3]] decays = [[0.5, 0.9, 1.0], [0.9, 0.5, 0.3]] sequence_lengths = [2, 0] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32)) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32)) bootstrap = tf.constant([2.5, 1.5], dtype=tf.float32) result = sequence_ops.scan_discounted_sum( sequence_in, decays_in, bootstrap, reverse=True, sequence_lengths=sequence_lengths) expected_result = tf.constant( [[(0.9 * 2.5 + 1) * 0.5 + 3, (0.9 * 2.5 + 1), 0], [0, 0, 0]], dtype=tf.float32) self.assertAllClose(sess.run(result), sess.run(tf.transpose(expected_result))) def testScanSumWithDecaysReverse3D(self): """scan_discounted_sum vs. higher-dimensional arguments.""" with self.test_session() as sess: sequence = [[[3, 33], [1, 11], [5, 55]], [[-1.7, -17], [1.2, 12], [2.3, 23]]] decays = [[[0.5, 5], [0.9, 9], [1.0, 10]], [[0.9, 9], [0.5, 5], [0.3, 3]]]<|fim▁hole|> # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32), perm=[1, 0, 2]) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32), perm=[1, 0, 2]) bootstrap = tf.constant([[0, 0], [1.5, 15]], dtype=tf.float32) result = sequence_ops.scan_discounted_sum(sequence_in, decays_in, bootstrap, reverse=True) expected_result = tf.constant( [[[(5 * 0.9 + 1) * 0.5 + 3, (55 * 9 + 11) * 5 + 33], [5 * 0.9 + 1, 55 * 9 + 11], [5, 55]], [[((2.3 + 0.3 * 1.5) * 0.5 + 1.2) * 0.9 - 1.7, ((23 + 3 * 15) * 5 + 12) * 9 - 17], [(2.3 + 0.3 * 1.5) * 0.5 + 1.2, (23 + 3 * 15) * 5 + 12], [2.3 + 0.3 * 1.5, 23 + 3 * 15]]], dtype=tf.float32) self.assertAllClose(sess.run(result), sess.run(tf.transpose(expected_result, perm=[1, 0, 2]))) def testScanSumWithDecaysReverse3DWithSeqLen(self): """scan_discounted_sum vs. higher-dimensional arguments.""" with self.test_session() as sess: sequence = [[[3, 33], [1, 11], [5, 55]], [[-1.7, -17], [1.2, 12], [2.3, 23]]] decays = [[[0.5, 5], [0.9, 9], [1.0, 10]], [[0.9, 9], [0.5, 5], [0.3, 3]]] sequence_lengths = [2, 0] # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. sequence_in = tf.transpose(tf.constant(sequence, dtype=tf.float32), perm=[1, 0, 2]) decays_in = tf.transpose(tf.constant(decays, dtype=tf.float32), perm=[1, 0, 2]) bootstrap = tf.constant([[0, 0], [1.5, 15]], dtype=tf.float32) result = sequence_ops.scan_discounted_sum( sequence_in, decays_in, bootstrap, reverse=True, sequence_lengths=sequence_lengths) expected_result = np.asarray( [[[1 * 0.5 + 3, 11 * 5 + 33], [1, 11], [0, 0]], [[0, 0], [0, 0], [0, 0]]], dtype=np.float32) self.assertAllClose(sess.run(result), np.transpose(expected_result, axes=[1, 0, 2])) class MultistepForwardViewTest(tf.test.TestCase): def testMultistepForwardView(self): with self.test_session() as sess: # Define input data. rewards = [[1, 0, -1, 0, 1], [0.5, 0.8, -0.7, 0.0, 2.1]] pcontinues = [[0.5, 0.9, 1.0, 0.5, 0.8], [0.9, 0.5, 0.3, 0.8, 0.7]] state_values = [[3, 1, 5, -5, 3], [-1.7, 1.2, 2.3, 2.2, 2.7]] lambda_ = 0.75 # Evaluate expected complex backups at all time-steps for both batches. expected_result = [] for b in xrange(0, 2): expected_result.append( get_complex_n_step_backup_at_all_times(rewards[b], pcontinues[b], state_values[b], lambda_)) # Only partially-specify the input shapes - verifies that the # dynamically sized Tensors are handled correctly. state_values_pl = tf.placeholder(tf.float32, shape=[None, None]) rewards_pl = tf.placeholder(tf.float32, shape=[None, None]) pcontinues_pl = tf.placeholder(tf.float32, shape=[None, None]) # We use transpose because it is easier to define the input data in # BxT (batch x time) form, while scan_discounted_sum assumes TxB form. state_values_in = tf.transpose(state_values_pl) rewards_in = tf.transpose(rewards_pl) pcontinues_in = tf.transpose(pcontinues_pl) expected = tf.transpose(tf.constant(expected_result, dtype=tf.float32)) # Evaluate complex backups. result = sequence_ops.multistep_forward_view(rewards_in, pcontinues_in, state_values_in, lambda_) feed_dict = {state_values_pl: state_values, rewards_pl: rewards, pcontinues_pl: pcontinues} self.assertAllClose(sess.run(result, feed_dict=feed_dict), sess.run(expected)) if __name__ == "__main__": tf.test.main()<|fim▁end|>
<|file_name|>de.js<|end_file_name|><|fim▁begin|>// moment.js locale configuration // locale : german (de) // author : lluchs : https://github.com/lluchs // author: Menelion Elensúle: https://github.com/Oire (function (factory) { if (typeof define === 'function' && define.amd) { define(['moment'], factory); // AMD } else if (typeof exports === 'object') { module.exports = factory(require('../moment')); // Node } else { factory(window.moment); // Browser global } }(function (moment) { function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 'm': ['eine Minute', 'einer Minute'], 'h': ['eine Stunde', 'einer Stunde'], 'd': ['ein Tag', 'einem Tag'], 'dd': [number + ' Tage', number + ' Tagen'], 'M': ['ein Monat', 'einem Monat'], 'MM': [number + ' Monate', number + ' Monaten'], 'y': ['ein Jahr', 'einem Jahr'], 'yy': [number + ' Jahre', number + ' Jahren'] }; return withoutSuffix ? format[key][0] : format[key][1]; } return moment.defineLocale('de', { months : 'Januar_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), monthsShort : 'Jan._Febr._Mrz._Apr._Mai_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'), weekdays : 'Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag'.split('_'), weekdaysShort : 'So._Mo._Di._Mi._Do._Fr._Sa.'.split('_'), weekdaysMin : 'So_Mo_Di_Mi_Do_Fr_Sa'.split('_'), longDateFormat : { LT: 'HH:mm [Uhr]', L : 'DD.MM.YYYY', LL : 'D. MMMM YYYY', LLL : 'D. MMMM YYYY LT', LLLL : 'dddd, D. MMMM YYYY LT' }, calendar : { sameDay: '[Heute um] LT', sameElse: 'L', nextDay: '[Morgen um] LT', nextWeek: 'dddd [um] LT', lastDay: '[Gestern um] LT', lastWeek: '[letzten] dddd [um] LT' }, relativeTime : { future : 'in %s', past : 'vor %s', s : 'ein paar Sekunden', m : processRelativeTime, mm : '%d Minuten', h : processRelativeTime, hh : '%d Stunden', d : processRelativeTime, dd : processRelativeTime, M : processRelativeTime, MM : processRelativeTime, y : processRelativeTime, yy : processRelativeTime }, ordinalParse: /\d{1,2}\./, ordinal : '%d.', week : { dow : 1, // Monday is the first day of the week. doy : 4 // The week that contains Jan 4th is the first week of the year.<|fim▁hole|><|fim▁end|>
} }); }));
<|file_name|>test_antelope_v1_client.py<|end_file_name|><|fim▁begin|>import unittest from antelope_catalog.data_sources.local import TEST_ROOT from antelope_catalog import LcCatalog from lcatools.interfaces import IndexRequired cat = LcCatalog(TEST_ROOT) ref = 'calrecycle.antelope' cat.new_resource(ref, 'http://www.antelope-lca.net/uo-lca/api/', 'AntelopeV1Client', store=False, interfaces=['index', 'inventory', 'quantity'], quiet=True) ar = cat.get_archive(ref) class AntelopeV1Client(unittest.TestCase): def test_stages(self): self.assertEqual(len(ar.get_endpoint('stages')), 87) def test_stagename(self): inv = ar.make_interface('inventory') self.assertEqual(inv.get_stage_name('42'), 'Natural Gas') self.assertEqual(inv.get_stage_name('47'), 'Natural Gas Supply') self.assertEqual(inv.get_stage_name('81'), 'WWTP') def test_impactcategory(self): self.assertEqual(ar._get_impact_category(6), 'Cancer human health effects') with self.assertRaises(ValueError): ar._get_impact_category(5) def test_nonimpl(self): with self.assertRaises(IndexRequired): next(cat.query(ref).terminate('flows/87')) def test_traversal(self): ffs = cat.query(ref).get('fragments/47').traverse() self.assertEqual(len(ffs), 14) self.assertSetEqual({-0.5, -0.01163, -0.0102, 0.0, 0.5}, set(round(x.node_weight, 5) for x in ffs)) def test_lcia(self): lcia = cat.query(ref).get('fragments/19').fragment_lcia('lciamethods/4') self.assertSetEqual(set(x.external_ref for x in lcia.component_entities()), {'Crude Oil', 'Electricity', 'Natural Gas', 'Refinery'}) self.assertSetEqual(set(round(x.cumulative_result, 10) for x in lcia.components()), {0.0004522897, 0.0000733389, 0.0000419222, 0.0001582613}) self.assertAlmostEqual(lcia.total(), 0.0007258121306, places=12) <|fim▁hole|><|fim▁end|>
if __name__ == '__main__': unittest.main()
<|file_name|>controller_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2020 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package mttrigger import ( "fmt" "testing" "k8s.io/apimachinery/pkg/labels" v1 "knative.dev/eventing/pkg/apis/eventing/v1" v1lister "knative.dev/eventing/pkg/client/listers/eventing/v1" "k8s.io/apimachinery/pkg/runtime" testingv1 "knative.dev/eventing/pkg/reconciler/testing/v1" "knative.dev/pkg/configmap" logtesting "knative.dev/pkg/logging/testing" . "knative.dev/pkg/reconciler/testing" // Fake injection informers _ "knative.dev/eventing/pkg/client/injection/informers/eventing/v1/broker/fake" _ "knative.dev/eventing/pkg/client/injection/informers/eventing/v1/trigger/fake" _ "knative.dev/eventing/pkg/client/injection/informers/messaging/v1/subscription/fake" _ "knative.dev/pkg/client/injection/ducks/duck/v1/source/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" ) func TestNew(t *testing.T) { ctx, _ := SetupFakeContext(t) c := NewController(ctx, configmap.NewStaticWatcher()) if c == nil { t.Fatal("Expected NewController to return a non-nil value") } } func TestGetTriggersForBroker(t *testing.T) { for _, tt := range []struct { name string in []runtime.Object out []string }{{ name: "Empty", }, { name: "single matching", in: []runtime.Object{testingv1.NewTrigger("match", testNS, brokerName)}, out: []string{"match"}, }, { name: "two, only one matching", in: []runtime.Object{testingv1.NewTrigger("match", testNS, brokerName), testingv1.NewTrigger("nomatch", testNS, "anotherbroker")}, out: []string{"match"}, }, { name: "two, both one matching", in: []runtime.Object{testingv1.NewTrigger("match", testNS, brokerName), testingv1.NewTrigger("match2", testNS, brokerName)}, out: []string{"match", "match2"}, }} { t.Run(tt.name, func(t *testing.T) { ls := testingv1.NewListers(tt.in) logger := logtesting.TestLogger(t) triggerLister := ls.GetTriggerLister() triggers := getTriggersForBroker(logger, triggerLister, ReadyBroker()) var found []string for _, want := range tt.out { for _, got := range triggers { if got.Name == want { found = append(found, got.Name) } } } if len(found) != len(tt.out) { t.Fatalf("Did not find all the triggers, wanted %+v found %+v", tt.out, found) } }) } } type TriggerListerFailer struct{} func (failer *TriggerListerFailer) List(selector labels.Selector) (ret []*v1.Trigger, err error) { return nil, nil } func (failer *TriggerListerFailer) Triggers(namespace string) v1lister.TriggerNamespaceLister { return &TriggerNamespaceListerFailer{} } <|fim▁hole|> // List lists all Triggers in the indexer. // Objects returned here must be treated as read-only. func (failer *TriggerNamespaceListerFailer) List(selector labels.Selector) (ret []*v1.Trigger, err error) { return nil, fmt.Errorf("Inducing test failure for List") } // Triggers returns an object that can list and get Triggers. func (failer *TriggerNamespaceListerFailer) Get(name string) (*v1.Trigger, error) { return nil, nil } func TestListFailure(t *testing.T) { logger := logtesting.TestLogger(t) triggerListerFailer := &TriggerListerFailer{} if len(getTriggersForBroker(logger, triggerListerFailer, ReadyBroker())) != 0 { t.Fatalf("Got back triggers when not expecting any") } }<|fim▁end|>
type TriggerNamespaceListerFailer struct{}
<|file_name|>api.rs<|end_file_name|><|fim▁begin|>//! The Api system is responsible for talking to our Turtl server, and manages //! our user authentication. use ::std::io::Read; use ::std::time::Duration; use ::config; use ::reqwest::{Method, blocking::RequestBuilder, blocking::Client, Url, Proxy}; use ::reqwest::header::{HeaderMap, HeaderValue}; pub use ::reqwest::StatusCode; use ::jedi::{self, Value, DeserializeOwned}; use ::error::{MResult, MError}; use ::crypto; /// Holds our Api configuration. This consists of any mutable fields the Api /// needs to build URLs or make decisions. struct ApiConfig { auth: Option<String>, } impl ApiConfig { /// Create a new, blank config fn new() -> ApiConfig { ApiConfig { auth: None, } } } /// A struct used for building API requests pub struct ApiReq { headers: HeaderMap, timeout: Duration, data: Value, } impl ApiReq { /// Create a new builder<|fim▁hole|> data: Value::Null, } } /// Set a header #[allow(dead_code)] pub fn header<'a>(mut self, name: &'static str, val: &String) -> Self { self.headers.insert(name, HeaderValue::from_str(val.as_str()).expect("ApiReq.header() -- bad header value given")); self } /// Set (override) the timeout for this request pub fn timeout<'a>(mut self, secs: u64) -> Self { self.timeout = Duration::new(secs, 0); self } /// Set this request's data #[allow(dead_code)] pub fn data<'a>(mut self, data: Value) -> Self { self.data = data; self } } /// Used to store some info we want when we send a response to call_end() pub struct CallInfo { method: Method, resource: String, } impl CallInfo { /// Create a new call info object fn new(method: Method, resource: String) -> Self { Self { method: method, resource: resource, } } } /// Our Api object. Responsible for making outbound calls to our Turtl server. pub struct Api { config: ApiConfig, } impl Api { /// Create an Api pub fn new() -> Api { Api { config: ApiConfig::new(), } } /// Set the API's authentication pub fn set_auth(&mut self, auth: String) -> MResult<()> { let auth_str = String::from("user:") + &auth; let base_auth = crypto::to_base64(&Vec::from(auth_str.as_bytes()))?; self.config.auth = Some(String::from("Basic ") + &base_auth); Ok(()) } /// Grab the auth our of the API object pub fn get_auth(&self) -> Option<String> { self.config.auth.as_ref().map(|x| x.clone()) } /// Write our auth headers into a header collection pub fn set_auth_headers(&self, req: RequestBuilder) -> RequestBuilder { match self.config.auth.as_ref() { Some(x) => req.header("Authorization", x.clone()), None => req, } } /// Set our standard auth header into a Headers set fn set_standard_headers(&self, req: RequestBuilder) -> RequestBuilder { self.set_auth_headers(req) .header("Content-Type", "application/json") } /// Build a full URL given a resource fn build_url(&self, resource: &str) -> MResult<String> { let endpoint = config::get::<String>(&["api", "v6", "endpoint"])?; let mut url = String::with_capacity(endpoint.len() + resource.len()); url.push_str(endpoint.trim_end_matches('/')); url.push_str(resource); Ok(url) } /// Send out an API request pub fn call<T: DeserializeOwned>(&self, method: Method, resource: &str, builder: ApiReq) -> MResult<T> { debug!("api::call() -- req: {} {}", method, resource); let ApiReq {headers, timeout, data} = builder; let url = self.build_url(resource)?; let mut client_builder = Client::builder() .timeout(timeout); match config::get::<Option<String>>(&["api", "proxy"]) { Ok(x) => { if let Some(proxy_cfg) = x { client_builder = client_builder.proxy(Proxy::http(format!("http://{}", proxy_cfg).as_str())?); } } Err(_) => {} } let client = client_builder.build()?; let req = client.request(method, Url::parse(url.as_str())?); let req = self.set_standard_headers(req) .headers(headers) .json(&data) .build()?; let callinfo = CallInfo::new(req.method().clone(), String::from(req.url().as_str())); let res = client.execute(req); res .map_err(|e| { tomerr!(e) }) .and_then(|mut res| { let mut out = String::new(); let str_res = res.read_to_string(&mut out) .map_err(|e| tomerr!(e)) .and_then(move |_| Ok(out)); if !res.status().is_success() { let errstr = match str_res { Ok(x) => x, Err(e) => { error!("api::call() -- problem grabbing error message: {}", e); String::from("<unknown>") } }; return Err(MError::Api(res.status(), errstr)); } str_res.map(move |x| (x, res)) }) .map(|(out, res)| { info!("api::call() -- res({}): {:?} {} {}", out.len(), res.status().as_u16(), &callinfo.method, &callinfo.resource); trace!(" api::call() -- body: {}", out); out }) .and_then(|out| jedi::parse(&out).map_err(|e| tomerr!(e))) } /// Convenience function for api.call(GET) pub fn get<T: DeserializeOwned>(&self, resource: &str, builder: ApiReq) -> MResult<T> { self.call(Method::GET, resource, builder) } /// Convenience function for api.call(POST) pub fn post<T: DeserializeOwned>(&self, resource: &str, builder: ApiReq) -> MResult<T> { self.call(Method::POST, resource, builder) } /// Convenience function for api.call(PUT) #[allow(dead_code)] pub fn put<T: DeserializeOwned>(&self, resource: &str, builder: ApiReq) -> MResult<T> { self.call(Method::PUT, resource, builder) } /// Convenience function for api.call(DELETE) #[allow(dead_code)] pub fn delete<T: DeserializeOwned>(&self, resource: &str, builder: ApiReq) -> MResult<T> { self.call(Method::DELETE, resource, builder) } }<|fim▁end|>
pub fn new() -> Self { ApiReq { headers: HeaderMap::new(), timeout: Duration::new(10, 0),
<|file_name|>service.py<|end_file_name|><|fim▁begin|>import settings import mysql.connector from domain.domain import Article from domain.domain import Project <|fim▁hole|>import service.database as db # 文章管理 class ArticleService: # 查询最近发表的文章 def query_most_published_article(self): conn = db.get_connection() sql = "".join(["select a.id as id,a.author_id as author_id,", "u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,", "a.publish_time as publish_time,a.last_update_time as last_update_time", " from article as a left join user as u on a.author_id=u.id", " order by a.publish_time desc limit 0,%(page_size)s"]) cursor = conn.cursor() cursor.execute(sql, {"page_size": settings.app_settings["page_size"]}) articles = None for (id, author_id, author_name, title, content, create_time, publish_time, last_update_time) in cursor: if (not articles): articles = [] article = Article() articles.append(article) article.id = id if (author_id): u = User() article.author = u u.id = author_id u.name = author_name article.title = title article.content = content article.create_time = create_time article.publish_time = publish_time article.last_update_time = last_update_time cursor.close() conn.close() return articles # 根据标签查询文章列表 def query_article_by_tag(self, tag_id): if (not tag_id): return None _tag_id = None try: _tag_id = int(tag_id) except ValueError: return None sql = "".join(["select a.id as id,a.author_id as author_id,u.name as author_name", ",a.title as title,a.create_time as create_time,a.publish_time as publish_time", ",a.last_update_time as last_update_time", " from article as a left join user as u on a.author_id=u.id", " where a.publish_time is not null and a.id in (select article_id from article_tag where tag_id=%(tag_id)s)"]) conn = db.get_connection() cursor = conn.cursor() cursor.execute(sql, {"tag_id": _tag_id}) articles = None for (id, author_id, author_name, title, create_time, publish_time, last_update_time) in cursor: if (not articles): articles = [] a = Article() articles.append(a) a.id = id a.title = title a.create_time = create_time a.publish_time = publish_time a.last_update_time = last_update_time if (author_id): u = User() a.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return articles # 根据文章 ID 查询文章 def find(self, article_id): conn = db.get_connection() sql = "".join(["select a.id as id,a.author_id as author_id,", "u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,", "a.publish_time as publish_time,a.last_update_time as last_update_time", " from article as a left join user as u on a.author_id=u.id", " where a.id=%(article_id)s"]) cursor = conn.cursor() cursor.execute(sql, {"article_id": article_id}) article = None for (id, author_id, author_name, title, content, create_time, publish_time, last_update_time) in cursor: if (not article): article = Article() article.id = id article.title = title article.content = content article.create_time = create_time article.publish_time = publish_time article.last_update_time = last_update_time if (author_id): u = User() article.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return article # 添加新文章 def add(self, article): # implement return 1 # 标签管理 class TagService: def list_all(self): conn = db.get_connection() if (not conn): return None sql = "".join(["select t.id as id, t.name as name, t.author_id as author_id, u.name as author_name", ",t.create_time as create_time,t.last_update_time as last_update_time", " from tag as t left join user as u on t.author_id=u.id order by t.create_time desc"]) cursor = conn.cursor() cursor.execute(sql) tags = None for (id, name, author_id, author_name, create_time, last_update_time) in cursor: if (not tags): tags = [] t = Tag() tags.append(t) t.id = id t.name = name t.create_time = create_time t.last_update_time = last_update_time if (author_id): u = User() t.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return tags article_service = ArticleService() tag_service = TagService()<|fim▁end|>
from domain.domain import User from domain.domain import Tag
<|file_name|>server_list.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Matthew Collins // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs; use std::thread; use std::sync::mpsc; use std::rc::Rc; use std::cell::RefCell; use ui; use render; use format; use format::{Component, TextComponent}; use protocol; use serde_json; use time; use image; use rustc_serialize::base64::FromBase64; use rand; use rand::Rng; pub struct ServerList { elements: Option<UIElements>, disconnect_reason: Option<Component>, needs_reload: Rc<RefCell<bool>>, } struct UIElements { logo: ui::logo::Logo, elements: ui::Collection, servers: Vec<Server>, } struct Server { collection: ui::Collection, back: ui::ElementRef<ui::Image>, offset: f64, y: f64, motd: ui::ElementRef<ui::Formatted>, ping: ui::ElementRef<ui::Image>, players: ui::ElementRef<ui::Text>, version: ui::ElementRef<ui::Formatted>, icon: ui::ElementRef<ui::Image>, icon_texture: Option<String>, done_ping: bool, recv: mpsc::Receiver<PingInfo>, } struct PingInfo { motd: format::Component,<|fim▁hole|> online: i32, max: i32, protocol_version: i32, protocol_name: String, favicon: Option<image::DynamicImage>, } impl Server { fn update_position(&mut self) { if self.offset < 0.0 { self.y = self.offset * 200.0; } else { self.y = self.offset * 100.0; } } } impl ServerList { pub fn new(disconnect_reason: Option<Component>) -> ServerList { ServerList { elements: None, disconnect_reason: disconnect_reason, needs_reload: Rc::new(RefCell::new(false)), } } fn reload_server_list(&mut self, renderer: &mut render::Renderer, ui_container: &mut ui::Container) { let elements = self.elements.as_mut().unwrap(); *self.needs_reload.borrow_mut() = false; { // Clean up previous list entries and icons. let mut tex = renderer.get_textures_ref().write().unwrap(); for server in &mut elements.servers { server.collection.remove_all(ui_container); if let Some(ref icon) = server.icon_texture { tex.remove_dynamic(&icon); } } } elements.servers.clear(); let file = match fs::File::open("servers.json") { Ok(val) => val, Err(_) => return, }; let servers_info: serde_json::Value = serde_json::from_reader(file).unwrap(); let servers = servers_info.find("servers").unwrap().as_array().unwrap(); let mut offset = 0.0; // Default icon whilst we ping the servers or if the server doesn't provide one let default_icon = render::Renderer::get_texture(renderer.get_textures_ref(), "misc/unknown_server"); // General gui icons let icons = render::Renderer::get_texture(renderer.get_textures_ref(), "gui/icons"); for (index, svr) in servers.iter().enumerate() { let name = svr.find("name").unwrap().as_string().unwrap().to_owned(); let address = svr.find("address").unwrap().as_string().unwrap().to_owned(); let solid = render::Renderer::get_texture(renderer.get_textures_ref(), "steven:solid"); // Everything is attached to this let mut back = ui::Image::new(solid, 0.0, offset * 100.0, 700.0, 100.0, 0.0, 0.0, 1.0, 1.0, 0, 0, 0); back.set_a(100); back.set_v_attach(ui::VAttach::Middle); back.set_h_attach(ui::HAttach::Center); let (send, recv) = mpsc::channel::<PingInfo>(); let mut server = Server { collection: ui::Collection::new(), back: ui_container.add(back), offset: offset, y: 0.0, done_ping: false, recv: recv, motd: Default::default(), ping: Default::default(), players: Default::default(), version: Default::default(), icon: Default::default(), icon_texture: None, }; server.collection.add(server.back.clone()); server.update_position(); // Make whole entry interactable { let back = ui_container.get_mut(&server.back); let back_ref = server.back.clone(); let address = address.clone(); back.add_hover_func(move |over, _, ui_container| { let back = ui_container.get_mut(&back_ref); back.set_a(if over { 200 } else { 100 }); }); back.add_click_func(move |game, _| { game.screen_sys.replace_screen(Box::new(super::connecting::Connecting::new(&address))); game.connect_to(&address); }); } // Server name let mut text = ui::Text::new(renderer, &name, 100.0, 5.0, 255, 255, 255); text.set_parent(&server.back); server.collection.add(ui_container.add(text)); // Server icon let mut icon = ui::Image::new( default_icon.clone(), 5.0, 5.0, 90.0, 90.0, 0.0, 0.0, 1.0, 1.0, 255, 255, 255 ); icon.set_parent(&server.back); server.icon = server.collection.add(ui_container.add(icon)); // Ping indicator let mut ping = ui::Image::new( icons.clone(), 5.0, 5.0, 20.0, 16.0, 0.0, 56.0 / 256.0, 10.0 / 256.0, 8.0 / 256.0, 255, 255, 255 ); ping.set_h_attach(ui::HAttach::Right); ping.set_parent(&server.back); server.ping = server.collection.add(ui_container.add(ping)); // Player count let mut players = ui::Text::new(renderer, "???", 30.0, 5.0, 255, 255, 255); players.set_h_attach(ui::HAttach::Right); players.set_parent(&server.back); server.players = server.collection.add(ui_container.add(players)); // Server's message of the day let mut motd = ui::Formatted::with_width_limit(renderer, Component::Text(TextComponent::new("Connecting.\ ..")), 100.0, 23.0, 700.0 - (90.0 + 10.0 + 5.0)); motd.set_parent(&server.back); server.motd = server.collection.add(ui_container.add(motd)); // Version information let mut version = ui::Formatted::with_width_limit(renderer, Component::Text(TextComponent::new("")), 100.0, 5.0, 700.0 - (90.0 + 10.0 + 5.0)); version.set_v_attach(ui::VAttach::Bottom); version.set_parent(&server.back); server.version = server.collection.add(ui_container.add(version)); // Delete entry button let (mut del, mut txt) = super::new_button_text(renderer, "X", 0.0, 0.0, 25.0, 25.0); del.set_v_attach(ui::VAttach::Bottom); del.set_h_attach(ui::HAttach::Right); del.set_parent(&server.back); let re = ui_container.add(del); txt.set_parent(&re); let tre = ui_container.add(txt); super::button_action(ui_container, re.clone(), Some(tre.clone()), |_,_| {}); // TOOO: delete entry server.collection.add(re); server.collection.add(tre); // Edit entry button let (mut edit, mut txt) = super::new_button_text(renderer, "E", 25.0, 0.0, 25.0, 25.0); edit.set_v_attach(ui::VAttach::Bottom); edit.set_h_attach(ui::HAttach::Right); edit.set_parent(&server.back); let re = ui_container.add(edit); txt.set_parent(&re); let tre = ui_container.add(txt); let index = index; let sname = name.clone(); let saddr = address.clone(); super::button_action(ui_container, re.clone(), Some(tre.clone()), move |game,_|{ let sname = sname.clone(); let saddr = saddr.clone(); game.screen_sys.replace_screen(Box::new(super::edit_server::EditServerEntry::new( Some((index, sname, saddr)) ))); }); server.collection.add(re); server.collection.add(tre); elements.servers.push(server); offset += 1.0; // Don't block the main thread whilst pinging the server thread::spawn(move || { match protocol::Conn::new(&address).and_then(|conn| conn.do_status()) { Ok(res) => { let mut desc = res.0.description; format::convert_legacy(&mut desc); let favicon = if let Some(icon) = res.0.favicon { let data = icon["data:image/png;base64,".len()..] .from_base64() .unwrap(); Some(image::load_from_memory(&data).unwrap()) } else { None }; drop(send.send(PingInfo { motd: desc, ping: res.1, exists: true, online: res.0.players.online, max: res.0.players.max, protocol_version: res.0.version.protocol, protocol_name: res.0.version.name, favicon: favicon, })); } Err(err) => { let e = format!("{}", err); let mut msg = TextComponent::new(&e); msg.modifier.color = Some(format::Color::Red); drop(send.send(PingInfo { motd: Component::Text(msg), ping: time::Duration::seconds(99999), exists: false, online: 0, max: 0, protocol_version: 0, protocol_name: "".to_owned(), favicon: None, })); } } }); } } } impl super::Screen for ServerList { fn on_active(&mut self, renderer: &mut render::Renderer, ui_container: &mut ui::Container) { let logo = ui::logo::Logo::new(renderer.resources.clone(), renderer, ui_container); let mut elements = ui::Collection::new(); // Refresh the server list let (mut refresh, mut txt) = super::new_button_text(renderer, "Refresh", 300.0, -50.0 - 15.0, 100.0, 30.0); refresh.set_v_attach(ui::VAttach::Middle); refresh.set_h_attach(ui::HAttach::Center); let re = ui_container.add(refresh); txt.set_parent(&re); let tre = ui_container.add(txt); let nr = self.needs_reload.clone(); super::button_action(ui_container, re.clone(), Some(tre.clone()), move |_, _| { *nr.borrow_mut() = true; }); elements.add(re); elements.add(tre); // Add a new server to the list let (mut add, mut txt) = super::new_button_text( renderer, "Add", 200.0, -50.0 - 15.0, 100.0, 30.0 ); add.set_v_attach(ui::VAttach::Middle); add.set_h_attach(ui::HAttach::Center); let re = ui_container.add(add); txt.set_parent(&re); let tre = ui_container.add(txt); super::button_action(ui_container, re.clone(), Some(tre.clone()), |game, _|{ game.screen_sys.replace_screen(Box::new(super::edit_server::EditServerEntry::new( None ))); }); elements.add(re); elements.add(tre); // Options menu let mut options = ui::Button::new(5.0, 25.0, 40.0, 40.0); options.set_v_attach(ui::VAttach::Bottom); options.set_h_attach(ui::HAttach::Right); let re = ui_container.add(options); let mut cog = ui::Image::new(render::Renderer::get_texture(renderer.get_textures_ref(), "steven:gui/cog"), 0.0, 0.0, 40.0, 40.0, 0.0, 0.0, 1.0, 1.0, 255, 255, 255); cog.set_parent(&re); cog.set_v_attach(ui::VAttach::Middle); cog.set_h_attach(ui::HAttach::Center); super::button_action(ui_container, re.clone(), None, | game, _ | { game.screen_sys.add_screen(Box::new(super::SettingsMenu::new(game.console.clone(), false))); }); elements.add(re); elements.add(ui_container.add(cog)); // Disclaimer let mut warn = ui::Text::new(renderer, "Not affiliated with Mojang/Minecraft", 5.0, 5.0, 255, 200, 200); warn.set_v_attach(ui::VAttach::Bottom); warn.set_h_attach(ui::HAttach::Right); elements.add(ui_container.add(warn)); // If we are kicked from a server display the reason if let Some(ref disconnect_reason) = self.disconnect_reason { let mut dis_msg = ui::Text::new(renderer, "Disconnected", 0.0, 32.0, 255, 0, 0); dis_msg.set_h_attach(ui::HAttach::Center); let mut dis = ui::Formatted::with_width_limit(renderer, disconnect_reason.clone(), 0.0, 48.0, 600.0); dis.set_h_attach(ui::HAttach::Center); let mut back = ui::Image::new(render::Renderer::get_texture(renderer.get_textures_ref(), "steven:solid"), 0.0, 30.0, dis.get_width().max(dis_msg.get_width()) + 4.0, dis.get_height() + 4.0 + 16.0, 0.0, 0.0, 1.0, 1.0, 0, 0, 0); back.set_a(100); back.set_h_attach(ui::HAttach::Center); elements.add(ui_container.add(back)); elements.add(ui_container.add(dis)); elements.add(ui_container.add(dis_msg)); } self.elements = Some(UIElements { logo: logo, elements: elements, servers: Vec::new(), }); self.reload_server_list(renderer, ui_container); } fn on_deactive(&mut self, renderer: &mut render::Renderer, ui_container: &mut ui::Container) { // Clean up { let elements = self.elements.as_mut().unwrap(); elements.logo.remove(ui_container); elements.elements.remove_all(ui_container); let mut tex = renderer.get_textures_ref().write().unwrap(); for server in &mut elements.servers { if let Some(ref icon) = server.icon_texture { tex.remove_dynamic(&icon); } server.collection.remove_all(ui_container); } elements.servers.clear(); } self.elements = None } fn tick(&mut self, delta: f64, renderer: &mut render::Renderer, ui_container: &mut ui::Container) -> Option<Box<super::Screen>> { if *self.needs_reload.borrow() { self.reload_server_list(renderer, ui_container); } let elements = self.elements.as_mut().unwrap(); elements.logo.tick(renderer, ui_container); for s in &mut elements.servers { // Animate the entries { let back = ui_container.get_mut(&s.back); let dy = s.y - back.get_y(); if dy * dy > 1.0 { let y = back.get_y(); back.set_y(y + delta * dy * 0.1); } else { back.set_y(s.y); } } // Keep checking to see if the server has finished being // pinged if !s.done_ping { match s.recv.try_recv() { Ok(res) => { s.done_ping = true; { let motd = ui_container.get_mut(&s.motd); motd.set_component(renderer, res.motd); } { let ping = ui_container.get_mut(&s.ping); // Selects the icon for the given ping range let y = match res.ping.num_milliseconds() { _x @ 0 ... 75 => 16.0 / 256.0, _x @ 76 ... 150 => 24.0 / 256.0, _x @ 151 ... 225 => 32.0 / 256.0, _x @ 226 ... 350 => 40.0 / 256.0, _x @ 351 ... 999 => 48.0 / 256.0, _ => 56.0 / 256.0, }; ping.set_t_y(y); } if res.exists { { let players = ui_container.get_mut(&s.players); let txt = if res.protocol_version == protocol::SUPPORTED_PROTOCOL { players.set_g(255); players.set_b(255); format!("{}/{}", res.online, res.max) } else { players.set_g(85); players.set_b(85); format!("Out of date {}/{}", res.online, res.max) }; players.set_text(renderer, &txt); } { let version = ui_container.get_mut(&s.version); let mut txt = TextComponent::new(&res.protocol_name); txt.modifier.color = Some(format::Color::Yellow); let mut msg = Component::Text(txt); format::convert_legacy(&mut msg); version.set_component(renderer, msg); } } if let Some(favicon) = res.favicon { let name: String = rand::thread_rng() .gen_ascii_chars() .take(30) .collect(); let tex = renderer.get_textures_ref(); s.icon_texture = Some(name.clone()); let icon_tex = tex.write() .unwrap() .put_dynamic(&name, favicon); let icon = ui_container.get_mut(&s.icon); icon.set_texture(icon_tex); } } Err(mpsc::TryRecvError::Disconnected) => { s.done_ping = true; let motd = ui_container.get_mut(&s.motd); let mut txt = TextComponent::new("Channel dropped"); txt.modifier.color = Some(format::Color::Red); motd.set_component(renderer, Component::Text(txt)); } _ => {} } } } None } fn on_scroll(&mut self, _: f64, y: f64) { let elements = self.elements.as_mut().unwrap(); if elements.servers.is_empty() { return; } let mut diff = y / 1.0; { let last = elements.servers.last().unwrap(); if last.offset + diff <= 2.0 { diff = 2.0 - last.offset; } let first = elements.servers.first().unwrap(); if first.offset + diff >= 0.0 { diff = -first.offset; } } for s in &mut elements.servers { s.offset += diff; s.update_position(); } } }<|fim▁end|>
ping: time::Duration, exists: bool,