prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>jscsspFontFaceRule.js<|end_file_name|><|fim▁begin|>/* kJscsspFONT_FACE_RULE */ function jscsspFontFaceRule() { this.type = kJscsspFONT_FACE_RULE; this.parsedCssText = null; this.descriptors = []; this.parentStyleSheet = null; this.parentRule = null; } jscsspFontFaceRule.prototype = { cssText: function() { var rv = gTABS + "@font-face {\n"; var preservedGTABS = gTABS; gTABS += " "; for (var i = 0; i < this.descriptors.length; i++) rv += gTABS + this.descriptors[i].cssText() + "\n"; gTABS = preservedGTABS; return rv + gTABS + "}"; },<|fim▁hole|> setCssText: function(val) { var sheet = {cssRules: []}; var parser = new CSSParser(val); var token = parser.getToken(true, true); if (token.isAtRule("@font-face")) { if (parser.parseFontFaceRule(token, sheet)) { var newRule = sheet.cssRules[0]; this.descriptors = newRule.descriptors; this.parsedCssText = newRule.parsedCssText; return; } } throw DOMException.SYNTAX_ERR; } };<|fim▁end|>
<|file_name|>MarketResourceFormatter.java<|end_file_name|><|fim▁begin|>package cm.aptoide.pt.util; import android.content.Context; public class MarketResourceFormatter { private String marketName; <|fim▁hole|> } public String formatString(Context context, int id, String... optParamaters) { return context.getString(id); } }<|fim▁end|>
public MarketResourceFormatter(String marketName) { this.marketName = marketName;
<|file_name|>dnn.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Deep Neural Network estimators.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib import layers from tensorflow.contrib.learn.python.learn.estimators import _sklearn from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined from tensorflow.contrib.learn.python.learn.estimators.base import DeprecatedMixin from tensorflow.python.ops import nn class DNNClassifier(dnn_linear_combined.DNNLinearCombinedClassifier): """A classifier for TensorFlow DNN models. Example: ``` installed_app_id = sparse_column_with_hash_bucket("installed_id", 1e6) impression_app_id = sparse_column_with_hash_bucket("impression_id", 1e6) installed_emb = embedding_column(installed_app_id, dimension=16, combiner="sum") impression_emb = embedding_column(impression_app_id, dimension=16, combiner="sum") estimator = DNNClassifier( feature_columns=[installed_emb, impression_emb], hidden_units=[1024, 512, 256]) # Input builders def input_fn_train: # returns x, Y pass estimator.fit(input_fn=input_fn_train) def input_fn_eval: # returns x, Y pass estimator.evaluate(input_fn=input_fn_eval) estimator.predict(x=x) ``` Input of `fit` and `evaluate` should have following features, otherwise there will be a `KeyError`: if `weight_column_name` is not `None`, a feature with `key=weight_column_name` whose value is a `Tensor`. for each `column` in `feature_columns`: - if `column` is a `SparseColumn`, a feature with `key=column.name` whose `value` is a `SparseTensor`. - if `column` is a `RealValuedColumn, a feature with `key=column.name` whose `value` is a `Tensor`. - if `feauture_columns` is None, then `input` must contains only real valued `Tensor`. Parameters: hidden_units: List of hidden units per layer. All layers are fully connected. Ex. [64, 32] means first layer has 64 nodes and second one has 32. feature_columns: An iterable containing all the feature columns used by the model. All items in the set should be instances of classes derived from `FeatureColumn`. model_dir: Directory to save model parameters, graph and etc. n_classes: number of target classes. Default is binary classification. It must be greater than 1. weight_column_name: A string defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. optimizer: An instance of `tf.Optimizer` used to train the model. If `None`, will use an Adagrad optimizer. activation_fn: Activation function applied to each layer. If `None`, will use `tf.nn.relu`. dropout: When not None, the probability we will drop out a given coordinate. """ def __init__(self, hidden_units, feature_columns=None, model_dir=None, n_classes=2, weight_column_name=None, optimizer=None, activation_fn=nn.relu, dropout=None, config=None): super(DNNClassifier, self).__init__(model_dir=model_dir, n_classes=n_classes, weight_column_name=weight_column_name, dnn_feature_columns=feature_columns, dnn_optimizer=optimizer, dnn_hidden_units=hidden_units, dnn_activation_fn=activation_fn, dnn_dropout=dropout, config=config) def _get_train_ops(self, features, targets): """See base class.""" if self._dnn_feature_columns is None: self._dnn_feature_columns = layers.infer_real_valued_columns(features) return super(DNNClassifier, self)._get_train_ops(features, targets) @property def weights_(self): return self.dnn_weights_ @property def bias_(self): return self.dnn_bias_ class DNNRegressor(dnn_linear_combined.DNNLinearCombinedRegressor): """A regressor for TensorFlow DNN models. Example: ``` installed_app_id = sparse_column_with_hash_bucket("installed_id", 1e6) impression_app_id = sparse_column_with_hash_bucket("impression_id", 1e6) installed_emb = embedding_column(installed_app_id, dimension=16,<|fim▁hole|> combiner="sum") estimator = DNNRegressor( feature_columns=[installed_emb, impression_emb], hidden_units=[1024, 512, 256]) # Input builders def input_fn_train: # returns x, Y pass estimator.fit(input_fn=input_fn_train) def input_fn_eval: # returns x, Y pass estimator.evaluate(input_fn=input_fn_eval) estimator.predict(x=x) ``` Input of `fit` and `evaluate` should have following features, otherwise there will be a `KeyError`: if `weight_column_name` is not `None`, a feature with `key=weight_column_name` whose value is a `Tensor`. for each `column` in `feature_columns`: - if `column` is a `SparseColumn`, a feature with `key=column.name` whose `value` is a `SparseTensor`. - if `column` is a `RealValuedColumn, a feature with `key=column.name` whose `value` is a `Tensor`. - if `feauture_columns` is None, then `input` must contains only real valued `Tensor`. Parameters: hidden_units: List of hidden units per layer. All layers are fully connected. Ex. [64, 32] means first layer has 64 nodes and second one has 32. feature_columns: An iterable containing all the feature columns used by the model. All items in the set should be instances of classes derived from `FeatureColumn`. model_dir: Directory to save model parameters, graph and etc. weight_column_name: A string defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. optimizer: An instance of `tf.Optimizer` used to train the model. If `None`, will use an Adagrad optimizer. activation_fn: Activation function applied to each layer. If `None`, will use `tf.nn.relu`. dropout: When not None, the probability we will drop out a given coordinate. """ def __init__(self, hidden_units, feature_columns=None, model_dir=None, weight_column_name=None, optimizer=None, activation_fn=nn.relu, dropout=None, config=None): super(DNNRegressor, self).__init__(model_dir=model_dir, weight_column_name=weight_column_name, dnn_feature_columns=feature_columns, dnn_optimizer=optimizer, dnn_hidden_units=hidden_units, dnn_activation_fn=activation_fn, dnn_dropout=dropout, config=config) def _get_train_ops(self, features, targets): """See base class.""" if self._dnn_feature_columns is None: self._dnn_feature_columns = layers.infer_real_valued_columns(features) return super(DNNRegressor, self)._get_train_ops(features, targets) @property def weights_(self): return self.dnn_weights_ @property def bias_(self): return self.dnn_bias_ # TensorFlowDNNClassifier and TensorFlowDNNRegressor are deprecated. class TensorFlowDNNClassifier(DeprecatedMixin, DNNClassifier, _sklearn.ClassifierMixin): pass class TensorFlowDNNRegressor(DeprecatedMixin, DNNRegressor, _sklearn.RegressorMixin): pass<|fim▁end|>
combiner="sum") impression_emb = embedding_column(impression_app_id, dimension=16,
<|file_name|>utf8.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 Luis Cáceres // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use { rust_icu_common as common, rust_icu_sys as sys, rust_icu_sys::versioned_function, rust_icu_sys::*, std::ops::Range, std::os::raw, }; use super::{FeedConverterRaw, FeedResult, UConverter}; /// This is a convenience type that provides conversion functions directly to/from UTF-8. /// /// This type wraps around `ucnv_convertEx`. It keeps two converters for the specified encoding and for /// UTF-8, as well as the UTF-16 pivot buffers used by `ucnv_convertEx`. /// /// Its interface is analogous to the interface of [UConverter], so for examples and more detailed /// information on its use, refer to the documentation page of [UConverter]. /// /// For convenience, the single-string conversion functions take a `&str` for UTF-8 input and /// give a `String` for UTF-8 output. #[derive(Debug)] pub struct Converter { utf8: UConverter, converter: UConverter, pivot_buffer: Box<[sys::UChar]>, pivot_to: Range<*mut sys::UChar>, pivot_to_source: *mut sys::UChar, pivot_to_target: *mut sys::UChar, pivot_from: Range<*mut sys::UChar>, pivot_from_source: *mut sys::UChar, pivot_from_target: *mut sys::UChar, } unsafe impl Send for Converter {} impl Converter { pub fn open(name: &str) -> Result<Self, common::Error> { let converter = UConverter::open(name)?; let utf8 = UConverter::open("UTF-8")?; let mut pivot_buffer = vec![0u16; 2 * 8192].into_boxed_slice(); let (pivot_to, pivot_from) = pivot_buffer.split_at_mut(8192); let (pivot_to, pivot_from) = (pivot_to.as_mut_ptr_range(), pivot_from.as_mut_ptr_range()); Ok(Self { utf8, converter, pivot_to_source: pivot_to.start, pivot_to_target: pivot_to.start, pivot_to, pivot_from_source: pivot_from.start, pivot_from_target: pivot_from.start, pivot_from, pivot_buffer, }) } pub fn try_clone(&self) -> Result<Self, common::Error> { let utf8 = self.utf8.try_clone()?; let converter = self.converter.try_clone()?; let mut pivot_buffer = self.pivot_buffer.clone(); let (pivot_to, pivot_from) = pivot_buffer.split_at_mut(8192); let (pivot_to, pivot_from) = (pivot_to.as_mut_ptr_range(), pivot_from.as_mut_ptr_range()); // shift the pivot_{to,from}_{source,target} pointers to point to the newly-created buffer let pivot_to_source = unsafe { pivot_to .start .offset(self.pivot_to_source.offset_from(self.pivot_to.start)) }; let pivot_to_target = unsafe { pivot_to .start .offset(self.pivot_to_target.offset_from(self.pivot_to.start)) }; let pivot_from_source = unsafe { pivot_from .start .offset(self.pivot_from_source.offset_from(self.pivot_from.start)) }; let pivot_from_target = unsafe { pivot_from .start .offset(self.pivot_from_target.offset_from(self.pivot_from.start)) }; Ok(Self { utf8, converter, pivot_buffer, pivot_to, pivot_to_source, pivot_to_target, pivot_from, pivot_from_source, pivot_from_target, }) } #[inline(always)] pub fn has_ambiguous_mappings(&self) -> bool { self.converter.has_ambiguous_mappings() } #[inline(always)] pub fn name(&self) -> Result<&str, common::Error> { self.converter.name() } pub fn reset(&mut self) { self.reset_to_utf8(); self.reset_from_utf8(); } pub fn reset_to_utf8(&mut self) { self.converter.reset_to_uchars(); self.utf8.reset_from_uchars(); self.pivot_to_source = self.pivot_to.start; self.pivot_to_target = self.pivot_to_target; } pub fn reset_from_utf8(&mut self) { self.utf8.reset_to_uchars(); self.converter.reset_from_uchars(); self.pivot_from_source = self.pivot_from.start; self.pivot_from_target = self.pivot_from_target; } pub fn feed_to_utf8(&mut self, dst: &mut [u8], src: &[u8]) -> FeedResult { self.feed_to(dst, src) } pub fn feed_from_utf8(&mut self, dst: &mut [u8], src: &[u8]) -> FeedResult { self.feed_from(dst, src) } pub fn convert_to_utf8(&mut self, src: &[u8]) -> Result<String, common::Error> { self.reset_to_utf8(); self.convert_to(src) .map(|v| String::from_utf8(v).expect("should be valid UTF-8")) } pub fn convert_from_utf8(&mut self, src: &str) -> Result<Vec<u8>, common::Error> { self.reset_from_utf8(); self.convert_from(src.as_bytes()) } } impl FeedConverterRaw for Converter { // for utf8 type ToUnit = u8; // for other encoding type FromUnit = u8; unsafe fn feed_to_raw( &mut self, dst: &mut Range<*mut Self::ToUnit>, src: &mut Range<*const Self::FromUnit>, should_flush: bool, ) -> sys::UErrorCode { let mut dst_raw = Range { start: dst.start as *mut raw::c_char, end: dst.end as *mut raw::c_char, }; let mut src_raw = Range { start: src.start as *const raw::c_char, end: src.end as *const raw::c_char, }; // ucnv_convertEx documentation indicates it appends a 0-terminator at the end of the // converted output if possible. This does not advance the dst pointer so we don't need to // do anything about it. let mut status = sys::UErrorCode::U_ZERO_ERROR; versioned_function!(ucnv_convertEx)( self.utf8.0.as_ptr(), self.converter.0.as_ptr(), &mut dst_raw.start, dst_raw.end, &mut src_raw.start, src_raw.end, self.pivot_to.start, &mut self.pivot_to_source, &mut self.pivot_to_target, self.pivot_to.end, false.into(), should_flush.into(), &mut status, ); dst.start = dst_raw.start as *mut u8; src.start = src_raw.start as *const u8; status } unsafe fn feed_from_raw( &mut self, dst: &mut Range<*mut Self::FromUnit>, src: &mut Range<*const Self::ToUnit>, should_flush: bool, ) -> sys::UErrorCode { let mut dst_raw = Range { start: dst.start as *mut raw::c_char, end: dst.end as *mut raw::c_char, }; let mut src_raw = Range { start: src.start as *const raw::c_char, end: src.end as *const raw::c_char, }; // ucnv_convertEx documentation indicates it appends a 0-terminator at the end of the // converted output if possible. This does not advance the dst pointer so we don't need to // do anything about it. let mut status = sys::UErrorCode::U_ZERO_ERROR; versioned_function!(ucnv_convertEx)( self.converter.0.as_ptr(), self.utf8.0.as_ptr(), &mut dst_raw.start, dst_raw.end, &mut src_raw.start, src_raw.end, self.pivot_from.start, &mut self.pivot_from_source, &mut self.pivot_from_target, self.pivot_from.end, false.into(), should_flush.into(), &mut status, ); dst.start = dst_raw.start as *mut u8; src.start = src_raw.start as *const u8; status } } #[cfg(test)] mod tests { use rust_icu_common as common; use rust_icu_sys as sys; use super::Converter; #[test] fn test_shiftjis_utf8_conversion() { const SHIFT_JIS_STRING: [u8; 8] = [0x83, 0x58, 0x81, 0x5B, 0x83, 0x70, 0x81, 0x5B]; const UTF8_STRING: &str = "スーパー"; let mut converter = Converter::open("SHIFT-JIS").unwrap(); assert_eq!( UTF8_STRING, converter .convert_to_utf8(&SHIFT_JIS_STRING) .unwrap() .as_str() ); assert_eq!( SHIFT_JIS_STRING.iter().copied().collect::<Vec<u8>>(), converter.convert_from_utf8(UTF8_STRING).unwrap() ); } #[test] fn test_shiftjis_utf8_feeding() { const UTF8_STRING: &str = "Shift_JIS(シフトジス)は、コンピュータ上で日本語を含む文字列を表現するために\ 用いられる文字コードの一つ。シフトJIS(シフトジス)と表記されることもある。"; let mut converter = Converter::open("SHIFT_JIS").unwrap(); let mut dst_buffer: Vec<u8> = Vec::new(); dst_buffer.resize(1024, 0); let mut dst_chunks = dst_buffer.chunks_mut(8); let mut get_dst_chunk = move || dst_chunks.next(); let mut src_chunks = UTF8_STRING.as_bytes().chunks(6); let mut get_src_chunk = move || src_chunks.next(); let mut dst: &mut [u8] = get_dst_chunk().unwrap(); let mut src: &[u8] = get_src_chunk().unwrap(); loop { let res = converter.feed_from_utf8(dst, src); match res.result { Ok(_) | Err(common::Error::Sys(sys::UErrorCode::U_BUFFER_OVERFLOW_ERROR)) => { dst = dst.split_at_mut(res.dst_consumed).1;<|fim▁hole|> } if dst.is_empty() { dst = get_dst_chunk().unwrap(); } if src.is_empty() { src = match get_src_chunk() { None => break, Some(src) => src, }; } } } }<|fim▁end|>
src = src.split_at(res.src_consumed).1; } _ => panic!("conversion error"),
<|file_name|>MZ.rs<|end_file_name|><|fim▁begin|>// fn main() { // let nums = vec![0,1,0,3,12]; // let mut result:Vec<i32> = Vec::new(); // let mut zeros = 0; // for i in &nums { // if *i != 0 { // result.push(*i); // }else { // zeros += 1; // } // } // for _ in 0..zeros { // result.push(0) // } <|fim▁hole|>// } pub fn move_zeroes(nums: &mut Vec<i32>) { let mut result: Vec<i32> = Vec::new(); let mut zeros = 0; for i in nums.clone() { if i != 0 { result.push(i); } else { zeros += 1; } } for _ in 0..zeros { result.push(0) } *nums = result; } pub fn move_zeroes_new(nums: &mut Vec<i32>) { let mut result: Vec<i32> = Vec::new(); let mut zeros = 0; let len = nums.len().clone(); for i in 0..len { if nums[i] != 0 { result.push(nums[i].clone()); } else { zeros += 1; } } for _ in 0..zeros { result.push(0) } *nums = result; } fn main() { let mut testcase = vec![0, 1, 0, 3, 12]; move_zeroes(&mut testcase); println!("{:?}", testcase); }<|fim▁end|>
// println!("{:?}", nums); // println!("{:?}", result);
<|file_name|>sales-monthly-report-router.js<|end_file_name|><|fim▁begin|>var Router = require('restify-router').Router; var db = require("../../../../db"); var ProductionOrderManager = require("dl-module").managers.sales.ProductionOrderManager; var resultFormatter = require("../../../../result-formatter"); var passport = require('../../../../passports/jwt-passport'); const apiVersion = '1.0.0'; function getRouter() { var router = new Router(); router.get("/", passport, function (request, response, next) { db.get().then(db => { var manager = new ProductionOrderManager(db, request.user); var query = request.queryInfo; query.accept =request.headers.accept; manager.getSalesMonthlyReport(query) .then(docs => { var dateFormat = "DD MMM YYYY"; var locale = 'id'; var moment = require('moment'); moment.locale(locale); if ((request.headers.accept || '').toString().indexOf("application/xls") < 0) { for (var a in docs.data) { docs.data[a]._createdDate = moment(new Date(docs.data[a]._createdDate)).format(dateFormat); docs.data[a].deliveryDate = moment(new Date(docs.data[a].deliveryDate)).format(dateFormat); } var result = resultFormatter.ok(apiVersion, 200, docs.data); delete docs.data; result.info = docs; response.send(200, result); } else { var index = 0; var data = []; for (var order of docs.data) { index++; var item = {}; item["No"] = index; item["Sales"] = order._id.sales; item["Januari"] = order.jan.toFixed(2); item["Februari"] = order.feb.toFixed(2); item["Maret"] = order.mar.toFixed(2); item["April"] = order.apr.toFixed(2); item["Mei"] = order.mei.toFixed(2); item["Juni"] = order.jun.toFixed(2); item["Juli"] = order.jul.toFixed(2); item["Agustus"] = order.agu.toFixed(2); item["September"] = order.sep.toFixed(2); item["Oktober"] = order.okt.toFixed(2); item["November"] = order.nov.toFixed(2); item["Desember"] = order.des.toFixed(2); item["Total"] = order.totalOrder.toFixed(2); data.push(item); } var options = { "No": "number", "Sales": "string",<|fim▁hole|> "Januari": "string", "Februari": "string", "Maret": "string", "April": "string", "Mei": "string", "Juni": "string", "Juli": "string", "Agustus": "string", "September": "string", "Oktober": "string", "November": "string", "Desember": "string", "Total": "string", }; response.xls(`Sales Monthly Report.xlsx`, data, options); } }) .catch(e => { response.send(500, "gagal ambil data"); }); }) .catch(e => { var error = resultFormatter.fail(apiVersion, 400, e); response.send(400, error); }); }); return router; } module.exports = getRouter; /* SUKSES var Router = require('restify-router').Router; var db = require("../../../../db"); var ProductionOrderManager = require("dl-module").managers.sales.ProductionOrderManager; var resultFormatter = require("../../../../result-formatter"); var passport = require('../../../../passports/jwt-passport'); const apiVersion = '1.0.0'; function getRouter() { var router = new Router(); router.get("/", passport, function (request, response, next) { db.get().then(db => { var manager = new ProductionOrderManager(db, request.user); var query = request.queryInfo; query.accept =request.headers.accept; if(!query.page){ query.page=1; }if(!query.size){ query.size=20; } manager.getSalesMonthlyReport(query) .then(docs => { var dateFormat = "DD MMM YYYY"; var locale = 'id'; var moment = require('moment'); moment.locale(locale); if ((request.headers.accept || '').toString().indexOf("application/xls") < 0) { for (var a in docs.data) { docs.data[a]._createdDate = moment(new Date(docs.data[a]._createdDate)).format(dateFormat); docs.data[a].deliveryDate = moment(new Date(docs.data[a].deliveryDate)).format(dateFormat); } var result = resultFormatter.ok(apiVersion, 200, docs.data); delete docs.data; result.info = docs; response.send(200, result); } else { var index = 0; var data = []; for (var order of docs.data) { index++; var item = {}; var firstname = ""; var lastname = ""; if (order.firstname) firstname = order.firstname; if (order.lastname) lastname = order.lastname; item["No"] = index; item["Nomor Sales Contract"] = order.salesContractNo; item["Tanggal Surat Order Produksi"] = moment(new Date(order._createdDate)).format(dateFormat); item["Nomor Surat Order Produksi"] = order.orderNo; item["Jenis Order"] = order.orderType; item["Jenis Proses"] = order.processType; item["Buyer"] = order.buyer; item["Tipe Buyer"] = order.buyerType; item["Jumlah Order"] = order.orderQuantity; item["Satuan"] = order.uom; item["Acuan Warna / Desain"] = order.colorTemplate; item["Warna Yang Diminta"] = order.colorRequest; item["Jenis Warna"] = order.colorType; item["Jumlah"] = order.quantity; item["Satuan Detail"] = order.uomDetail; item["Tanggal Delivery"] = moment(new Date(order.deliveryDate)).format(dateFormat); item["Staff Penjualan"] = `${firstname} ${lastname}`; item["Status"] = order.status; item["Detail"] = order.detail; data.push(item); } var options = { "No": "number", "Nomor Sales Contract": "string", "Tanggal Surat Order Produksi": "string", "Nomor Surat Order Produksi": "string", "Jenis Order": "string", "Jenis Proses": "string", "Buyer": "string", "Tipe Buyer": "string", "Jumlah Order": "number", "Satuan": "string", "Acuan Warna / Desain": "string", "Warna Yang Diminta": "string", "Jenis Warna": "string", "Jumlah": "number", "Satuan Detail": "string", "Tanggal Delivery": "string", "Staff Penjualan": "string", "Status": "string", "Detail": "string" }; response.xls(`Sales Monthly Report.xlsx`, data, options); // } }) .catch(e => { response.send(500, "gagal ambil data"); }); }) .catch(e => { var error = resultFormatter.fail(apiVersion, 400, e); response.send(400, error); }); }); return router; } module.exports = getRouter; */<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render from django.utils.translation import activate def index(request): # latest_question_list = Question.objects.order_by('-pub_date')[:5] # context = {'latest_question_list': latest_question_list} # activate('pt-br') # print(request.LANGUAGE_CODE) context = {}<|fim▁hole|> return render(request, 'index.html', context)<|fim▁end|>
<|file_name|>builder.py<|end_file_name|><|fim▁begin|>from scapy.all import * from scapy.layers import dhcp6 from time import time def duid(ll_addr): return DUID_LLT(lladdr=ll_addr, timeval=time()) def ias(requested, iface, T1=None, T2=None): return map(lambda r: __build_ia(r, iface, T1, T2), requested) def options(requested): return map(__build_option_by_code, requested) def __build_ia(request, iface, T1=None, T2=None): ia = request.__class__(iaid=request.iaid, T1=(T1 == None and request.T1 or T1), T2=(T2 == None and request.T2 or T2)) ia.ianaopts.append(DHCP6OptIAAddress(addr=str(iface.global_ip()), preflft=300, validlft=300)) return ia def __build_option_by_code(code): opt = __option_klass_by_code(code)() if isinstance(opt, DHCP6OptClientFQDN): opt.fqdn = 'testhost.local.'<|fim▁hole|> elif isinstance(opt, DHCP6OptSNTPServers): opt.sntpservers.append('2001:500:88:200::10') return opt def __option_klass_by_code(code): return getattr(dhcp6, dhcp6.dhcp6opts_by_code[code])<|fim▁end|>
elif isinstance(opt, DHCP6OptDNSDomains): pass elif isinstance(opt, DHCP6OptDNSServers): opt.dnsservers.append('2001:500:88:200::10')
<|file_name|>64.py<|end_file_name|><|fim▁begin|><|fim▁hole|>p = 'noobie' if p == 'hacking': print('Hack the planet!') else: print('Falso') ''' A condição retornou falso pq o valor atribuído em p não é igual a hacking '''<|fim▁end|>
<|file_name|>NotificationProviderException.java<|end_file_name|><|fim▁begin|>/* * Funambol is a mobile platform developed by Funambol, Inc. * Copyright (C) 2007 Funambol, Inc. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by * the Free Software Foundation with the addition of the following permission * added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED * WORK IN WHICH THE COPYRIGHT IS OWNED BY FUNAMBOL, FUNAMBOL DISCLAIMS THE * WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301 USA. * * You can contact Funambol, Inc. headquarters at 643 Bair Island Road, Suite * 305, Redwood City, CA 94063, USA, or at email address [email protected]. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License * version 3, these Appropriate Legal Notices must retain the display of the * "Powered by Funambol" logo. If the display of the logo is not reasonably * feasible for technical reasons, the Appropriate Legal Notices must display * the words "Powered by Funambol". */ package com.funambol.ctp.server.notification; /** * * @version $Id: NotificationProviderException.java,v 1.2 2007-11-28 11:26:16 nichele Exp $ */ public class NotificationProviderException extends Exception { /** * Creates a new instance of <code>NotificationProviderException</code> without * detail message. */ public NotificationProviderException() { super(); } /** * Constructs an instance of <code>NotificationProviderException</code> with the * specified detail message. * * @param message the detail message. */ public NotificationProviderException(String message) { super(message); } /** * Constructs an instance of <code>NotificationProviderException</code> with the * specified detail message and the given cause. * * @param message the detail message. * @param cause the cause. */<|fim▁hole|> public NotificationProviderException(String message, Throwable cause) { super(message, cause); } /** * Constructs an instance of <code>NotificationProviderException</code> with the * specified cause. * * @param cause the cause. */ public NotificationProviderException(Throwable cause) { super(cause); } }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import signal as signals import messages from config import config from curio import SignalQueue, TaskGroup, run, spawn, tcp_server # Queue, CancelledError from feeds import ClientStreamFeed from logs import setup_logging from tasks import ControlTask, HeartbeatTask, StatusTask logger = setup_logging(__name__) # TODO: convert all this into a class that is configurable # Make the messaging layer agnostic? # feed = ClientStreamFeed() out_feed = ClientStreamFeed() async def connection_handler(client, addr): logger.info('connection from %s', addr) async with client: client_stream = client.as_stream() async with TaskGroup(wait=any) as workers: # connect client stream to main feed<|fim▁hole|> # May not need this await feed.publish(messages.EXIT) logger.info('connection lost %s', addr) rov_tasks = [] def add_task(Task): task = Task.run() rov_tasks.append(task) add_task(HeartbeatTask) add_task(StatusTask) add_task(ControlTask) async def server(host, port): # Server task groups async with TaskGroup() as group: await group.spawn(feed.dispatcher) await group.spawn(out_feed.dispatcher) await group.spawn(tcp_server, host, port, connection_handler) for rov_task in rov_tasks: await group.spawn(rov_task, out_feed) async def main(host, port): async with SignalQueue(signals.SIGHUP, signals.SIGTERM) as close_signals: logger.info('Starting the server') serv_task = await spawn(server, host, port) # Stop here and wait for any of the close signals signal = await close_signals.get() logger.info('Server shutting down: %s', signal) # cancle all server tasks await serv_task.cancel() if __name__ == '__main__': # from curio.debug import schedtrace # run(main('', 9000), with_monitor=True, debug=schedtrace) try: run(main('', config.get('port'))) except KeyboardInterrupt: pass<|fim▁end|>
await workers.spawn(out_feed.outgoing, client_stream) await workers.spawn(feed.incoming, client_stream) # TODO: incoming feed message parsing
<|file_name|>sha1.rs<|end_file_name|><|fim▁begin|>use byteorder::{ ReadBytesExt, WriteBytesExt, BigEndian }; use digest::Digest; use utils::buffer::{ FixedBuffer, FixedBuffer64, StandardPadding }; struct SHA1State { h0: u32, h1: u32, h2: u32, h3: u32, h4: u32 } impl SHA1State { fn new() -> Self { SHA1State { h0: 0x67452301, h1: 0xefcdab89, h2: 0x98badcfe, h3: 0x10325476, h4: 0xc3d2e1f0 } } fn process_block(&mut self, mut data: &[u8]) { assert_eq!(data.len(), 64); let mut words = [0u32; 80]; fn ff(b: u32, c: u32, d: u32) -> u32 { d ^ (b & (c ^ d)) } fn gg(b: u32, c: u32, d: u32) -> u32 { b ^ c ^ d } fn hh(b: u32, c: u32, d: u32) -> u32 { (b & c) | (d & (b | c)) } fn ii(b: u32, c: u32, d: u32) -> u32 { b ^ c ^ d } for i in 0..16 { words[i] = data.read_u32::<BigEndian>().unwrap(); } for i in 16..80 { words[i] = (words[i - 3] ^ words[i - 8] ^ words[i - 14] ^ words[i - 16]).rotate_left(1); } let (mut a, mut b, mut c, mut d, mut e) = (self.h0, self.h1, self.h2, self.h3, self.h4); for (i, &word) in words.iter().enumerate() { let (f, k) = match i { 0 ... 19 => (ff(b, c, d), 0x5a827999), 20 ... 39 => (gg(b, c, d), 0x6ed9eba1), 40 ... 59 => (hh(b, c, d), 0x8f1bbcdc), 60 ... 79 => (ii(b, c, d), 0xca62c1d6), _ => unreachable!(), }; let tmp = a.rotate_left(5) .wrapping_add(f) .wrapping_add(e) .wrapping_add(k) .wrapping_add(word); e = d; d = c; c = b.rotate_left(30); b = a; a = tmp; } self.h0 = self.h0.wrapping_add(a); self.h1 = self.h1.wrapping_add(b); self.h2 = self.h2.wrapping_add(c); self.h3 = self.h3.wrapping_add(d); self.h4 = self.h4.wrapping_add(e); } } pub struct SHA1 { state: SHA1State, buffer: FixedBuffer64,<|fim▁hole|>} impl Default for SHA1 { fn default() -> Self { SHA1 { state: SHA1State::new(), buffer: FixedBuffer64::new(), length: 0 } } } impl Digest for SHA1 { fn update<T: AsRef<[u8]>>(&mut self, data: T) { let data = data.as_ref(); self.length += data.len() as u64; let state = &mut self.state; self.buffer.input(data, |d| state.process_block(d)); } fn output_bits() -> usize { 160 } fn block_size() -> usize { 64 } fn result<T: AsMut<[u8]>>(mut self, mut out: T) { let state = &mut self.state; self.buffer.standard_padding(8, |d| state.process_block(d)); self.buffer.next(8).write_u64::<BigEndian>(self.length * 8).unwrap(); state.process_block(self.buffer.full_buffer()); let mut out = out.as_mut(); assert!(out.len() >= Self::output_bytes()); out.write_u32::<BigEndian>(state.h0).unwrap(); out.write_u32::<BigEndian>(state.h1).unwrap(); out.write_u32::<BigEndian>(state.h2).unwrap(); out.write_u32::<BigEndian>(state.h3).unwrap(); out.write_u32::<BigEndian>(state.h4).unwrap(); } } #[cfg(test)] mod tests { use digest::Digest; use digest::test::Test; use super::SHA1; const TESTS: [Test<'static>; 7] = [ Test { input: "", output: "da39a3ee5e6b4b0d3255bfef95601890afd80709" }, Test { input: "a", output: "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8" }, Test { input: "abc", output: "a9993e364706816aba3e25717850c26c9cd0d89d" }, Test { input: "message digest", output: "c12252ceda8be8994d5fa0290a47231c1d16aae3" }, Test { input: "abcdefghijklmnopqrstuvwxyz", output: "32d10c7b8cf96570ca04ce37f2a19d84240d3a89" }, Test { input: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", output: "761c457bf73b14d27e9e9265c46f4b4dda11f940" }, Test { input: "12345678901234567890123456789012345678901234567890123456789012345678901234567890", output: "50abf5706a150990a08b2c5ea40fa0e585554732" }, ]; #[test] fn test_sha1() { for test in &TESTS { test.test(SHA1::new()); } } }<|fim▁end|>
length: u64
<|file_name|>base.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Django settings for bluebottle project. import os, datetime # Import global settings for overriding without throwing away defaults from django.conf import global_settings from django.utils.translation import ugettext as _ from admin_dashboard import * from .payments import * # Set PROJECT_ROOT to the dir of the current file # Find the project's containing directory and normalize it to refer to # the project's root more easily PROJECT_ROOT = os.path.dirname(os.path.normpath(os.path.join(__file__, '..', '..'))) # DJANGO_PROJECT: the short project name # (defaults to the basename of PROJECT_ROOT) DJANGO_PROJECT = os.path.basename(PROJECT_ROOT.rstrip('/')) DEBUG = True TEST_MEMCACHE = False TEMPLATE_DEBUG = True COMPRESS_TEMPLATES = False ADMINS = ( ('Team Error', '[email protected]'), ) CONTACT_EMAIL = '[email protected]' MANAGERS = ADMINS # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['.onepercentclub.com', '.1procentclub.nl', 'localhost'] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'Europe/Amsterdam' # Available user interface translations # Ref: https://docs.djangoproject.com/en/1.4/ref/settings/#languages # # Default language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en' # This is defined here as a do-nothing function because we can't import # django.utils.translation -- that module depends on the settings. gettext_noop = lambda s: s LANGUAGES = ( ('nl', gettext_noop('Dutch')), ('en', gettext_noop('English')) ) SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # First one is for apps the second for the main templates LOCALE_PATHS = ('../locale', 'locale') # If you set this to False, Django will not use timezone-aware datetimes. # pytz is in requirements.txt because it's "highly recommended" when using # timezone support. # https://docs.djangoproject.com/en/1.4/topics/i18n/timezones/ USE_TZ = True # Static Files and Media # ====================== # # For staticfiles and media, the following convention is used: # # * '/static/media/': Application media default path # * '/static/global/': Global static media # * '/static/assets/<app_name>/': Static assets after running `collectstatic` # # The respective URL's (available only when `DEBUG=True`) are in `urls.py`. # # More information: # https://docs.djangoproject.com/en/1.4/ref/contrib/staticfiles/ # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'static', 'media') # Absolute filesystem path to the directory that will hold PRIVATE user-uploaded files. PRIVATE_MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'private', 'media') # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '/static/media/' PRIVATE_MEDIA_URL = '/private/media/' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static', 'assets') # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/assets/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. # You can also name this tuple like: ('css', '/path/to/css') (os.path.join(PROJECT_ROOT, 'static', 'global')), ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ] TEMPLATE_LOADERS = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'apptemplates.Loader', # extend AND override templates ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } # These are basically the default values from the Django configuration, written # as a list for easy manipulation. This way one can: # # 1. Easily add, remove or replace elements in the list, ie. overriding. # 2. Know what the defaults are, if you want to change them right here. This # way you won't have to look them up every time you want to change. # # Note: The first three middleware classes need to be in this order: Session, Locale, Common # http://stackoverflow.com/questions/8092695/404-on-requests-without-trailing-slash-to-i18n-urls MIDDLEWARE_CLASSES = [ 'bluebottle.auth.middleware.UserJwtTokenMiddleware', 'apps.redirects.middleware.RedirectHashCompatMiddleware', 'bluebottle.auth.middleware.AdminOnlyCsrf', # Have a middleware to make sure old cookies still work after we switch to domain-wide cookies. 'bluebottle.utils.middleware.SubDomainSessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'bluebottle.auth.middleware.AdminOnlySessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'bluebottle.auth.middleware.AdminOnlyAuthenticationMiddleware', 'bluebottle.bb_accounts.middleware.LocaleMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # https://docs.djangoproject.com/en/1.4/ref/clickjacking/ 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.transaction.TransactionMiddleware', 'apps.redirects.middleware.RedirectFallbackMiddleware', 'apps.crawlable.middleware.HashbangMiddleware', 'django_tools.middlewares.ThreadLocal.ThreadLocalMiddleware', 'bluebottle.auth.middleware.SlidingJwtTokenMiddleware' ] # Browsers will block our pages from loading in an iframe no matter which site # made the request. This setting can be overridden on a per response or a per # view basis with the @xframe decorators. X_FRAME_OPTIONS = 'DENY' TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + ( # Makes the 'request' variable (the current HttpRequest) available in templates. 'django.core.context_processors.request',<|fim▁hole|> 'bluebottle.utils.context_processors.git_commit', 'bluebottle.utils.context_processors.conf_settings', 'bluebottle.utils.context_processors.google_maps_api_key', 'bluebottle.utils.context_processors.google_analytics_code', 'bluebottle.utils.context_processors.sentry_dsn', 'bluebottle.utils.context_processors.facebook_auth_settings', 'bluebottle.utils.context_processors.mixpanel_settings', 'social.apps.django_app.context_processors.backends', 'social.apps.django_app.context_processors.login_redirect', ) ROOT_URLCONF = 'onepercentclub.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'onepercentclub.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(PROJECT_ROOT, 'templates') ) INSTALLED_APPS = ( # Django apps 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # 3rd party apps 'django_extensions', 'django_extensions.tests', 'raven.contrib.django.raven_compat', 'djcelery', 'south', # 'django_nose', 'compressor', 'sorl.thumbnail', 'taggit', 'taggit_autocomplete_modified', 'micawber.contrib.mcdjango', # Embedding videos 'templatetag_handlebars', 'rest_framework', 'rest_framework.authtoken', 'polymorphic', 'registration', 'filetransfers', 'loginas', #'social_auth', 'social.apps.django_app.default', # Onepercent app to send POST requests to AFOM 'onepercent_afom', #Widget 'bluebottle.widget', # CMS page contents 'fluent_contents', 'fluent_contents.plugins.text', 'fluent_contents.plugins.oembeditem', 'fluent_contents.plugins.rawhtml', 'django_wysiwyg', 'tinymce', 'statici18n', 'django.contrib.humanize', 'django_tools', # FB Auth 'bluebottle.auth', # Password auth from old PHP site. 'legacyauth', # Plain Bluebottle apps 'bluebottle.wallposts', 'bluebottle.utils', 'bluebottle.common', 'bluebottle.contentplugins', 'bluebottle.contact', 'bluebottle.geo', 'bluebottle.pages', 'bluebottle.news', 'bluebottle.slides', 'bluebottle.quotes', 'bluebottle.payments', 'bluebottle.payments_docdata', 'bluebottle.payments_logger', 'bluebottle.payments_voucher', 'bluebottle.redirects', # Apps extending Bluebottle base models # These should be before there Bb parents so the templates are overridden 'apps.members', 'apps.tasks', 'apps.projects', 'apps.organizations', 'apps.payouts', # apps overriding bluebottle functionality should come before the bluebottle entries # (template loaders pick the first template they find) 'apps.core', 'apps.bluebottle_salesforce', 'apps.bluebottle_dashboard', 'apps.contentplugins', 'apps.campaigns', 'apps.hbtemplates', 'apps.statistics', 'apps.homepage', 'apps.partners', 'apps.crawlable', 'apps.mchanga', 'apps.recurring_donations', # Bluebottle apps with abstract models 'bluebottle.bb_accounts', 'bluebottle.bb_organizations', 'bluebottle.bb_projects', 'bluebottle.bb_tasks', 'bluebottle.bb_fundraisers', 'bluebottle.bb_donations', 'bluebottle.bb_orders', 'bluebottle.bb_payouts', # Basic Bb implementations 'bluebottle.fundraisers', 'bluebottle.donations', 'bluebottle.orders', # FIXME: Keep these just for migrations 'apps.fund', 'apps.cowry', 'apps.cowry_docdata', # FIXME: Reimplement these apps 'apps.vouchers', # 'apps.sepa', # 'apps.csvimport', # 'apps.accounting', # Custom dashboard 'fluent_dashboard', 'admin_tools', 'admin_tools.theming', 'admin_tools.menu', 'admin_tools.dashboard', 'django.contrib.admin', 'django.contrib.admindocs', ) # Custom User model AUTH_USER_MODEL = 'members.Member' PROJECTS_PROJECT_MODEL = 'projects.Project' PROJECTS_PHASELOG_MODEL = 'projects.ProjectPhaseLog' FUNDRAISERS_FUNDRAISER_MODEL = 'fundraisers.FundRaiser' TASKS_TASK_MODEL = 'tasks.Task' TASKS_SKILL_MODEL = 'tasks.Skill' TASKS_TASKMEMBER_MODEL = 'tasks.TaskMember' TASKS_TASKFILE_MODEL = 'tasks.TaskFile' ORGANIZATIONS_ORGANIZATION_MODEL = 'organizations.Organization' ORGANIZATIONS_DOCUMENT_MODEL = 'organizations.OrganizationDocument' ORGANIZATIONS_MEMBER_MODEL = 'organizations.OrganizationMember' ORDERS_ORDER_MODEL = 'orders.Order' DONATIONS_DONATION_MODEL = 'donations.Donation' PAYOUTS_PROJECTPAYOUT_MODEL = 'payouts.ProjectPayout' PAYOUTS_ORGANIZATIONPAYOUT_MODEL = 'payouts.OrganizationPayout' SOCIAL_AUTH_USER_MODEL = 'members.Member' SOCIAL_AUTH_FACEBOOK_SCOPE = ['email', 'user_friends', 'public_profile', 'user_birthday'] SOCIAL_AUTH_FACEBOOK_EXTRA_DATA = [('birthday', 'birthday')] # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'standard': { 'format': "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", 'datefmt': "%d/%b/%Y %H:%M:%S" }, }, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' }, 'payment_logs': { 'level': 'INFO', 'class': 'bluebottle.payments_logger.handlers.PaymentLogHandler', } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, 'bluebottle.salesforce': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, 'payments.payment': { 'handlers': ['mail_admins', 'payment_logs'], 'level': 'INFO', 'propagate': True, }, } } # log errors & warnings import logging logging.basicConfig(level=logging.WARNING, format='[%(asctime)s] %(levelname)-8s %(message)s', datefmt="%d/%b/%Y %H:%M:%S") # Django Celery - asynchronous task server import djcelery djcelery.setup_loader() SOCIAL_AUTH_STRATEGY = 'social.strategies.django_strategy.DjangoStrategy' SOCIAL_AUTH_STORAGE = 'social.apps.django_app.default.models.DjangoStorage' AUTHENTICATION_BACKENDS = ( 'social.backends.facebook.FacebookAppOAuth2', 'social.backends.facebook.FacebookOAuth2', 'django.contrib.auth.backends.ModelBackend', ) # We're using nose because it limits the tests to our apps (i.e. no Django and # 3rd party app tests). We need this because tests in contrib.auth.user are # failing in Django 1.4.1. Here's the ticket for the failing test: # https://code.djangoproject.com/ticket/17966 # The new test runner in Django 1.5 will be more flexible: #https://code.djangoproject.com/ticket/17365 TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = [ '--detailed-errors', '--nologcapture', ] SKIP_BB_FUNCTIONAL_TESTS = True SOUTH_TESTS_MIGRATE = False # Make south shut up during tests # django-compressor http://pypi.python.org/pypi/django_compressor # Compressor is enabled whenever DEBUG is False. STATICFILES_FINDERS += [ # django-compressor staticfiles 'compressor.finders.CompressorFinder', ] # TODO Enable compass here. COMPRESS_OUTPUT_DIR = 'compressed' COMPRESS_CSS_FILTERS = [ 'compressor.filters.css_default.CssAbsoluteFilter', #'compressor.filters.datauri.DataUriFilter', 'compressor.filters.cssmin.CSSMinFilter' ] # Automagic CSS precompilation #COMPRESS_PRECOMPILERS = ( # ('text/coffeescript', 'coffee --compile --stdio'), # ('text/less', 'lessc {infile} {outfile}'), # ('text/x-sass', 'sass {infile} {outfile}'), # ('text/x-scss', 'sass --scss {infile} {outfile}'), #) # The default URL to send users to after login. This will be used when the # 'next' URL parameter hasn't been set. LOGIN_REDIRECT_URL = '/' # Blog/news content configuration FLUENT_CONTENTS_CACHE_OUTPUT = True FLUENT_TEXT_CLEAN_HTML = True FLUENT_TEXT_SANITIZE_HTML = True DJANGO_WYSIWYG_FLAVOR = 'tinymce_advanced' # Required for handlebars_template to work properly USE_EMBER_STYLE_ATTRS = True # Sorl Thumbnail settings # http://sorl-thumbnail.readthedocs.org/en/latest/reference/settings.html THUMBNAIL_QUALITY = 85 # TODO: Configure Sorl with Redis. REST_FRAMEWORK = { 'FILTER_BACKEND': 'rest_framework.filters.DjangoFilterBackend', 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', 'rest_framework.authentication.SessionAuthentication', 'rest_framework.authentication.TokenAuthentication', ) } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(days=7), 'JWT_LEEWAY': 0, 'JWT_VERIFY': True, 'JWT_VERIFY_EXPIRATION': True, 'JWT_ALLOW_TOKEN_RENEWAL': True, # After the renewal limit it isn't possible to request a token refresh # => time token first created + renewal limit. 'JWT_TOKEN_RENEWAL_LIMIT': datetime.timedelta(days=90), } # Time between attempts to refresh the jwt token automatically on standard request # TODO: move this setting into the JWT_AUTH settings. JWT_TOKEN_RENEWAL_DELTA = datetime.timedelta(minutes=30) COWRY_RETURN_URL_BASE = 'http://127.0.0.1:8000' COWRY_PAYMENT_METHODS = { 'dd-webmenu': { 'profile': 'webmenu', 'name': 'DocData Web Menu', 'supports_recurring': False, 'supports_single': True, }, 'dd-webdirect': { 'profile': 'webdirect', 'name': 'DocData WebDirect Direct Debit', 'restricted_countries': ('NL',), 'supports_recurring': True, 'supports_single': False, }, } # Default VAT percentage as string (used in payouts) VAT_RATE = '0.21' # Settings for organization bank account. Please set this in secrets.py # SEPA = { # 'iban': '', # 'bic': '', # 'name': '', # 'id': '' # } # Salesforce app settings SALESFORCE_QUERY_TIMEOUT = 3 DATABASE_ROUTERS = [ "salesforce.router.ModelRouter" ] # E-mail settings DEFAULT_FROM_EMAIL = '<[email protected]> 1%Club' # Django-registration settings ACCOUNT_ACTIVATION_DAYS = 4 HTML_ACTIVATION_EMAIL = True # Note this setting is from our forked version. # Functional testing # Selenium and Splinter settings SELENIUM_TESTS = True SELENIUM_WEBDRIVER = 'phantomjs' # Can be any of chrome, firefox, phantomjs FIXTURE_DIRS = [ os.path.join(DJANGO_PROJECT, 'fixtures') ] # PhantomJS for flat page generation. # NOTE: This has nothing to do with testing against phantomjs. CRAWLABLE_PHANTOMJS_DEDICATED_MODE = True # If dedicated mode is enabled, configure the port: CRAWLABLE_PHANTOMJS_DEDICATED_PORT = 8910 # If dedicated mode is disabled, you can specify arguments to start phantomjs. CRAWLABLE_PHANTOMJS_ARGS = [] # Use HTTPS for PhantomJS requests. CRAWLABLE_FORCE_HTTPS = True # Send email to console by default EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' STATICI18N_ROOT = os.path.join(PROJECT_ROOT, 'static', 'global') SESSION_COOKIE_NAME = 'bb-session-id' # Support legacy passwords PASSWORD_HASHERS = global_settings.PASSWORD_HASHERS + ( 'legacyauth.hashers.LegacyPasswordHasher', ) # Twitter handles, per language TWITTER_HANDLES = { 'nl': '1procentclub', 'en': '1percentclub', } DEFAULT_TWITTER_HANDLE = TWITTER_HANDLES['nl'] MINIMAL_PAYOUT_AMOUNT = 21.00 SOCIAL_AUTH_PIPELINE = ( 'social.pipeline.social_auth.social_details', 'social.pipeline.social_auth.social_uid', 'social.pipeline.social_auth.auth_allowed', 'social.pipeline.social_auth.social_user', 'social.pipeline.user.get_username', 'social.pipeline.social_auth.associate_by_email', 'social.pipeline.user.create_user', 'social.pipeline.social_auth.associate_user', 'social.pipeline.social_auth.load_extra_data', 'social.pipeline.user.user_details', 'bluebottle.auth.utils.save_profile_picture', 'bluebottle.auth.utils.get_extra_facebook_data', 'bluebottle.auth.utils.send_welcome_mail_pipe' ) AFOM_ENABLED = False SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email', 'first_name', 'last_name', ] SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True SEND_WELCOME_MAIL = True<|fim▁end|>
'django.core.context_processors.i18n', 'bluebottle.utils.context_processors.installed_apps_context_processor',
<|file_name|>ident.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Algebra Developers. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Identities for binary operators. use ops::{Op, Additive, Multiplicative}; /// A type that is equipped with identity. pub trait Identity<O: Op> { /// The identity element. fn id() -> Self; } /// The identity element. pub fn id<T: Identity<O>, O: Op>(_: O) -> T { Identity::id() } impl_ident!(Additive; 0; u8, u16, u32, u64, i8, i16, i32, i64);<|fim▁hole|><|fim▁end|>
impl_ident!(Additive; 0.; f32, f64); impl_ident!(Multiplicative; 1; u8, u16, u32, u64, i8, i16, i32, i64); impl_ident!(Multiplicative; 1.; f32, f64);
<|file_name|>TDBinaryExpr.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2015-2016 Didier Villevalois. * * This file is part of JLaTo. * * JLaTo is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * JLaTo is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with JLaTo. If not, see <http://www.gnu.org/licenses/>. */ package org.jlato.internal.td.expr; import org.jlato.internal.bu.expr.SBinaryExpr; import org.jlato.internal.bu.expr.SExpr; import org.jlato.internal.td.TDLocation; import org.jlato.internal.td.TDTree; import org.jlato.tree.Kind; import org.jlato.tree.expr.BinaryExpr; import org.jlato.tree.expr.BinaryOp; import org.jlato.tree.expr.Expr; import org.jlato.util.Mutation; /** * A binary expression. */<|fim▁hole|> * Returns the kind of this binary expression. * * @return the kind of this binary expression. */ public Kind kind() { return Kind.BinaryExpr; } /** * Creates a binary expression for the specified tree location. * * @param location the tree location. */ public TDBinaryExpr(TDLocation<SBinaryExpr> location) { super(location); } /** * Creates a binary expression with the specified child trees. * * @param left the left child tree. * @param op the op child tree. * @param right the right child tree. */ public TDBinaryExpr(Expr left, BinaryOp op, Expr right) { super(new TDLocation<SBinaryExpr>(SBinaryExpr.make(TDTree.<SExpr>treeOf(left), op, TDTree.<SExpr>treeOf(right)))); } /** * Returns the left of this binary expression. * * @return the left of this binary expression. */ public Expr left() { return location.safeTraversal(SBinaryExpr.LEFT); } /** * Replaces the left of this binary expression. * * @param left the replacement for the left of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withLeft(Expr left) { return location.safeTraversalReplace(SBinaryExpr.LEFT, left); } /** * Mutates the left of this binary expression. * * @param mutation the mutation to apply to the left of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withLeft(Mutation<Expr> mutation) { return location.safeTraversalMutate(SBinaryExpr.LEFT, mutation); } /** * Returns the op of this binary expression. * * @return the op of this binary expression. */ public BinaryOp op() { return location.safeProperty(SBinaryExpr.OP); } /** * Replaces the op of this binary expression. * * @param op the replacement for the op of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withOp(BinaryOp op) { return location.safePropertyReplace(SBinaryExpr.OP, op); } /** * Mutates the op of this binary expression. * * @param mutation the mutation to apply to the op of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withOp(Mutation<BinaryOp> mutation) { return location.safePropertyMutate(SBinaryExpr.OP, mutation); } /** * Returns the right of this binary expression. * * @return the right of this binary expression. */ public Expr right() { return location.safeTraversal(SBinaryExpr.RIGHT); } /** * Replaces the right of this binary expression. * * @param right the replacement for the right of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withRight(Expr right) { return location.safeTraversalReplace(SBinaryExpr.RIGHT, right); } /** * Mutates the right of this binary expression. * * @param mutation the mutation to apply to the right of this binary expression. * @return the resulting mutated binary expression. */ public BinaryExpr withRight(Mutation<Expr> mutation) { return location.safeTraversalMutate(SBinaryExpr.RIGHT, mutation); } }<|fim▁end|>
public class TDBinaryExpr extends TDTree<SBinaryExpr, Expr, BinaryExpr> implements BinaryExpr { /**
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>''' Full setup, used to distribute the debugger backend to PyPi. Note that this is mostly so that users can do: pip install pydevd in a machine for doing remote-debugging, as a local installation with the IDE should have everything already distributed.<|fim▁hole|> Reference on wheels: https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/ http://lucumr.pocoo.org/2014/1/27/python-on-wheels/ Another (no wheels): https://jamie.curle.io/blog/my-first-experience-adding-package-pypi/ New version: change version and then: rm dist/pydevd* C:\tools\Miniconda32\Scripts\activate py27_32 python setup.py sdist bdist_wheel deactivate C:\tools\Miniconda32\Scripts\activate py34_32 python setup.py sdist bdist_wheel deactivate C:\tools\Miniconda32\Scripts\activate py35_32 python setup.py sdist bdist_wheel deactivate C:\tools\Miniconda\Scripts\activate py27_64 python setup.py sdist bdist_wheel deactivate C:\tools\Miniconda\Scripts\activate py34_64 python setup.py sdist bdist_wheel deactivate C:\tools\Miniconda\Scripts\activate py35_64 python setup.py sdist bdist_wheel deactivate twine upload dist/pydevd* ''' from setuptools import setup from setuptools.dist import Distribution from distutils.extension import Extension import os class BinaryDistribution(Distribution): def is_pure(self): return False data_files = [] def accept_file(f): f = f.lower() for ext in '.py .dll .so .dylib .txt .cpp .h .bat .c .sh .md .txt'.split(): if f.endswith(ext): return True return f in ['readme', 'makefile'] data_files.append(('pydevd_attach_to_process', [os.path.join('pydevd_attach_to_process', f) for f in os.listdir('pydevd_attach_to_process') if accept_file(f)])) for root, dirs, files in os.walk("pydevd_attach_to_process"): for d in dirs: data_files.append((os.path.join(root, d), [os.path.join(root, d, f) for f in os.listdir(os.path.join(root, d)) if accept_file(f)])) import pydevd version = pydevd.__version__ args = dict( name='pydevd', version=version, description = 'PyDev.Debugger (used in PyDev and PyCharm)', author='Fabio Zadrozny and others', url='https://github.com/fabioz/PyDev.Debugger/', license='EPL (Eclipse Public License)', packages=[ '_pydev_bundle', '_pydev_imps', '_pydev_runfiles', '_pydevd_bundle', 'pydev_ipython', # 'pydev_sitecustomize', -- Not actually a package (not added) # 'pydevd_attach_to_process', -- Not actually a package (included in MANIFEST.in) 'pydevd_concurrency_analyser', 'pydevd_plugins', ], py_modules=[ # 'interpreterInfo', -- Not needed for debugger # 'pycompletionserver', -- Not needed for debugger 'pydev_app_engine_debug_startup', # 'pydev_coverage', -- Not needed for debugger # 'pydev_pysrc', -- Not needed for debugger 'pydev_run_in_console', 'pydevconsole', 'pydevd_file_utils', 'pydevd', 'pydevd_tracing', # 'runfiles', -- Not needed for debugger # 'setup_cython', -- Should not be included as a module # 'setup', -- Should not be included as a module ], classifiers=[ 'Development Status :: 6 - Mature', 'Environment :: Console', 'Intended Audience :: Developers', # It seems that the license is not recognized by Pypi, so, not categorizing it for now. # https://bitbucket.org/pypa/pypi/issues/369/the-eclipse-public-license-superseeded # 'License :: OSI Approved :: Eclipse Public License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Debuggers', ], data_files=data_files, keywords=['pydev', 'pydevd', 'pydev.debugger'], include_package_data=True, zip_safe=False, ) import sys try: args_with_binaries = args.copy() args_with_binaries.update(dict( distclass=BinaryDistribution, ext_modules=[ # In this setup, don't even try to compile with cython, just go with the .c file which should've # been properly generated from a tested version. Extension('_pydevd_bundle.pydevd_cython', ["_pydevd_bundle/pydevd_cython.c",]) ] )) setup(**args_with_binaries) except: # Compile failed: just setup without compiling cython deps. setup(**args) sys.stdout.write('Plain-python version of pydevd installed (cython speedups not available).\n')<|fim▁end|>
<|file_name|>main.cc<|end_file_name|><|fim▁begin|>#include "assert.hh" #include "buffer.hh" #include "backtrace.hh" #include "buffer_manager.hh" #include "buffer_utils.hh" #include "client_manager.hh" #include "command_manager.hh" #include "commands.hh" #include "containers.hh" #include "context.hh" #include "event_manager.hh" #include "face_registry.hh" #include "file.hh" #include "highlighters.hh" #include "insert_completer.hh" #include "shared_string.hh" #include "ncurses_ui.hh" #include "json_ui.hh" #include "parameters_parser.hh" #include "register_manager.hh" #include "remote.hh" #include "regex.hh" #include "scope.hh" #include "shell_manager.hh" #include "string.hh" #include "unit_tests.hh" #include "window.hh" #include <fcntl.h> #include <locale> #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include <pwd.h> using namespace Kakoune; struct startup_error : Kakoune::runtime_error { using Kakoune::runtime_error::runtime_error; }; inline void write_stdout(StringView str) { write(1, str); } inline void write_stderr(StringView str) { write(2, str); } String runtime_directory() { char relpath[PATH_MAX+1]; format_to(relpath, "{}../share/kak", split_path(get_kak_binary_path()).first); struct stat st; if (stat(relpath, &st) == 0 and S_ISDIR(st.st_mode)) return real_path(relpath); return "/usr/share/kak"; } void register_env_vars() { static const struct { const char* name; bool prefix; String (*func)(StringView, const Context&); } env_vars[] = { { "bufname", false, [](StringView name, const Context& context) -> String { return context.buffer().display_name(); } }, { "buffile", false, [](StringView name, const Context& context) -> String { return context.buffer().name(); } }, { "buflist", false, [](StringView name, const Context& context) { return join(BufferManager::instance() | transform(std::mem_fn(&Buffer::display_name)), ':'); } }, { "timestamp", false, [](StringView name, const Context& context) -> String { return to_string(context.buffer().timestamp()); } }, { "selection", false, [](StringView name, const Context& context) { const Selection& sel = context.selections().main(); return content(context.buffer(), sel); } }, { "selections", false, [](StringView name, const Context& context) { return join(context.selections_content(), ':'); } }, { "runtime", false, [](StringView name, const Context& context) { return runtime_directory(); } }, { "opt_", true, [](StringView name, const Context& context) { return context.options()[name.substr(4_byte)].get_as_string(); } }, { "reg_", true, [](StringView name, const Context& context) { return context.main_sel_register_value(name.substr(4_byte)).str(); } }, { "client_env_", true, [](StringView name, const Context& context) { return context.client().get_env_var(name.substr(11_byte)).str(); } }, { "session", false, [](StringView name, const Context& context) -> String { return Server::instance().session(); } }, { "client", false, [](StringView name, const Context& context) -> String { return context.name(); } }, {<|fim▁hole|> [](StringView name, const Context& context) -> String { return to_string(context.selections().main().cursor().line + 1); } }, { "cursor_column", false, [](StringView name, const Context& context) -> String { return to_string(context.selections().main().cursor().column + 1); } }, { "cursor_char_column", false, [](StringView name, const Context& context) -> String { auto coord = context.selections().main().cursor(); return to_string(context.buffer()[coord.line].char_count_to(coord.column) + 1); } }, { "cursor_byte_offset", false, [](StringView name, const Context& context) -> String { auto cursor = context.selections().main().cursor(); return to_string(context.buffer().distance({0,0}, cursor)); } }, { "selection_desc", false, [](StringView name, const Context& context) { return selection_to_string(context.selections().main()); } }, { "selections_desc", false, [](StringView name, const Context& context) { return selection_list_to_string(context.selections()); } }, { "window_width", false, [](StringView name, const Context& context) -> String { return to_string(context.window().dimensions().column); } }, { "window_height", false, [](StringView name, const Context& context) -> String { return to_string(context.window().dimensions().line); } } }; ShellManager& shell_manager = ShellManager::instance(); for (auto& env_var : env_vars) shell_manager.register_env_var(env_var.name, env_var.prefix, env_var.func); } void register_registers() { RegisterManager& register_manager = RegisterManager::instance(); for (auto c : "abcdefghijklmnopqrstuvwxyz/\"|^@") register_manager.add_register(c, make_unique<StaticRegister>()); using StringList = Vector<String, MemoryDomain::Registers>; register_manager.add_register('%', make_dyn_reg( [](const Context& context) { return StringList{{context.buffer().display_name()}}; })); register_manager.add_register('.', make_dyn_reg( [](const Context& context) { auto content = context.selections_content(); return StringList{content.begin(), content.end()}; })); register_manager.add_register('#', make_dyn_reg( [](const Context& context) { StringList res; for (size_t i = 1; i < context.selections().size()+1; ++i) res.push_back(to_string((int)i)); return res; })); for (size_t i = 0; i < 10; ++i) { register_manager.add_register('0'+i, make_dyn_reg( [i](const Context& context) { StringList result; for (auto& sel : context.selections()) result.emplace_back(i < sel.captures().size() ? sel.captures()[i] : ""); return result; })); } register_manager.add_register('_', make_unique<NullRegister>()); } static void check_tabstop(const int& val) { if (val < 1) throw runtime_error{"tabstop should be strictly positive"}; } static void check_indentwidth(const int& val) { if (val < 0) throw runtime_error{"indentwidth should be positive or zero"}; } static void check_scrolloff(const DisplayCoord& so) { if (so.line < 0 or so.column < 0) throw runtime_error{"scroll offset must be positive or zero"}; } static void check_timeout(const int& timeout) { if (timeout < 50) throw runtime_error{"the minimum acceptable timeout is 50 milliseconds"}; } void register_options() { OptionsRegistry& reg = GlobalScope::instance().option_registry(); reg.declare_option<int, check_tabstop>("tabstop", "size of a tab character", 8); reg.declare_option<int, check_indentwidth>("indentwidth", "indentation width", 4); reg.declare_option<DisplayCoord, check_scrolloff>( "scrolloff", "number of lines and columns to keep visible main cursor when scrolling", {0,0}); reg.declare_option("eolformat", "end of line format", EolFormat::Lf); reg.declare_option("BOM", "byte order mark to use when writing buffer", ByteOrderMark::None); reg.declare_option("incsearch", "incrementaly apply search/select/split regex", true); reg.declare_option("autoinfo", "automatically display contextual help", AutoInfo::Command | AutoInfo::OnKey); reg.declare_option("autoshowcompl", "automatically display possible completions for prompts", true); reg.declare_option("aligntab", "use tab characters when possible for alignement", false); reg.declare_option("ignored_files", "patterns to ignore when completing filenames", Regex{R"(^(\..*|.*\.(o|so|a))$)"}); reg.declare_option("disabled_hooks", "patterns to disable hooks whose group is matched", Regex{}); reg.declare_option("filetype", "buffer filetype", ""_str); reg.declare_option("path", "path to consider when trying to find a file", Vector<String, MemoryDomain::Options>({ "./", "/usr/include" })); reg.declare_option("completers", "insert mode completers to execute.", InsertCompleterDescList({ InsertCompleterDesc{ InsertCompleterDesc::Filename }, InsertCompleterDesc{ InsertCompleterDesc::Word, "all"_str } }), OptionFlags::None); reg.declare_option("static_words", "list of words to always consider for insert word completion", Vector<String, MemoryDomain::Options>{}); reg.declare_option("autoreload", "autoreload buffer when a filesystem modification is detected", Autoreload::Ask); reg.declare_option<int, check_timeout>( "idle_timeout", "timeout, in milliseconds, before idle hooks are triggered", 50); reg.declare_option<int, check_timeout>( "fs_check_timeout", "timeout, in milliseconds, between file system buffer modification checks", 500); reg.declare_option("ui_options", "colon separated list of <key>=<value> options that are " "passed to and interpreted by the user interface\n" "\n" "The ncurses ui supports the following options:\n" "<key>: <value>:\n" " ncurses_assistant clippy|cat|none|off\n" " ncurses_status_on_top bool\n" " ncurses_set_title bool\n" " ncurses_enable_mouse bool\n" " ncurses_wheel_up_button int\n" " ncurses_wheel_down_button int\n" " ncurses_buffer_padding_str str\n" " ncurses_buffer_padding_type fill|single|off\n", UserInterface::Options{}); reg.declare_option("modelinefmt", "format string used to generate the modeline", "%val{bufname} %val{cursor_line}:%val{cursor_char_column} "_str); reg.declare_option("debug", "various debug flags", DebugFlags::None); reg.declare_option("readonly", "prevent buffers from being modified", false); reg.declare_option("completion_extra_word_char", "Additional characters to be considered as words for insert completion", ""_str); } struct convert_to_client_mode { String session; String buffer_name; String selections; }; enum class UIType { NCurses, Json, Dummy, }; static Client* local_client = nullptr; static UserInterface* local_ui = nullptr; static bool convert_to_client_pending = false; pid_t fork_server_to_background() { if (pid_t pid = fork()) return pid; if (fork()) // double fork to orphan the server exit(0); write_stderr(format("Kakoune forked server to background ({}), for session '{}'\n", getpid(), Server::instance().session())); return 0; } std::unique_ptr<UserInterface> make_ui(UIType ui_type) { struct DummyUI : UserInterface { DummyUI() { set_signal_handler(SIGINT, SIG_DFL); } void menu_show(ConstArrayView<DisplayLine>, DisplayCoord, Face, Face, MenuStyle) override {} void menu_select(int) override {} void menu_hide() override {} void info_show(StringView, StringView, DisplayCoord, Face, InfoStyle) override {} void info_hide() override {} void draw(const DisplayBuffer&, const Face&, const Face&) override {} void draw_status(const DisplayLine&, const DisplayLine&, const Face&) override {} DisplayCoord dimensions() override { return {24,80}; } void refresh(bool) override {} void set_on_key(OnKeyCallback callback) override {} void set_ui_options(const Options&) override {} }; switch (ui_type) { case UIType::NCurses: return make_unique<NCursesUI>(); case UIType::Json: return make_unique<JsonUI>(); case UIType::Dummy: return make_unique<DummyUI>(); } throw logic_error{}; } std::unique_ptr<UserInterface> create_local_ui(UIType ui_type) { if (ui_type != UIType::NCurses) return make_ui(ui_type); struct LocalUI : NCursesUI { LocalUI() { kak_assert(not local_ui); local_ui = this; m_old_sighup = set_signal_handler(SIGHUP, [](int) { ClientManager::instance().remove_client(*local_client, false); static_cast<LocalUI*>(local_ui)->on_sighup(); }); m_old_sigtstp = set_signal_handler(SIGTSTP, [](int) { if (ClientManager::instance().count() == 1 and *ClientManager::instance().begin() == local_client) { // Suspend normally if we are the only client auto current = set_signal_handler(SIGTSTP, static_cast<LocalUI*>(local_ui)->m_old_sigtstp); sigset_t unblock_sigtstp, old_mask; sigemptyset(&unblock_sigtstp); sigaddset(&unblock_sigtstp, SIGTSTP); sigprocmask(SIG_UNBLOCK, &unblock_sigtstp, &old_mask); raise(SIGTSTP); set_signal_handler(SIGTSTP, current); sigprocmask(SIG_SETMASK, &old_mask, nullptr); } else convert_to_client_pending = true; }); } ~LocalUI() { set_signal_handler(SIGHUP, m_old_sighup); set_signal_handler(SIGTSTP, m_old_sigtstp); local_client = nullptr; local_ui = nullptr; if (not convert_to_client_pending and not ClientManager::instance().empty()) { if (fork_server_to_background()) { this->NCursesUI::~NCursesUI(); exit(0); } } } private: using SigHandler = void (*)(int); SigHandler m_old_sighup; SigHandler m_old_sigtstp; }; if (not isatty(1)) throw startup_error("stdout is not a tty"); if (not isatty(0)) { // move stdin to another fd, and restore tty as stdin int fd = dup(0); int tty = open("/dev/tty", O_RDONLY); dup2(tty, 0); close(tty); create_fifo_buffer("*stdin*", fd); } return make_unique<LocalUI>(); } void signal_handler(int signal) { NCursesUI::abort(); const char* text = nullptr; switch (signal) { case SIGSEGV: text = "SIGSEGV"; break; case SIGFPE: text = "SIGFPE"; break; case SIGQUIT: text = "SIGQUIT"; break; case SIGTERM: text = "SIGTERM"; break; case SIGPIPE: text = "SIGPIPE"; break; } if (signal != SIGTERM) { auto msg = format("Received {}, exiting.\nPid: {}\nCallstack:\n{}", text, getpid(), Backtrace{}.desc()); write_stderr(msg); notify_fatal_error(msg); } if (Server::has_instance()) Server::instance().close_session(); if (BufferManager::has_instance()) BufferManager::instance().backup_modified_buffers(); if (signal == SIGTERM) exit(-1); else abort(); } int run_client(StringView session, StringView init_cmds, UIType ui_type) { try { EventManager event_manager; RemoteClient client{session, make_ui(ui_type), get_env_vars(), init_cmds}; while (true) event_manager.handle_next_events(EventMode::Normal); } catch (remote_error& e) { write_stderr(format("{}\ndisconnecting\n", e.what())); return -1; } return 0; } int run_server(StringView session, StringView init_cmds, BufferCoord init_coord, bool ignore_kakrc, bool daemon, bool readonly, UIType ui_type, ConstArrayView<StringView> files) { static bool terminate = false; if (daemon) { if (session.empty()) { write_stderr("-d needs a session name to be specified with -s\n"); return -1; } if (pid_t child = fork()) { write_stderr(format("Kakoune forked to background, for session '{}'\n" "send SIGTERM to process {} for closing the session\n", session, child)); exit(0); } set_signal_handler(SIGTERM, [](int) { terminate = true; }); } StringRegistry string_registry; EventManager event_manager; GlobalScope global_scope; ShellManager shell_manager; CommandManager command_manager; RegisterManager register_manager; HighlighterRegistry highlighter_registry; DefinedHighlighters defined_highlighters; FaceRegistry face_registry; ClientManager client_manager; BufferManager buffer_manager; register_options(); register_env_vars(); register_registers(); register_commands(); register_highlighters(); UnitTest::run_all_tests(); write_to_debug_buffer("*** This is the debug buffer, where debug info will be written ***"); GlobalScope::instance().options().get_local_option("readonly").set(readonly); Server server(session.empty() ? to_string(getpid()) : session.str()); bool startup_error = false; if (not ignore_kakrc) try { Context initialisation_context{Context::EmptyContextFlag{}}; command_manager.execute(format("source {}/kakrc", runtime_directory()), initialisation_context); } catch (Kakoune::runtime_error& error) { startup_error = true; write_to_debug_buffer(format("error while parsing kakrc:\n" " {}", error.what())); } { Context empty_context{Context::EmptyContextFlag{}}; global_scope.hooks().run_hook("KakBegin", "", empty_context); } if (not files.empty()) try { // create buffers in reverse order so that the first given buffer // is the most recently created one. for (auto& file : files | reverse()) { try { Buffer *buffer = open_or_create_file_buffer(file); if (readonly) buffer->flags() |= Buffer::Flags::ReadOnly; } catch (Kakoune::runtime_error& error) { startup_error = true; write_to_debug_buffer(format("error while opening file '{}':\n" " {}", file, error.what())); } } } catch (Kakoune::runtime_error& error) { write_to_debug_buffer(format("error while opening command line files: {}", error.what())); } try { if (not daemon) local_client = client_manager.create_client( create_local_ui(ui_type), get_env_vars(), init_cmds, init_coord); if (local_client and startup_error) local_client->print_status({ "error during startup, see *debug* buffer for details", get_face("Error") }); while (not terminate and (not client_manager.empty() or daemon)) { client_manager.redraw_clients(); event_manager.handle_next_events(EventMode::Normal); client_manager.process_pending_inputs(); client_manager.clear_client_trash(); client_manager.clear_window_trash(); buffer_manager.clear_buffer_trash(); string_registry.purge_unused(); if (convert_to_client_pending) { String buffer_name = local_client->context().buffer().name(); String selections = selection_list_to_string(local_client->context().selections()); ClientManager::instance().remove_client(*local_client, true); client_manager.clear_client_trash(); convert_to_client_pending = false; if (fork_server_to_background()) { String session = server.session(); server.close_session(false); throw convert_to_client_mode{ std::move(session), std::move(buffer_name), std::move(selections) }; } } } } catch (const kill_session&) {} { Context empty_context{Context::EmptyContextFlag{}}; global_scope.hooks().run_hook("KakEnd", "", empty_context); } return 0; } int run_filter(StringView keystr, StringView commands, ConstArrayView<StringView> files, bool quiet) { StringRegistry string_registry; GlobalScope global_scope; EventManager event_manager; ShellManager shell_manager; CommandManager command_manager; RegisterManager register_manager; ClientManager client_manager; BufferManager buffer_manager; register_options(); register_env_vars(); register_registers(); register_commands(); try { auto keys = parse_keys(keystr); auto apply_to_buffer = [&](Buffer& buffer) { try { InputHandler input_handler{ { buffer, Selection{{0,0}, buffer.back_coord()} }, Context::Flags::Transient }; if (not commands.empty()) command_manager.execute(commands, input_handler.context(), ShellContext{}); for (auto& key : keys) input_handler.handle_key(key); } catch (Kakoune::runtime_error& err) { if (not quiet) write_stderr(format("error while applying keys to buffer '{}': {}\n", buffer.display_name(), err.what())); } }; for (auto& file : files) { Buffer* buffer = open_file_buffer(file); write_buffer_to_file(*buffer, file + ".kak-bak"); apply_to_buffer(*buffer); write_buffer_to_file(*buffer, file); buffer_manager.delete_buffer(*buffer); } if (not isatty(0)) { Buffer& buffer = *buffer_manager.create_buffer( "*stdin*", Buffer::Flags::None, read_fd(0), InvalidTime); apply_to_buffer(buffer); write_buffer_to_fd(buffer, 1); buffer_manager.delete_buffer(buffer); } } catch (Kakoune::runtime_error& err) { write_stderr(format("error: {}\n", err.what())); } buffer_manager.clear_buffer_trash(); return 0; } int run_pipe(StringView session) { char buf[512]; String command; while (ssize_t count = read(0, buf, 512)) { if (count < 0) { write_stderr("error while reading stdin\n"); return -1; } command += StringView{buf, buf + count}; } try { send_command(session, command); } catch (remote_error& e) { write_stderr(format("{}\ndisconnecting\n", e.what())); return -1; } return 0; } UIType parse_ui_type(StringView ui_name) { if (ui_name == "ncurses") return UIType::NCurses; if (ui_name == "json") return UIType::Json; if (ui_name == "dummy") return UIType::Dummy; throw parameter_error(format("error: unknown ui type: '{}'", ui_name)); } int main(int argc, char* argv[]) { setlocale(LC_ALL, ""); set_signal_handler(SIGSEGV, signal_handler); set_signal_handler(SIGFPE, signal_handler); set_signal_handler(SIGQUIT, signal_handler); set_signal_handler(SIGTERM, signal_handler); set_signal_handler(SIGPIPE, SIG_IGN); set_signal_handler(SIGINT, [](int){}); set_signal_handler(SIGCHLD, [](int){}); Vector<String> params; for (size_t i = 1; i < argc; ++i) params.push_back(argv[i]); const ParameterDesc param_desc{ SwitchMap{ { "c", { true, "connect to given session" } }, { "e", { true, "execute argument on initialisation" } }, { "n", { false, "do not source kakrc files on startup" } }, { "s", { true, "set session name" } }, { "d", { false, "run as a headless session (requires -s)" } }, { "p", { true, "just send stdin as commands to the given session" } }, { "f", { true, "act as a filter, executing given keys on given files" } }, { "q", { false, "in filter mode, be quiet about errors applying keys" } }, { "ui", { true, "set the type of user interface to use (ncurses, dummy, or json)" } }, { "l", { false, "list existing sessions" } }, { "clear", { false, "clear dead sessions" } }, { "ro", { false, "readonly mode" } } } }; try { std::sort(keymap.begin(), keymap.end(), [](const NormalCmdDesc& lhs, const NormalCmdDesc& rhs) { return lhs.key < rhs.key; }); ParametersParser parser(params, param_desc); const bool list_sessions = (bool)parser.get_switch("l"); const bool clear_sessions = (bool)parser.get_switch("clear"); if (list_sessions or clear_sessions) { StringView username = getpwuid(geteuid())->pw_name; for (auto& session : list_files(format("/tmp/kakoune/{}/", username))) { const bool valid = check_session(session); if (list_sessions) write_stdout(format("{}{}\n", session, valid ? "" : " (dead)")); if (not valid and clear_sessions) { char socket_file[128]; format_to(socket_file, "/tmp/kakoune/{}/{}", username, session); unlink(socket_file); } } return 0; } if (auto session = parser.get_switch("p")) { for (auto opt : { "c", "n", "s", "d", "e", "ro" }) { if (parser.get_switch(opt)) { write_stderr(format("error: -{} is incompatible with -p\n", opt)); return -1; } } return run_pipe(*session); } auto init_cmds = parser.get_switch("e").value_or(StringView{}); const UIType ui_type = parse_ui_type(parser.get_switch("ui").value_or("ncurses")); if (auto keys = parser.get_switch("f")) { if (parser.get_switch("ro")) { write_stderr("error: -ro is incompatible with -f\n"); return -1; } Vector<StringView> files; for (size_t i = 0; i < parser.positional_count(); ++i) files.emplace_back(parser[i]); return run_filter(*keys, init_cmds, files, (bool)parser.get_switch("q")); } if (auto server_session = parser.get_switch("c")) { for (auto opt : { "n", "s", "d", "ro" }) { if (parser.get_switch(opt)) { write_stderr(format("error: -{} is incompatible with -c\n", opt)); return -1; } } String new_files; for (auto name : parser) new_files += format("edit '{}';", escape(real_path(name), "'", '\\')); return run_client(*server_session, new_files + init_cmds, ui_type); } else { BufferCoord init_coord; Vector<StringView> files; for (auto& name : parser) { if (not name.empty() and name[0_byte] == '+') { auto colon = find(name, ':'); if (auto line = str_to_int_ifp({name.begin()+1, colon})) { init_coord.line = *line - 1; if (colon != name.end()) init_coord.column = str_to_int_ifp({colon+1, name.end()}).value_or(1) - 1; continue; } } files.emplace_back(name); } StringView session = parser.get_switch("s").value_or(StringView{}); try { return run_server(session, init_cmds, init_coord, (bool)parser.get_switch("n"), (bool)parser.get_switch("d"), (bool)parser.get_switch("ro"), ui_type, files); } catch (convert_to_client_mode& convert) { raise(SIGTSTP); return run_client(convert.session, format("try %^buffer '{}'; select '{}'^; echo converted to client only mode", escape(convert.buffer_name, "'^", '\\'), convert.selections), ui_type); } } } catch (Kakoune::parameter_error& error) { write_stderr(format("Error while parsing parameters: {}\n" "Valid switches:\n" "{}", error.what(), generate_switches_doc(param_desc.switches))); return -1; } catch (startup_error& error) { write_stderr(format("Could not start kakoune: {}\n", error.what())); return -1; } catch (Kakoune::exception& error) { write_stderr(format("uncaught exception ({}):\n{}", typeid(error).name(), error.what())); return -1; } catch (std::exception& error) { write_stderr(format("uncaught exception ({}):\n{}", typeid(error).name(), error.what())); return -1; } catch (...) { write_stderr("uncaught exception"); return -1; } return 0; }<|fim▁end|>
"cursor_line", false,
<|file_name|>GeneratorCodonsTest.py<|end_file_name|><|fim▁begin|>import unittest import string from pprint import pprint import XGram, XGram.Parser, XGram.Exceptions from XGram.Generator.Prebuilt import Codons from XGram.Model import Annotation import Bio.Data.CodonTable class GeneratorCodonsTest(unittest.TestCase): """ A test class for testing a grammar """ def setUp(self): """ set up data used in the tests. setUp is called before each test function execution. """ self.mInputFile= XGram.PATH_DATA+"/dpse_dmel.stk" self.mXgram = XGram.XGram() self.mXgram.setDebug() def tearDown(self): """ tear down any data used in tests tearDown is called after each test function execution. """ pass def testModelF3X4Two(self): """test f3x4-two model. """ self.buildAndCheckModel( "f3x4-two" ) def testModelF3X4Four(self): """test f3x4-four model. """ self.buildAndCheckModel( "f3x4-four" ) def testModelF3X4Four(self): """test f3x4-four model. """ self.buildAndCheckModel( "f3x4-fourproducts" ) def testModelCodonsTwo(self): """test codons-two model """ self.buildAndCheckModel( "codons-two" ) codons = Bio.Data.CodonTable.standard_dna_table.forward_table codon_frequencies = {} n = 1 f = 61 * 62 / 2 for codon in Bio.Data.CodonTable.standard_dna_table.forward_table: codon_frequencies[codon] = float(n)/f n += 1 self.buildAndCheckModel( "codons-four", codon_frequencies = codon_frequencies ) def testModelCodonsFour(self): """test codons-four model """ <|fim▁hole|> n = 1 f = 61 * 62 / 2 for codon in Bio.Data.CodonTable.standard_dna_table.forward_table: codon_frequencies[codon] = float(n)/f n += 1 self.buildAndCheckModel( "codons-four", codon_frequencies = codon_frequencies ) def buildAndCheckModel(self, codon_model, **kwargs): """build various models checking parameter settings.""" model = Codons.buildCodonML(codon_model = codon_model, **kwargs ) self.checkModel( model ) model = Codons.buildCodonML(codon_model = codon_model, fix_kappa = True, **kwargs ) self.checkModel( model ) model = Codons.buildCodonML(codon_model = codon_model, fix_omega = True, **kwargs ) self.checkModel( model ) model = Codons.buildCodonML(codon_model = codon_model, fix_omega = True, fix_kappa = True, **kwargs ) self.checkModel( model ) model = Codons.buildCodonML( codon_model, num_blocks=2, grammar_type="linear-blocks", shared_frequencies = False, shared_rates = False, **kwargs ) self.checkModel(model) num_blocks = 2 model = Codons.buildCodonML( codon_model, num_blocks=num_blocks, grammar_type="linear-blocks", shared_frequencies = True, shared_rates = False, **kwargs) self.checkModel(model) num_blocks = 2 model = Codons.buildCodonML( codon_model, num_blocks=num_blocks, grammar_type="linear-blocks", shared_frequencies = False, shared_rates = True, **kwargs) self.checkModel(model) num_blocks = 2 model = Codons.buildCodonML( codon_model, num_blocks=num_blocks, grammar_type="linear-blocks", shared_frequencies = True, shared_rates = True, **kwargs) self.checkModel(model) ## test model with annotations ## build annotation labels = string.letters.upper() annotate_terminals = {} for x in range(num_blocks): annotations = [] key = [] for c in range( 0,3 ): t = "B%i_COD%i" % (x, c) key.append(t) annotations.append( Annotation( row = "STATE", column = t, label = labels[x % len(labels)] )) annotate_terminals[ tuple(key) ] = annotations model = Codons.buildCodonML( codon_model, num_blocks=2, grammar_type="linear-blocks", shared_frequencies = True, annotate_terminals = annotate_terminals, **kwargs ) # print model.getGrammar() self.checkModel(model) def checkModel(self, model ): """check a model.""" model.getGrammar() frequencies = model.evaluateTerminalFrequencies() matrix = model.evaluateRateMatrix() if __name__ == '__main__': unittest.main()<|fim▁end|>
self.buildAndCheckModel( "codons-four" ) codons = Bio.Data.CodonTable.standard_dna_table.forward_table codon_frequencies = {}
<|file_name|>Mat4f.ts<|end_file_name|><|fim▁begin|>import {Vec3f} from "./Vec3f"; import {Quaternion} from "./Quaternion"; import {MathUtils} from "./MathUtils"; import {Vec4f} from "./Vec4f"; /** * Created by r3f on 7/1/2016. */ export class Mat4f{ // Matrix row & column values as in i,j m:Float32Array; /* * Constructor Mat4f() * @info: Rows and columns are 0 by default */ constructor(m?:Float32Array) { this.m = m?m:new Float32Array(16); } identity(overwrite:boolean=false):Mat4f { if (!overwrite) { var mat = new Mat4f() mat.m[0] = 1; mat.m[1] = 0; mat.m[2] = 0; mat.m[3] = 0; mat.m[4] = 0; mat.m[5] = 1; mat.m[6] = 0; mat.m[7] = 0; mat.m[8] = 0; mat.m[9] = 0; mat.m[10] = 1; mat.m[11] = 0; mat.m[12] = 0; mat.m[13] = 0; mat.m[14] = 0; mat.m[15] = 1; return mat; } this.m[0] = 1; this.m[1] = 0; this.m[2] = 0; this.m[3] = 0; this.m[4] = 0; this.m[5] = 1; this.m[6] = 0; this.m[7] = 0; this.m[8] = 0; this.m[9] = 0; this.m[10] = 1; this.m[11] = 0; this.m[12] = 0; this.m[13] = 0; this.m[14] = 0; this.m[15] = 1; return this; } transpose(overwrite:boolean=false):Mat4f { if (!overwrite) { var mat = new Mat4f(); var a01 = this.m[1], a02 = this.m[2], a03 = this.m[3], a12 = this.m[6], a13 = this.m[7], a23 = this.m[11]; mat.m[1] = mat[4]; mat.m[2] = mat[8]; mat.m[3] = mat[12]; mat.m[4] = a01; mat.m[6] = mat[9]; mat.m[7] = mat[13]; mat.m[8] = a02; mat.m[9] = a12; mat.m[11] = mat[14]; mat.m[12] = a03; mat.m[13] = a13; mat.m[14] = a23; return mat; } this.m[0] = this.m[0]; this.m[1] = this.m[4]; this.m[2] = this.m[8]; this.m[3] = this.m[12]; this.m[4] = this.m[1]; this.m[5] = this.m[5]; this.m[6] = this.m[9]; this.m[7] = this.m[13]; this.m[8] = this.m[2]; this.m[9] = this.m[6]; this.m[10] = this.m[10]; this.m[11] = this.m[14]; this.m[12] = this.m[3]; this.m[13] = this.m[7]; this.m[14] = this.m[11]; this.m[15] = this.m[15]; return this; } determinant():number { // Cache the matrix values (makes for huge speed increases!) var a00 = this.m[0], a01 = this.m[1], a02 = this.m[2], a03 = this.m[3], a10 = this.m[4], a11 = this.m[5], a12 = this.m[6], a13 = this.m[7], a20 = this.m[8], a21 = this.m[9], a22 = this.m[10], a23 = this.m[11], a30 = this.m[12], a31 = this.m[13], a32 = this.m[14], a33 = this.m[15]; return (a30 * a21 * a12 * a03 - a20 * a31 * a12 * a03 - a30 * a11 * a22 * a03 + a10 * a31 * a22 * a03 + a20 * a11 * a32 * a03 - a10 * a21 * a32 * a03 - a30 * a21 * a02 * a13 + a20 * a31 * a02 * a13 + a30 * a01 * a22 * a13 - a00 * a31 * a22 * a13 - a20 * a01 * a32 * a13 + a00 * a21 * a32 * a13 + a30 * a11 * a02 * a23 - a10 * a31 * a02 * a23 - a30 * a01 * a12 * a23 + a00 * a31 * a12 * a23 + a10 * a01 * a32 * a23 - a00 * a11 * a32 * a23 - a20 * a11 * a02 * a33 + a10 * a21 * a02 * a33 + a20 * a01 * a12 * a33 - a00 * a21 * a12 * a33 - a10 * a01 * a22 * a33 + a00 * a11 * a22 * a33); } inverse(overwrite:boolean=false) { var mat = this.m; if(overwrite){ var dest:Mat4f = this; }else{ var dest = new Mat4f(); } // Cache the matrix values (makes for huge speed increases!) var a00 = mat[0], a01 = mat[1], a02 = mat[2], a03 = mat[3], a10 = mat[4], a11 = mat[5], a12 = mat[6], a13 = mat[7], a20 = mat[8], a21 = mat[9], a22 = mat[10], a23 = mat[11], a30 = mat[12], a31 = mat[13], a32 = mat[14], a33 = mat[15], b00 = a00 * a11 - a01 * a10, b01 = a00 * a12 - a02 * a10, b02 = a00 * a13 - a03 * a10, b03 = a01 * a12 - a02 * a11, b04 = a01 * a13 - a03 * a11, b05 = a02 * a13 - a03 * a12, b06 = a20 * a31 - a21 * a30, b07 = a20 * a32 - a22 * a30, b08 = a20 * a33 - a23 * a30, b09 = a21 * a32 - a22 * a31, b10 = a21 * a33 - a23 * a31, b11 = a22 * a33 - a23 * a32, d = (b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06), invDet; // Calculate the determinant if (!d) { return null; } invDet = 1 / d; dest.m[0] = (a11 * b11 - a12 * b10 + a13 * b09) * invDet; dest.m[1] = (-a01 * b11 + a02 * b10 - a03 * b09) * invDet; dest.m[2] = (a31 * b05 - a32 * b04 + a33 * b03) * invDet; dest.m[3] = (-a21 * b05 + a22 * b04 - a23 * b03) * invDet; dest.m[4] = (-a10 * b11 + a12 * b08 - a13 * b07) * invDet; dest.m[5] = (a00 * b11 - a02 * b08 + a03 * b07) * invDet; dest.m[6] = (-a30 * b05 + a32 * b02 - a33 * b01) * invDet; dest.m[7] = (a20 * b05 - a22 * b02 + a23 * b01) * invDet; dest.m[8] = (a10 * b10 - a11 * b08 + a13 * b06) * invDet; dest.m[9] = (-a00 * b10 + a01 * b08 - a03 * b06) * invDet; dest.m[10] = (a30 * b04 - a31 * b02 + a33 * b00) * invDet; dest.m[11] = (-a20 * b04 + a21 * b02 - a23 * b00) * invDet; dest.m[12] = (-a10 * b09 + a11 * b07 - a12 * b06) * invDet; dest.m[13] = (a00 * b09 - a01 * b07 + a02 * b06) * invDet; dest.m[14] = (-a30 * b03 + a31 * b01 - a32 * b00) * invDet; dest.m[15] = (a20 * b03 - a21 * b01 + a22 * b00) * invDet; return dest; } toRotationMat() { var dest:Mat4f = new Mat4f(); dest.m[0] = this.m[0]; dest.m[1] = this.m[1]; dest.m[2] = this.m[2]; dest.m[3] = this.m[3]; dest.m[4] = this.m[4]; dest.m[5] = this.m[5]; dest.m[6] = this.m[6]; dest.m[7] = this.m[7]; dest.m[8] = this.m[8]; dest.m[9] = this.m[9]; dest.m[10] = this.m[10]; dest.m[11] = this.m[11]; dest.m[12] = 0; dest.m[13] = 0; dest.m[14] = 0; dest.m[15] = 1; return dest; } frustum(left, right, bottom, top, near, far, overwrite:boolean=false) { if(overwrite){ var dest:Mat4f = this; }else{ dest = new Mat4f(); } var rl = (right - left), tb = (top - bottom), fn = (far - near); dest.m[0] = (near * 2) / rl; dest.m[1] = 0; dest.m[2] = 0; dest.m[3] = 0; dest.m[4] = 0; dest.m[5] = (near * 2) / tb; dest.m[6] = 0; dest.m[7] = 0; dest.m[8] = (right + left) / rl; dest.m[9] = (top + bottom) / tb; dest.m[10] = -(far + near) / fn; dest.m[11] = -1; dest.m[12] = 0; dest.m[13] = 0; dest.m[14] = -(far * near * 2) / fn; dest.m[15] = 0; return dest; } perspective(fovy, aspect, near, far, overwrite:boolean=false) { var top = near * Math.tan(fovy * Math.PI / 360.0), right = top * aspect; return this.frustum(-right, right, -top, top, near, far, overwrite); } lookAt(eye, center, up) { var x0, x1, x2, y0, y1, y2, z0, z1, z2, len, eyex = eye[0], eyey = eye[1], eyez = eye[2], upx = up[0], upy = up[1], upz = up[2], centerx = center[0], centery = center[1], centerz = center[2]; if (eyex === centerx && eyey === centery && eyez === centerz) { return this.identity(true); } //vec3.direction(eye, center, z); z0 = eyex - centerx; z1 = eyey - centery; z2 = eyez - centerz; // normalize (no check needed for 0 because of early return) len = 1 / Math.sqrt(z0 * z0 + z1 * z1 + z2 * z2); z0 *= len; z1 *= len; z2 *= len; //vec3.normalize(vec3.cross(up, z, x)); x0 = upy * z2 - upz * z1; x1 = upz * z0 - upx * z2; x2 = upx * z1 - upy * z0; len = Math.sqrt(x0 * x0 + x1 * x1 + x2 * x2); if (!len) { x0 = 0; x1 = 0; x2 = 0; } else { len = 1 / len; x0 *= len; x1 *= len; x2 *= len; } //vec3.normalize(vec3.cross(z, x, y)); y0 = z1 * x2 - z2 * x1; y1 = z2 * x0 - z0 * x2; y2 = z0 * x1 - z1 * x0; len = Math.sqrt(y0 * y0 + y1 * y1 + y2 * y2); if (!len) { y0 = 0; y1 = 0; y2 = 0; } else { len = 1 / len; y0 *= len; y1 *= len; y2 *= len; } this.m[0] = x0; this.m[1] = y0; this.m[2] = z0; this.m[3] = 0; this.m[4] = x1; this.m[5] = y1; this.m[6] = z1; this.m[7] = 0; this.m[8] = x2; this.m[9] = y2; this.m[10] = z2; this.m[11] = 0; this.m[12] = -(x0 * eyex + x1 * eyey + x2 * eyez); this.m[13] = -(y0 * eyex + y1 * eyey + y2 * eyez); this.m[14] = -(z0 * eyex + z1 * eyey + z2 * eyez); this.m[15] = 1; return this; } multiply(mat2) { var dest = new Mat4f(); var mat = this.m; // Cache the matrix values (makes for huge speed increases!) var a00 = mat[ 0], a01 = mat[ 1], a02 = mat[ 2], a03 = mat[3];<|fim▁hole|> // Cache only the current line of the second matrix var b0 = mat2.m[0], b1 = mat2.m[1], b2 = mat2.m[2], b3 = mat2.m[3]; dest.m[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30; dest.m[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31; dest.m[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32; dest.m[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33; b0 = mat2.m[4]; b1 = mat2.m[5]; b2 = mat2.m[6]; b3 = mat2.m[7]; dest.m[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30; dest.m[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31; dest.m[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32; dest.m[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33; b0 = mat2.m[8]; b1 = mat2.m[9]; b2 = mat2.m[10]; b3 = mat2.m[11]; dest.m[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30; dest.m[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31; dest.m[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32; dest.m[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33; b0 = mat2.m[12]; b1 = mat2.m[13]; b2 = mat2.m[14]; b3 = mat2.m[15]; dest.m[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30; dest.m[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31; dest.m[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32; dest.m[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33; return dest; } multiplyVec4(vec) { var dest = new Vec4f(); var x = vec[0], y = vec[1], z = vec[2], w = vec[3]; dest.x = this.m[0] * x + this.m[4] * y + this.m[8] * z + this.m[12] * w; dest.y = this.m[1] * x + this.m[5] * y + this.m[9] * z + this.m[13] * w; dest.z = this.m[2] * x + this.m[6] * y + this.m[10] * z + this.m[14] * w; dest.w = this.m[3] * x + this.m[7] * y + this.m[11] * z + this.m[15] * w; return dest; } }<|fim▁end|>
var a10 = mat[ 4], a11 = mat[ 5], a12 = mat[ 6], a13 = mat[7]; var a20 = mat[ 8], a21 = mat[ 9], a22 = mat[10], a23 = mat[11]; var a30 = mat[12], a31 = mat[13], a32 = mat[14], a33 = mat[15];
<|file_name|>one.js<|end_file_name|><|fim▁begin|>var one = {<|fim▁hole|><|fim▁end|>
name: 'one' };
<|file_name|>checkChecker.js<|end_file_name|><|fim▁begin|>/* Check for Checks Jesse Allison 2015 */ var checkDict = new Dict('recentChecks'); var ajaxreq; function bang() { var all_the_checks = checkDict.get('body');<|fim▁hole|>} function updateChecks() { ajaxreq = new XMLHttpRequest(); ajaxreq.open("GET","https://data.brla.gov/resource/g5c2-myyj.json"); ajaxreq.onreadystatechange = checksLoaded; ajaxreq.send(); } function checksLoaded() { post("loading"); if (this.readyState ==4){ var obj = JSON.parse(this.responseText); post("Object Returned: "+ obj); checkDict.setparse("body",this.responseText); post("Dictionary "+checkDict); } } function checkAmounts() { var all_the_checks = checkDict.get('body'); for(var i=0; i < all_the_checks.length; i++) { var paymentDict = new Dict(all_the_checks[i]); var payment2 = paymentDict.get('gross_pay'); debugDict.parse(paymentDict); var searchString = "body["+i+"]::gross_pay"; // body[0]::gross_pay var payment = checkDict.get(searchString); outlet(0, payment); } }<|fim▁end|>
post(all_the_checks.length);
<|file_name|>test_select.py<|end_file_name|><|fim▁begin|>from .. import config from .. import fixtures from ..assertions import eq_ from ..assertions import in_ from ..schema import Column from ..schema import Table from ... import bindparam from ... import case from ... import Computed from ... import exists from ... import false from ... import func from ... import Integer from ... import literal from ... import literal_column from ... import null from ... import select from ... import String from ... import testing from ... import text from ... import true from ... import tuple_ from ... import union from ... import util class CollateTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("data", String(100)), ) <|fim▁hole|> @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "data": "collate data1"}, {"id": 2, "data": "collate data2"}, ], ) def _assert_result(self, select, result): eq_(config.db.execute(select).fetchall(), result) @testing.requires.order_by_collation def test_collate_order_by(self): collation = testing.requires.get_order_by_collation(testing.config) self._assert_result( select([self.tables.some_table]).order_by( self.tables.some_table.c.data.collate(collation).asc() ), [(1, "collate data1"), (2, "collate data2")], ) class OrderByLabelTest(fixtures.TablesTest): """Test the dialect sends appropriate ORDER BY expressions when labels are used. This essentially exercises the "supports_simple_order_by_label" setting. """ __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("y", Integer), Column("q", String(50)), Column("p", String(50)), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "x": 1, "y": 2, "q": "q1", "p": "p3"}, {"id": 2, "x": 2, "y": 3, "q": "q2", "p": "p2"}, {"id": 3, "x": 3, "y": 4, "q": "q3", "p": "p1"}, ], ) def _assert_result(self, select, result): eq_(config.db.execute(select).fetchall(), result) def test_plain(self): table = self.tables.some_table lx = table.c.x.label("lx") self._assert_result(select([lx]).order_by(lx), [(1,), (2,), (3,)]) def test_composed_int(self): table = self.tables.some_table lx = (table.c.x + table.c.y).label("lx") self._assert_result(select([lx]).order_by(lx), [(3,), (5,), (7,)]) def test_composed_multiple(self): table = self.tables.some_table lx = (table.c.x + table.c.y).label("lx") ly = (func.lower(table.c.q) + table.c.p).label("ly") self._assert_result( select([lx, ly]).order_by(lx, ly.desc()), [(3, util.u("q1p3")), (5, util.u("q2p2")), (7, util.u("q3p1"))], ) def test_plain_desc(self): table = self.tables.some_table lx = table.c.x.label("lx") self._assert_result( select([lx]).order_by(lx.desc()), [(3,), (2,), (1,)] ) def test_composed_int_desc(self): table = self.tables.some_table lx = (table.c.x + table.c.y).label("lx") self._assert_result( select([lx]).order_by(lx.desc()), [(7,), (5,), (3,)] ) @testing.requires.group_by_complex_expression def test_group_by_composed(self): table = self.tables.some_table expr = (table.c.x + table.c.y).label("lx") stmt = ( select([func.count(table.c.id), expr]) .group_by(expr) .order_by(expr) ) self._assert_result(stmt, [(1, 3), (1, 5), (1, 7)]) class LimitOffsetTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("y", Integer), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "x": 1, "y": 2}, {"id": 2, "x": 2, "y": 3}, {"id": 3, "x": 3, "y": 4}, {"id": 4, "x": 4, "y": 5}, ], ) def _assert_result(self, select, result, params=()): eq_(config.db.execute(select, params).fetchall(), result) def test_simple_limit(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).limit(2), [(1, 1, 2), (2, 2, 3)], ) @testing.requires.offset def test_simple_offset(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).offset(2), [(3, 3, 4), (4, 4, 5)], ) @testing.requires.offset def test_simple_limit_offset(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).limit(2).offset(1), [(2, 2, 3), (3, 3, 4)], ) @testing.requires.offset def test_limit_offset_nobinds(self): """test that 'literal binds' mode works - no bound params.""" table = self.tables.some_table stmt = select([table]).order_by(table.c.id).limit(2).offset(1) sql = stmt.compile( dialect=config.db.dialect, compile_kwargs={"literal_binds": True} ) sql = str(sql) self._assert_result(sql, [(2, 2, 3), (3, 3, 4)]) @testing.requires.bound_limit_offset def test_bound_limit(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).limit(bindparam("l")), [(1, 1, 2), (2, 2, 3)], params={"l": 2}, ) @testing.requires.bound_limit_offset def test_bound_offset(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).offset(bindparam("o")), [(3, 3, 4), (4, 4, 5)], params={"o": 2}, ) @testing.requires.bound_limit_offset def test_bound_limit_offset(self): table = self.tables.some_table self._assert_result( select([table]) .order_by(table.c.id) .limit(bindparam("l")) .offset(bindparam("o")), [(2, 2, 3), (3, 3, 4)], params={"l": 2, "o": 1}, ) class CompoundSelectTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("y", Integer), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "x": 1, "y": 2}, {"id": 2, "x": 2, "y": 3}, {"id": 3, "x": 3, "y": 4}, {"id": 4, "x": 4, "y": 5}, ], ) def _assert_result(self, select, result, params=()): eq_(config.db.execute(select, params).fetchall(), result) def test_plain_union(self): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2) s2 = select([table]).where(table.c.id == 3) u1 = union(s1, s2) self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) def test_select_from_plain_union(self): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2) s2 = select([table]).where(table.c.id == 3) u1 = union(s1, s2).alias().select() self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) @testing.requires.order_by_col_from_union @testing.requires.parens_in_union_contained_select_w_limit_offset def test_limit_offset_selectable_in_unions(self): table = self.tables.some_table s1 = ( select([table]) .where(table.c.id == 2) .limit(1) .order_by(table.c.id) ) s2 = ( select([table]) .where(table.c.id == 3) .limit(1) .order_by(table.c.id) ) u1 = union(s1, s2).limit(2) self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) @testing.requires.parens_in_union_contained_select_wo_limit_offset def test_order_by_selectable_in_unions(self): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2).order_by(table.c.id) s2 = select([table]).where(table.c.id == 3).order_by(table.c.id) u1 = union(s1, s2).limit(2) self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) def test_distinct_selectable_in_unions(self): table = self.tables.some_table s1 = select([table]).where(table.c.id == 2).distinct() s2 = select([table]).where(table.c.id == 3).distinct() u1 = union(s1, s2).limit(2) self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) @testing.requires.parens_in_union_contained_select_w_limit_offset def test_limit_offset_in_unions_from_alias(self): table = self.tables.some_table s1 = ( select([table]) .where(table.c.id == 2) .limit(1) .order_by(table.c.id) ) s2 = ( select([table]) .where(table.c.id == 3) .limit(1) .order_by(table.c.id) ) # this necessarily has double parens u1 = union(s1, s2).alias() self._assert_result( u1.select().limit(2).order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)] ) def test_limit_offset_aliased_selectable_in_unions(self): table = self.tables.some_table s1 = ( select([table]) .where(table.c.id == 2) .limit(1) .order_by(table.c.id) .alias() .select() ) s2 = ( select([table]) .where(table.c.id == 3) .limit(1) .order_by(table.c.id) .alias() .select() ) u1 = union(s1, s2).limit(2) self._assert_result(u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) class ExpandingBoundInTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("y", Integer), Column("z", String(50)), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "x": 1, "y": 2, "z": "z1"}, {"id": 2, "x": 2, "y": 3, "z": "z2"}, {"id": 3, "x": 3, "y": 4, "z": "z3"}, {"id": 4, "x": 4, "y": 5, "z": "z4"}, ], ) def _assert_result(self, select, result, params=()): eq_(config.db.execute(select, params).fetchall(), result) def test_multiple_empty_sets(self): # test that any anonymous aliasing used by the dialect # is fine with duplicates table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.x.in_(bindparam("q", expanding=True))) .where(table.c.y.in_(bindparam("p", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [], params={"q": [], "p": []}) @testing.requires.tuple_in def test_empty_heterogeneous_tuples(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where( tuple_(table.c.x, table.c.z).in_( bindparam("q", expanding=True) ) ) .order_by(table.c.id) ) self._assert_result(stmt, [], params={"q": []}) @testing.requires.tuple_in def test_empty_homogeneous_tuples(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where( tuple_(table.c.x, table.c.y).in_( bindparam("q", expanding=True) ) ) .order_by(table.c.id) ) self._assert_result(stmt, [], params={"q": []}) def test_bound_in_scalar(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.x.in_(bindparam("q", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [(2,), (3,), (4,)], params={"q": [2, 3, 4]}) @testing.requires.tuple_in def test_bound_in_two_tuple(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where( tuple_(table.c.x, table.c.y).in_( bindparam("q", expanding=True) ) ) .order_by(table.c.id) ) self._assert_result( stmt, [(2,), (3,), (4,)], params={"q": [(2, 3), (3, 4), (4, 5)]} ) @testing.requires.tuple_in def test_bound_in_heterogeneous_two_tuple(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where( tuple_(table.c.x, table.c.z).in_( bindparam("q", expanding=True) ) ) .order_by(table.c.id) ) self._assert_result( stmt, [(2,), (3,), (4,)], params={"q": [(2, "z2"), (3, "z3"), (4, "z4")]}, ) def test_empty_set_against_integer(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.x.in_(bindparam("q", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [], params={"q": []}) def test_empty_set_against_integer_negation(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.x.notin_(bindparam("q", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [(1,), (2,), (3,), (4,)], params={"q": []}) def test_empty_set_against_string(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.z.in_(bindparam("q", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [], params={"q": []}) def test_empty_set_against_string_negation(self): table = self.tables.some_table stmt = ( select([table.c.id]) .where(table.c.z.notin_(bindparam("q", expanding=True))) .order_by(table.c.id) ) self._assert_result(stmt, [(1,), (2,), (3,), (4,)], params={"q": []}) def test_null_in_empty_set_is_false(self): stmt = select( [ case( [ ( null().in_( bindparam("foo", value=(), expanding=True) ), true(), ) ], else_=false(), ) ] ) in_(config.db.execute(stmt).fetchone()[0], (False, 0)) class LikeFunctionsTest(fixtures.TablesTest): __backend__ = True run_inserts = "once" run_deletes = None @classmethod def define_tables(cls, metadata): Table( "some_table", metadata, Column("id", Integer, primary_key=True), Column("data", String(50)), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.some_table.insert(), [ {"id": 1, "data": "abcdefg"}, {"id": 2, "data": "ab/cdefg"}, {"id": 3, "data": "ab%cdefg"}, {"id": 4, "data": "ab_cdefg"}, {"id": 5, "data": "abcde/fg"}, {"id": 6, "data": "abcde%fg"}, {"id": 7, "data": "ab#cdefg"}, {"id": 8, "data": "ab9cdefg"}, {"id": 9, "data": "abcde#fg"}, {"id": 10, "data": "abcd9fg"}, ], ) def _test(self, expr, expected): some_table = self.tables.some_table with config.db.connect() as conn: rows = { value for value, in conn.execute( select([some_table.c.id]).where(expr) ) } eq_(rows, expected) def test_startswith_unescaped(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c"), {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}) def test_startswith_autoescape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c", autoescape=True), {3}) def test_startswith_sqlexpr(self): col = self.tables.some_table.c.data self._test( col.startswith(literal_column("'ab%c'")), {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, ) def test_startswith_escape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab##c", escape="#"), {7}) def test_startswith_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.startswith("ab%c", autoescape=True, escape="#"), {3}) self._test(col.startswith("ab#c", autoescape=True, escape="#"), {7}) def test_endswith_unescaped(self): col = self.tables.some_table.c.data self._test(col.endswith("e%fg"), {1, 2, 3, 4, 5, 6, 7, 8, 9}) def test_endswith_sqlexpr(self): col = self.tables.some_table.c.data self._test( col.endswith(literal_column("'e%fg'")), {1, 2, 3, 4, 5, 6, 7, 8, 9} ) def test_endswith_autoescape(self): col = self.tables.some_table.c.data self._test(col.endswith("e%fg", autoescape=True), {6}) def test_endswith_escape(self): col = self.tables.some_table.c.data self._test(col.endswith("e##fg", escape="#"), {9}) def test_endswith_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.endswith("e%fg", autoescape=True, escape="#"), {6}) self._test(col.endswith("e#fg", autoescape=True, escape="#"), {9}) def test_contains_unescaped(self): col = self.tables.some_table.c.data self._test(col.contains("b%cde"), {1, 2, 3, 4, 5, 6, 7, 8, 9}) def test_contains_autoescape(self): col = self.tables.some_table.c.data self._test(col.contains("b%cde", autoescape=True), {3}) def test_contains_escape(self): col = self.tables.some_table.c.data self._test(col.contains("b##cde", escape="#"), {7}) def test_contains_autoescape_escape(self): col = self.tables.some_table.c.data self._test(col.contains("b%cd", autoescape=True, escape="#"), {3}) self._test(col.contains("b#cd", autoescape=True, escape="#"), {7}) class ComputedColumnTest(fixtures.TablesTest): __backend__ = True __requires__ = ("computed_columns",) @classmethod def define_tables(cls, metadata): Table( "square", metadata, Column("id", Integer, primary_key=True), Column("side", Integer), Column("area", Integer, Computed("side * side")), Column("perimeter", Integer, Computed("4 * side")), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.square.insert(), [{"id": 1, "side": 10}, {"id": 10, "side": 42}], ) def test_select_all(self): with config.db.connect() as conn: res = conn.execute( select([text("*")]) .select_from(self.tables.square) .order_by(self.tables.square.c.id) ).fetchall() eq_(res, [(1, 10, 100, 40), (10, 42, 1764, 168)]) def test_select_columns(self): with config.db.connect() as conn: res = conn.execute( select( [self.tables.square.c.area, self.tables.square.c.perimeter] ) .select_from(self.tables.square) .order_by(self.tables.square.c.id) ).fetchall() eq_(res, [(100, 40), (1764, 168)]) class ExistsTest(fixtures.TablesTest): __backend__ = True @classmethod def define_tables(cls, metadata): Table( "stuff", metadata, Column("id", Integer, primary_key=True), Column("data", String(50)), ) @classmethod def insert_data(cls, connection): connection.execute( cls.tables.stuff.insert(), [ {"id": 1, "data": "some data"}, {"id": 2, "data": "some data"}, {"id": 3, "data": "some data"}, {"id": 4, "data": "some other data"}, ], ) def test_select_exists(self, connection): stuff = self.tables.stuff eq_( connection.execute( select([literal(1)]).where( exists().where(stuff.c.data == "some data") ) ).fetchall(), [(1,)], ) def test_select_exists_false(self, connection): stuff = self.tables.stuff eq_( connection.execute( select([literal(1)]).where( exists().where(stuff.c.data == "no data") ) ).fetchall(), [], ) class IsOrIsNotDistinctFromTest(fixtures.TablesTest): __backend__ = True __requires__ = ("supports_is_distinct_from",) @classmethod def define_tables(cls, metadata): Table( "is_distinct_test", metadata, Column("id", Integer, primary_key=True), Column("col_a", Integer, nullable=True), Column("col_b", Integer, nullable=True), ) @testing.combinations( ("both_int_different", 0, 1, 1), ("both_int_same", 1, 1, 0), ("one_null_first", None, 1, 1), ("one_null_second", 0, None, 1), ("both_null", None, None, 0), id_="iaaa", argnames="col_a_value, col_b_value, expected_row_count_for_is", ) def test_is_or_isnot_distinct_from( self, col_a_value, col_b_value, expected_row_count_for_is, connection ): tbl = self.tables.is_distinct_test connection.execute( tbl.insert(), [{"id": 1, "col_a": col_a_value, "col_b": col_b_value}], ) result = connection.execute( tbl.select(tbl.c.col_a.is_distinct_from(tbl.c.col_b)) ).fetchall() eq_( len(result), expected_row_count_for_is, ) expected_row_count_for_isnot = ( 1 if expected_row_count_for_is == 0 else 0 ) result = connection.execute( tbl.select(tbl.c.col_a.isnot_distinct_from(tbl.c.col_b)) ).fetchall() eq_( len(result), expected_row_count_for_isnot, )<|fim▁end|>
<|file_name|>pid.py<|end_file_name|><|fim▁begin|>import os<|fim▁hole|>print os.path.dirname(os.path.abspath(__file__))<|fim▁end|>
<|file_name|>ajax_filtered_fields.js<|end_file_name|><|fim▁begin|>$(function() { $(".ajax_filter_choice").click(function() { $(this).parent().siblings().css("font-weight", "normal"); $(this).parent().css("font-weight","bold"); }) }); ajax_filtered_fields = { request_url: "/ajax_filtered_fields/json_index/", data_loaded: "data_loaded", _appendOption: function(obj, selector) { // append a json data row as an option to the selector var option = $('<option>' + obj[1] + '</option>'); option.attr({value: obj[0]}); option.appendTo(selector); return option; }, _removeOptions: function(selector) { // remove all options from selector selector.children("option").each(function(i) { $(this).remove(); });<|fim▁hole|> getManyToManyJSON: function(element_id, app_label, object_name, lookup_string, select_related) { // manage the ManyToMany ajax request var selector_from = $("#" + element_id + "_from"); var selector_to = $("#" + element_id + "_to"); $("#" + element_id + "_input").val(""); selector_from.attr("disabled", true); selector_to.attr("disabled", true); this._removeOptions(selector_from); $.getJSON(this.request_url, { app_label: app_label, object_name: object_name, lookup_string: lookup_string, select_related: select_related}, function(data){ $.each(data, function(i, obj){ var option_is_selected = selector_to.children("option[value='" + obj[0] + "']").length; if (!option_is_selected) { ajax_filtered_fields._appendOption(obj, selector_from); }; }); SelectBox.init(element_id + "_from"); selector_from.attr("disabled", false); selector_to.attr("disabled", false); selector_from.trigger(ajax_filtered_fields.data_loaded); }); }, getForeignKeyJSON: function(element_id, app_label, object_name, lookup_string, select_related) { // manage the ForeignKey ajax request var selector = $("#" + element_id); var hidden = $("#hidden-" + element_id); $("#" + element_id + "_input").val(""); selector.attr("disabled", true); this._removeOptions(selector); $.getJSON(this.request_url, { app_label: app_label, object_name: object_name, lookup_string: lookup_string, select_related: select_related}, function(data){ var selection = hidden.val(); ajax_filtered_fields._appendOption(new Array("", "---------"), selector); $.each(data, function(i, obj){ ajax_filtered_fields._appendOption(obj, selector); }); selector.children("option[value='" + selection + "']").attr("selected", "selected"); selector.attr("disabled", false); SelectBox.init(element_id); ajax_filtered_fields.bindForeignKeyOptions(element_id); selector.trigger(ajax_filtered_fields.data_loaded); }); }, bindForeignKeyOptions: function(element_id) { // bind the dummy options to the hidden field that do the work var selector = $("#" + element_id); var hidden = $("#hidden-" + element_id); selector.change(function(e) { hidden.val($(this).val()); }); } };<|fim▁end|>
},
<|file_name|>KonamiCodeManager.js<|end_file_name|><|fim▁begin|>class KonamiCodeManager { constructor() { this._pattern = "38384040373937396665";<|fim▁hole|> attach(root, callback) { if (root instanceof Element) { root.removeEventListener('keydown', this._boundCheckKeyCodePattern); root.addEventListener('keydown', this._boundCheckKeyCodePattern); this._callback = callback; } } _checkKeyCodePattern(e) { if (e) { this._keyCodeCache += e.keyCode; if (this._keyCodeCache.length === this._pattern.length) { if (this._keyCodeCache === this._pattern) { console.log('KonamiCode passed, let\'s show some easter eggs :)'); this._callback(); } this._keyCodeCache = ''; } else if (!this._pattern.match(this._keyCodeCache)) { this._keyCodeCache = ''; } } } } module.exports = new KonamiCodeManager();<|fim▁end|>
this._keyCodeCache = ''; this._callback = () => {}; this._boundCheckKeyCodePattern = this._checkKeyCodePattern.bind(this); }
<|file_name|>TargetInfoVisitor.java<|end_file_name|><|fim▁begin|>/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.attribute.annotation.target.visitor; import proguard.classfile.*; import proguard.classfile.attribute.CodeAttribute; import proguard.classfile.attribute.annotation.*; import proguard.classfile.attribute.annotation.target.*; /** * This interface specifies the methods for a visitor of <code>TargetInfo</code> * objects. * * @author Eric Lafortune */ public interface TargetInfoVisitor { public void visitTypeParameterTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitTypeParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitSuperTypeTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, SuperTypeTargetInfo superTypeTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Field field, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Field field, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitFormalParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, FormalParameterTargetInfo formalParameterTargetInfo); public void visitThrowsTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, ThrowsTargetInfo throwsTargetInfo); public void visitLocalVariableTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo); public void visitCatchTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, CatchTargetInfo catchTargetInfo);<|fim▁hole|><|fim▁end|>
public void visitOffsetTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, OffsetTargetInfo offsetTargetInfo); public void visitTypeArgumentTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, TypeArgumentTargetInfo typeArgumentTargetInfo); }
<|file_name|>BlogPostsFetcher.java<|end_file_name|><|fim▁begin|>package com.jvm_bloggers.core.data_fetching.blog_posts; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.routing.RoundRobinPool; import com.jvm_bloggers.core.data_fetching.blogs.PreventConcurrentExecutionSafeguard; import com.jvm_bloggers.core.rss.SyndFeedProducer; import com.jvm_bloggers.entities.blog.Blog; import com.jvm_bloggers.entities.blog.BlogRepository; import com.jvm_bloggers.entities.metadata.Metadata; import com.jvm_bloggers.entities.metadata.MetadataKeys; import com.jvm_bloggers.entities.metadata.MetadataRepository; import com.jvm_bloggers.utils.NowProvider; import lombok.NoArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Component; @Component @NoArgsConstructor public class BlogPostsFetcher { private BlogRepository blogRepository; private ActorRef rssCheckingActor; private MetadataRepository metadataRepository; private NowProvider nowProvider; private PreventConcurrentExecutionSafeguard concurrentExecutionSafeguard = new PreventConcurrentExecutionSafeguard(); @Autowired public BlogPostsFetcher(ActorSystem actorSystem, BlogRepository blogRepository, BlogPostService blogPostService, SyndFeedProducer syndFeedFactory, MetadataRepository metadataRepository, NowProvider nowProvider) {<|fim▁hole|> final ActorRef blogPostStoringActor = actorSystem .actorOf(NewBlogPostStoringActor.props(blogPostService)); rssCheckingActor = actorSystem.actorOf(new RoundRobinPool(10) .props(RssCheckingActor.props(blogPostStoringActor, syndFeedFactory)), "rss-checkers"); this.metadataRepository = metadataRepository; this.nowProvider = nowProvider; } void refreshPosts() { concurrentExecutionSafeguard.preventConcurrentExecution(this::startFetchingProcess); } @Async("singleThreadExecutor") public void refreshPostsAsynchronously() { refreshPosts(); } private Void startFetchingProcess() { blogRepository.findAllActiveBlogs() .filter(Blog::isActive) .forEach(person -> rssCheckingActor.tell(new RssLink(person), ActorRef.noSender())); final Metadata dateOfLastFetch = metadataRepository .findByName(MetadataKeys.DATE_OF_LAST_FETCHING_BLOG_POSTS); dateOfLastFetch.setValue(nowProvider.now().toString()); metadataRepository.save(dateOfLastFetch); return null; } public boolean isFetchingProcessInProgress() { return concurrentExecutionSafeguard.isExecuting(); } }<|fim▁end|>
this.blogRepository = blogRepository;
<|file_name|>city-mentions.py<|end_file_name|><|fim▁begin|>import json import os from commonfunctions import commonfunctions as cf dir = cf.working_directory transcripts = [] for x in os.listdir(dir): with open(os.path.join(dir, x)) as f: transcripts.append(json.load(f)) with open('worldcitiesout.json', 'r') as f: city_dicts = json.load(f) city_dicts = [x for x in city_dicts if x['Country'] == 'us'] results = [] for transcript in transcripts: description = transcript['description'] date = transcript['date'] for text in transcript['text_by_speakers']: for city_dict in city_dicts: index = text['text'].lower().find(" " + city_dict['City'] + " ") if index > 0: if index < 90: from_index = 0 else: from_index = index - 100 if len(text['text']) < index + 90: to_index = len(text['text']) else: to_index = index + 100 result = ({'speaker': text['speaker'], 'city': city_dict,<|fim▁hole|> print result results.append(result) with open('city-mentions.json', 'w') as f: json.dump(results, f)<|fim▁end|>
'debate': {'description': description, 'date': date, 'context': text['text'][from_index:to_index]}})
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ''' Sphinx setting. ''' import os.path import sys sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) extensions = [ 'mamchecker.inl', 'sphinx.ext.mathjax', 'sphinxcontrib.tikz', 'sphinxcontrib.texfigure'] <|fim▁hole|># i.e. same as conf.py and with page.html containing only {{body}} templates_path = ['.'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8' default_role = 'math' # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' tikz_proc_suite = 'ImageMagick' tikz_tikzlibraries = 'arrows,snakes,backgrounds,patterns,matrix,shapes,fit,calc,shadows,plotmarks' latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'a4paper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', 'preamble': '''\\usepackage{amsfonts}\\usepackage{amssymb}\\usepackage{amsmath}\\usepackage{siunitx}\\usepackage{tikz}''' + ''' \\usetikzlibrary{''' + tikz_tikzlibraries + '''}''' } # latex # sphinx-build[2] -b latex -c . -D master_doc=<rst-file> -D project=<rst-file> <src-dir> <build-dir> # sphinx-build2 -b latex -c . -D master_doc=vector -D project=vector r/b _build # html # sphinx-build[2] -b html -c . -D master_doc=<rst-file> -D project=<rst-file> <src-dir> <build-dir> # sphinx-build2 -c . -D master_doc=vector -D project=vector r/b _build<|fim▁end|>
<|file_name|>named lists.spec.ts<|end_file_name|><|fim▁begin|>import * as jssm from '../jssm'; // TODO these tests only assert non-crashing // That amn't very good, sah test.todo('Assert better than non-crashing in named lists.spec.ts'); describe('named lists', () => { test('alone', () => expect(() => { jssm.parse('&b: [a c e];'); }).not.toThrow() ); test('before trans', () => expect(() => { jssm.parse('&b: [a c e]; a->c;'); }).not.toThrow() ); test('after trans', () => expect(() => { jssm.parse('a->c; &b: [a c e];'); }).not.toThrow() );<|fim▁hole|><|fim▁end|>
});
<|file_name|>volread.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2012, Almar Klein # # Visvis is distributed under the terms of the (new) BSD License. # The full license can be found in 'license.txt'. import visvis as vv import numpy as np import os # Try importing imageio imageio = None try: import imageio except ImportError: pass def volread(filename): """ volread(filename) Read volume from a file. If filename is 'stent', read a dedicated test dataset. For reading any other kind of volume, the imageio package is required. """ if filename == 'stent': # Get full filename path = vv.misc.getResourceDir() filename2 = os.path.join(path, 'stent_vol.ssdf') if os.path.isfile(filename2):<|fim▁hole|> else: raise IOError("File '%s' does not exist." % filename) # Load s = vv.ssdf.load(filename) return s.vol.astype('int16') * s.colorscale elif imageio is not None: return imageio.volread(filename) else: raise RuntimeError("visvis.volread needs the imageio package to read arbitrary files.") if __name__ == '__main__': vol = vv.volread('stent') t = vv.volshow(vol) t.renderStyle = 'mip' # maximum intensity projection (is the default)<|fim▁end|>
filename = filename2
<|file_name|>cEngine.__pluginTemplate__.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ 'use strict'; /*global cEngine */ /*eslint no-console:0*/ (function (cEngine) { cEngine.extend('__name__', { create: function create(config) { config = config || {}; var __name__ = { cEnginePlugin: { name: '__name__', version: '0.0.1' }, init: function init(engine) { console.log('init', engine); }, start: function start() { console.log('start'); }, stop: function stop() { console.log('stop'); }, preStep: function preStep(context, width, height, dt) { console.log('preStep', context, width, height, dt); }, postStep: function postStep(context, width, height, dt) { console.log('postStep', context, width, height, dt); }, <|fim▁hole|> } }; return __name__; } }); })(cEngine); },{}]},{},[1])<|fim▁end|>
destroy: function destroy() { console.log('destroy');
<|file_name|>test_metrics.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Red Hat, Inc. # # This software is licensed to you under the GNU General Public # License as published by the Free Software Foundation; either version # 2 of the License (GPLv2) or (at your option) any later version. # There is NO WARRANTY for this software, express or implied, # including the implied warranties of MERCHANTABILITY, # NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should # have received a copy of GPLv2 along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. from unittest import TestCase from datetime import datetime from mock import patch from gofer.metrics import Timer, timestamp class TestUtils(TestCase): @patch('gofer.metrics.datetime') def test_timestamp(self, dt): dt.utcnow.return_value = datetime(2014, 12, 25, 9, 30, 0) ts = timestamp() self.assertEqual(ts, '2014-12-25T09:30:00Z') class TestTimer(TestCase): def test_init(self): t = Timer() self.assertEqual(t.started, 0) self.assertEqual(t.stopped, 0) @patch('time.time') def test_start(self, _time): _time.return_value = 10.0 t = Timer() t.start() self.assertEqual(t.started, 10.0) self.assertEqual(t.stopped, 0) @patch('time.time') def test_stop(self, _time): _time.return_value = 20.0 t = Timer() t.started = 10.0 t.stop() self.assertEqual(t.started, 10.0) self.assertEqual(t.stopped, 20.0) def duration(self): t = Timer() t.started = 10.0 t.stopped = 100.0 self.assertEqual(t.duration(), 90.0) def test_unicode(self): t = Timer() # not started self.assertEqual(unicode(t), 'not-running') # started but not stopped t.started = 1 self.assertEqual(unicode(t), 'started: %d (running)' % t.started) # milliseconds t.started = 0.10 t.stopped = 0.25<|fim▁hole|> # seconds t.started = 10.0 t.stopped = 25.0 self.assertEqual(unicode(t), '15.000 (seconds)') # minutes t.started = 10.0 t.stopped = 100.0 self.assertEqual(unicode(t), '1.500 (minutes)') def test_str(self): t = Timer() # not started self.assertEqual(str(t), 'not-running') # started but not stopped t.started = 1 self.assertEqual(str(t), 'started: %d (running)' % t.started) # milliseconds t.started = 0.10 t.stopped = 0.25 self.assertEqual(str(t), '150 (ms)') # seconds t.started = 10.0 t.stopped = 25.0 self.assertEqual(str(t), '15.000 (seconds)') # minutes t.started = 10.0 t.stopped = 100.0 self.assertEqual(str(t), '1.500 (minutes)')<|fim▁end|>
self.assertEqual(unicode(t), '150 (ms)')
<|file_name|>rebuild_index.py<|end_file_name|><|fim▁begin|># encoding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals from django.core.management import call_command from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Completely rebuilds the search index by removing the old data and then updating." def add_arguments(self, parser): parser.add_argument( '--noinput', action='store_false', dest='interactive', default=True, help='If provided, no prompts will be issued to the user and the data will be wiped out.' ) parser.add_argument( '-u', '--using', action='append', default=[], help='Update only the named backend (can be used multiple times). ' 'By default all backends will be updated.' ) parser.add_argument( '-k', '--workers', default=0, type=int, help='Allows for the use multiple workers to parallelize indexing. Requires multiprocessing.' ) parser.add_argument( '--nocommit', action='store_false', dest='commit', default=True, help='Will pass commit=False to the backend.'<|fim▁hole|> call_command('clear_index', **options) call_command('update_index', **options)<|fim▁end|>
) def handle(self, **options):
<|file_name|>app.js<|end_file_name|><|fim▁begin|>//var application = require("application"); //application.mainModule = "main-page"; //application.cssFile = "./app.css"; var map = new Map(); map.set("a", "b"); log(map); application.start();<|fim▁hole|>//application.Run(new System.Windows.Window());<|fim▁end|>
//var application = new System.Windows.Application();
<|file_name|>comp_table.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 """ COSMO TECHNICAL TESTSUITE General purpose script to compare two files containing tables Only lines with given table pattern are considered """ # built-in modules import os, sys, string # information __author__ = "Xavier Lapillonne" __maintainer__ = "[email protected]" def cmp_table(file1,file2,colpattern,minval,threshold,verbose=1,maxcompline=-1): # General purpose script to compare two files containing tables # Only lines with given table column pattern. Column to be compared are marked with c # column to discard with x #init ncomp=0 nerror=0 lerror=False epsilon=1e-16 #used to avoid division by zero in case minval is zero # check file existence if not(os.path.exists(file1)): print('File %s does not exist' %(file1)) return -1 elif not(os.path.exists(file2)): print('File %s does not exist' %(file2)) print('File '+file2+' does not exist') return -1 # convert input colpattern=[x=='c' for x in list(colpattern)] threshold=float(threshold) minval=float(minval) # open file data1=open(file1).readlines() data2=open(file2).readlines() # get max record nd1=len(data1) nd2=len(data2) # check that files are not empty if nd1==0: print('file %s is empty!' %(file1)) return -1 if nd2==0: print('file %s is empty!' %(file2)) return -1 if nd1!=nd2 and verbose>1: print('Warning: %s and %s have different size, comparing commun set only \n' %(file1,file2)) ncdata=min(nd1,nd2) if (maxcompline>0): ncdata=min(ncdata,maxcompline) # Iterates through the lines for il in range(ncdata): l1=data1[il].split() l2=data2[il].split() l1match=matchColPattern(l1,colpattern) l2match=matchColPattern(l2,colpattern) # compare values if both lines are compatible if l1match and l2match: for ic in range(len(colpattern)): if colpattern[ic]: v1=float(l1[ic]) v2=float(l2[ic]) val_abs_max=max(abs(v1),abs(v2)) if val_abs_max > minval: ncomp+=1 diff=abs(v1-v2)/(val_abs_max+epsilon) if diff>threshold: nerror+=1 # Print error if verbose>1: print('Error %2.2e above %2.2e thresold at line %i, col %i' %(diff,threshold,il+1,ic+1)) print('> %s' %(file1)) print(data1[il]) print('< %s' %(file2)) print(data2[il]) #save line for first error if not lerror: differ=diff linerr=il+1 colerr=ic+1 linerr1=data1[il] linerr2=data2[il] lerror=True if ncomp==0: print('Warning :no line to compare') nerror=-2 if lerror and verbose>0: print('Compared values: %i, errors above threshold: %i ; %i %% ' %(ncomp,nerror,nerror*100./ncomp)) if verbose==1: print('First error %2.2e above %2.2e thresold at line %i, col %i' %(differ,threshold,linerr,colerr)) print('> %s' %(file1)) print(linerr1) print('< %s' %(file2)) print(linerr2) return nerror #---------------------------------------------------------------------------- # Local functions def matchColPattern(line,colpattern): if len(line)!=len(colpattern): return False try: for i in range(len(colpattern)): if colpattern[i]: f=float(line[i]) except ValueError: return False return True #----------------------------------- #execute as a script if __name__ == "__main__": if len(sys.argv)==6: cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \ sys.argv[5]) elif len(sys.argv)==7: cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \ sys.argv[5],sys.argv[6]) elif len(sys.argv)==8: cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \ sys.argv[5],sys.argv[6],sys.argv[7]) <|fim▁hole|> General purpose script to compare two files containing tables Only lines with given table column pattern. Column to be compared must be numbers are marked with c column to discard with x colpattern c for compare or x for ignore, ex: xccx discard first and last column of a 4 column table ''')<|fim▁end|>
else: print('''USAGE : ./comp_table file1 file2 colpattern minval threshold [verbose maxcompline]
<|file_name|>base.js<|end_file_name|><|fim▁begin|><|fim▁hole|>*/ import Ember from 'ember'; /** Class implementing base stylization for markers. @class BaseMarkerStyle */ export default Ember.Object.extend({ /** Gets default style settings. @method getDefaultStyleSettings @return {Object} Hash containing default style settings. */ getDefaultStyleSettings() { return null; }, /** Applies layer-style to the specified leaflet layer. @method renderOnLeafletMarker @param {Object} options Method options. @param {<a =ref="http://leafletjs.com/reference-1.2.0.html#marker">L.Marker</a>} options.marker Leaflet marker to which marker-style must be applied. @param {Object} options.style Hash containing style settings. */ renderOnLeafletMarker({ marker, style }) { throw `Method 'renderOnLeafletMarker' isn't implemented in 'base' marker-style`; }, /** Renderes layer-style preview on the specified canvas element. @method renderOnCanvas @param {Object} options Method options. @param {<a =ref="https://developer.mozilla.org/ru/docs/Web/HTML/Element/canvas">Canvas</a>} options.canvas Canvas element on which marker-style preview must be rendered. @param {Object} options.style Hash containing style settings. @param {Object} [options.target = 'preview'] Render target ('preview' or 'legend'). */ renderOnCanvas({ canvas, style, target }) { throw `Method 'renderOnCanvas' isn't implemented in 'base' marker-style`; } });<|fim▁end|>
/** @module ember-flexberry-gis
<|file_name|>sixth.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # bigcinemas <|fim▁hole|>class InvalidAge(Exception): def __init__(self,age): self.age = age def validate_age(age): if age < 18: raise InvalidAge(age) else: return "Welcome to the movies!!" age = int(raw_input("please enter your age:")) #print validate_age(age) try: validate_age(age) # except Exception as e: except InvalidAge as e: print "Buddy!! you are very young at {}!! Grow up a bit.".format(e.age) else: print validate_age(age)<|fim▁end|>
<|file_name|>parameters.py<|end_file_name|><|fim▁begin|>from property import * # Neuron common parameters iaf_neuronparams = {'E_L': -70., 'V_th': -50., 'V_reset': -67., 'C_m': 2., 't_ref': 2., 'V_m': -60., 'tau_syn_ex': 1., 'tau_syn_in': 1.33} # Synapse common parameters STDP_synapseparams = { 'model': 'stdp_synapse', 'tau_m': {'distribution': 'uniform', 'low': 15., 'high': 25.}, 'alpha': {'distribution': 'normal_clipped', 'low': 0.5, 'mu': 5.0, 'sigma': 1.0}, 'delay': {'distribution': 'uniform', 'low': 0.8, 'high': 2.5}, 'lambda': 0.5 } # Glutamate synapse STDP_synparams_Glu = dict({'delay': {'distribution': 'uniform', 'low': 1, 'high': 1.3}, 'weight': w_Glu, 'Wmax': 70.}, **STDP_synapseparams) # GABA synapse STDP_synparams_GABA = dict({'delay': {'distribution': 'uniform', 'low': 1., 'high': 1.3}, 'weight': w_GABA, 'Wmax': -60.}, **STDP_synapseparams) # Acetylcholine synapse STDP_synparams_ACh = dict({'delay': {'distribution': 'uniform', 'low': 1, 'high': 1.3}, 'weight': w_ACh, 'Wmax': 70.}, **STDP_synapseparams) # Dopamine synapse common parameter NORA_synparams = {'delay': 1.} # Dopamine exhibitory synapse NORA_synparams_ex = dict({'weight': w_NR_ex, 'Wmax': 100.,<|fim▁hole|> 'Wmin': 85.}, **NORA_synparams) # Dopamine inhibitory synapse NORA_synparams_in = dict({'weight': w_NR_in, 'Wmax': -100., 'Wmin': -85.}, **NORA_synparams) # Create volume transmitters # Dictionary of synapses with keys and their parameters types = {GABA: (STDP_synparams_GABA, w_GABA, 'GABA'), ACh: (STDP_synparams_ACh, w_ACh, 'Ach'), Glu: (STDP_synparams_Glu, w_Glu, 'Glu'), DA_ex: (NORA_synparams_ex, w_NR_ex, 'DA_ex', nora_model_ex), DA_in: (NORA_synparams_in, w_NR_in, 'DA_in', nora_model_in)} # Parameters for generator links static_syn = { 'model': 'static_synapse', 'weight': w_Glu * 5, 'delay': pg_delay } # Connection parameters conn_dict = {'rule': 'all_to_all', 'multapses': True} # Device parameters multimeter_param = {'to_memory': True, 'to_file': False, 'withtime': True, 'interval': 0.1, 'record_from': ['V_m'], 'withgid': True} detector_param = {'label': 'spikes', 'withtime': True, 'withgid': True, 'to_file': False, 'to_memory': True, 'scientific': True}<|fim▁end|>
<|file_name|>graphviz.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The nom-lua project developers<|fim▁hole|>// option. This file may not be copied, modified, or distributed // except according to those terms. extern crate nom_lua; pub fn main() { use std::fs::File; let mut f = File::create("example1.dot").unwrap(); let ast = nom_lua::parse_string("10 / 20 * 30".as_bytes()).unwrap(); println!("{}", ast); ast.graphviz_render(&mut f); }<|fim▁end|>
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
<|file_name|>settestpath.py<|end_file_name|><|fim▁begin|># yum-rhn-plugin - RHN support for yum # # Copyright (C) 2006 Red Hat, Inc.<|fim▁hole|># # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA import sys # Adjust path so we can see the src modules running from branch as well # as test dir: sys.path.insert(0, './') sys.path.insert(0, '../') sys.path.insert(0, '../../')<|fim▁end|>
<|file_name|>aqpkg.ts<|end_file_name|><|fim▁begin|><!DOCTYPE TS><TS> <context> <name>CategoryFilterImpl</name> <message> <source>Category Filter</source> <translation>Filtro de Categorías</translation> </message> <message> <source>Select one or more groups</source> <translation>Seleccione uno o más grupos</translation> </message> </context> <context> <name>DataManager</name> <message> <source>Reading configuration...</source> <translation>Leyendo configuración...</translation> </message> </context> <context> <name>InputDialog</name> <message> <source>&amp;OK</source> <translation>&amp;Ok</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Cancelar</translation> </message> </context> <context> <name>InstallDlgImpl</name> <message> <source>Close</source> <translation>Cerrar</translation> </message> <message> <source>Remove </source> <translation>Borrar </translation> </message> <message> <source>Install </source> <translation>Instalar</translation> </message> <message> <source>Upgrade </source> <translation>Renovar</translation> </message> <message> <source>(ReInstall)</source> <translation>(Reinstalar)</translation> </message> <message> <source>(Upgrade)</source> <translation>(Renovar)</translation> </message> <message> <source>Destination</source> <translation>Destino</translation> </message> <message> <source>Space Avail</source> <translation>Espacio disponible</translation> </message> <message> <source>Output</source> <translation>Salida</translation> </message> <message> <source>Start</source> <translation>Empezar</translation> </message> <message> <source>Options</source> <translation>Opciones</translation> </message> <message> <source>All</source> <translation>Todo</translation> </message> <message> <source>Text</source> <translation>Texto</translation> </message> <message> <source>Abort</source> <translation>Abortar</translation> </message> <message> <source> **** User Clicked ABORT ***</source> <translation> **** Usuario pulsó ABORTAR ****</translation> </message> <message> <source>**** Process Aborted ****</source> <translation>**** Proceso abortado ****</translation> </message> <message> <source>Save output</source> <translation>Guardar salida</translation> </message> <message> <source>Unknown</source> <translation>Desconocido</translation> </message> <message> <source>%1 Kb</source> <translation>%1 Kb</translation> </message> </context> <context> <name>InstallOptionsDlgImpl</name> <message> <source>Options</source> <translation>Opciones</translation> </message> <message> <source>Force Depends</source> <translation>Forzar dependencias</translation> </message> <message> <source>Force Reinstall</source> <translation>Forzar reinstalar</translation> </message> <message> <source>Force Remove</source> <translation>Forzar borrar</translation> </message> <message> <source>Force Overwrite</source> <translation>Forzar sobreescribir</translation> </message> <message> <source>Information Level</source> <translation>Nivel de información</translation> </message> <message> <source>Errors only</source> <translation>Sólo errores</translation> </message> <message> <source>Normal messages</source> <translation>Mensajes normales</translation> </message> <message> <source>Informative messages</source> <translation>Mensajes informativos</translation> </message> <message> <source>Troubleshooting output</source> <translation>Salida de soluciones</translation> </message> </context> <context> <name>Ipkg</name> <message> <source>Dealing with package %1</source> <translation>Tratando con el paquete %1</translation> </message> <message> <source>Removing symbolic links... </source> <translation>Eliminando enlaces simbólicos... </translation> </message> <message> <source>Creating symbolic links for %1.</source> <translation>Creando enlaces simbólicos para %1.</translation> </message> <message> <source>Creating symbolic links for %1</source> <translation>Creando enlaces simbólicos para %1</translation> </message> <message> <source>Finished</source> <translation>Finalizado</translation> </message> <message> <source>Removing status entry...</source> <translation>Eliminando entrada de estado...</translation> </message> <message> <source>status file - </source> <translation>fichero de estado - </translation> </message> <message> <source>package - </source> <translation>paquete - </translation> </message> <message> <source>Couldn&apos;t open status file - </source> <translation>No pude abrir fichero de estado - </translation> </message> <message> <source>Couldn&apos;t create tempory status file - </source> <translation type="obsolete">No pude crear fichero temporal de estado - </translation> </message> <message> <source>Couldn&apos;t start ipkg process</source> <translation>No pude iniciar proceso ipkg</translation> </message> <message> <source>Couldn&apos;t start ipkg-link process</source> <translation>Error al iniciar el proceso ipkg-link</translation> </message> <message> <source>Symbolic linking failed! </source> <translation type="unfinished">Error al crear enlace simbólico </translation> </message> <message> <source>Symbolic linking succeeded. </source> <translation type="unfinished">Enlace simbólico hecho con éxitoto </translation> </message> <message> <source>Couldn&apos;t create temporary status file - </source> <translation type="unfinished"></translation> </message> <message> <source>Couldn&apos;t rename temporary status file - </source> <translation type="unfinished"></translation> </message> <message> <source>to status file - </source> <translation type="unfinished"></translation> </message> </context> <context> <name>MainWindow</name> <message> <source>AQPkg - Package Manager</source> <translation>AQPkg - Gestor de Paquetes</translation> </message> <message> <source>Type the text to search for here.</source> <translation>Escriba aquí el texto a buscar.</translation> </message> <message> <source>Click here to hide the Quick Jump toolbar.</source> <translation>Pulse aquí para esconder la barra Salto Rápido.</translation> </message> <message> <source>Update lists</source> <translation>Actualizar listas</translation> </message> <message> <source>Click here to update package lists from servers.</source> <translation>Pulse aquí para actualizar las lista de paquetes de los servidores.</translation> </message> <message> <source>Upgrade</source> <translation>Renovar</translation> </message> <message> <source>Click here to upgrade all installed packages if a newer version is available.</source> <translation>Pulse aquí para actualizar todos los paquetes instalados si se dispone de una nueva versión.</translation> </message> <message> <source>Download</source> <translation>Descargar</translation> </message> <message> <source>Click here to download the currently selected package(s).</source> <translation>Pulse aquí para descargar los paquetes seleccionados actualmente.</translation> </message> <message> <source>Apply changes</source> <translation>Aplicar cambios</translation> </message> <message> <source>Click here to install, remove or upgrade currently selected package(s).</source> <translation>Pulse aquí para instalar, borrar o renovar los paquetes instalados actualmente.</translation> </message> <message> <source>Actions</source> <translation>Acciones</translation> </message> <message> <source>Show packages not installed</source> <translation>Mostrar paquetes no instalados</translation> </message> <message> <source>Click here to show packages available which have not been installed.</source> <translation>Pulse aquí para mostrar los paquetes disponibles que no han sido instalados.</translation> </message> <message> <source>Show installed packages</source> <translation>Mostrar paquetes instalados</translation> </message><|fim▁hole|> <message> <source>Click here to show packages currently installed on this device.</source> <translation>Pulse aquí para mostrar los paquetes actualmente instalados en este dispositivo.</translation> </message> <message> <source>Show updated packages</source> <translation>Mostrar paquetes actualizados</translation> </message> <message> <source>Click here to show packages currently installed on this device which have a newer version available.</source> <translation>Pulse aquí para mostrar los paquetes actualmente instalados en este dispositivo que tienen una versión más nueva disponible.</translation> </message> <message> <source>Filter by category</source> <translation>Filtrar por categoría</translation> </message> <message> <source>Click here to list packages belonging to one category.</source> <translation>Pulse aquí para listar paquetes que pertenecen a una categoría.</translation> </message> <message> <source>Set filter category</source> <translation>Fijar categoría del filtro</translation> </message> <message> <source>Click here to change package category to used filter.</source> <translation>Pulse aquí para cambiar la categoría de paquetes que serán filtrados.</translation> </message> <message> <source>Find</source> <translation>Buscar</translation> </message> <message> <source>Click here to search for text in package names.</source> <translation>Pulse aquí para buscar texto en el nombre de los paquetes.</translation> </message> <message> <source>Find next</source> <translation>Buscar siguiente</translation> </message> <message> <source>Click here to find the next package name containing the text you are searching for.</source> <translation>Pulse aquí para buscar el siguiente nombre de paquete que contenga el texto que está buscando.</translation> </message> <message> <source>Quick Jump keypad</source> <translation>Teclas de Salto Rápido</translation> </message> <message> <source>Click here to display/hide keypad to allow quick movement through the package list.</source> <translation>Pulse aquí para mostrar/esconder el teclado que permite moverse rápidamente a través de la lista de paquetes.</translation> </message> <message> <source>View</source> <translation>Ver</translation> </message> <message> <source>Configure</source> <translation>Configurar</translation> </message> <message> <source>Click here to configure this application.</source> <translation>Pulse aquí para configurar esta aplicación.</translation> </message> <message> <source>Click here to hide the find toolbar.</source> <translation>Pulse aquí para esconder la barra de búsqueda.</translation> </message> <message> <source>Servers:</source> <translation>Servidores:</translation> </message> <message> <source>Click here to select a package feed.</source> <translation>Pulse aquí para seleccionar una fuente de paquetes.</translation> </message> <message> <source>Packages</source> <translation>Paquetes</translation> </message> <message> <source>This is a listing of all packages for the server feed selected above. A blue dot next to the package name indicates that the package is currently installed. A blue dot with a star indicates that a newer version of the package is available from the server feed. Click inside the box at the left to select a package.</source> <translation>Ésta es una lista de todos los paquetes del servidor selecionado arriba. Un punto azul junto al nombre del paquete indica que está instalado actualmente. Un punto azul con una estrella indica que hay una versión más moderna disponible. Pulse en el recuadro de la izquierda para seleccionar un paquete.</translation> </message> <message> <source>Remove</source> <translation>Borrar</translation> </message> <message> <source>Click here to uninstall the currently selected package(s).</source> <translation>Pulse aquí para desinstalar los paquetes seleccionados.</translation> </message> <message> <source>Building server list: <byte value="x9"/>%1</source> <translation>Construyendo lista del servidor: <byte value="x9"/>%1</translation> </message> <message> <source>Building package list for: <byte value="x9"/>%1</source> <translation>Construyendo lista de paquetes para: <byte value="x9"/>%1</translation> </message> <message> <source>Refreshing server package lists</source> <translation>Refrescando listas de paquetes del servidor</translation> </message> <message> <source>WARNING: Upgrading while Opie/Qtopia is running is NOT recommended! Are you sure? </source> <translation>ATENCIÓN: ¡No se recomienda actualizar mientras Opie/Qtopia está corriendo! ¿Está seguro?</translation> </message> <message> <source>Warning</source> <translation>Aviso</translation> </message> <message> <source>Upgrading installed packages</source> <translation>Renovando paquetes instalados</translation> </message> <message> <source>Are you sure you wish to delete %1?</source> <translation>¿Está seguro que desea borrar %1?</translation> </message> <message> <source>Are you sure?</source> <translation>¿Está seguro?</translation> </message> <message> <source>No</source> <translation>No</translation> </message> <message> <source>Yes</source> <translation>Sí</translation> </message> <message> <source>Download to where</source> <translation>Descargar a dónde</translation> </message> <message> <source>Enter path to download to</source> <translation>Introduzca carpeta donde descargar</translation> </message> <message> <source>Install Remote Package</source> <translation>Instalar paquete remoto</translation> </message> <message> <source>Enter package location</source> <translation>Introduzca localización del paquete</translation> </message> <message> <source>Nothing to do</source> <translation>No hay nada que hacer</translation> </message> <message> <source>No packages selected</source> <translation>No hay paquetes seleccionados</translation> </message> <message> <source>OK</source> <translation>Ok</translation> </message> <message> <source>Do you wish to remove or reinstall %1?</source> <translation>¿Desea borrar o reinstalar %1?</translation> </message> <message> <source>Remove or ReInstall</source> <translation>Borrar o reinstalar</translation> </message> <message> <source>ReInstall</source> <translation>Reinstalar</translation> </message> <message> <source>Do you wish to remove or upgrade %1?</source> <translation>¿Desea borrar o actualizar %1?</translation> </message> <message> <source>Remove or Upgrade</source> <translation>Borrar o actualizar</translation> </message> <message> <source>Updating Launcher...</source> <translation>Actualizando lanzador...</translation> </message> </context> <context> <name>PackageWindow</name> <message> <source>&lt;b&gt;Description&lt;/b&gt; - </source> <translation>&lt;b&gt;Descripción&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Installed To&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Instalado en&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Size&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Tamaño&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Section&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Selección&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Filename&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Nombre de fichero&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Version Installed&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Versión instalada&lt;/b&gt; - </translation> </message> <message> <source>&lt;p&gt;&lt;b&gt;Version Available&lt;/b&gt; - </source> <translation>&lt;p&gt;&lt;b&gt;Versión disponible&lt;/b&gt; - </translation> </message> <message> <source>Package Information</source> <translation>Información del paquete</translation> </message> <message> <source>Package information is unavailable</source> <translation>No dispongo de información del paquete</translation> </message> <message> <source>Close</source> <translation>Cerrar</translation> </message> </context> <context> <name>QObject</name> <message> <source>Installed packages</source> <translation>Paquetes instalados</translation> </message> <message> <source>Local packages</source> <translation>Paquetes locales</translation> </message> <message> <source>N/A</source> <translation>N/D</translation> </message> <message> <source>Package - %1 version - %2</source> <translation>Paquete - %1 versión - %2</translation> </message> <message> <source> inst version - %1</source> <translation> versión inst - %1</translation> </message> <message> <source>Version string is empty.</source> <translation>Cadena de versión vacía.</translation> </message> <message> <source>Epoch in version is not number.</source> <translation>Época en versión no numérica.</translation> </message> <message> <source>Nothing after colon in version number.</source> <translation>Nada después del punto en el número de verisión.</translation> </message> </context> <context> <name>QuestionDlg</name> <message> <source>Remove</source> <translation>Borrar</translation> </message> </context> <context> <name>SettingsImpl</name> <message> <source>Configuration</source> <translation>Configuración</translation> </message> <message> <source>Servers</source> <translation>Servidores</translation> </message> <message> <source>Destinations</source> <translation>Destinos</translation> </message> <message> <source>Proxies</source> <translation>Proxies</translation> </message> <message> <source>New</source> <translation>Nuevo</translation> </message> <message> <source>Delete</source> <translation>Borrar</translation> </message> <message> <source>Server</source> <translation>Servidor</translation> </message> <message> <source>Name:</source> <translation>Nombre:</translation> </message> <message> <source>Address:</source> <translation>Dirección:</translation> </message> <message> <source>Active Server</source> <translation>Servidor activo</translation> </message> <message> <source>Update</source> <translation>Actualizar</translation> </message> <message> <source>Destination</source> <translation>Destino</translation> </message> <message> <source>Location:</source> <translation>Posición:</translation> </message> <message> <source>Link to root</source> <translation>Enlazar a root</translation> </message> <message> <source>HTTP Proxy</source> <translation>Proxy HTTP</translation> </message> <message> <source>Enabled</source> <translation>Habilitada</translation> </message> <message> <source>FTP Proxy</source> <translation>Proxy FTP</translation> </message> <message> <source>Username:</source> <translation>Nombre usuario:</translation> </message> <message> <source>Password:</source> <translation>Clave:</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>local-drop-glue.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-tidy-linelength // We specify -Z incremental here because we want to test the partitioning for // incremental compilation // compile-flags:-Zprint-mono-items=lazy -Zincremental=tmp/partitioning-tests/local-drop-glue // compile-flags:-Zinline-in-all-cgus #![allow(dead_code)] #![crate_type="rlib"] //~ MONO_ITEM fn core::ptr[0]::real_drop_in_place[0]<local_drop_glue::Struct[0]> @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal] struct Struct { _a: u32 } impl Drop for Struct { //~ MONO_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[External] fn drop(&mut self) {} } //~ MONO_ITEM fn core::ptr[0]::real_drop_in_place[0]<local_drop_glue::Outer[0]> @@ local_drop_glue[Internal] struct Outer { _a: Struct } //~ MONO_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[External] pub fn user() { let _ = Outer { _a: Struct { _a: 0 } }; } pub mod mod1 { use super::Struct; //~ MONO_ITEM fn core::ptr[0]::real_drop_in_place[0]<local_drop_glue::mod1[0]::Struct2[0]> @@ local_drop_glue-mod1[Internal] struct Struct2 { _a: Struct, //~ MONO_ITEM fn core::ptr[0]::real_drop_in_place[0]<(u32, local_drop_glue::Struct[0])> @@ local_drop_glue-mod1[Internal] _b: (u32, Struct),<|fim▁hole|> //~ MONO_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[External] pub fn user() { let _ = Struct2 { _a: Struct { _a: 0 }, _b: (0, Struct { _a: 0 }), }; } }<|fim▁end|>
}
<|file_name|>common.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Mon Apr 21 10:34:18 2014 @author: eegroopm """ import os, sys import pandas as pd import numpy as np class common: def __init__(self): self.path = os.path.expanduser('~') #\u0305 is unicode overline character #self._overline_strings = [u'1\u0305', u'2\u0305' ,u'3\u0305', u'4\u0305', u'5\u0305', u'6\u0305', u'7\u0305',u'8\u0305',u'9\u0305'] #use matplotlib's mathtex rendering for overline strings self._overline_strings = [r'\\bar{1}',r'\\bar{2}',r'\\bar{3}', r'\\bar{6}',r'\\bar{5}',r'\\bar{6}', r'\\bar{7}',r'\\bar{8}',r'\\bar{9}'] self.DSpaces = pd.DataFrame(columns = ['d-space','h','k','l']) #Msum is sum of absolute miller indices, neede for plotting pattern self.Forbidden = pd.DataFrame(columns = ['d-space','h','k','l']) self.u = 0 self.v = 0 self.w = 1 self.ZoneAxis = np.array([self.u,self.v,self.w]) self.beamenergy = 200 #keV self.camlength = 100 #cm self.camconst = 1.0 self.wavelength = self.Wavelength(self.beamenergy) #angstroms self._x2 = False self.a = 1 self.b = 1 self.c = 1 self.astar = 1 self.bstar = 1 self.cstar = 1 self.alpha = 90 #degrees self.beta = 90 self.gamma = 90 self.alphastar = 90 self.betastar = 90 self.gammastar = 90 #SpaceGroup data #DataFrame in the form SG Number, Patterson symbol, Geometry,Unit Cell Type, Unit Cell Conditions , Spacegroup conditions #e.g. #sg.loc[218] yields: #Patterson P-43n #Conditions (h==k and l == 2*n) or (h == 2*n and k==0 and ... #Name: 218, dtype: object if sys.version_info[0] == 3: #python3 and python2 pickle h5 files differently. GAH!! self.sg = pd.read_hdf('resources/SpaceGroups.h5','table') self.sghex = pd.read_hdf('resources/SpaceGroupsHex.h5','table') #for trigonal crystals with rhombohedral or hexagonal centering self.mineraldb = pd.read_hdf('resources/MineralDatabase.h5','table') elif sys.version_info[0] == 2: self.sg = pd.read_hdf('resources/SpaceGroups_py2.h5','table') self.sghex = pd.read_hdf('resources/SpaceGroupsHex_py2.h5','table') self.mineraldb = pd.read_hdf('resources/MineralDatabase_py2.h5','table')<|fim▁hole|> def Wavelength(self,E): hbar = 6.626E-34 #m^2 kg/s me = 9.109E-31 #kg c = 3E8 #m/s e = 1.602E-19 #Coulombs E = E*1000 #turn to eV wavelength = hbar/np.sqrt(2*me*e*E)/np.sqrt(1 + (e*E)/(2*me*c**2))*(10**10) #angstroms. relativistic formula return(wavelength)<|fim▁end|>
self.manualConds = [] #empty list of strings for manual conditions
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Qnnpack(CMakePackage): """QNNPACK (Quantized Neural Networks PACKage) is a mobile-optimized library for low-precision high-performance neural network inference. QNNPACK provides implementation of common neural network operators on quantized 8-bit tensors.""" homepage = "https://github.com/pytorch/QNNPACK" git = "https://github.com/pytorch/QNNPACK.git" version('master', branch='master') version('2019-08-28', commit='7d2a4e9931a82adc3814275b6219a03e24e36b4c') # [email protected]:1.9 version('2018-12-27', commit='6c62fddc6d15602be27e9e4cbb9e985151d2fa82') # [email protected] version('2018-12-04', commit='ef05e87cef6b8e719989ce875b5e1c9fdb304c05') # [email protected]:1.1 depends_on('[email protected]:', type='build') depends_on('ninja', type='build') depends_on('python', type='build') resource(<|fim▁hole|> git='https://github.com/Maratyszcza/cpuinfo.git', destination='deps', placement='cpuinfo' ) resource( name='fp16', git='https://github.com/Maratyszcza/FP16.git', destination='deps', placement='fp16' ) resource( name='fxdiv', git='https://github.com/Maratyszcza/FXdiv.git', destination='deps', placement='fxdiv' ) resource( name='googlebenchmark', url='https://github.com/google/benchmark/archive/v1.4.1.zip', sha256='61ae07eb5d4a0b02753419eb17a82b7d322786bb36ab62bd3df331a4d47c00a7', destination='deps', placement='googlebenchmark', ) resource( name='googletest', url='https://github.com/google/googletest/archive/release-1.8.0.zip', sha256='f3ed3b58511efd272eb074a3a6d6fb79d7c2e6a0e374323d1e6bcbcc1ef141bf', destination='deps', placement='googletest', ) resource( name='psimd', git='https://github.com/Maratyszcza/psimd.git', destination='deps', placement='psimd' ) resource( name='pthreadpool', git='https://github.com/Maratyszcza/pthreadpool.git', destination='deps', placement='pthreadpool' ) generator = 'Ninja' def cmake_args(self): return [ self.define('CPUINFO_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'cpuinfo')), self.define('FP16_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'fp16')), self.define('FXDIV_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'fxdiv')), self.define('PSIMD_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'psimd')), self.define('PTHREADPOOL_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'pthreadpool')), self.define('GOOGLEBENCHMARK_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'googlebenchmark')), self.define('GOOGLETEST_SOURCE_DIR', join_path(self.stage.source_path, 'deps', 'googletest')), ]<|fim▁end|>
name='cpuinfo',
<|file_name|>history.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; import { HistoryService } from './history.service'; <|fim▁hole|> moduleId: module.id, selector: 'sr-history', templateUrl: 'history.component.html', styleUrls: ['history.component.css'], }) export class HistoryComponent implements OnInit { public radioStations: any[]; public songs: any[]; constructor(private _historyService: HistoryService) {} ngOnInit() { this.radioStations = this._historyService.getRadioHistory(); this.songs = this._historyService.getSongHistory(); } }<|fim▁end|>
@Component({
<|file_name|>solver.js<|end_file_name|><|fim▁begin|>import * as utils from '../../utils/utils' import * as math from '../../math/math' import QR from '../../math/qr' import LMOptimizer from '../../math/lm' import {ConstantWrapper, EqualsTo} from './constraints' import {dog_leg} from '../../math/optim' /** @constructor */ function Param(id, value, readOnly) { this.reset(value); } Param.prototype.reset = function(value) { this.set(value); this.j = -1; }; Param.prototype.set = function(value) { this.value = value; }; Param.prototype.get = function() { return this.value; }; Param.prototype.nop = function() {}; /** @constructor */ function System(constraints) { this.constraints = constraints; this.params = []; for (var ci = 0; ci < constraints.length; ++ci) { var c = constraints[ci]; for (var pi = 0; pi < c.params.length; ++pi) { var p = c.params[pi]; if (p.j == -1) { p.j = this.params.length; this.params.push(p); } } } } System.prototype.makeJacobian = function() { var jacobi = []; var i; var j; for (i=0; i < this.constraints.length; i++) { jacobi[i] = []; for (j=0; j < this.params.length; j++) { jacobi[i][j] = 0; } } for (i=0; i < this.constraints.length; i++) { var c = this.constraints[i]; var cParams = c.params; var grad = []; utils.fillArray(grad, 0, cParams.length, 0); c.gradient(grad); for (var p = 0; p < cParams.length; p++) { var param = cParams[p]; j = param.j; jacobi[i][j] = grad[p]; } } return jacobi; }; System.prototype.fillJacobian = function(jacobi) { for (var i=0; i < this.constraints.length; i++) { var c = this.constraints[i]; var cParams = c.params; var grad = []; utils.fillArray(grad, 0, cParams.length, 0); c.gradient(grad); for (var p = 0; p < cParams.length; p++) { var param = cParams[p]; var j = param.j; jacobi[i][j] = grad[p]; } } return jacobi; }; System.prototype.calcResidual = function(r) { var i=0; var err = 0.; for (i=0; i < this.constraints.length; i++) { var c = this.constraints[i]; r[i] = c.error(); err += r[i]*r[i]; } err *= 0.5; return err; }; System.prototype.calcGrad_ = function(out) { var i; for (i = 0; i < out.length || i < this.params.length; ++i) { out[i][0] = 0; } for (i=0; i < this.constraints.length; i++) { var c = this.constraints[i]; var cParams = c.params; var grad = []; utils.fillArray(grad, 0, cParams.length, 0); c.gradient(grad); for (var p = 0; p < cParams.length; p++) { var param = cParams[p]; var j = param.j; out[j][0] += this.constraints[i].error() * grad[p]; // (10.4) } } }; System.prototype.calcGrad = function(out) { var i; for (i = 0; i < out.length || i < this.params.length; ++i) { out[i] = 0; } for (i=0; i < this.constraints.length; i++) { var c = this.constraints[i]; var cParams = c.params; var grad = []; utils.fillArray(grad, 0, cParams.length, 0); for (var p = 0; p < cParams.length; p++) { var param = cParams[p]; var j = param.j; out[j] += this.constraints[i].error() * grad[p]; // (10.4) } } }; System.prototype.fillParams = function(out) { for (var p = 0; p < this.params.length; p++) { out[p] = this.params[p].get(); } }; System.prototype.getParams = function() { var out = []; this.fillParams(out); return out; }; System.prototype.setParams = function(point) { for (var p = 0; p < this.params.length; p++) { this.params[p].set(point[p]); } }; System.prototype.error = function() { var error = 0; for (var i=0; i < this.constraints.length; i++) { error += Math.abs(this.constraints[i].error()); } return error; }; System.prototype.errorSquare = function() { var error = 0; for (var i=0; i < this.constraints.length; i++) { var t = this.constraints[i].error(); error += t * t; } return error * 0.5; }; System.prototype.getValues = function() { var values = []; for (var i=0; i < this.constraints.length; i++) { values[i] = this.constraints[i].error(); } return values; }; var wrapAux = function(constrs, locked) { var i, lockedSet = {}; for (i = 0; i < locked.length; i++) { lockedSet[locked[i].j] = true; } for (i = 0; i < constrs.length; i++) { var c = constrs[i]; var mask = []; var needWrap = false; for (var j = 0; j < c.params.length; j++) { var param = c.params[j]; mask[j] = lockedSet[param.j] === true; needWrap = needWrap || mask[j]; } if (needWrap) { var wrapper = new ConstantWrapper(c, mask); constrs[i] = wrapper; } } }; var lock2Equals2 = function(constrs, locked) { var _locked = []; for (var i = 0; i < locked.length; ++i) { _locked.push(new EqualsTo([locked[i]], locked[i].get())); } return _locked; }; var diagnose = function(sys) { if (sys.constraints.length == 0 || sys.params.length == 0) { return { conflict : false, dof : 0 } } var jacobian = sys.makeJacobian(); var qr = new QR(jacobian); return { conflict : sys.constraints.length > qr.rank, dof : sys.params.length - qr.rank } }; var prepare = function(constrs, locked, aux, alg) { var simpleMode = true; if (!simpleMode) { var lockingConstrs = lock2Equals2(constrs, locked); Array.prototype.push.apply( constrs, lockingConstrs ); } var sys = new System(constrs); wrapAux(constrs, aux); var model = function(point) { sys.setParams(point);<|fim▁hole|> sys.setParams(point); return sys.makeJacobian(); }; var nullResult = { evalCount : 0, error : 0, returnCode : 1 }; function solve(rough, alg) { //if (simpleMode) return nullResult; if (constrs.length == 0) return nullResult; if (sys.params.length == 0) return nullResult; switch (alg) { case 2: return solve_lm(sys, model, jacobian, rough); case 1: default: return dog_leg(sys, rough); } } var systemSolver = { diagnose : function() {return diagnose(sys)}, error : function() {return sys.error()}, solveSystem : solve, system : sys, updateLock : function(values) { for (var i = 0; i < values.length; ++i) { if (simpleMode) { locked[i].set(values[i]); } else { lockingConstrs[i].value = values[i]; } } } }; return systemSolver; }; var solve_lm = function(sys, model, jacobian, rough) { var opt = new LMOptimizer(sys.getParams(), math.vec(sys.constraints.length), model, jacobian); opt.evalMaximalCount = 100 * sys.params.length; var eps = rough ? 0.001 : 0.00000001; opt.init0(eps, eps, eps); var returnCode = 1; try { var res = opt.doOptimize(); } catch (e) { returnCode = 2; } sys.setParams(res[0]); return { evalCount : opt.evalCount, error : sys.error(), returnCode : returnCode }; }; export {Param, prepare}<|fim▁end|>
return sys.getValues(); }; var jacobian = function(point) {
<|file_name|>theMovieDb.py<|end_file_name|><|fim▁begin|>from app.config.cplog import CPLog from app.lib.provider.movie.base import movieBase from imdb import IMDb from urllib import quote_plus from urllib2 import URLError import cherrypy import os import urllib2 log = CPLog(__name__) class theMovieDb(movieBase): """Api for theMovieDb""" apiUrl = 'http://api.themoviedb.org/2.1' imageUrl = 'http://hwcdn.themoviedb.org' def __init__(self, config): log.info('Using TheMovieDb provider.') self.config = config def conf(self, option): return self.config.get('TheMovieDB', option) def find(self, q, limit = 8, alternative = True): ''' Find movie by name ''' if self.isDisabled(): return False log.debug('TheMovieDB - Searching for movie: %s' % q) url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.search', self.conf('key'), quote_plus(self.toSearchString(q))) try: log.info('Searching: %s' % url) data = urllib2.urlopen(url, timeout = self.timeout) return self.parseXML(data, limit, alternative = alternative) except: return [] def findById(self, id): ''' Find movie by TheMovieDB ID ''' if self.isDisabled(): return False xml = self.getXML(id) if xml: results = self.parseXML(xml, limit = 8) return results.pop(0) else: return False def findByImdbId(self, id): ''' Find movie by IMDB ID ''' if self.isDisabled(): return False url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.imdbLookup', self.conf('key'), id) try: data = urllib2.urlopen(url, timeout = self.timeout) except (IOError, URLError): log.error('Failed to open %s.' % url) return [] results = self.parseXML(data, limit = 8, alternative = False) if results: return results.pop(0) else: return [] def parseXML(self, data, limit, alternative = True): if data: log.debug('TheMovieDB - Parsing RSS') try: xml = self.getItems(data, 'movies/movie') results = [] nr = 0 for movie in xml: id = int(self.gettextelement(movie, "id")) name = self.gettextelement(movie, "name") imdb = self.gettextelement(movie, "imdb_id") year = str(self.gettextelement(movie, "released"))[:4] # 1900 is the same as None if year == '1900': year = 'None' # do some IMDB searching if needed if year == 'None': i = IMDb('mobile') if imdb: log.info('Found movie, but with no date, getting data from %s.' % imdb) r = i.get_movie(imdb.replace('tt', '')) year = r.get('year', None) else: log.info('Found movie, but with no date, searching IMDB.') r = i.search_movie(name) if len(r) > 0: imdb = 'tt' + r[0].movieID year = r[0].get('year', None) results.append(self.fillFeedItem(id, name, imdb, year)) alternativeName = self.gettextelement(movie, "alternative_name") if alternativeName and alternative: if alternativeName.lower() != name.lower() and alternativeName.lower() != 'none' and alternativeName != None: results.append(self.fillFeedItem(id, alternativeName, imdb, year)) nr += 1 if nr == limit: break log.info('TheMovieDB - Found: %s' % results) return results except SyntaxError: log.error('TheMovieDB - Failed to parse XML response from TheMovieDb') return False def getXML(self, id): if self.isDisabled(): return False try: url = "%s/%s/en/xml/%s/%s" % (self.apiUrl, 'Movie.getInfo', self.conf('key'), id) data = urllib2.urlopen(url, timeout = self.timeout) except: data = False return data def saveImage(self, url, destination): if url[:7] != 'http://': url = self.imageUrl + url # Make dir imageCache = os.path.join(cherrypy.config.get('cachePath'), 'images') if not os.path.isdir(imageCache): os.mkdir(imageCache) # Return old imageFile = os.path.join(imageCache, destination) if not os.path.isfile(imageFile): try: data = urllib2.urlopen(url, timeout = 10) # Write file with open(imageFile, 'wb') as f: f.write(data.read()) except (IOError, URLError): log.error('Failed get thumb %s.' % url) return False return 'cache/images/' + destination def fillFeedItem(self, id, name, imdb, year): item = self.feedItem() item.id = id<|fim▁hole|> item.imdb = imdb item.year = year return item def isDisabled(self): if self.conf('key') == '': log.error('TheMovieDB - No API key provided for TheMovieDB') True else: False def findReleaseDate(self, movie): pass<|fim▁end|>
item.name = self.toSaveString(name)
<|file_name|>task_5.py<|end_file_name|><|fim▁begin|>#task_5 def y1(x): return 1 + x * x def y2(x): return -2 - x * x x, y = tuple(map(float, input().split()))<|fim▁hole|>else: print('NO')<|fim▁end|>
if y1(x) < y or y2(x) > y: print('YES')
<|file_name|>229MajorityElementII.py<|end_file_name|><|fim▁begin|>class Solution: def majorityElement(self, nums):<|fim▁hole|> :rtype: List[int] """ num1, cnt1 = 0, 0 num2, cnt2 = 1, 0 for num in nums: if num == num1: cnt1 += 1 elif num == num2: cnt2 += 1 else: if cnt1 == 0: num1, cnt1 = num, 1 elif cnt2 == 0: num2, cnt2 = num, 1 else: cnt1, cnt2 = cnt1 - 1, cnt2 - 1 return [num for num in (num1, num2) if nums.count(num) > len(nums) // 3]<|fim▁end|>
""" :type nums: List[int]
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># created by Chirath R, [email protected] from django.conf.urls import url from django.contrib.auth.decorators import login_required from django.views.generic import TemplateView from workshop.views import WorkshopRegistrationListView, WorkshopDetailView, WorkshopRegistrationUpdateView, \ WorkshopRegisterFormView, WorkshopListView, WorkshopFeedbackCreateView, WorkshopGalleryCreateView, \ WorkshopGalleryListView, WorkshopGalleryDeleteView, WorkshopCreateView, WorkshopUpdateView, WorkshopDeleteView urlpatterns = [ url(r'^$', WorkshopListView.as_view(), name='workshop_list'), url(r'^create/$', login_required(WorkshopCreateView.as_view()), name='workshop_create'), url(r'^(?P<workshop_id>[0-9]+)/$', WorkshopDetailView.as_view(), name='workshop_detail'), # TODO(2) Fix update and uncomment # url(r'^(?P<pk>[0-9]+)/update/$', login_required(WorkshopUpdateView.as_view()), name='workshopdetail_update'), url(r'^(?P<pk>[0-9]+)/delete/$', login_required(WorkshopDeleteView.as_view()), name='workshop_delete'), url(r'^(?P<workshop_id>[0-9]+)/register/$', WorkshopRegisterFormView.as_view(), name='workshop_register'), url(r'^(?P<workshop_id>[0-9]+)/register/list/$', login_required(WorkshopRegistrationListView.as_view()), name='workshop_registration_list'), url(r'^(?P<workshop_id>[0-9]+)/register/update/$', login_required(WorkshopRegistrationUpdateView.as_view()), name='workshop_update'), url(r'^success/$', TemplateView.as_view(template_name='workshop/success.html'), name='workshop_registration_success'), url(r'^(?P<workshop_id>[0-9]+)/feedback/$', WorkshopFeedbackCreateView.as_view(), name='workshop_feedback'), url(r'^feedback/success/$', TemplateView.as_view(template_name='workshop/success_feedback.html'), name='feedback_success'), url(r'^(?P<pk>[0-9]+)/add-image/$', login_required(WorkshopGalleryCreateView.as_view()), name='image_create'),<|fim▁hole|> url(r'^image/(?P<pk>[0-9]+)/delete/$', login_required(WorkshopGalleryDeleteView.as_view()), name='image_delete'), ]<|fim▁end|>
url(r'^(?P<pk>[0-9]+)/gallery/$', WorkshopGalleryListView.as_view(), name='image_list'),
<|file_name|>twisted_test.py<|end_file_name|><|fim▁begin|># Author: Ovidiu Predescu # Date: July 2011 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unittest for the twisted-style reactor. """ from __future__ import absolute_import, division, print_function, with_statement import logging import os import shutil import signal import sys import tempfile import threading import warnings try: import fcntl from twisted.internet.defer import Deferred, inlineCallbacks, returnValue from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor from twisted.internet.protocol import Protocol from twisted.python import log from tornado.platform.twisted import TornadoReactor, TwistedIOLoop from zope.interface import implementer have_twisted = True except ImportError: have_twisted = False # The core of Twisted 12.3.0 is available on python 3, but twisted.web is not # so test for it separately. try: from twisted.web.client import Agent, readBody from twisted.web.resource import Resource from twisted.web.server import Site # As of Twisted 15.0.0, twisted.web is present but fails our # tests due to internal str/bytes errors. have_twisted_web = sys.version_info < (3,) except ImportError: have_twisted_web = False try: import thread # py2 except ImportError: import _thread as thread # py3 from tornado.escape import utf8 from tornado import gen from tornado.httpclient import AsyncHTTPClient from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from tornado.platform.auto import set_close_exec from tornado.platform.select import SelectIOLoop from tornado.testing import bind_unused_port from tornado.test.util import unittest from tornado.util import import_object from tornado.web import RequestHandler, Application skipIfNoTwisted = unittest.skipUnless(have_twisted, "twisted module not present") skipIfPy26 = unittest.skipIf(sys.version_info < (2, 7), "twisted incompatible with singledispatch in py26") def save_signal_handlers(): saved = {} for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]: saved[sig] = signal.getsignal(sig) if "twisted" in repr(saved): if not issubclass(IOLoop.configured_class(), TwistedIOLoop): # when the global ioloop is twisted, we expect the signal # handlers to be installed. Otherwise, it means we're not # cleaning up after twisted properly. raise Exception("twisted signal handlers already installed") return saved def restore_signal_handlers(saved): for sig, handler in saved.items(): signal.signal(sig, handler) class ReactorTestCase(unittest.TestCase): def setUp(self): self._saved_signals = save_signal_handlers() self._io_loop = IOLoop() self._reactor = TornadoReactor(self._io_loop) def tearDown(self): self._io_loop.close(all_fds=True) restore_signal_handlers(self._saved_signals) @skipIfNoTwisted class ReactorWhenRunningTest(ReactorTestCase): def test_whenRunning(self): self._whenRunningCalled = False self._anotherWhenRunningCalled = False self._reactor.callWhenRunning(self.whenRunningCallback) self._reactor.run() self.assertTrue(self._whenRunningCalled) self.assertTrue(self._anotherWhenRunningCalled) def whenRunningCallback(self): self._whenRunningCalled = True self._reactor.callWhenRunning(self.anotherWhenRunningCallback) self._reactor.stop() def anotherWhenRunningCallback(self): self._anotherWhenRunningCalled = True @skipIfNoTwisted class ReactorCallLaterTest(ReactorTestCase): def test_callLater(self): self._laterCalled = False self._now = self._reactor.seconds() self._timeout = 0.001 dc = self._reactor.callLater(self._timeout, self.callLaterCallback) self.assertEqual(self._reactor.getDelayedCalls(), [dc]) self._reactor.run() self.assertTrue(self._laterCalled) self.assertTrue(self._called - self._now > self._timeout) self.assertEqual(self._reactor.getDelayedCalls(), []) def callLaterCallback(self): self._laterCalled = True self._called = self._reactor.seconds() self._reactor.stop() @skipIfNoTwisted class ReactorTwoCallLaterTest(ReactorTestCase): def test_callLater(self): self._later1Called = False self._later2Called = False self._now = self._reactor.seconds() self._timeout1 = 0.0005 dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1) self._timeout2 = 0.001 dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2) self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or self._reactor.getDelayedCalls() == [dc2, dc1]) self._reactor.run() self.assertTrue(self._later1Called) self.assertTrue(self._later2Called) self.assertTrue(self._called1 - self._now > self._timeout1) self.assertTrue(self._called2 - self._now > self._timeout2) self.assertEqual(self._reactor.getDelayedCalls(), []) def callLaterCallback1(self): self._later1Called = True self._called1 = self._reactor.seconds() def callLaterCallback2(self): self._later2Called = True self._called2 = self._reactor.seconds() self._reactor.stop() @skipIfNoTwisted class ReactorCallFromThreadTest(ReactorTestCase): def setUp(self): super(ReactorCallFromThreadTest, self).setUp() self._mainThread = thread.get_ident() def tearDown(self): self._thread.join() super(ReactorCallFromThreadTest, self).tearDown() def _newThreadRun(self): self.assertNotEqual(self._mainThread, thread.get_ident()) if hasattr(self._thread, 'ident'): # new in python 2.6 self.assertEqual(self._thread.ident, thread.get_ident()) self._reactor.callFromThread(self._fnCalledFromThread) def _fnCalledFromThread(self): self.assertEqual(self._mainThread, thread.get_ident()) self._reactor.stop() def _whenRunningCallback(self): self._thread = threading.Thread(target=self._newThreadRun) self._thread.start() def testCallFromThread(self): self._reactor.callWhenRunning(self._whenRunningCallback) self._reactor.run() @skipIfNoTwisted class ReactorCallInThread(ReactorTestCase): def setUp(self): super(ReactorCallInThread, self).setUp() self._mainThread = thread.get_ident() def _fnCalledInThread(self, *args, **kwargs): self.assertNotEqual(thread.get_ident(), self._mainThread) self._reactor.callFromThread(lambda: self._reactor.stop()) def _whenRunningCallback(self): self._reactor.callInThread(self._fnCalledInThread) def testCallInThread(self): self._reactor.callWhenRunning(self._whenRunningCallback) self._reactor.run() class Reader(object): def __init__(self, fd, callback): self._fd = fd self._callback = callback def logPrefix(self): return "Reader" def close(self): self._fd.close() def fileno(self): return self._fd.fileno() def readConnectionLost(self, reason): self.close() def connectionLost(self, reason): self.close() def doRead(self): self._callback(self._fd) if have_twisted: Reader = implementer(IReadDescriptor)(Reader) class Writer(object): def __init__(self, fd, callback): self._fd = fd self._callback = callback def logPrefix(self): return "Writer" def close(self): self._fd.close() def fileno(self): return self._fd.fileno() def connectionLost(self, reason): self.close() def doWrite(self): self._callback(self._fd) if have_twisted: Writer = implementer(IWriteDescriptor)(Writer) @skipIfNoTwisted class ReactorReaderWriterTest(ReactorTestCase): def _set_nonblocking(self, fd): flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) def setUp(self): super(ReactorReaderWriterTest, self).setUp() r, w = os.pipe() self._set_nonblocking(r) self._set_nonblocking(w) set_close_exec(r) set_close_exec(w) self._p1 = os.fdopen(r, "rb", 0) self._p2 = os.fdopen(w, "wb", 0) def tearDown(self): super(ReactorReaderWriterTest, self).tearDown() self._p1.close() self._p2.close() def _testReadWrite(self): """ In this test the writer writes an 'x' to its fd. The reader reads it, check the value and ends the test. """ self.shouldWrite = True def checkReadInput(fd): self.assertEquals(fd.read(1), b'x') self._reactor.stop() def writeOnce(fd): if self.shouldWrite: self.shouldWrite = False fd.write(b'x') self._reader = Reader(self._p1, checkReadInput) self._writer = Writer(self._p2, writeOnce) self._reactor.addWriter(self._writer) # Test that adding the reader twice adds it only once to # IOLoop. self._reactor.addReader(self._reader) self._reactor.addReader(self._reader) def testReadWrite(self): self._reactor.callWhenRunning(self._testReadWrite) self._reactor.run() def _testNoWriter(self): """ In this test we have no writer. Make sure the reader doesn't read anything. """ def checkReadInput(fd): self.fail("Must not be called.") def stopTest(): # Close the writer here since the IOLoop doesn't know # about it. self._writer.close() self._reactor.stop() self._reader = Reader(self._p1, checkReadInput) # We create a writer, but it should never be invoked. self._writer = Writer(self._p2, lambda fd: fd.write('x')) # Test that adding and removing the writer leaves us with no writer. self._reactor.addWriter(self._writer) self._reactor.removeWriter(self._writer) # Test that adding and removing the reader doesn't cause # unintended effects. self._reactor.addReader(self._reader) # Wake up after a moment and stop the test self._reactor.callLater(0.001, stopTest) def testNoWriter(self): self._reactor.callWhenRunning(self._testNoWriter) self._reactor.run() # Test various combinations of twisted and tornado http servers, # http clients, and event loop interfaces. @skipIfNoTwisted @unittest.skipIf(not have_twisted_web, 'twisted web not present') class CompatibilityTests(unittest.TestCase): def setUp(self): self.saved_signals = save_signal_handlers() self.io_loop = IOLoop() self.io_loop.make_current() self.reactor = TornadoReactor(self.io_loop) def tearDown(self): self.reactor.disconnectAll() self.io_loop.clear_current() self.io_loop.close(all_fds=True) restore_signal_handlers(self.saved_signals) def start_twisted_server(self): class HelloResource(Resource): isLeaf = True def render_GET(self, request): return "Hello from twisted!" site = Site(HelloResource()) port = self.reactor.listenTCP(0, site, interface='127.0.0.1') self.twisted_port = port.getHost().port def start_tornado_server(self): class HelloHandler(RequestHandler): def get(self): self.write("Hello from tornado!") app = Application([('/', HelloHandler)], log_function=lambda x: None) server = HTTPServer(app, io_loop=self.io_loop) sock, self.tornado_port = bind_unused_port() server.add_sockets([sock]) def run_ioloop(self): self.stop_loop = self.io_loop.stop self.io_loop.start() self.reactor.fireSystemEvent('shutdown') def run_reactor(self): self.stop_loop = self.reactor.stop self.stop = self.reactor.stop self.reactor.run() def tornado_fetch(self, url, runner): responses = [] client = AsyncHTTPClient(self.io_loop) def callback(response): responses.append(response) self.stop_loop() client.fetch(url, callback=callback) runner() self.assertEqual(len(responses), 1) responses[0].rethrow() return responses[0] def twisted_fetch(self, url, runner): # http://twistedmatrix.com/documents/current/web/howto/client.html chunks = [] client = Agent(self.reactor) d = client.request(b'GET', utf8(url)) class Accumulator(Protocol): def __init__(self, finished): self.finished = finished def dataReceived(self, data): chunks.append(data) def connectionLost(self, reason): self.finished.callback(None) def callback(response): finished = Deferred() response.deliverBody(Accumulator(finished)) return finished d.addCallback(callback) def shutdown(failure): if hasattr(self, 'stop_loop'): self.stop_loop() elif failure is not None: # loop hasn't been initialized yet; try our best to # get an error message out. (the runner() interaction # should probably be refactored). try: failure.raiseException() except: logging.error('exception before starting loop', exc_info=True) d.addBoth(shutdown) runner() self.assertTrue(chunks) return ''.join(chunks) def twisted_coroutine_fetch(self, url, runner): body = [None] @gen.coroutine def f(): # This is simpler than the non-coroutine version, but it cheats # by reading the body in one blob instead of streaming it with # a Protocol. client = Agent(self.reactor) response = yield client.request(b'GET', utf8(url)) with warnings.catch_warnings(): # readBody has a buggy DeprecationWarning in Twisted 15.0: # https://twistedmatrix.com/trac/changeset/43379 warnings.simplefilter('ignore', category=DeprecationWarning) body[0] = yield readBody(response) self.stop_loop() self.io_loop.add_callback(f) runner() return body[0] def testTwistedServerTornadoClientIOLoop(self): self.start_twisted_server() response = self.tornado_fetch( 'http://127.0.0.1:%d' % self.twisted_port, self.run_ioloop) self.assertEqual(response.body, 'Hello from twisted!') def testTwistedServerTornadoClientReactor(self): self.start_twisted_server() response = self.tornado_fetch( 'http://127.0.0.1:%d' % self.twisted_port, self.run_reactor) self.assertEqual(response.body, 'Hello from twisted!') def testTornadoServerTwistedClientIOLoop(self): self.start_tornado_server() response = self.twisted_fetch( 'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop) self.assertEqual(response, 'Hello from tornado!') def testTornadoServerTwistedClientReactor(self): self.start_tornado_server() response = self.twisted_fetch( 'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor) self.assertEqual(response, 'Hello from tornado!') @skipIfPy26 def testTornadoServerTwistedCoroutineClientIOLoop(self): self.start_tornado_server() response = self.twisted_coroutine_fetch( 'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop) self.assertEqual(response, 'Hello from tornado!') @skipIfNoTwisted @skipIfPy26 class ConvertDeferredTest(unittest.TestCase): def test_success(self): @inlineCallbacks def fn(): if False: # inlineCallbacks doesn't work with regular functions; # must have a yield even if it's unreachable. yield returnValue(42) f = gen.convert_yielded(fn()) self.assertEqual(f.result(), 42) def test_failure(self): @inlineCallbacks def fn(): if False: yield 1 / 0 f = gen.convert_yielded(fn()) with self.assertRaises(ZeroDivisionError): f.result() if have_twisted: # Import and run as much of twisted's test suite as possible. # This is unfortunately rather dependent on implementation details, # but there doesn't appear to be a clean all-in-one conformance test # suite for reactors. # # This is a list of all test suites using the ReactorBuilder # available in Twisted 11.0.0 and 11.1.0 (and a blacklist of # specific test methods to be disabled). twisted_tests = { 'twisted.internet.test.test_core.ObjectModelIntegrationTest': [], 'twisted.internet.test.test_core.SystemEventTestsBuilder': [ 'test_iterate', # deliberately not supported # Fails on TwistedIOLoop and AsyncIOLoop. 'test_runAfterCrash', ], 'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [ "test_lostFileDescriptor", # incompatible with epoll and kqueue ], 'twisted.internet.test.test_process.ProcessTestsBuilder': [ # Only work as root. Twisted's "skip" functionality works # with py27+, but not unittest2 on py26. 'test_changeGID', 'test_changeUID', # This test sometimes fails with EPIPE on a call to # kqueue.control. Happens consistently for me with # trollius but not asyncio or other IOLoops. 'test_childConnectionLost', ], # Process tests appear to work on OSX 10.7, but not 10.6 # 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [ # 'test_systemCallUninterruptedByChildExit', # ], 'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [ 'test_badContext', # ssl-related; see also SSLClientTestsMixin ], 'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [ # These use link-local addresses and cause firewall prompts on mac 'test_buildProtocolIPv6AddressScopeID', 'test_portGetHostOnIPv6ScopeID', 'test_serverGetHostOnIPv6ScopeID', 'test_serverGetPeerOnIPv6ScopeID', ], 'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [], 'twisted.internet.test.test_tcp.WriteSequenceTests': [], 'twisted.internet.test.test_tcp.AbortConnectionTestCase': [], 'twisted.internet.test.test_threads.ThreadTestsBuilder': [], 'twisted.internet.test.test_time.TimeTestsBuilder': [], # Extra third-party dependencies (pyOpenSSL) # 'twisted.internet.test.test_tls.SSLClientTestsMixin': [], 'twisted.internet.test.test_udp.UDPServerTestsBuilder': [], 'twisted.internet.test.test_unix.UNIXTestsBuilder': [ # Platform-specific. These tests would be skipped automatically # if we were running twisted's own test runner. 'test_connectToLinuxAbstractNamespace', 'test_listenOnLinuxAbstractNamespace', # These tests use twisted's sendmsg.c extension and sometimes # fail with what looks like uninitialized memory errors # (more common on pypy than cpython, but I've seen it on both) 'test_sendFileDescriptor', 'test_sendFileDescriptorTriggersPauseProducing', 'test_descriptorDeliveredBeforeBytes', 'test_avoidLeakingFileDescriptors', ], 'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [ 'test_listenOnLinuxAbstractNamespace', ], 'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [], } if sys.version_info >= (3,): # In Twisted 15.2.0 on Python 3.4, the process tests will try to run # but fail, due in part to interactions between Tornado's strict # warnings-as-errors policy and Twisted's own warning handling # (it was not obvious how to configure the warnings module to # reconcile the two), and partly due to what looks like a packaging # error (process_cli.py missing). For now, just skip it. del twisted_tests['twisted.internet.test.test_process.ProcessTestsBuilder'] for test_name, blacklist in twisted_tests.items(): try: test_class = import_object(test_name) except (ImportError, AttributeError): continue for test_func in blacklist: if hasattr(test_class, test_func): # The test_func may be defined in a mixin, so clobber # it instead of delattr() setattr(test_class, test_func, lambda self: None) def make_test_subclass(test_class): class TornadoTest(test_class): _reactors = ["tornado.platform.twisted._TestReactor"] def setUp(self): # Twisted's tests expect to be run from a temporary # directory; they create files in their working directory # and don't always clean up after themselves. self.__curdir = os.getcwd() self.__tempdir = tempfile.mkdtemp() os.chdir(self.__tempdir) super(TornadoTest, self).setUp() def tearDown(self): super(TornadoTest, self).tearDown() os.chdir(self.__curdir) shutil.rmtree(self.__tempdir) def flushWarnings(self, *args, **kwargs): # This is a hack because Twisted and Tornado have # differing approaches to warnings in tests. # Tornado sets up a global set of warnings filters # in runtests.py, while Twisted patches the filter # list in each test. The net effect is that # Twisted's tests run with Tornado's increased # strictness (BytesWarning and ResourceWarning are # enabled) but without our filter rules to ignore those # warnings from Twisted code. filtered = [] for w in super(TornadoTest, self).flushWarnings( *args, **kwargs): if w['category'] in (BytesWarning, ResourceWarning): continue filtered.append(w) return filtered def buildReactor(self):<|fim▁hole|> def unbuildReactor(self, reactor): test_class.unbuildReactor(self, reactor) # Clean up file descriptors (especially epoll/kqueue # objects) eagerly instead of leaving them for the # GC. Unfortunately we can't do this in reactor.stop # since twisted expects to be able to unregister # connections in a post-shutdown hook. reactor._io_loop.close(all_fds=True) restore_signal_handlers(self.__saved_signals) TornadoTest.__name__ = test_class.__name__ return TornadoTest test_subclass = make_test_subclass(test_class) globals().update(test_subclass.makeTestCaseClasses()) # Since we're not using twisted's test runner, it's tricky to get # logging set up well. Most of the time it's easiest to just # leave it turned off, but while working on these tests you may want # to uncomment one of the other lines instead. log.defaultObserver.stop() # import sys; log.startLogging(sys.stderr, setStdout=0) # log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0) # import logging; logging.getLogger('twisted').setLevel(logging.WARNING) # Twisted recently introduced a new logger; disable that one too. try: from twisted.logger import globalLogBeginner except ImportError: pass else: globalLogBeginner.beginLoggingTo([]) if have_twisted: class LayeredTwistedIOLoop(TwistedIOLoop): """Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop. This is of course silly, but is useful for testing purposes to make sure we're implementing both sides of the various interfaces correctly. In some tests another TornadoReactor is layered on top of the whole stack. """ def initialize(self, **kwargs): # When configured to use LayeredTwistedIOLoop we can't easily # get the next-best IOLoop implementation, so use the lowest common # denominator. self.real_io_loop = SelectIOLoop(make_current=False) reactor = TornadoReactor(io_loop=self.real_io_loop) super(LayeredTwistedIOLoop, self).initialize(reactor=reactor, **kwargs) self.add_callback(self.make_current) def close(self, all_fds=False): super(LayeredTwistedIOLoop, self).close(all_fds=all_fds) # HACK: This is the same thing that test_class.unbuildReactor does. for reader in self.reactor._internalReaders: self.reactor.removeReader(reader) reader.connectionLost(None) self.real_io_loop.close(all_fds=all_fds) def stop(self): # One of twisted's tests fails if I don't delay crash() # until the reactor has started, but if I move this to # TwistedIOLoop then the tests fail when I'm *not* running # tornado-on-twisted-on-tornado. I'm clearly missing something # about the startup/crash semantics, but since stop and crash # are really only used in tests it doesn't really matter. def f(): self.reactor.crash() # Become current again on restart. This is needed to # override real_io_loop's claim to being the current loop. self.add_callback(self.make_current) self.reactor.callWhenRunning(f) if __name__ == "__main__": unittest.main()<|fim▁end|>
self.__saved_signals = save_signal_handlers() return test_class.buildReactor(self)
<|file_name|>InjectorSubscriptionMap.java<|end_file_name|><|fim▁begin|>/* * This file is part of LuckPerms, licensed under the MIT License. * * Copyright (c) lucko (Luck) <[email protected]> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.lucko.luckperms.nukkit.inject.server; import me.lucko.luckperms.nukkit.LPNukkitPlugin; import cn.nukkit.Server; import cn.nukkit.permission.Permissible; import cn.nukkit.plugin.PluginManager; import java.lang.reflect.Field; import java.util.Map; import java.util.Objects; import java.util.Set; /**<|fim▁hole|> * Injects a {@link LuckPermsSubscriptionMap} into the {@link PluginManager}. */ public class InjectorSubscriptionMap implements Runnable { private static final Field PERM_SUBS_FIELD; static { Field permSubsField = null; try { permSubsField = PluginManager.class.getDeclaredField("permSubs"); permSubsField.setAccessible(true); } catch (Exception e) { // ignore } PERM_SUBS_FIELD = permSubsField; } private final LPNukkitPlugin plugin; public InjectorSubscriptionMap(LPNukkitPlugin plugin) { this.plugin = plugin; } @Override public void run() { try { LuckPermsSubscriptionMap subscriptionMap = inject(); if (subscriptionMap != null) { this.plugin.setSubscriptionMap(subscriptionMap); } } catch (Exception e) { this.plugin.getLogger().severe("Exception occurred whilst injecting LuckPerms Permission Subscription map.", e); } } private LuckPermsSubscriptionMap inject() throws Exception { Objects.requireNonNull(PERM_SUBS_FIELD, "PERM_SUBS_FIELD"); PluginManager pluginManager = this.plugin.getBootstrap().getServer().getPluginManager(); Object map = PERM_SUBS_FIELD.get(pluginManager); if (map instanceof LuckPermsSubscriptionMap) { if (((LuckPermsSubscriptionMap) map).plugin == this.plugin) { return null; } map = ((LuckPermsSubscriptionMap) map).detach(); } //noinspection unchecked Map<String, Set<Permissible>> castedMap = (Map<String, Set<Permissible>>) map; // make a new subscription map & inject it LuckPermsSubscriptionMap newMap = new LuckPermsSubscriptionMap(this.plugin, castedMap); PERM_SUBS_FIELD.set(pluginManager, newMap); return newMap; } public static void uninject() { try { Objects.requireNonNull(PERM_SUBS_FIELD, "PERM_SUBS_FIELD"); PluginManager pluginManager = Server.getInstance().getPluginManager(); Object map = PERM_SUBS_FIELD.get(pluginManager); if (map instanceof LuckPermsSubscriptionMap) { LuckPermsSubscriptionMap lpMap = (LuckPermsSubscriptionMap) map; PERM_SUBS_FIELD.set(pluginManager, lpMap.detach()); } } catch (Exception e) { e.printStackTrace(); } } }<|fim▁end|>
<|file_name|>range.rs<|end_file_name|><|fim▁begin|>use std::fmt::Display; #[cfg(feature = "inclusive_range")]<|fim▁hole|>pub fn range<T: 'static + PartialOrd + Display + Clone>(a: T, b: T) -> Box<Fn(&T) -> ::ValidatorResult> { Box::new(move |s: &T| { if *s >= a && *s <= b { Ok(()) } else { Err(::Invalid { msg: "Must be in the range %1..%2.".to_string(), args: vec![a.to_string(), b.to_string()], human_readable: format!("Must be between {} and {}", a, b) }) } }) } #[cfg(feature = "inclusive_range")] pub fn range<T: 'static + PartialOrd + Display + Clone>(range: RangeInclusive<T>) -> Box<Fn(&T) -> ::ValidatorResult> { // do bounds checking here so we can panic early if needed if range.end() <= range.start() { panic!("Invalid range!"); // TODO: Bad way to do this. } Box::new(move |s: &T| { let start = range.start(); let end = range.end(); if *s >= *start && *s <= *end { Ok(()) } else { Err(::Invalid { msg: "Must be in the range %1..%2.".to_string(), args: vec![start.to_string(), end.to_string()], human_readable: format!("Must be between {} and {}", start, end) }) } }) } #[cfg(test)] #[cfg(not(feature = "inclusive_range"))] mod tests { use super::*; // range #[test] pub fn range_valid() { assert!(range(1, 100)(&1).is_ok()); assert!(range(1, 100)(&50).is_ok()); assert!(range(1, 100)(&100).is_ok()); } #[test] pub fn range_invalid() { assert!(range(1, 100)(&0).is_err()); assert!(range(1, 100)(&101).is_err()); } } #[cfg(test)] #[cfg(feature = "inclusive_range")] mod tests { use super::*; // range #[test] pub fn range_valid() { assert!(range(1..=100)(&1).is_ok()); assert!(range(1..=100)(&50).is_ok()); assert!(range(1..=100)(&100).is_ok()); } #[test] pub fn range_invalid() { assert!(range(1..=100)(&0).is_err()); assert!(range(1..=100)(&101).is_err()); } }<|fim▁end|>
use std::ops::RangeInclusive; #[cfg(not(feature = "inclusive_range"))]
<|file_name|>store.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at <http://mozilla.org/MPL/2.0/>. */ // @flow const { applyMiddleware, createStore, compose } = require("redux"); const { thunk } = require("../shared/redux/middleware/thunk"); const { waitUntilService } = require("../shared/redux/middleware/waitUntilService"); const reducer = require("./reducer"); import type { Props, State } from "./types"; function createInitialState(overrides: Object): State { return { actors: new Set(), expandedPaths: new Set(), focusedItem: null, loadedProperties: new Map(), forceUpdated: false, ...overrides }; } function enableStateReinitializer(props) { return next => (innerReducer, initialState, enhancer) => { function reinitializerEnhancer(state, action) { if (action.type !== "ROOTS_CHANGED") { return innerReducer(state, action);<|fim▁hole|> } return { ...action.data, actors: new Set(), expandedPaths: new Set(), loadedProperties: new Map(), // Indicates to the component that we do want to render on the next // render cycle. forceUpdate: true }; } return next(reinitializerEnhancer, initialState, enhancer); }; } module.exports = (props: Props) => { const middlewares = [thunk]; if (props.injectWaitService) { middlewares.push(waitUntilService); } return createStore( reducer, createInitialState(props), compose( applyMiddleware(...middlewares), enableStateReinitializer(props) ) ); };<|fim▁end|>
} if (props.releaseActor && initialState.actors) { initialState.actors.forEach(props.releaseActor);
<|file_name|>upstream.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # File created on 20 Feb 2013 from __future__ import division __author__ = "Greg Caporaso" __copyright__ = "Copyright 2011, The QIIME project" __credits__ = ["Greg Caporaso", "Kyle Bittinger", "Justin Kuczynski", "Jai Ram Rideout"] __license__ = "GPL" __version__ = "1.9.1-dev" __maintainer__ = "Greg Caporaso" __email__ = "[email protected]" from os.path import split, splitext, abspath from qiime.util import create_dir from qiime.workflow.util import (print_to_stdout, generate_log_fp, WorkflowLogger, log_input_md5s, get_params_str) def run_pick_de_novo_otus(input_fp, output_dir, command_handler, params, qiime_config, parallel=False, logger=None, suppress_md5=False, status_update_callback=print_to_stdout): """ Run the data preparation steps of Qiime The steps performed by this function are: 1) Pick OTUs; 2) Pick a representative set; 3) Align the representative set; 4) Assign taxonomy; 5) Filter the alignment prior to tree building - remove positions which are all gaps, and specified as 0 in the lanemask 6) Build a phylogenetic tree; 7) Build an OTU table. """ # Prepare some variables for the later steps input_dir, input_filename = split(input_fp) input_basename, input_ext = splitext(input_filename) create_dir(output_dir) commands = [] cluster_failures = False if logger is None: logger = WorkflowLogger(generate_log_fp(output_dir), params=params, qiime_config=qiime_config) close_logger_on_success = True else: close_logger_on_success = False if not suppress_md5: log_input_md5s(logger, [input_fp]) # Prep the OTU picking command try: otu_picking_method = params['pick_otus']['otu_picking_method'] except KeyError: otu_picking_method = 'uclust' pick_otu_dir = '%s/%s_picked_otus' % (output_dir, otu_picking_method) otu_fp = '%s/%s_otus.txt' % (pick_otu_dir, input_basename) if parallel and (otu_picking_method == 'blast' or otu_picking_method == 'uclust_ref'): # Grab the parallel-specific parameters try: params_str = get_params_str(params['parallel']) except KeyError: params_str = '' # Grab the OTU picker parameters try: # Want to find a cleaner strategy for this: the parallel script # is method-specific, so doesn't take a --otu_picking_method # option. This works for now though. d = params['pick_otus'].copy() del d['otu_picking_method'] except KeyError: pass if otu_picking_method == 'uclust_ref': try: suppress_new_clusters = d['suppress_new_clusters'] del d['suppress_new_clusters'] cluster_failures = False except KeyError: cluster_failures = True failure_otu_picking_method = 'uclust' params_str += ' %s' % get_params_str(d) otu_picking_script = 'parallel_pick_otus_%s.py' % otu_picking_method # Build the OTU picking command pick_otus_cmd = '%s -i %s -o %s -T %s' % (otu_picking_script, input_fp, pick_otu_dir, params_str) else: try: params_str = get_params_str(params['pick_otus']) except KeyError: params_str = '' # Build the OTU picking command pick_otus_cmd = 'pick_otus.py -i %s -o %s %s' %\ (input_fp, pick_otu_dir, params_str) commands.append([('Pick OTUs', pick_otus_cmd)]) if cluster_failures: reference_otu_fp = otu_fp clustered_failures_dir = '%s/failure_otus/' % pick_otu_dir try: d = params['pick_otus'].copy() del d['otu_picking_method'] except KeyError: pass if 'denovo_otu_id_prefix' not in d: d['denovo_otu_id_prefix'] = 'DeNovoOTU' params_str = ' %s' % get_params_str(d) failures_list_fp = '%s/%s_failures.txt' % \ (pick_otu_dir, input_basename) failures_fasta_fp = '%s/%s_failures.fasta' % \ (pick_otu_dir, input_basename) filter_fasta_cmd = 'filter_fasta.py -f %s -s %s -o %s' %\ (input_fp, failures_list_fp, failures_fasta_fp) commands.append([('Generate failures fasta file', filter_fasta_cmd)]) # Prep the OTU picking command for failure_otu_fp = '%s/%s_failures_otus.txt' % (clustered_failures_dir, input_basename) # Build the OTU picking command pick_otus_cmd = 'pick_otus.py -i %s -o %s -m %s %s' %\ (failures_fasta_fp, clustered_failures_dir, failure_otu_picking_method, params_str) commands.append( [('Pick de novo OTUs for new clusters', pick_otus_cmd)]) merged_otu_map_fp = '%s/merged_otu_map.txt' % clustered_failures_dir cat_otu_tables_cmd = 'cat %s %s >> %s' %\ (reference_otu_fp, failure_otu_fp, merged_otu_map_fp) commands.append([('Merge OTU maps', cat_otu_tables_cmd)]) otu_fp = merged_otu_map_fp # Prep the representative set picking command rep_set_dir = '%s/rep_set/' % output_dir create_dir(rep_set_dir) rep_set_fp = '%s/%s_rep_set.fasta' % (rep_set_dir, input_basename) rep_set_log_fp = '%s/%s_rep_set.log' % (rep_set_dir, input_basename) try: params_str = get_params_str(params['pick_rep_set']) except KeyError: params_str = '' # Build the representative set picking command pick_rep_set_cmd = 'pick_rep_set.py -i %s -f %s -l %s -o %s %s' %\ (otu_fp, input_fp, rep_set_log_fp, rep_set_fp, params_str) commands.append([('Pick representative set', pick_rep_set_cmd)]) # Prep the taxonomy assignment command try: assignment_method = params['assign_taxonomy']['assignment_method'] except KeyError: assignment_method = 'uclust' assign_taxonomy_dir = '%s/%s_assigned_taxonomy' %\ (output_dir, assignment_method) taxonomy_fp = '%s/%s_rep_set_tax_assignments.txt' % \ (assign_taxonomy_dir, input_basename) if parallel and (assignment_method == 'rdp' or assignment_method == 'blast' or assignment_method == 'uclust'): # Grab the parallel-specific parameters try: params_str = get_params_str(params['parallel']) except KeyError: params_str = '' # Grab the taxonomy assignment parameters try: # Want to find a cleaner strategy for this: the parallel script # is method-specific, so doesn't take a --assignment_method # option. This works for now though. d = params['assign_taxonomy'].copy() if 'assignment_method' in d: del d['assignment_method'] params_str += ' %s' % get_params_str(d) except KeyError: pass # Build the parallel taxonomy assignment command assign_taxonomy_cmd = \ 'parallel_assign_taxonomy_%s.py -i %s -o %s -T %s' %\ (assignment_method, rep_set_fp, assign_taxonomy_dir, params_str) else: try: params_str = get_params_str(params['assign_taxonomy']) except KeyError: params_str = '' # Build the taxonomy assignment command assign_taxonomy_cmd = 'assign_taxonomy.py -o %s -i %s %s' %\ (assign_taxonomy_dir, rep_set_fp, params_str) commands.append([('Assign taxonomy', assign_taxonomy_cmd)]) # Prep the OTU table building command otu_table_fp = '%s/otu_table.biom' % output_dir try: params_str = get_params_str(params['make_otu_table']) except KeyError: params_str = '' # Build the OTU table building command make_otu_table_cmd = 'make_otu_table.py -i %s -t %s -o %s %s' %\ (otu_fp, taxonomy_fp, otu_table_fp, params_str) commands.append([('Make OTU table', make_otu_table_cmd)]) if cluster_failures: reference_otu_table_fp = '%s/reference_only_otu_table.biom' % output_dir # Build the OTU table building command make_otu_table_cmd = 'make_otu_table.py -i %s -t %s -o %s %s' %\ (reference_otu_fp, taxonomy_fp, reference_otu_table_fp, params_str) commands.append( [('Make reference-only OTU table', make_otu_table_cmd)]) # Prep the pynast alignment command try: alignment_method = params['align_seqs']['alignment_method'] except KeyError: alignment_method = 'pynast' pynast_dir = '%s/%s_aligned_seqs' % (output_dir, alignment_method) aln_fp = '%s/%s_rep_set_aligned.fasta' % (pynast_dir, input_basename) if parallel and alignment_method == 'pynast': # Grab the parallel-specific parameters try: params_str = get_params_str(params['parallel']) except KeyError: params_str = '' # Grab the alignment parameters # Want to find a cleaner strategy for this: the parallel script # is method-specific, so doesn't take a --alignment_method # option. This works for now though. try: d = params['align_seqs'].copy() except KeyError: d = {} try: del d['alignment_method'] except KeyError: pass params_str += ' %s' % get_params_str(d) # Build the parallel pynast alignment command align_seqs_cmd = 'parallel_align_seqs_pynast.py -i %s -o %s -T %s' %\ (rep_set_fp, pynast_dir, params_str) else: try: params_str = get_params_str(params['align_seqs']) except KeyError: params_str = '' # Build the pynast alignment command align_seqs_cmd = 'align_seqs.py -i %s -o %s %s' %\ (rep_set_fp, pynast_dir, params_str) commands.append([('Align sequences', align_seqs_cmd)]) # Prep the alignment filtering command filtered_aln_fp = '%s/%s_rep_set_aligned_pfiltered.fasta' %\ (pynast_dir, input_basename) try: params_str = get_params_str(params['filter_alignment']) except KeyError: params_str = '' # Build the alignment filtering command filter_alignment_cmd = 'filter_alignment.py -o %s -i %s %s' %\ (pynast_dir, aln_fp, params_str) commands.append([('Filter alignment', filter_alignment_cmd)]) # Prep the tree building command tree_fp = '%s/rep_set.tre' % output_dir try: params_str = get_params_str(params['make_phylogeny']) except KeyError: params_str = '' # Build the tree building command make_phylogeny_cmd = 'make_phylogeny.py -i %s -o %s %s' %\ (filtered_aln_fp, tree_fp, params_str) commands.append([('Build phylogenetic tree', make_phylogeny_cmd)]) # Call the command handler on the list of commands command_handler(commands, status_update_callback, logger=logger, close_logger_on_success=close_logger_on_success) return abspath(tree_fp), abspath(otu_table_fp) run_qiime_data_preparation = run_pick_otus_through_otu_table = run_pick_de_novo_otus def run_pick_closed_reference_otus( input_fp, refseqs_fp, output_dir, taxonomy_fp, command_handler, params, qiime_config, assign_taxonomy=False, parallel=False, logger=None, suppress_md5=False, status_update_callback=print_to_stdout): """ Run the data preparation steps of Qiime The steps performed by this function are: 1) Pick OTUs; 2) If assignment_taxonomy is True, choose representative sequence for OTUs and assign taxonomy using a classifier. 3) Build an OTU table with optional predefined taxonomy (if assign_taxonomy=False) or taxonomic assignments from step 2 (if assign_taxonomy=True). """ # confirm that a valid otu picking method was supplied before doing # any work reference_otu_picking_methods = ['blast', 'uclust_ref', 'usearch61_ref', 'usearch_ref', 'sortmerna'] try: otu_picking_method = params['pick_otus']['otu_picking_method'] except KeyError: otu_picking_method = 'uclust_ref' assert otu_picking_method in reference_otu_picking_methods,\ "Invalid OTU picking method supplied: %s. Valid choices are: %s"\ % (otu_picking_method, ' '.join(reference_otu_picking_methods)) # Prepare some variables for the later steps input_dir, input_filename = split(input_fp) input_basename, input_ext = splitext(input_filename) create_dir(output_dir) commands = [] if logger is None: logger = WorkflowLogger(generate_log_fp(output_dir), params=params, qiime_config=qiime_config) close_logger_on_success = True else: close_logger_on_success = False if not suppress_md5: log_input_md5s(logger, [input_fp, refseqs_fp, taxonomy_fp]) # Prep the OTU picking command pick_otu_dir = '%s/%s_picked_otus' % (output_dir, otu_picking_method) otu_fp = '%s/%s_otus.txt' % (pick_otu_dir, input_basename) if parallel and (otu_picking_method == 'blast' or otu_picking_method == 'uclust_ref' or otu_picking_method == 'usearch61_ref' or otu_picking_method == 'sortmerna'): # Grab the parallel-specific parameters try: params_str = get_params_str(params['parallel']) except KeyError: params_str = '' # Grab the OTU picker parameters try: # Want to find a cleaner strategy for this: the parallel script # is method-specific, so doesn't take a --alignment_method # option. This works for now though. d = params['pick_otus'].copy() if 'otu_picking_method' in d: del d['otu_picking_method'] params_str += ' %s' % get_params_str(d) except KeyError: pass otu_picking_script = 'parallel_pick_otus_%s.py' % otu_picking_method # Build the OTU picking command pick_otus_cmd = '%s -i %s -o %s -r %s -T %s' %\ (otu_picking_script, input_fp, pick_otu_dir, refseqs_fp, params_str) else: try: params_str = get_params_str(params['pick_otus']) except KeyError: params_str = '' # Since this is reference-based OTU picking we always want to # suppress new clusters -- force it here. params_str += ' --suppress_new_clusters' logger.write( "Forcing --suppress_new_clusters as this is " "closed-reference OTU picking.\n\n") # Build the OTU picking command pick_otus_cmd = 'pick_otus.py -i %s -o %s -r %s -m %s %s' %\ (input_fp, pick_otu_dir, refseqs_fp, otu_picking_method, params_str) commands.append([('Pick OTUs', pick_otus_cmd)]) # Assign taxonomy using a taxonomy classifier, if request by the user. # (Alternatively predefined taxonomic assignments will be used, if provided.) if assign_taxonomy: # Prep the representative set picking command rep_set_dir = '%s/rep_set/' % output_dir create_dir(rep_set_dir) rep_set_fp = '%s/%s_rep_set.fasta' % (rep_set_dir, input_basename) rep_set_log_fp = '%s/%s_rep_set.log' % (rep_set_dir, input_basename) try: params_str = get_params_str(params['pick_rep_set']) except KeyError: params_str = '' # Build the representative set picking command pick_rep_set_cmd = 'pick_rep_set.py -i %s -f %s -l %s -o %s %s' %\ (otu_fp, input_fp, rep_set_log_fp, rep_set_fp, params_str) commands.append([('Pick representative set', pick_rep_set_cmd)]) # Prep the taxonomy assignment command try: assignment_method = params['assign_taxonomy']['assignment_method'] except KeyError: assignment_method = 'uclust' assign_taxonomy_dir = '%s/%s_assigned_taxonomy' %\ (output_dir, assignment_method) taxonomy_fp = '%s/%s_rep_set_tax_assignments.txt' % \ (assign_taxonomy_dir, input_basename) if parallel and (assignment_method == 'rdp' or assignment_method == 'blast' or assignment_method == 'uclust'): # Grab the parallel-specific parameters try: params_str = get_params_str(params['parallel']) except KeyError: params_str = '' # Grab the taxonomy assignment parameters try: # Want to find a cleaner strategy for this: the parallel script # is method-specific, so doesn't take a --assignment_method # option. This works for now though. d = params['assign_taxonomy'].copy() if 'assignment_method' in d: del d['assignment_method'] params_str += ' %s' % get_params_str(d) except KeyError: pass # Build the parallel taxonomy assignment command assign_taxonomy_cmd = \ 'parallel_assign_taxonomy_%s.py -i %s -o %s -T %s' %\ (assignment_method, rep_set_fp, assign_taxonomy_dir, params_str) else: try: params_str = get_params_str(params['assign_taxonomy']) except KeyError: params_str = '' # Build the taxonomy assignment command<|fim▁hole|> commands.append([('Assign taxonomy', assign_taxonomy_cmd)]) # Prep the OTU table building command otu_table_fp = '%s/otu_table.biom' % output_dir try: params_str = get_params_str(params['make_otu_table']) except KeyError: params_str = '' # If assign_taxonomy is True, this will be the path to the taxonomic # assignment results. If assign_taxonomy is False this will be either # the precomputed taxonomic assignments that the user passed in, # or None. if taxonomy_fp: taxonomy_str = '-t %s' % taxonomy_fp else: taxonomy_str = '' # Build the OTU table building command make_otu_table_cmd = 'make_otu_table.py -i %s %s -o %s %s' %\ (otu_fp, taxonomy_str, otu_table_fp, params_str) commands.append([('Make OTU table', make_otu_table_cmd)]) # Call the command handler on the list of commands command_handler(commands, status_update_callback, logger=logger, close_logger_on_success=close_logger_on_success) run_pick_reference_otus_through_otu_table = run_pick_closed_reference_otus<|fim▁end|>
assign_taxonomy_cmd = 'assign_taxonomy.py -o %s -i %s %s' %\ (assign_taxonomy_dir, rep_set_fp, params_str)
<|file_name|>encrypted_transport_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2019 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* This test makes sure encrypted transport over gRPC works. The security chains are setup the following way: * root CA * vttablet server CA * vttablet server instance cert/key * vttablet client CA * vttablet client 1 cert/key * vtgate server CA * vtgate server instance cert/key (common name is 'localhost') * vtgate client CA * vtgate client 1 cert/key * vtgate client 2 cert/key The following table shows all the checks we perform: process: will check its peer is signed by: for link: vttablet vttablet client CA vtgate -> vttablet vtgate vttablet server CA vtgate -> vttablet vtgate vtgate client CA client -> vtgate client vtgate server CA client -> vtgate Additionally, we have the following constraints: - the client certificate common name is used as immediate caller ID by vtgate, and forwarded to vttablet. This allows us to use table ACLs on the vttablet side. - the vtgate server certificate common name is set to 'localhost' so it matches the hostname dialed by the vtgate clients. This is not a requirement for the go client, that can set its expected server name. However, the python gRPC client doesn't have the ability to set the server name, so they must match. - the python client needs to have the full chain for the server validation (that is 'vtgate server CA' + 'root CA'). A go client doesn't. So we read both below when using the python client, but we only pass the intermediate cert to the go clients (for vtgate -> vttablet link). */ package encryptedtransport import ( "flag" "fmt" "io/ioutil" "os" "os/exec" "path" "testing" "vitess.io/vitess/go/test/endtoend/encryption" "vitess.io/vitess/go/vt/proto/vtrpc" "vitess.io/vitess/go/vt/vterrors" "context" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "vitess.io/vitess/go/test/endtoend/cluster" "vitess.io/vitess/go/vt/grpcclient" "vitess.io/vitess/go/vt/log" querypb "vitess.io/vitess/go/vt/proto/query" vtgatepb "vitess.io/vitess/go/vt/proto/vtgate" vtgateservicepb "vitess.io/vitess/go/vt/proto/vtgateservice" ) var ( clusterInstance *cluster.LocalProcessCluster createVtInsertTest = `create table vt_insert_test ( id bigint auto_increment, msg varchar(64), keyspace_id bigint(20) unsigned NOT NULL, primary key (id) ) Engine = InnoDB` keyspace = "test_keyspace" hostname = "localhost" shardName = "0" cell = "zone1" certDirectory string grpcCert = "" grpcKey = "" grpcCa = "" grpcName = "" ) func TestSecureTransport(t *testing.T) { defer cluster.PanicHandler(t) flag.Parse() // initialize cluster _, err := clusterSetUp(t) require.Nil(t, err, "setup failed") masterTablet := *clusterInstance.Keyspaces[0].Shards[0].Vttablets[0] replicaTablet := *clusterInstance.Keyspaces[0].Shards[0].Vttablets[1] // creating table_acl_config.json file tableACLConfigJSON := path.Join(certDirectory, "table_acl_config.json") f, err := os.Create(tableACLConfigJSON) require.NoError(t, err) _, err = f.WriteString(`{ "table_groups": [ { "table_names_or_prefixes": ["vt_insert_test"], "readers": ["vtgate client 1"], "writers": ["vtgate client 1"], "admins": ["vtgate client 1"] } ] }`) require.NoError(t, err) err = f.Close() require.NoError(t, err) // start the tablets for _, tablet := range []cluster.Vttablet{masterTablet, replicaTablet} { tablet.VttabletProcess.ExtraArgs = append(tablet.VttabletProcess.ExtraArgs, "-table-acl-config", tableACLConfigJSON, "-queryserver-config-strict-table-acl") tablet.VttabletProcess.ExtraArgs = append(tablet.VttabletProcess.ExtraArgs, serverExtraArguments("vttablet-server-instance", "vttablet-client")...) err = tablet.VttabletProcess.Setup() require.NoError(t, err) } // setup replication var vtctlClientArgs []string vtctlClientTmArgs := append(vtctlClientArgs, tmclientExtraArgs("vttablet-client-1")...) // Reparenting vtctlClientArgs = append(vtctlClientTmArgs, "InitShardMaster", "-force", "test_keyspace/0", masterTablet.Alias) err = clusterInstance.VtctlProcess.ExecuteCommand(vtctlClientArgs...) require.NoError(t, err) // Apply schema var vtctlApplySchemaArgs = append(vtctlClientTmArgs, "ApplySchema", "-sql", createVtInsertTest, "test_keyspace") err = clusterInstance.VtctlProcess.ExecuteCommand(vtctlApplySchemaArgs...) require.NoError(t, err) for _, tablet := range []cluster.Vttablet{masterTablet, replicaTablet} { var vtctlTabletArgs []string vtctlTabletArgs = append(vtctlTabletArgs, tmclientExtraArgs("vttablet-client-1")...) vtctlTabletArgs = append(vtctlTabletArgs, "RunHealthCheck", tablet.Alias) _, err = clusterInstance.VtctlProcess.ExecuteCommandWithOutput(vtctlTabletArgs...) require.NoError(t, err) } // start vtgate clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, tabletConnExtraArgs("vttablet-client-1")...) clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, serverExtraArguments("vtgate-server-instance", "vtgate-client")...) err = clusterInstance.StartVtgate() require.NoError(t, err) grpcAddress := fmt.Sprintf("%s:%d", "localhost", clusterInstance.VtgateProcess.GrpcPort) // 'vtgate client 1' is authorized to access vt_insert_test setCreds(t, "vtgate-client-1", "vtgate-server") ctx := context.Background() request := getRequest("select * from vt_insert_test") vc, err := getVitessClient(grpcAddress) require.NoError(t, err) qr, err := vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.NoError(t, err) // 'vtgate client 2' is not authorized to access vt_insert_test setCreds(t, "vtgate-client-2", "vtgate-server") request = getRequest("select * from vt_insert_test") vc, err = getVitessClient(grpcAddress) require.NoError(t, err) qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") // now restart vtgate in the mode where we don't use SSL // for client connections, but we copy effective caller id // into immediate caller id. clusterInstance.VtGateExtraArgs = []string{"-grpc_use_effective_callerid"} clusterInstance.VtGateExtraArgs = append(clusterInstance.VtGateExtraArgs, tabletConnExtraArgs("vttablet-client-1")...) err = clusterInstance.RestartVtgate() require.NoError(t, err) grpcAddress = fmt.Sprintf("%s:%d", "localhost", clusterInstance.VtgateProcess.GrpcPort) setSSLInfoEmpty() // get vitess client vc, err = getVitessClient(grpcAddress) require.NoError(t, err) // test with empty effective caller Id request = getRequest("select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") // 'vtgate client 1' is authorized to access vt_insert_test callerID := &vtrpc.CallerID{ Principal: "vtgate client 1", } request = getRequestWithCallerID(callerID, "select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.NoError(t, err) // 'vtgate client 2' is not authorized to access vt_insert_test callerID = &vtrpc.CallerID{ Principal: "vtgate client 2", } request = getRequestWithCallerID(callerID, "select * from vt_insert_test") qr, err = vc.Execute(ctx, request) require.NoError(t, err) err = vterrors.FromVTRPC(qr.Error) require.Error(t, err) assert.Contains(t, err.Error(), "table acl error") assert.Contains(t, err.Error(), "cannot run Select on table") clusterInstance.Teardown() } func clusterSetUp(t *testing.T) (int, error) { var mysqlProcesses []*exec.Cmd clusterInstance = cluster.NewCluster(cell, hostname) // Start topo server if err := clusterInstance.StartTopo(); err != nil { return 1, err } // create all certs log.Info("Creating certificates") certDirectory = path.Join(clusterInstance.TmpDirectory, "certs") _ = encryption.CreateDirectory(certDirectory, 0700) err := encryption.ExecuteVttlstestCommand("-root", certDirectory, "CreateCA") require.NoError(t, err) err = createSignedCert("ca", "01", "vttablet-server", "vttablet server CA") require.NoError(t, err) err = createSignedCert("ca", "02", "vttablet-client", "vttablet client CA") require.NoError(t, err) err = createSignedCert("ca", "03", "vtgate-server", "vtgate server CA") require.NoError(t, err) err = createSignedCert("ca", "04", "vtgate-client", "vtgate client CA") require.NoError(t, err) err = createSignedCert("vttablet-server", "01", "vttablet-server-instance", "vttablet server instance") require.NoError(t, err) err = createSignedCert("vttablet-client", "01", "vttablet-client-1", "vttablet client 1") require.NoError(t, err) err = createSignedCert("vtgate-server", "01", "vtgate-server-instance", "localhost") require.NoError(t, err) err = createSignedCert("vtgate-client", "01", "vtgate-client-1", "vtgate client 1") require.NoError(t, err) err = createSignedCert("vtgate-client", "02", "vtgate-client-2", "vtgate client 2") require.NoError(t, err) for _, keyspaceStr := range []string{keyspace} { KeyspacePtr := &cluster.Keyspace{Name: keyspaceStr} keyspace := *KeyspacePtr if err := clusterInstance.VtctlProcess.CreateKeyspace(keyspace.Name); err != nil { return 1, err } shard := &cluster.Shard{ Name: shardName, } for i := 0; i < 2; i++ { // instantiate vttablet object with reserved ports tablet := clusterInstance.NewVttabletInstance("replica", 0, cell) // Start Mysqlctl process tablet.MysqlctlProcess = *cluster.MysqlCtlProcessInstance(tablet.TabletUID, tablet.MySQLPort, clusterInstance.TmpDirectory) proc, err := tablet.MysqlctlProcess.StartProcess() if err != nil { return 1, err } mysqlProcesses = append(mysqlProcesses, proc) // start vttablet process tablet.VttabletProcess = cluster.VttabletProcessInstance(tablet.HTTPPort, tablet.GrpcPort, tablet.TabletUID, clusterInstance.Cell, shardName, keyspace.Name, clusterInstance.VtctldProcess.Port, tablet.Type, clusterInstance.TopoProcess.Port, clusterInstance.Hostname, clusterInstance.TmpDirectory, clusterInstance.VtTabletExtraArgs, clusterInstance.EnableSemiSync) tablet.Alias = tablet.VttabletProcess.TabletPath shard.Vttablets = append(shard.Vttablets, tablet) } keyspace.Shards = append(keyspace.Shards, *shard) clusterInstance.Keyspaces = append(clusterInstance.Keyspaces, keyspace) } for _, proc := range mysqlProcesses { err := proc.Wait() if err != nil { return 1, err } } return 0, nil } func createSignedCert(ca string, serial string, name string, commonName string) error { log.Infof("Creating signed cert and key %s", commonName) tmpProcess := exec.Command( "vttlstest", "-root", certDirectory, "CreateSignedCert", "-parent", ca, "-serial", serial, "-common_name", commonName, name) return tmpProcess.Run() } func serverExtraArguments(name string, ca string) []string { args := []string{"-grpc_cert", certDirectory + "/" + name + "-cert.pem", "-grpc_key", certDirectory + "/" + name + "-key.pem", "-grpc_ca", certDirectory + "/" + ca + "-cert.pem"} return args } func tmclientExtraArgs(name string) []string { ca := "vttablet-server" var args = []string{"-tablet_manager_grpc_cert", certDirectory + "/" + name + "-cert.pem", "-tablet_manager_grpc_key", certDirectory + "/" + name + "-key.pem", "-tablet_manager_grpc_ca", certDirectory + "/" + ca + "-cert.pem", "-tablet_manager_grpc_server_name", "vttablet server instance"} return args } func tabletConnExtraArgs(name string) []string { ca := "vttablet-server" args := []string{"-tablet_grpc_cert", certDirectory + "/" + name + "-cert.pem", "-tablet_grpc_key", certDirectory + "/" + name + "-key.pem", "-tablet_grpc_ca", certDirectory + "/" + ca + "-cert.pem", "-tablet_grpc_server_name", "vttablet server instance"} return args } func getVitessClient(addr string) (vtgateservicepb.VitessClient, error) { opt, err := grpcclient.SecureDialOption(grpcCert, grpcKey, grpcCa, grpcName) if err != nil { return nil, err } cc, err := grpcclient.Dial(addr, grpcclient.FailFast(false), opt) if err != nil { return nil, err } c := vtgateservicepb.NewVitessClient(cc) return c, nil } func setCreds(t *testing.T, name string, ca string) { f1, err := os.Open(path.Join(certDirectory, "ca-cert.pem")) require.NoError(t, err) b1, err := ioutil.ReadAll(f1) require.NoError(t, err) f2, err := os.Open(path.Join(certDirectory, ca+"-cert.pem")) require.NoError(t, err) b2, err := ioutil.ReadAll(f2) require.NoError(t, err) caContent := append(b1, b2...) fileName := "ca-" + name + ".pem" caVtgateClient := path.Join(certDirectory, fileName) f, err := os.Create(caVtgateClient) require.NoError(t, err) _, err = f.Write(caContent) require.NoError(t, err) grpcCa = caVtgateClient grpcKey = path.Join(certDirectory, name+"-key.pem") grpcCert = path.Join(certDirectory, name+"-cert.pem") err = f.Close() require.NoError(t, err) err = f2.Close() require.NoError(t, err) err = f1.Close() require.NoError(t, err) } func setSSLInfoEmpty() { grpcCa = "" grpcCert = "" grpcKey = "" grpcName = "" } func getSession() *vtgatepb.Session { return &vtgatepb.Session{ TargetString: "test_keyspace:0@master", } } func getRequestWithCallerID(callerID *vtrpc.CallerID, sql string) *vtgatepb.ExecuteRequest { session := getSession() return &vtgatepb.ExecuteRequest{ CallerId: callerID, Session: session, Query: &querypb.BoundQuery{ Sql: sql, }, } } <|fim▁hole|>func getRequest(sql string) *vtgatepb.ExecuteRequest { session := getSession() return &vtgatepb.ExecuteRequest{ Session: session, Query: &querypb.BoundQuery{ Sql: sql, }, } }<|fim▁end|>
<|file_name|>cart.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Inject } from '@angular/core'; import { MatDialog, MatDialogRef, MAT_DIALOG_DATA } from '@angular/material'; import { Materiaal, Reservering } from '../../models/index'; import { Subscription } from 'rxjs/Subscription'; import { MaterialenService, ReserveringService } from '../../services/index' @Component({ selector: 'cart', templateUrl: './cart.component.html', styleUrls: ['./cart.component.css'] }) export class CartComponent { subscription: Subscription; confirmStep1 = false; confirmStep2 = false; materiaalCart: Reservering [] =[]; materialenInCart: Materiaal[] = []; constructor( public materialenService: MaterialenService, public reserveringSerivce: ReserveringService, public dialogRef: MatDialogRef<CartComponent>, // data binnengekomen via de NavbarComponent @Inject(MAT_DIALOG_DATA) public data: Reservering[]) { this.confirmStep1 = false; this.confirmStep2 = false; this.materiaalCart = data; // haal de materialen op welke gereserveerd zijn this.materiaalCart.forEach(x => { this.materialenService.getMateriaalById(x.materiaal_id).subscribe(materiaal => { // voeg de $key toe, omdat deze niet wordt gereturned const addMateriaal = materiaal; addMateriaal.$key = x.materiaal_id; this.materialenInCart.push(materiaal); }); }); } /** sluiten van de dialog */ onNoClick(): void { this.dialogRef.close(); } /** aantal verlagen*/ checkRemove(key) { this.materiaalCart.forEach(x => { if (x.materiaal_id === key) { x.aantal = Number(x.aantal) - 1; this.pushToService(); } }); } /** aantal verhogen */ checkAdd(key) { this.materiaalCart.forEach(x => { if (x.materiaal_id === key) { x.aantal = Number(x.aantal) + 1; this.pushToService(); }<|fim▁hole|> /** verwijderen van Reservering */ deleteReservering(key) { // delete Reservering van Cart this.materiaalCart.forEach(x => { if (x.materiaal_id === key) { const index = this.materiaalCart.indexOf(x); this.materiaalCart.splice(index, 1); this.pushToService(); } }); // delete Materiaal van materialenInCart this.materialenInCart.forEach(x => { if (x.$key === key) { const index = this.materialenInCart.indexOf(x); this.materialenInCart.splice(index, 1); } }); } /** bevestigen van Reservering */ confirmReservering() { if (this.reserveringSerivce.addReservering()) { this.onNoClick(); } } /** push Cart naar reserveringsService */ pushToService(): void { this.reserveringSerivce.addToCart(this.materiaalCart); } }<|fim▁end|>
}); }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from .circleclient import __version__
<|file_name|>driver.py<|end_file_name|><|fim▁begin|># Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import base64 import copy import logging import re import shlex import sys import time import os from webkitpy.common.system import path from webkitpy.common.system.profiler import ProfilerFactory _log = logging.getLogger(__name__) DRIVER_START_TIMEOUT_SECS = 30 class DriverInput(object): def __init__(self, test_name, timeout, image_hash, should_run_pixel_test, args): self.test_name = test_name self.timeout = timeout # in ms self.image_hash = image_hash self.should_run_pixel_test = should_run_pixel_test self.args = args class DriverOutput(object): """Groups information about a output from driver for easy passing and post-processing of data.""" def __init__(self, text, image, image_hash, audio, crash=False, test_time=0, measurements=None, timeout=False, error='', crashed_process_name='??', crashed_pid=None, crash_log=None, leak=False, leak_log=None, pid=None): # FIXME: Args could be renamed to better clarify what they do. self.text = text self.image = image # May be empty-string if the test crashes. self.image_hash = image_hash self.image_diff = None # image_diff gets filled in after construction. self.audio = audio # Binary format is port-dependent. self.crash = crash self.crashed_process_name = crashed_process_name self.crashed_pid = crashed_pid self.crash_log = crash_log self.leak = leak self.leak_log = leak_log self.test_time = test_time self.measurements = measurements self.timeout = timeout self.error = error # stderr output self.pid = pid def has_stderr(self): return bool(self.error) class DeviceFailure(Exception): pass class Driver(object): """object for running test(s) using content_shell or other driver.""" def __init__(self, port, worker_number, pixel_tests, no_timeout=False): """Initialize a Driver to subsequently run tests. Typically this routine will spawn content_shell in a config ready for subsequent input. port - reference back to the port object. worker_number - identifier for a particular worker/driver instance """ self._port = port self._worker_number = worker_number self._no_timeout = no_timeout self._driver_tempdir = None # content_shell can report back subprocess crashes by printing # "#CRASHED - PROCESSNAME". Since those can happen at any time # and ServerProcess won't be aware of them (since the actual tool # didn't crash, just a subprocess) we record the crashed subprocess name here. self._crashed_process_name = None self._crashed_pid = None # content_shell can report back subprocesses that became unresponsive # This could mean they crashed. self._subprocess_was_unresponsive = False # content_shell can report back subprocess DOM-object leaks by printing # "#LEAK". This leak detection is enabled only when the flag # --enable-leak-detection is passed to content_shell. self._leaked = False # stderr reading is scoped on a per-test (not per-block) basis, so we store the accumulated # stderr output, as well as if we've seen #EOF on this driver instance. # FIXME: We should probably remove _read_first_block and _read_optional_image_block and # instead scope these locally in run_test. self.error_from_test = str() self.err_seen_eof = False self._server_process = None self._current_cmd_line = None self._measurements = {} if self._port.get_option("profile"): profiler_name = self._port.get_option("profiler") self._profiler = ProfilerFactory.create_profiler(self._port.host, self._port._path_to_driver(), self._port.results_directory(), profiler_name) else: self._profiler = None def __del__(self): self.stop() def run_test(self, driver_input, stop_when_done): """Run a single test and return the results. Note that it is okay if a test times out or crashes and leaves the driver in an indeterminate state. The upper layers of the program are responsible for cleaning up and ensuring things are okay. Returns a DriverOutput object. """ start_time = time.time() stdin_deadline = start_time + int(driver_input.timeout) / 2000.0 self.start(driver_input.should_run_pixel_test, driver_input.args, stdin_deadline) test_begin_time = time.time() self.error_from_test = str() self.err_seen_eof = False command = self._command_from_driver_input(driver_input) deadline = test_begin_time + int(driver_input.timeout) / 1000.0 self._server_process.write(command) text, audio = self._read_first_block(deadline) # First block is either text or audio image, actual_image_hash = self._read_optional_image_block(deadline) # The second (optional) block is image data. crashed = self.has_crashed() timed_out = self._server_process.timed_out pid = self._server_process.pid() leaked = self._leaked if not crashed: sanitizer = self._port.output_contains_sanitizer_messages(self.error_from_test) if sanitizer: self.error_from_test = 'OUTPUT CONTAINS "' + sanitizer + '", so we are treating this test as if it crashed, even though it did not.\n\n' + self.error_from_test crashed = True self._crashed_process_name = "unknown process name" self._crashed_pid = 0 if stop_when_done or crashed or timed_out or leaked: # We call stop() even if we crashed or timed out in order to get any remaining stdout/stderr output. # In the timeout case, we kill the hung process as well. out, err = self._server_process.stop(self._port.driver_stop_timeout() if stop_when_done else 0.0) if out: text += out if err: self.error_from_test += err self._server_process = None crash_log = None if crashed: self.error_from_test, crash_log = self._get_crash_log(text, self.error_from_test, newer_than=start_time) # If we don't find a crash log use a placeholder error message instead. if not crash_log: pid_str = str(self._crashed_pid) if self._crashed_pid else "unknown pid" crash_log = 'No crash log found for %s:%s.\n' % (self._crashed_process_name, pid_str) # If we were unresponsive append a message informing there may not have been a crash. if self._subprocess_was_unresponsive: crash_log += 'Process failed to become responsive before timing out.\n' # Print stdout and stderr to the placeholder crash log; we want as much context as possible. if self.error_from_test: crash_log += '\nstdout:\n%s\nstderr:\n%s\n' % (text, self.error_from_test) return DriverOutput(text, image, actual_image_hash, audio, crash=crashed, test_time=time.time() - test_begin_time, measurements=self._measurements, timeout=timed_out, error=self.error_from_test, crashed_process_name=self._crashed_process_name, crashed_pid=self._crashed_pid, crash_log=crash_log, leak=leaked, leak_log=self._leak_log, pid=pid) def _get_crash_log(self, stdout, stderr, newer_than): return self._port._get_crash_log(self._crashed_process_name, self._crashed_pid, stdout, stderr, newer_than) # FIXME: Seems this could just be inlined into callers. @classmethod def _command_wrapper(cls, wrapper_option): # Hook for injecting valgrind or other runtime instrumentation, # used by e.g. tools/valgrind/valgrind_tests.py. return shlex.split(wrapper_option) if wrapper_option else [] HTTP_DIR = "http/tests/" HTTP_LOCAL_DIR = "http/tests/local/" def is_http_test(self, test_name): return test_name.startswith(self.HTTP_DIR) and not test_name.startswith(self.HTTP_LOCAL_DIR) def test_to_uri(self, test_name): """Convert a test name to a URI. Tests which have an 'https' directory in their paths (e.g. '/http/tests/security/mixedContent/https/test1.html') or '.https.' in their name (e.g. 'http/tests/security/mixedContent/test1.https.html') will be loaded over HTTPS; all other tests over HTTP. """ if not self.is_http_test(test_name): return path.abspath_to_uri(self._port.host.platform, self._port.abspath_for_test(test_name)) relative_path = test_name[len(self.HTTP_DIR):] if "/https/" in test_name or ".https." in test_name: return "https://127.0.0.1:8443/" + relative_path return "http://127.0.0.1:8000/" + relative_path def uri_to_test(self, uri): """Return the base layout test name for a given URI. This returns the test name for a given URI, e.g., if you passed in "file:///src/LayoutTests/fast/html/keygen.html" it would return "fast/html/keygen.html". """ if uri.startswith("file:///"): prefix = path.abspath_to_uri(self._port.host.platform, self._port.layout_tests_dir()) if not prefix.endswith('/'): prefix += '/' return uri[len(prefix):] if uri.startswith("http://"): return uri.replace('http://127.0.0.1:8000/', self.HTTP_DIR) if uri.startswith("https://"): return uri.replace('https://127.0.0.1:8443/', self.HTTP_DIR) raise NotImplementedError('unknown url type: %s' % uri) def has_crashed(self): if self._server_process is None: return False if self._crashed_process_name: return True if self._server_process.has_crashed(): self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() return True return False def start(self, pixel_tests, per_test_args, deadline): new_cmd_line = self.cmd_line(pixel_tests, per_test_args) if not self._server_process or new_cmd_line != self._current_cmd_line: self._start(pixel_tests, per_test_args) self._run_post_start_tasks() def _setup_environ_for_driver(self, environment): if self._profiler: environment = self._profiler.adjusted_environment(environment) return environment def _start(self, pixel_tests, per_test_args, wait_for_ready=True): self.stop() self._driver_tempdir = self._port._filesystem.mkdtemp(prefix='%s-' % self._port.driver_name()) server_name = self._port.driver_name() environment = self._port.setup_environ_for_server(server_name) environment = self._setup_environ_for_driver(environment) self._crashed_process_name = None self._crashed_pid = None self._leaked = False self._leak_log = None cmd_line = self.cmd_line(pixel_tests, per_test_args) self._server_process = self._port._server_process_constructor(self._port, server_name, cmd_line, environment, logging=self._port.get_option("driver_logging")) self._server_process.start() self._current_cmd_line = cmd_line if wait_for_ready: deadline = time.time() + DRIVER_START_TIMEOUT_SECS if not self._wait_for_server_process_output(self._server_process, deadline, '#READY'): _log.error("content_shell took too long to startup.") def _wait_for_server_process_output(self, server_process, deadline, text): output = '' line = server_process.read_stdout_line(deadline) while not server_process.timed_out and not server_process.has_crashed() and not text in line.rstrip(): output += line line = server_process.read_stdout_line(deadline) if server_process.timed_out or server_process.has_crashed(): _log.error('Failed to start the %s process: \n%s' % (server_process.name(), output)) return False return True def _run_post_start_tasks(self): # Remote drivers may override this to delay post-start tasks until the server has ack'd. if self._profiler: self._profiler.attach_to_pid(self._pid_on_target()) def _pid_on_target(self): # Remote drivers will override this method to return the pid on the device. return self._server_process.pid() def stop(self, timeout_secs=0.0): if self._server_process: self._server_process.stop(timeout_secs) self._server_process = None if self._profiler: self._profiler.profile_after_exit() if self._driver_tempdir: self._port._filesystem.rmtree(str(self._driver_tempdir)) self._driver_tempdir = None self._current_cmd_line = None def cmd_line(self, pixel_tests, per_test_args): cmd = self._command_wrapper(self._port.get_option('wrapper')) cmd.append(self._port._path_to_driver()) if self._no_timeout: cmd.append('--no-timeout') cmd.extend(self._port.get_option('additional_driver_flag', [])) cmd.extend(self._port.additional_driver_flag()) if self._port.get_option('enable_leak_detection'): cmd.append('--enable-leak-detection') cmd.extend(per_test_args) cmd.append('-') return cmd def _check_for_driver_crash(self, error_line): if error_line == "#CRASHED\n": # This is used on Windows to report that the process has crashed # See http://trac.webkit.org/changeset/65537. self._crashed_process_name = self._server_process.name() self._crashed_pid = self._server_process.pid() elif (error_line.startswith("#CRASHED - ") or error_line.startswith("#PROCESS UNRESPONSIVE - ")): # WebKitTestRunner uses this to report that the WebProcess subprocess crashed. match = re.match('#(?:CRASHED|PROCESS UNRESPONSIVE) - (\S+)', error_line) self._crashed_process_name = match.group(1) if match else 'WebProcess' match = re.search('pid (\d+)', error_line) pid = int(match.group(1)) if match else None self._crashed_pid = pid # FIXME: delete this after we're sure this code is working :) _log.debug('%s crash, pid = %s, error_line = %s' % (self._crashed_process_name, str(pid), error_line)) if error_line.startswith("#PROCESS UNRESPONSIVE - "): self._subprocess_was_unresponsive = True self._port.sample_process(self._crashed_process_name, self._crashed_pid) # We want to show this since it's not a regular crash and probably we don't have a crash log. self.error_from_test += error_line return True return self.has_crashed() def _check_for_leak(self, error_line): if error_line.startswith("#LEAK - "): self._leaked = True match = re.match('#LEAK - (\S+) pid (\d+) (.+)\n', error_line) self._leak_log = match.group(3) return self._leaked def _command_from_driver_input(self, driver_input): # FIXME: performance tests pass in full URLs instead of test names. if driver_input.test_name.startswith('http://') or driver_input.test_name.startswith('https://') or driver_input.test_name == ('about:blank'): command = driver_input.test_name elif self.is_http_test(driver_input.test_name): command = self.test_to_uri(driver_input.test_name) else: command = self._port.abspath_for_test(driver_input.test_name) if sys.platform == 'cygwin': command = path.cygpath(command) assert not driver_input.image_hash or driver_input.should_run_pixel_test # ' is the separator between arguments. if self._port.supports_per_test_timeout(): command += "'--timeout'%s" % driver_input.timeout if driver_input.should_run_pixel_test: command += "'--pixel-test" if driver_input.image_hash: command += "'" + driver_input.image_hash return command + "\n" def _read_first_block(self, deadline): # returns (text_content, audio_content) block = self._read_block(deadline) if block.malloc: self._measurements['Malloc'] = float(block.malloc)<|fim▁hole|> if block.js_heap: self._measurements['JSHeap'] = float(block.js_heap) if block.content_type == 'audio/wav': return (None, block.decoded_content) return (block.decoded_content, None) def _read_optional_image_block(self, deadline): # returns (image, actual_image_hash) block = self._read_block(deadline, wait_for_stderr_eof=True) if block.content and block.content_type == 'image/png': return (block.decoded_content, block.content_hash) return (None, block.content_hash) def _read_header(self, block, line, header_text, header_attr, header_filter=None): if line.startswith(header_text) and getattr(block, header_attr) is None: value = line.split()[1] if header_filter: value = header_filter(value) setattr(block, header_attr, value) return True return False def _process_stdout_line(self, block, line): if (self._read_header(block, line, 'Content-Type: ', 'content_type') or self._read_header(block, line, 'Content-Transfer-Encoding: ', 'encoding') or self._read_header(block, line, 'Content-Length: ', '_content_length', int) or self._read_header(block, line, 'ActualHash: ', 'content_hash') or self._read_header(block, line, 'DumpMalloc: ', 'malloc') or self._read_header(block, line, 'DumpJSHeap: ', 'js_heap') or self._read_header(block, line, 'StdinPath', 'stdin_path')): return # Note, we're not reading ExpectedHash: here, but we could. # If the line wasn't a header, we just append it to the content. block.content += line def _strip_eof(self, line): if line and line.endswith("#EOF\n"): return line[:-5], True if line and line.endswith("#EOF\r\n"): _log.error("Got a CRLF-terminated #EOF - this is a driver bug.") return line[:-6], True return line, False def _read_block(self, deadline, wait_for_stderr_eof=False): block = ContentBlock() out_seen_eof = False while not self.has_crashed(): if out_seen_eof and (self.err_seen_eof or not wait_for_stderr_eof): break if self.err_seen_eof: out_line = self._server_process.read_stdout_line(deadline) err_line = None elif out_seen_eof: out_line = None err_line = self._server_process.read_stderr_line(deadline) else: out_line, err_line = self._server_process.read_either_stdout_or_stderr_line(deadline) if self._server_process.timed_out or self.has_crashed(): break if out_line: assert not out_seen_eof out_line, out_seen_eof = self._strip_eof(out_line) if err_line: assert not self.err_seen_eof err_line, self.err_seen_eof = self._strip_eof(err_line) if out_line: if out_line[-1] != "\n": _log.error("Last character read from DRT stdout line was not a newline! This indicates either a NRWT or DRT bug.") content_length_before_header_check = block._content_length self._process_stdout_line(block, out_line) # FIXME: Unlike HTTP, DRT dumps the content right after printing a Content-Length header. # Don't wait until we're done with headers, just read the binary blob right now. if content_length_before_header_check != block._content_length: if block._content_length > 0: block.content = self._server_process.read_stdout(deadline, block._content_length) else: _log.error("Received content of type %s with Content-Length of 0! This indicates a bug in %s.", block.content_type, self._server_process.name()) if err_line: if self._check_for_driver_crash(err_line): break if self._check_for_leak(err_line): break self.error_from_test += err_line block.decode_content() return block class ContentBlock(object): def __init__(self): self.content_type = None self.encoding = None self.content_hash = None self._content_length = None # Content is treated as binary data even though the text output is usually UTF-8. self.content = str() # FIXME: Should be bytearray() once we require Python 2.6. self.decoded_content = None self.malloc = None self.js_heap = None self.stdin_path = None def decode_content(self): if self.encoding == 'base64' and self.content is not None: self.decoded_content = base64.b64decode(self.content) else: self.decoded_content = self.content<|fim▁end|>
<|file_name|>DateTime.js<|end_file_name|><|fim▁begin|>import DateTimeCore from './DateTimeCore'; let DateTime = { mixins : [DateTimeCore], data : function () { return {}; }, methods : { _dtParseRelativeDatetimeToObj : function (val) { return this._dtcoreParseRelativeToObj('datetime', val); }, _dtIsRelativeDatetime : function (val) { <|fim▁hole|> return this._dtcoreGetRelativeObj('datetime', relativeDate); } } }; export default DateTime;<|fim▁end|>
return this._dtcoreIsRelative('datetime', val); }, _dtGetRelativeDatetime : function (relativeDate) {
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use libc::{c_char, c_int, c_uint, c_void, size_t}; pub use libgpg_error_sys::gpg_error_t as gcry_error_t; // extern { // pub type gcry_thread_cbs; // pub type gcry_context; // pub type gcry_sexp; // pub type gcry_mpi; // pub type gcry_mpi_point; // pub type gcry_cipher_handle; // pub type gcry_md_handle; // pub type gcry_mac_handle; // } #[repr(C)] pub struct gcry_thread_cbs { _priv: [u8; 0], } #[repr(C)] pub struct gcry_context { _priv: [u8; 0], } #[repr(C)] pub struct gcry_sexp { _priv: [u8; 0], } #[repr(C)] pub struct gcry_mpi { _priv: [u8; 0], } #[repr(C)] pub struct gcry_mpi_point { _priv: [u8; 0], } #[repr(C)] pub struct gcry_cipher_handle { _priv: [u8; 0], } #[repr(C)] pub struct gcry_md_handle { _priv: [u8; 0], } #[repr(C)] pub struct gcry_mac_handle { _priv: [u8; 0], }<|fim▁hole|>pub type gcry_mpi_point_t = *mut gcry_mpi_point; pub type gcry_cipher_hd_t = *mut gcry_cipher_handle; pub type gcry_md_hd_t = *mut gcry_md_handle; pub type gcry_mac_hd_t = *mut gcry_mac_handle; pub type gcry_prime_check_func_t = Option<unsafe extern "C" fn(*mut c_void, c_int, gcry_mpi_t) -> c_int>; pub type gcry_handler_progress_t = Option<unsafe extern "C" fn(*mut c_void, *const c_char, c_int, c_int, c_int)>; pub type gcry_handler_alloc_t = Option<unsafe extern "C" fn(size_t) -> *mut c_void>; pub type gcry_handler_secure_check_t = Option<unsafe extern "C" fn(*const c_void) -> c_int>; pub type gcry_handler_realloc_t = Option<unsafe extern "C" fn(*mut c_void, size_t) -> *mut c_void>; pub type gcry_handler_free_t = Option<unsafe extern "C" fn(*mut c_void)>; pub type gcry_handler_no_mem_t = Option<unsafe extern "C" fn(*mut c_void, size_t, c_uint) -> c_int>; pub type gcry_handler_error_t = Option<unsafe extern "C" fn(*mut c_void, c_int, *const c_char)>; //pub type gcry_handler_log_t = Option<unsafe extern fn(*mut c_void, c_int, *const c_char, va_list)><|fim▁end|>
pub type gcry_ctx_t = *mut gcry_context; pub type gcry_sexp_t = *mut gcry_sexp; pub type gcry_mpi_t = *mut gcry_mpi;
<|file_name|>dna_r_test.cpp<|end_file_name|><|fim▁begin|>#include "dna_r.h" <|fim▁hole|> #include "dnasequence.h" #include "fileio.h" #include "testtimer.h" #include "ribi_rinside.h" #include "trace.h" #ifndef NDEBUG void ribi::DnaR::Test() noexcept { { static bool is_tested{false}; if (is_tested) return; is_tested = true; } { ribi::fileio::FileIo(); auto& r = ribi::Rinside().Get(); r.parseEval("library(ape)"); } const ribi::TestTimer test_timer(__func__,__FILE__,1.0); ribi::fileio::FileIo f; DnaR d; using ribi::DnaSequence; const bool verbose{false}; if (verbose) { TRACE("PlotSequences"); } { std::vector<DnaSequence> v; v.push_back(DnaSequence("1","ACGTA")); v.push_back(DnaSequence("2","ACGTC")); v.push_back(DnaSequence("3","ACGTG")); v.push_back(DnaSequence("4","ACGTT")); const std::string filename{f.GetTempFileName(".png")}; d.PlotSequences(v,filename); assert(f.IsRegularFile(filename)); } if (!"Speed comparison PlotSequences") { std::vector<DnaSequence> v; v.push_back(DnaSequence("1","ACGTA")); v.push_back(DnaSequence("2","ACGTC")); v.push_back(DnaSequence("3","ACGTG")); v.push_back(DnaSequence("4","ACGTT")); const std::string filename{f.GetTempFileName(".png")}; const auto t2 = std::chrono::system_clock::now(); d.PlotSequencesRinside(v,filename); const auto d2 = std::chrono::system_clock::now() - t2; const auto t1 = std::chrono::system_clock::now(); d.PlotSequencesRscript(v,filename); const auto d1 = std::chrono::system_clock::now() - t1; assert(d2 < d1 / 10); } } #endif<|fim▁end|>
#include <cassert> #include <chrono> #include "RInside.h"
<|file_name|>keras_metrics.py<|end_file_name|><|fim▁begin|># Copyright 2019, Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Libraries of Keras metrics.""" import tensorflow as tf def _apply_mask(y_true, sample_weight, masked_tokens, dtype): if sample_weight is None: sample_weight = tf.ones_like(y_true, dtype)<|fim▁hole|> sample_weight = tf.cast(sample_weight, dtype) for token in masked_tokens: mask = tf.cast(tf.not_equal(y_true, token), dtype) sample_weight = sample_weight * mask return sample_weight class NumTokensCounter(tf.keras.metrics.Sum): """A `tf.keras.metrics.Metric` that counts tokens seen after masking.""" def __init__(self, masked_tokens=None, name='num_tokens', dtype=tf.int64): self._masked_tokens = masked_tokens or [] super().__init__(name, dtype) def update_state(self, y_true, y_pred, sample_weight=None): sample_weight = _apply_mask(y_true, sample_weight, self._masked_tokens, self._dtype) sample_weight = tf.reshape(sample_weight, [-1]) super().update_state(sample_weight) def get_config(self): config = super().get_config() config['masked_tokens'] = tuple(self._masked_tokens) return config class MaskedCategoricalAccuracy(tf.keras.metrics.SparseCategoricalAccuracy): """An accuracy metric that masks some tokens.""" def __init__(self, masked_tokens=None, name='accuracy', dtype=None): self._masked_tokens = masked_tokens or [] super().__init__(name, dtype=dtype) def update_state(self, y_true, y_pred, sample_weight=None): sample_weight = _apply_mask(y_true, sample_weight, self._masked_tokens, self._dtype) num_classes = tf.shape(y_pred)[-1] y_true = tf.reshape(y_true, [-1]) y_pred = tf.reshape(y_pred, [-1, num_classes]) sample_weight = tf.reshape(sample_weight, [-1]) super().update_state(y_true, y_pred, sample_weight) def get_config(self): config = super().get_config() config['masked_tokens'] = tuple(self._masked_tokens) return config<|fim▁end|>
else:
<|file_name|>media_queries.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use euclid::size::Size2D; use std::borrow::ToOwned; use style::media_queries::*; use style::stylesheets::{Origin, Stylesheet, CSSRuleIteratorExt}; use style::values::specified; use url::Url; use util::geometry::Au; fn test_media_rule<F>(css: &str, callback: F) where F: Fn(&MediaQueryList, &str) { let url = Url::parse("http://localhost").unwrap(); let stylesheet = Stylesheet::from_str(css, url, Origin::Author); let mut rule_count = 0; for rule in stylesheet.rules().media() { rule_count += 1; callback(&rule.media_queries, css); } assert!(rule_count > 0); } fn media_query_test(device: &Device, css: &str, expected_rule_count: usize) { let url = Url::parse("http://localhost").unwrap(); let ss = Stylesheet::from_str(css, url, Origin::Author); let rule_count = ss.effective_rules(device).style().count(); assert!(rule_count == expected_rule_count, css.to_owned()); } #[test] fn test_mq_empty() { test_media_rule("@media { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_screen() { test_media_rule("@media screen { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media only screen { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Only), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not screen { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_print() { test_media_rule("@media print { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media only print { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Only), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not print { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_unknown() { test_media_rule("@media fridge { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Unknown), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media only glass { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Only), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Unknown), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not wood { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Unknown), css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_all() { test_media_rule("@media all { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media only all { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Only), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not all { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_or() { test_media_rule("@media screen, print { }", |list, css| { assert!(list.media_queries.len() == 2, css.to_owned()); let q0 = &list.media_queries[0]; assert!(q0.qualifier == None, css.to_owned()); assert!(q0.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q0.expressions.len() == 0, css.to_owned()); let q1 = &list.media_queries[1]; assert!(q1.qualifier == None, css.to_owned()); assert!(q1.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q1.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_mq_default_expressions() { test_media_rule("@media (min-width: 100px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 1, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Min(w)) => assert!(w == specified::Length::Absolute(Au::from_px(100))), _ => panic!("wrong expression type"), } }); test_media_rule("@media (max-width: 43px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 1, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Max(w)) => assert!(w == specified::Length::Absolute(Au::from_px(43))), _ => panic!("wrong expression type"), } }); } #[test] fn test_mq_expressions() { test_media_rule("@media screen and (min-width: 100px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q.expressions.len() == 1, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Min(w)) => assert!(w == specified::Length::Absolute(Au::from_px(100))), _ => panic!("wrong expression type"), } }); test_media_rule("@media print and (max-width: 43px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q.expressions.len() == 1, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Max(w)) => assert!(w == specified::Length::Absolute(Au::from_px(43))), _ => panic!("wrong expression type"), } }); test_media_rule("@media fridge and (max-width: 52px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Unknown), css.to_owned()); assert!(q.expressions.len() == 1, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Max(w)) => assert!(w == specified::Length::Absolute(Au::from_px(52))), _ => panic!("wrong expression type"), } }); } #[test] fn test_mq_multiple_expressions() { test_media_rule("@media (min-width: 100px) and (max-width: 200px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == None, css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 2, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Min(w)) => assert!(w == specified::Length::Absolute(Au::from_px(100))), _ => panic!("wrong expression type"), } match q.expressions[1] { Expression::Width(Range::Max(w)) => assert!(w == specified::Length::Absolute(Au::from_px(200))), _ => panic!("wrong expression type"), } }); test_media_rule("@media not screen and (min-width: 100px) and (max-width: 200px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q.expressions.len() == 2, css.to_owned()); match q.expressions[0] { Expression::Width(Range::Min(w)) => assert!(w == specified::Length::Absolute(Au::from_px(100))), _ => panic!("wrong expression type"), } match q.expressions[1] { Expression::Width(Range::Max(w)) => assert!(w == specified::Length::Absolute(Au::from_px(200))), _ => panic!("wrong expression type"), } }); } #[test] fn test_mq_malformed_expressions() { test_media_rule("@media (min-width: 100blah) and (max-width: 200px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media screen and (height: 200px) { }", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media (min-width: 30em foo bar) {}", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not {}", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media not (min-width: 300px) {}", |list, css| { assert!(list.media_queries.len() == 1, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media , {}", |list, css| { assert!(list.media_queries.len() == 2, css.to_owned()); let q = &list.media_queries[0]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); let q = &list.media_queries[1]; assert!(q.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q.media_type == MediaQueryType::All, css.to_owned()); assert!(q.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media screen 4px, print {}", |list, css| { assert!(list.media_queries.len() == 2, css.to_owned()); let q0 = &list.media_queries[0]; assert!(q0.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q0.media_type == MediaQueryType::All, css.to_owned()); assert!(q0.expressions.len() == 0, css.to_owned()); let q1 = &list.media_queries[1]; assert!(q1.qualifier == None, css.to_owned()); assert!(q1.media_type == MediaQueryType::MediaType(MediaType::Print), css.to_owned()); assert!(q1.expressions.len() == 0, css.to_owned()); }); test_media_rule("@media screen, {}", |list, css| { assert!(list.media_queries.len() == 2, css.to_owned()); let q0 = &list.media_queries[0];<|fim▁hole|> assert!(q0.media_type == MediaQueryType::MediaType(MediaType::Screen), css.to_owned()); assert!(q0.expressions.len() == 0, css.to_owned()); let q1 = &list.media_queries[1]; assert!(q1.qualifier == Some(Qualifier::Not), css.to_owned()); assert!(q1.media_type == MediaQueryType::All, css.to_owned()); assert!(q1.expressions.len() == 0, css.to_owned()); }); } #[test] fn test_matching_simple() { let device = Device { media_type: MediaType::Screen, viewport_size: Size2D::typed(200.0, 100.0), }; media_query_test(&device, "@media not all { a { color: red; } }", 0); media_query_test(&device, "@media not screen { a { color: red; } }", 0); media_query_test(&device, "@media not print { a { color: red; } }", 1); media_query_test(&device, "@media unknown { a { color: red; } }", 0); media_query_test(&device, "@media not unknown { a { color: red; } }", 1); media_query_test(&device, "@media { a { color: red; } }", 1); media_query_test(&device, "@media screen { a { color: red; } }", 1); media_query_test(&device, "@media print { a { color: red; } }", 0); } #[test] fn test_matching_width() { let device = Device { media_type: MediaType::Screen, viewport_size: Size2D::typed(200.0, 100.0), }; media_query_test(&device, "@media { a { color: red; } }", 1); media_query_test(&device, "@media (min-width: 50px) { a { color: red; } }", 1); media_query_test(&device, "@media (min-width: 150px) { a { color: red; } }", 1); media_query_test(&device, "@media (min-width: 300px) { a { color: red; } }", 0); media_query_test(&device, "@media screen and (min-width: 50px) { a { color: red; } }", 1); media_query_test(&device, "@media screen and (min-width: 150px) { a { color: red; } }", 1); media_query_test(&device, "@media screen and (min-width: 300px) { a { color: red; } }", 0); media_query_test(&device, "@media not screen and (min-width: 50px) { a { color: red; } }", 0); media_query_test(&device, "@media not screen and (min-width: 150px) { a { color: red; } }", 0); media_query_test(&device, "@media not screen and (min-width: 300px) { a { color: red; } }", 1); media_query_test(&device, "@media (max-width: 50px) { a { color: red; } }", 0); media_query_test(&device, "@media (max-width: 150px) { a { color: red; } }", 0); media_query_test(&device, "@media (max-width: 300px) { a { color: red; } }", 1); media_query_test(&device, "@media screen and (min-width: 50px) and (max-width: 100px) { a { color: red; } }", 0); media_query_test(&device, "@media screen and (min-width: 250px) and (max-width: 300px) { a { color: red; } }", 0); media_query_test(&device, "@media screen and (min-width: 50px) and (max-width: 250px) { a { color: red; } }", 1); media_query_test( &device, "@media not screen and (min-width: 50px) and (max-width: 100px) { a { color: red; } }", 1); media_query_test( &device, "@media not screen and (min-width: 250px) and (max-width: 300px) { a { color: red; } }", 1); media_query_test( &device, "@media not screen and (min-width: 50px) and (max-width: 250px) { a { color: red; } }", 0); media_query_test( &device, "@media not screen and (min-width: 3.1em) and (max-width: 6em) { a { color: red; } }", 1); media_query_test( &device, "@media not screen and (min-width: 16em) and (max-width: 19.75em) { a { color: red; } }", 1); media_query_test( &device, "@media not screen and (min-width: 3em) and (max-width: 250px) { a { color: red; } }", 0); } #[test] fn test_matching_invalid() { let device = Device { media_type: MediaType::Screen, viewport_size: Size2D::typed(200.0, 100.0), }; media_query_test(&device, "@media fridge { a { color: red; } }", 0); media_query_test(&device, "@media screen and (height: 100px) { a { color: red; } }", 0); media_query_test(&device, "@media not print and (width: 100) { a { color: red; } }", 0); }<|fim▁end|>
assert!(q0.qualifier == None, css.to_owned());
<|file_name|>example_pNH2D.py<|end_file_name|><|fim▁begin|>import pyspeckit import os from pyspeckit.spectrum.models import nh2d import numpy as np import astropy.units as u if not os.path.exists('p-nh2d_spec.fits'): import astropy.utils.data as aud from astropy.io import fits f = aud.download_file('https://github.com/pyspeckit/pyspeckit-example-files/raw/master/p-nh2d_spec.fits') with fits.open(f) as ff: ff.writeto('p-nh2d_spec.fits') # Load the spectrum spec = pyspeckit.Spectrum('p-nh2d_spec.fits') # Determine rms from line free section and load into cube rms = np.std(spec.data[10:340])<|fim▁hole|>spec.xarr.convert_to_unit('km/s') # define useful shortcuts for True and False F=False T=True # Setup of matplotlib import matplotlib.pyplot as plt plt.ion() # Add NH2D fitter spec.Registry.add_fitter('nh2d_vtau', pyspeckit.models.nh2d.nh2d_vtau_fitter,4) # run spectral fit using some reasonable guesses spec.specfit(fittype='nh2d_vtau', guesses=[5.52, 2.15, 0.166, 0.09067], verbose_level=4, signal_cut=1.5, limitedmax=[F,T,T,T], limitedmin=[T,T,T,T], minpars=[0, 0, -1, 0.05], maxpars=[30.,50.,1,0.5], fixed=[F,F,F,F]) # plot best fit spec.plotter(errstyle='fill') spec.specfit.plot_fit() #save figure plt.savefig('example_p-NH2D.png')<|fim▁end|>
spec.error[:] = rms # setup spectral axis spec.xarr.refX = 110.153594*u.GHz spec.xarr.velocity_convention = 'radio'
<|file_name|>attributes.ts<|end_file_name|><|fim▁begin|>import {VNode} from '../VNode'; const booleanAttrs = [ 'allowfullscreen', 'async', 'autofocus', 'autoplay', 'checked', 'compact', 'controls', 'declare', 'default', 'defaultchecked', 'defaultmuted', 'defaultselected', 'defer', 'disabled', 'draggable', 'enabled', 'formnovalidate', 'hidden', 'indeterminate', 'inert', 'ismap', 'itemscope', 'loop', 'multiple', 'muted', 'nohref', 'noresize', 'noshade', 'novalidate', 'nowrap', 'open', 'pauseonexit', 'readonly', 'required', 'reversed', 'scoped', 'seamless', 'selected', 'sortable', 'spellcheck', 'translate', 'truespeed', 'typemustmatch', 'visible' ]; const booleanAttrsDict = {}; for (let i = 0, len = booleanAttrs.length; i < len; i++) {<|fim▁hole|>}; function updateAttrs(oldVnode: VNode, vnode: VNode) { let key: any; let cur: any; let old: any; let elm = vnode.elm; let oldAttrs = oldVnode.data.attrs || {}; let attrs = vnode.data.attrs || {}; // update modified attributes, add new attributes for (key in attrs) { cur = attrs[key]; old = oldAttrs[key]; if (old !== cur) { // TODO: add support to namespaced attributes (setAttributeNS) if (!cur && booleanAttrsDict[key]) { (<HTMLElement> elm).removeAttribute(key); } else { (<HTMLElement> elm).setAttribute(key, cur); } } } //remove removed attributes for (key in oldAttrs) { if (!(key in attrs)) { (<HTMLElement> elm).removeAttribute(key); } } } const AttrsModule = { update: updateAttrs, create: updateAttrs, }; export default AttrsModule;<|fim▁end|>
booleanAttrsDict[booleanAttrs[i]] = true;
<|file_name|>Confirm.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { renderLog } from '../../helpers/logger'; import { Dialog, Bg, Content } from './StyledDialog'; interface IConfirm { yes: () => {}; no: () => {};<|fim▁hole|>const Confirm: any = ({msg, yes, no}: IConfirm) => { renderLog('DIALOG'); return ( <Dialog className='confirm'> <Content className='dialog-content'> <p>{ msg }</p> <footer> <button className='btn' onClick={ yes }>Yes</button> { ' ' } <button className='btn' onClick={ no }>No</button> </footer> </Content> <Bg className='dialog-background'></Bg> </Dialog> ); }; export default Confirm;<|fim▁end|>
msg: string; }
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Tests for parsing and serialization of values/properties use cssparser::{Parser, ParserInput}; use style::context::QuirksMode; use style::parser::ParserContext; use style::stylesheets::{CssRuleType, Origin}; use style_traits::{ParsingMode, ParseError}; fn parse<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>> where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> { let mut input = ParserInput::new(s); parse_input(f, &mut input) } fn parse_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>> where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> { let url = ::servo_url::ServoUrl::parse("http://localhost").unwrap(); let context = ParserContext::new( Origin::Author, &url, Some(CssRuleType::Style), ParsingMode::DEFAULT, QuirksMode::NoQuirks, None, None, ); let mut parser = Parser::new(input);<|fim▁hole|> fn parse_entirely<T, F>(f: F, s: &'static str) -> Result<T, ParseError<'static>> where F: for<'t> Fn(&ParserContext, &mut Parser<'static, 't>) -> Result<T, ParseError<'static>> { let mut input = ParserInput::new(s); parse_entirely_input(f, &mut input) } fn parse_entirely_input<'i: 't, 't, T, F>(f: F, input: &'t mut ParserInput<'i>) -> Result<T, ParseError<'i>> where F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<T, ParseError<'i>> { parse_input(|context, parser| parser.parse_entirely(|p| f(context, p)), input) } // This is a macro so that the file/line information // is preserved in the panic macro_rules! assert_roundtrip_with_context { ($fun:expr, $string:expr) => { assert_roundtrip_with_context!($fun, $string, $string); }; ($fun:expr, $input:expr, $output:expr) => {{ let mut input = ::cssparser::ParserInput::new($input); let serialized = super::parse_input(|context, i| { let parsed = $fun(context, i) .expect(&format!("Failed to parse {}", $input)); let serialized = ToCss::to_css_string(&parsed); assert_eq!(serialized, $output); Ok(serialized) }, &mut input).unwrap(); let mut input = ::cssparser::ParserInput::new(&serialized); let unwrapped = super::parse_input(|context, i| { let re_parsed = $fun(context, i) .expect(&format!("Failed to parse serialization {}", $input)); let re_serialized = ToCss::to_css_string(&re_parsed); assert_eq!(serialized, re_serialized); Ok(()) }, &mut input).unwrap(); unwrapped }} } macro_rules! assert_roundtrip { ($fun:expr, $string:expr) => { assert_roundtrip!($fun, $string, $string); }; ($fun:expr, $input:expr, $output:expr) => { let mut input = ParserInput::new($input); let mut parser = Parser::new(&mut input); let parsed = $fun(&mut parser) .expect(&format!("Failed to parse {}", $input)); let serialized = ToCss::to_css_string(&parsed); assert_eq!(serialized, $output); let mut input = ParserInput::new(&serialized); let mut parser = Parser::new(&mut input); let re_parsed = $fun(&mut parser) .expect(&format!("Failed to parse serialization {}", $input)); let re_serialized = ToCss::to_css_string(&re_parsed); assert_eq!(serialized, re_serialized) } } macro_rules! assert_parser_exhausted { ($fun:expr, $string:expr, $should_exhausted:expr) => {{ parse(|context, input| { let parsed = $fun(context, input); assert_eq!(parsed.is_ok(), true); assert_eq!(input.is_exhausted(), $should_exhausted); Ok(()) }, $string).unwrap() }} } macro_rules! parse_longhand { ($name:ident, $s:expr) => { parse($name::parse, $s).unwrap() }; } mod animation; mod background; mod border; mod box_; mod column; mod effects; mod image; mod inherited_text; mod outline; mod position; mod selectors; mod supports; mod text_overflow; mod transition_duration; mod transition_timing_function;<|fim▁end|>
f(&context, &mut parser) }
<|file_name|>detectorSpec.js<|end_file_name|><|fim▁begin|>var expect = require('expect.js'); var EventEmitter = require('events').EventEmitter; var fixtures = require('../fixtures'); var Detector = require('../../lib/detector.js'); describe('Detector', function() { // Used to test emitted events var found; var listener = function(magicNumber) { found.push(magicNumber); }; beforeEach(function() { found = []; }); describe('constructor', function() { it('inherits from EventEmitter', function() { expect(new Detector()).to.be.an(EventEmitter); }); it('accepts an array of file paths', function() { var filePaths = ['path1.js', 'path2.js']; var detector = new Detector(filePaths); expect(detector._filePaths).to.be(filePaths); }); it('accepts a boolean to enforce the use of const', function() { var detector = new Detector([], { enforceConst: true }); expect(detector._enforceConst).to.be(true); }); it('accepts an array of numbers to ignore', function() { var ignore = [1, 2, 3.4]; var detector = new Detector([], { ignore: ignore }); expect(detector._ignore).to.be(ignore); }); }); describe('run', function() { it('is compatible with callbacks', function(done) { var detector = new Detector([fixtures.emptyFile]); detector.run(function(err) { done(err); }); }); it('is compatible with promises', function(done) { var detector = new Detector([fixtures.emptyFile]);<|fim▁hole|> }); it('returns an Error if not given an array of file paths', function(done) { var detector = new Detector(); detector.run().catch(function(err) { expect(err).to.be.an(Error); expect(err.message).to.be('filePaths must be a non-empty array of paths'); done(); }); }); }); it('emits end on completion, passing the number of files parsed', function(done) { var detector = new Detector([fixtures.emptyFile, fixtures.singleVariable]); detector.on('end', function(numFiles) { expect(numFiles).to.be(2); done(); }); detector.run().catch(done); }); it('emits no events when parsing an empty file', function(done) { var detector = new Detector([fixtures.emptyFile]); detector.on('found', listener); detector.run().then(function() { expect(found).to.be.empty(); done(); }).catch(done); }); it('emits no events when the file contains only named constants', function(done) { var detector = new Detector([fixtures.singleVariable]); detector.on('found', listener); detector.run().then(function() { expect(found).to.be.empty(); done(); }).catch(done); }); it('emits no events for literals assigned to object properties', function(done) { var detector = new Detector([fixtures.objectProperties]); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(2); expect(found[0].value).to.be('4'); expect(found[1].value).to.be('5'); done(); }).catch(done); }); it('emits no events for literals used in AssignmentExpressions', function(done) { var detector = new Detector([fixtures.assignmentExpressions]); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(0); done(); }).catch(done); }); it('emits no events for numbers marked by ignore:line', function(done) { var detector = new Detector([fixtures.lineIgnore]); detector.on('found', listener); detector.run().then(function() { expect(found).to.be.empty(); done(); }).catch(done); }); it('emits no events between ignore:start / ignore:end', function(done) { var detector = new Detector([fixtures.blockIgnore]); detector.on('found', listener); detector.run().then(function() { expect(found).to.be.empty(); done(); }).catch(done); }); it('emits a "found" event containing a magic number, when found', function(done) { var detector = new Detector([fixtures.secondsInMinute]); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].value).to.be('60'); expect(found[0].file.substr(-18)).to.be('secondsInMinute.js'); expect(found[0].startColumn).to.be(9); expect(found[0].endColumn).to.be(11); expect(found[0].fileLength).to.be(4); expect(found[0].lineNumber).to.be(2); expect(found[0].lineSource).to.be(' return 60;'); expect(found[0].contextLines).to.eql([ 'function getSecondsInMinute() {', ' return 60;', '}' ]); expect(found[0].contextIndex).to.eql(1); done(); }).catch(done); }); it('correctly emits hex and octal values', function(done) { var detector = new Detector([fixtures.hexOctal]); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(3); expect(found[0].value).to.be('0x1A'); expect(found[1].value).to.be('0x02'); expect(found[2].value).to.be('071'); done(); }).catch(done); }); it('skips unnamed constants within the ignore list', function(done) { var detector = new Detector([fixtures.ignore], { ignore: [0] }); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].value).to.be('1'); done(); }).catch(done); }); it('ignores the shebang at the start of a file', function(done) { var detector = new Detector([fixtures.shebang]); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].lineNumber).to.be(4); expect(found[0].value).to.be('100'); done(); }).catch(done); }); describe('with detectObjects set to true', function() { it('emits a "found" event for object literals', function(done) { var detector = new Detector([fixtures.objectLiterals], { detectObjects: true }); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].value).to.be('42'); done(); }).catch(done); }); it('emits a "found" event for property assignments', function(done) { var detector = new Detector([fixtures.objectProperties], { detectObjects: true }); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(4); expect(found[0].value).to.be('2'); expect(found[1].value).to.be('3'); expect(found[2].value).to.be('4'); expect(found[3].value).to.be('5'); done(); }).catch(done); }); }); describe('with enforceConst set to true', function() { it('emits a "found" event for variable declarations', function(done) { var detector = new Detector([fixtures.constVariable], { enforceConst: true }); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].value).to.be('10'); done(); }).catch(done); }); it('emits a "found" event for object expressions', function(done) { var detector = new Detector([fixtures.constObject], { enforceConst: true }); detector.on('found', listener); detector.run().then(function() { expect(found).to.have.length(1); expect(found[0].value).to.be('10'); done(); }).catch(done); }); }); });<|fim▁end|>
detector.run().then(function() { done(); }).catch(done);
<|file_name|>addUser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # # # # This file is part of librix-thinclient. # # librix-thinclient is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # librix-thinclient is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with librix-thinclient. If not, see <http://www.gnu.org/licenses/>. import os from PyQt4 import QtGui <|fim▁hole|> """This class provides a add user dialog feature to users page of LTMT""" def __init__(self, configparser, parent=None): """Init method @param self A AddUser instance @param parent Parent QtGui.QWidget object """ self.configparser = configparser self.parent = parent QtGui.QDialog.__init__(self) self.ui = Ui_AddUser() self.ui.setupUi(self) self.parseDefaults() self.ui.detailsWid.hide() def parseDefaults(self): """Parse some default values for new user accounts @param self A AddUser instance """ with open("/etc/default/useradd", 'r') as ua: for l in ua: L = l.strip().split('=') if len(L) >= 2: if L[0] == "GROUP": self.group = L[1] elif L[0] == "HOME": self.home = L[1] elif L[0] == "SHELL": self.shell = L[1] def userChanged(self, username): """Slot called when user name was changed, updating entries @param self A AddUser instance @param username String username """ self.ui.initGLine.setText(self.group) self.ui.homeLine.setText(os.path.join(self.home, username)) self.ui.shellLine.setText(self.shell) def accept(self): """Reimplemented method QtGui.QDialog.accept Add user to configparser before accept dialog @param self A AddUser instance """ user = self.ui.nameLine.text() print("__accepted__", user) if user in self.configparser.getUsersList(): if QtGui.QMessageBox.warning(self, self.tr("Replace User"), self.tr("Are you sure you want to overwrite \"{0}\" user?")\ .format(user), QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No) == QtGui.QMessageBox.Yes: self.configparser.delUser(user) else: return self.configparser.addUser(user) if self.ui.syncCheck.isChecked(): self.configparser.setUserSync(user, passwd=self.ui.pwLine.text(), uid=self.ui.uidSpin.text(), init_group=self.ui.initGLine.text(), groups=[g.strip() for g in self.ui.groupsLine.text().split(',')], home=self.ui.homeLine.text(), shell=self.ui.shellLine.text()) QtGui.QDialog.accept(self)<|fim▁end|>
from ltmt.ui.users.add_user.Ui_addUser import Ui_AddUser class AddUser(QtGui.QDialog):
<|file_name|>create-multi-path-topology.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright 2017 Telstra Open Source # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from clean_topology import cleanup from create_topology import create_topo<|fim▁hole|>print "\n -- " cleanup() create_topo('multi-path-topology.json') print "\n -- "<|fim▁end|>
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/** * Copyright 2017 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @fileoverview Creates an http server to handle static * files and list directories for use with the gulp live server */ var BBPromise = require('bluebird'); var app = require('express')(); var bacon = require('baconipsum'); var bodyParser = require('body-parser'); var fs = BBPromise.promisifyAll(require('fs')); var formidable = require('formidable'); var jsdom = require('jsdom'); var path = require('path'); var request = require('request'); var url = require('url'); app.use(bodyParser.json()); app.use('/request-bank', require('./request-bank')); // Append ?csp=1 to the URL to turn on the CSP header. // TODO: shall we turn on CSP all the time? app.use(function(req, res, next) { if (req.query.csp) { res.set({ 'content-security-policy': "default-src * blob: data:; script-src https://cdn.ampproject.org/rtv/ https://cdn.ampproject.org/v0.js https://cdn.ampproject.org/v0/ https://cdn.ampproject.org/viewer/ http://localhost:8000 https://localhost:8000; object-src 'none'; style-src 'unsafe-inline' https://cloud.typography.com https://fast.fonts.net https://fonts.googleapis.com https://maxcdn.bootstrapcdn.com; report-uri https://csp-collector.appspot.com/csp/amp", }); } next(); }); app.use('/pwa', function(req, res, next) { var file; var contentType; if (!req.url || req.url == '/') { // pwa.html contentType = 'text/html'; file = '/examples/pwa/pwa.html'; } else if (req.url == '/pwa.js') { // pwa.js contentType = 'application/javascript'; file = '/examples/pwa/pwa.js'; } else if (req.url == '/pwa-sw.js') { // pwa.js contentType = 'application/javascript'; file = '/examples/pwa/pwa-sw.js'; } else { // Redirect to the underlying resource. // TODO(dvoytenko): would be nicer to do forward instead of redirect. res.writeHead(302, {'Location': req.url}); res.end(); return; } res.statusCode = 200; res.setHeader('Content-Type', contentType); fs.readFileAsync(process.cwd() + file).then((file) => { res.end(file); }); }); app.use('/api/show', function(req, res) { res.json({ showNotification: true }); }); app.use('/api/dont-show', function(req, res) { res.json({ showNotification: false }); }); app.use('/api/echo/post', function(req, res) { res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify(req.body, null, 2)); }); /** * In practice this would be *.ampproject.org and the publishers * origin. Please see AMP CORS docs for more details: * https://goo.gl/F6uCAY * @type {RegExp} */ const ORIGIN_REGEX = new RegExp('^http://localhost:8000|' + '^https?://.+\.herokuapp\.com'); /** * In practice this would be the publishers origin. * Please see AMP CORS docs for more details: * https://goo.gl/F6uCAY * @type {RegExp} */ const SOURCE_ORIGIN_REGEX = new RegExp('^http://localhost:8000|' + '^https?://.+\.herokuapp\.com'); app.use('/form/html/post', function(req, res) { assertCors(req, res, ['POST']); var form = new formidable.IncomingForm(); form.parse(req, function(err, fields) { res.setHeader('Content-Type', 'text/html'); if (fields['email'] == '[email protected]') { res.statusCode = 500; res.end(` <h1 style="color:red;">Sorry ${fields['name']}!</h1> <p>The email ${fields['email']} is already subscribed!</p> `); } else { res.end(` <h1>Thanks ${fields['name']}!</h1> <p>Please make sure to confirm your email ${fields['email']}</p> `); } }); }); app.use('/form/redirect-to/post', function(req, res) { assertCors(req, res, ['POST'], ['AMP-Redirect-To']); res.setHeader('AMP-Redirect-To', 'https://google.com'); res.end('{}'); }); app.use('/form/echo-json/post', function(req, res) { assertCors(req, res, ['POST']); var form = new formidable.IncomingForm(); form.parse(req, function(err, fields) { res.setHeader('Content-Type', 'application/json; charset=utf-8'); if (fields['email'] == '[email protected]') { res.statusCode = 500; } res.end(JSON.stringify(fields)); }); }); app.use('/form/json/poll1', function(req, res) { assertCors(req, res, ['POST']); var form = new formidable.IncomingForm(); form.parse(req, function(err, fields) { res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify({ result: [{ answer: 'Penguins', percentage: new Array(77), }, { answer: 'Ostriches', percentage: new Array(8), }, { answer: 'Kiwis', percentage: new Array(14), }, { answer: 'Wekas', percentage: new Array(1), },] })); }); }); app.use('/form/search-html/get', function(req, res) { res.setHeader('Content-Type', 'text/html'); res.end(` <h1>Here's results for your search<h1> <ul> <li>Result 1</li> <li>Result 2</li> <li>Result 3</li> </ul> `); }); app.use('/form/search-json/get', function(req, res) { assertCors(req, res, ['GET']); res.json({ results: [{title: 'Result 1'}, {title: 'Result 2'}, {title: 'Result 3'}] }); }); app.use('/share-tracking/get-outgoing-fragment', function(req, res) { res.setHeader('AMP-Access-Control-Allow-Source-Origin', req.protocol + '://' + req.headers.host); res.json({ fragment: '54321' }); }); // Fetches an AMP document from the AMP proxy and replaces JS // URLs, so that they point to localhost. function proxyToAmpProxy(req, res, minify) { var url = 'https://cdn.ampproject.org/' + (req.query['amp_js_v'] ? 'v' : 'c') + req.url; console.log('Fetching URL: ' + url); request(url, function(error, response, body) { body = body // Unversion URLs. .replace(/https\:\/\/cdn\.ampproject\.org\/rtv\/\d+\//g, 'https://cdn.ampproject.org/') // <base> href pointing to the proxy, so that images, etc. still work. .replace('<head>', '<head><base href="https://cdn.ampproject.org/">'); const inabox = req.query['inabox'] == '1'; const urlPrefix = getUrlPrefix(req); body = replaceUrls(minify ? 'min' : 'max', body, urlPrefix, inabox); if (inabox) { // Allow CORS requests for A4A. const origin = req.headers.origin || urlPrefix; enableCors(req, res, origin); } res.status(response.statusCode).send(body); }); } var liveListUpdateFile = '/examples/live-list-update.amp.html'; var liveListCtr = 0; var itemCtr = 2; var liveListDoc = null; var doctype = '<!doctype html>\n'; // Only handle min/max app.use('/examples/live-list-update.amp.(min|max).html', function(req, res) { var filePath = req.baseUrl; var mode = getPathMode(filePath); // When we already have state in memory and user refreshes page, we flush // the dom we maintain on the server. if (!('amp_latest_update_time' in req.query) && liveListDoc) { var outerHTML = liveListDoc.documentElement./*OK*/outerHTML; outerHTML = replaceUrls(mode, outerHTML); res.send(`${doctype}${outerHTML}`); return; } if (!liveListDoc) { var liveListUpdateFullPath = `${process.cwd()}${liveListUpdateFile}`; var liveListFile = fs.readFileSync(liveListUpdateFullPath); liveListDoc = jsdom.jsdom(liveListFile); } var action = Math.floor(Math.random() * 3); var liveList = liveListDoc.querySelector('#my-live-list'); var perPage = Number(liveList.getAttribute('data-max-items-per-page')); var items = liveList.querySelector('[items]'); var pagination = liveListDoc.querySelector('#my-live-list [pagination]'); var item1 = liveList.querySelector('#list-item-1'); if (liveListCtr != 0) { if (Math.random() < .8) { // Always run a replace on the first item liveListReplace(item1); if (Math.random() < .5) { liveListTombstone(liveList); } if (Math.random() < .8) { liveListInsert(liveList, item1); } pagination.textContent = ''; var liveChildren = [].slice.call(items.children) .filter(x => !x.hasAttribute('data-tombstone')); var pageCount = Math.ceil(liveChildren.length / perPage); var pageListItems = Array.apply(null, Array(pageCount)) .map((_, i) => `<li>${i + 1}</li>`).join(''); var newPagination = '<nav aria-label="amp live list pagination">' + `<ul class="pagination">${pageListItems}</ul>` + '</nav>'; pagination./*OK*/innerHTML = newPagination; } else { // Sometimes we want an empty response to simulate no changes. res.send(`${doctype}<html></html>`); return; } } var outerHTML = liveListDoc.documentElement./*OK*/outerHTML; outerHTML = replaceUrls(mode, outerHTML); liveListCtr++; res.send(`${doctype}${outerHTML}`); }); function liveListReplace(item) { item.setAttribute('data-update-time', Date.now()); var itemContents = item.querySelectorAll('.content'); itemContents[0].textContent = Math.floor(Math.random() * 10); itemContents[1].textContent = Math.floor(Math.random() * 10); } function liveListInsert(liveList, node) { var iterCount = Math.floor(Math.random() * 2) + 1; console.log(`inserting ${iterCount} item(s)`); for (var i = 0; i < iterCount; i++) { var child = node.cloneNode(true); child.setAttribute('id', `list-item-${itemCtr++}`); child.setAttribute('data-sort-time', Date.now()); liveList.querySelector('[items]').appendChild(child); } } function liveListTombstone(liveList) { var tombstoneId = Math.floor(Math.random() * itemCtr); console.log(`trying to tombstone #list-item-${tombstoneId}`); // We can tombstone any list item except item-1 since we always do a // replace example on item-1. if (tombstoneId != 1) { var item = liveList./*OK*/querySelector(`#list-item-${tombstoneId}`); if (item) { item.setAttribute('data-tombstone', ''); } } } // Generate a random number between min and max // Value is inclusive of both min and max values. function range(min, max) { var values = Array.apply(null, Array(max - min + 1)).map((_, i) => min + i); return values[Math.round(Math.random() * (max - min))] } // Returns the result of a coin flip, true or false function flip() { return !!Math.floor(Math.random() * 2); } function getLiveBlogItem() { var now = Date.now(); // Generate a 3 to 7 worded headline var headline = bacon(range(3, 7)); var numOfParagraphs = range(1, 2); var body = Array.apply(null, Array(numOfParagraphs)).map(x => { return `<p>${bacon(range(50, 90))}</p>`;<|fim▁hole|> layout="responsive" height="300" width="350"> </amp-img>`; return `<!doctype html> <html amp><body> <amp-live-list id="live-blog-1"> <div items> <div id="live-blog-item-${now}" data-sort-time="${now}"> <h3 class="headline"> <a href="#live-blog-item-${now}">${headline}</a> </h3> <div class="author"> <div class="byline"> <p> by <span itemscope itemtype="http://schema.org/Person" itemprop="author"><b>Lorem Ipsum</b> <a class="mailto" href="mailto:lorem.ipsum@"> lorem.ipsum@</a></span> </p> <p class="brand">PublisherName News Reporter<p> <p><span itemscope itemtype="http://schema.org/Date" itemprop="Date">${Date(now).replace(/ GMT.*$/, '')}<span></p> </div> </div> <div class="article-body">${body}</div> ${img} <div class="social-box"> <amp-social-share type="facebook" data-param-text="Hello world" data-param-href="https://example.com/?ref=URL" data-param-app_id="145634995501895"></amp-social-share> <amp-social-share type="twitter"></amp-social-share> </div> </div> </div> </amp-live-list></body></html>`; } function getLiveBlogItemWithBindAttributes() { var now = Date.now(); // Generate a 3 to 7 worded headline var headline = bacon(range(3, 7)); var numOfParagraphs = range(1, 2); var body = Array.apply(null, Array(numOfParagraphs)).map(x => { return `<p>${bacon(range(50, 90))}</p>`; }).join('\n'); return `<!doctype html> <html amp><body> <amp-live-list id="live-blog-1"> <div items> <div id="live-blog-item-${now}" data-sort-time="${now}"> <div class="article-body"> ${body} <p> As you can see, bacon is far superior to <b><span [text]='favoriteFood'>everything!</span></b>!</p> </div> </div> </div> </amp-live-list></body></html>`; } app.use('/examples/live-blog(-non-floating-button)?.amp.(min.|max.)?html', function(req, res, next) { if ('amp_latest_update_time' in req.query) { res.setHeader('Content-Type', 'text/html'); res.end(getLiveBlogItem()); return; } next(); }); app.use('/examples/bind/live-list.amp.(min.|max.)?html', function(req, res, next) { if ('amp_latest_update_time' in req.query) { res.setHeader('Content-Type', 'text/html'); res.end(getLiveBlogItemWithBindAttributes()); return; } next(); }); app.use('/examples/amp-fresh.amp.(min.|max.)?html', function(req, res, next) { if ('amp-fresh' in req.query && req.query['amp-fresh']) { res.setHeader('Content-Type', 'text/html'); res.end(`<!doctype html> <html ⚡> <body> <amp-fresh id="amp-fresh-1"><span>hello</span> world!</amp-fresh> <amp-fresh id="amp-fresh-2">foo bar</amp-fresh> </body> </html>`); return; } next(); }); app.use('/impression-proxy/', function(req, res) { assertCors(req, res, ['GET']); // Fake response with the following optional fields: // location: The Url the that server would have sent redirect to w/o ALP // tracking_url: URL that should be requested to track click // gclid: The conversion tracking value const body = { 'location': 'localhost:8000/examples/?gclid=1234&foo=bar&example=123', 'tracking_url': 'tracking_url', 'gclid': '1234', }; res.send(body); }); // Proxy with unminified JS. // Example: // http://localhost:8000/max/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ app.use('/max/', function(req, res) { proxyToAmpProxy(req, res, /* minify */ false); }); // Proxy with minified JS. // Example: // http://localhost:8000/min/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ app.use('/min/', function(req, res) { proxyToAmpProxy(req, res, /* minify */ true); }); // Nest the response in an iframe. // Example: // http://localhost:8000/iframe/examples/ads.amp.max.html app.get('/iframe/*', function(req, res) { // Returns an html blob with an iframe pointing to the url after /iframe/. res.send(`<!doctype html> <html style="width:100%; height:100%;"> <body style="width:98%; height:98%;"> <iframe src="${req.url.substr(7)}" style="width:100%; height:100%;"> </iframe> </body> </html>`); }); // Returns a document that echoes any post messages received from parent. // An optional `message` query param can be appended for an initial post // message sent on document load. // Example: // http://localhost:8000/iframe-echo-message?message=${payload} app.get('/iframe-echo-message', function(req, res) { const message = req.query.message; res.send( `<!doctype html> <body style="background-color: yellow"> <script> if (${message}) { echoMessage(${message}); } window.addEventListener('message', function(event) { echoMessage(event.data); }); function echoMessage(message) { parent.postMessage(message, '*'); } </script> </body> </html>`); }); // A4A envelope. // Examples: // http://localhost:8000/a4a[-3p]/examples/animations.amp.max.html // http://localhost:8000/a4a[-3p]/max/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ // http://localhost:8000/a4a[-3p]/min/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ app.use('/a4a(|-3p)/', function(req, res) { var force3p = req.baseUrl.indexOf('/a4a-3p') == 0; var adUrl = req.url; var templatePath = '/build-system/server-a4a-template.html'; var urlPrefix = getUrlPrefix(req); if (!adUrl.startsWith('/m') && urlPrefix.indexOf('//localhost') != -1) { // This is a special case for testing. `localhost` URLs are transformed to // `ads.localhost` to ensure that the iframe is fully x-origin. adUrl = urlPrefix.replace('localhost', 'ads.localhost') + adUrl; } adUrl = addQueryParam(adUrl, 'inabox', 1); fs.readFileAsync(process.cwd() + templatePath, 'utf8').then(template => { var result = template .replace(/FORCE3P/g, force3p) .replace(/DISABLE3PFALLBACK/g, !force3p) .replace(/OFFSET/g, req.query.offset || '0px') .replace(/AD_URL/g, adUrl) .replace(/AD_WIDTH/g, req.query.width || '300') .replace(/AD_HEIGHT/g, req.query.height || '250'); res.end(result); }); }); // In-a-box envelope. // Examples: // http://localhost:8000/inabox/examples/animations.amp.max.html // http://localhost:8000/inabox/max/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ // http://localhost:8000/inabox/min/s/www.washingtonpost.com/amphtml/news/post-politics/wp/2016/02/21/bernie-sanders-says-lower-turnout-contributed-to-his-nevada-loss-to-hillary-clinton/ app.use('/inabox/', function(req, res) { var adUrl = req.url; var templatePath = '/build-system/server-inabox-template.html'; var urlPrefix = getUrlPrefix(req); if (!adUrl.startsWith('/m') && // Ignore /min and /max urlPrefix.indexOf('//localhost') != -1) { // This is a special case for testing. `localhost` URLs are transformed to // `ads.localhost` to ensure that the iframe is fully x-origin. adUrl = urlPrefix.replace('localhost', 'ads.localhost') + adUrl; } adUrl = addQueryParam(adUrl, 'inabox', 1); fs.readFileAsync(process.cwd() + templatePath, 'utf8').then(template => { var result = template .replace(/AD_URL/g, adUrl) .replace(/OFFSET/g, req.query.offset || '0px') .replace(/AD_WIDTH/g, req.query.width || '300') .replace(/AD_HEIGHT/g, req.query.height || '250'); res.end(result); }); }); app.use('/examples/analytics.config.json', function(req, res, next) { res.setHeader('AMP-Access-Control-Allow-Source-Origin', getUrlPrefix(req)); next(); }); app.use(['/examples/*', '/extensions/*'], function (req, res, next) { var sourceOrigin = req.query['__amp_source_origin']; if (sourceOrigin) { res.setHeader('AMP-Access-Control-Allow-Source-Origin', sourceOrigin); } next(); }); /** * Append ?sleep=5 to any included JS file in examples to emulate delay in loading that * file. This allows you to test issues with your extension being late to load * and testing user interaction with your element before your code loads. * * Example delay loading amp-form script by 5 seconds: * <script async custom-element="amp-form" * src="https://cdn.ampproject.org/v0/amp-form-0.1.js?sleep=5"></script> */ app.use(['/dist/v0/amp-*.js'], function(req, res, next) { var sleep = parseInt(req.query.sleep || 0) * 1000; setTimeout(next, sleep); }); app.get(['/examples/*', '/test/manual/*'], function(req, res, next) { var filePath = req.path; var mode = getPathMode(filePath); if (!mode) { return next(); } const inabox = req.query['inabox'] == '1'; filePath = filePath.substr(0, filePath.length - 9) + '.html'; fs.readFileAsync(process.cwd() + filePath, 'utf8').then(file => { if (req.query['amp_js_v']) { file = addViewerIntegrationScript(req.query['amp_js_v'], file); } file = replaceUrls(mode, file, '', inabox); if (inabox && req.headers.origin && req.query.__amp_source_origin) { // Allow CORS requests for A4A. enableCors(req, res, req.headers.origin); } // Extract amp-ad for the given 'type' specified in URL query. if (req.path.indexOf('/examples/ads.amp') == 0 && req.query.type) { var ads = file.match(new RegExp('<(amp-ad|amp-embed) [^>]*[\'"]' + req.query.type + '[\'"][^>]*>([\\s\\S]+?)<\/(amp-ad|amp-embed)>', 'gm')); file = file.replace( /<body>[\s\S]+<\/body>/m, '<body>' + ads.join('') + '</body>'); } res.send(file); }).catch(() => { next(); }); }); // Data for example: http://localhost:8000/examples/bind/xhr.amp.max.html app.use('/bind/form/get', function(req, res, next) { assertCors(req, res, ['GET']); res.json({ bindXhrResult: 'I was fetched from the server!' }); }); // Data for example: http://localhost:8000/examples/bind/ecommerce.amp.max.html app.use('/bind/ecommerce/sizes', function(req, res, next) { assertCors(req, res, ['GET']); setTimeout(() => { var prices = { "0": { "sizes": ["XS"] }, "1": { "sizes": ["S", "M", "L"] }, "2": { "sizes": ["XL"] }, "3": { "sizes": ["M", "XL"] }, "4": { "sizes": ["S", "L"] }, "5": { "sizes": ["S", "XL"] }, "6": { "sizes": ["XS", "M"] }, "7": { "sizes": ["M", "L", "XL"] }, "8": { "sizes": ["XS", "M", "XL"] } }; const object = {}; object[req.query.shirt] = prices[req.query.shirt]; res.json(object); }, 1000); // Simulate network delay. }); app.use('/list/fruit-data/get', function(req, res, next) { assertCors(req, res, ['GET']); res.json({ items: [ {name: 'apple', quantity: 47, unitPrice: '0.33'}, {name: 'pear', quantity: 538, unitPrice: '0.54'}, {name: 'tomato', quantity: 0, unitPrice: '0.23'}, ], }); }); app.use('/list/vegetable-data/get', function(req, res, next) { assertCors(req, res, ['GET']); res.json({ items: [ {name: 'cabbage', quantity: 5, unitPrice: '1.05'}, {name: 'carrot', quantity: 10, unitPrice: '0.01'}, {name: 'brocoli', quantity: 7, unitPrice: '0.02'}, ], }); }); // Simulated Cloudflare signed Ad server const cloudflareDataDir = '/extensions/amp-ad-network-cloudflare-impl/0.1/data'; const fakeAdNetworkDataDir = '/extensions/amp-ad-network-fake-impl/0.1/data' /** * Handle CORS headers */ app.use([cloudflareDataDir], function fakeCors(req, res, next) { assertCors(req, res, ['GET', 'OPTIONS'], ['X-AmpAdSignature']); if (req.method=='OPTIONS') { res.status(204).end(); } else { next(); } }); /** * Handle fake a4a data */ app.get([ fakeAdNetworkDataDir + '/*', cloudflareDataDir + '/*'], function(req, res) { var filePath = req.path; var unwrap = false; if (req.path.endsWith('.html')) { filePath = req.path.slice(0,-5) unwrap = true } filePath = process.cwd() + filePath fs.readFileAsync(filePath).then(file => { if (!unwrap) { res.end(file) return } const metadata = JSON.parse(file); res.setHeader('Content-Type', 'text/html'); res.setHeader('X-AmpAdSignature', metadata.signature); res.end(metadata.creative); }).error( () => { res.status(404); res.end("Not found: " + filePath); }); }); /* * Start Cache SW LOCALDEV section */ app.get(['/dist/sw.js', '/dist/sw.max.js'], function(req, res, next) { var filePath = req.path; fs.readFileAsync(process.cwd() + filePath, 'utf8').then(file => { var n = new Date(); // Round down to the nearest 5 minutes. n -= ((n.getMinutes() % 5) * 1000 * 60) + (n.getSeconds() * 1000) + n.getMilliseconds(); file = 'self.AMP_CONFIG = {v: "99' + n + '",' + 'cdnUrl: "http://localhost:8000/dist"};' + file; res.setHeader('Content-Type', 'application/javascript'); res.setHeader('Date', new Date().toUTCString()); res.setHeader('Cache-Control', 'no-cache;max-age=150'); res.end(file); }).catch(next); }); app.get('/dist/rtv/9[89]*/*.js', function(req, res, next) { res.setHeader('Content-Type', 'application/javascript'); res.setHeader('Date', new Date().toUTCString()); res.setHeader('Cache-Control', 'no-cache;max-age=31536000'); setTimeout(() => { // Cause a delay, to show the "stale-while-revalidate" if (req.path.includes('v0.js')) { var path = req.path.replace(/rtv\/\d+/, ''); return fs.readFileAsync(process.cwd() + path, 'utf8') .then(file => { res.end(file); }).catch(next); } res.end(` var li = document.createElement('li'); li.textContent = '${req.path}'; loaded.appendChild(li); `); }, 2000); }); app.get(['/dist/cache-sw.min.html', '/dist/cache-sw.max.html'], function(req, res, next) { var filePath = '/test/manual/cache-sw.html'; fs.readFileAsync(process.cwd() + filePath, 'utf8').then(file => { var n = new Date(); // Round down to the nearest 5 minutes. n -= ((n.getMinutes() % 5) * 1000 * 60) + (n.getSeconds() * 1000) + n.getMilliseconds(); var percent = parseFloat(req.query.canary) || 0.01; var env = '99'; if (Math.random() < percent) { env = '98'; n += 5 * 1000 * 60; } file = file.replace(/dist\/v0/g, `dist/rtv/${env}${n}/v0`); file = file.replace(/CURRENT_RTV/, env + n); res.setHeader('Content-Type', 'text/html'); res.end(file); }).catch(next); }); app.get('/dist/diversions', function(req, res, next) { var n = new Date(); // Round down to the nearest 5 minutes. n -= ((n.getMinutes() % 5) * 1000 * 60) + (n.getSeconds() * 1000) + n.getMilliseconds(); n += 5 * 1000 * 60; res.setHeader('Content-Type', 'application/json'); res.setHeader('Date', new Date().toUTCString()); res.setHeader('Cache-Control', 'no-cache;max-age=150'); res.end(JSON.stringify(["98" + n])); }); /* * End Cache SW LOCALDEV section */ /** * Web worker binary. */ app.get(['/dist/ww.js', '/dist/ww.max.js'], function(req, res) { fs.readFileAsync(process.cwd() + req.path).then(file => { res.setHeader('Content-Type', 'text/javascript'); res.setHeader('Access-Control-Allow-Origin', '*'); res.end(file); }); }); /** * @param {string} mode * @param {string} file * @param {string=} hostName * @param {boolean=} inabox */ function replaceUrls(mode, file, hostName, inabox) { hostName = hostName || ''; if (mode == 'max') { file = file.replace('https://cdn.ampproject.org/v0.js', hostName + '/dist/amp.js'); file = file.replace('https://cdn.ampproject.org/amp4ads-v0.js', hostName + '/dist/amp-inabox.js'); file = file.replace(/https:\/\/cdn.ampproject.org\/v0\/(.+?).js/g, hostName + '/dist/v0/$1.max.js'); if (inabox) { file = file.replace('/dist/amp.js', '/dist/amp-inabox.js'); } } else if (mode == 'min') { file = file.replace('https://cdn.ampproject.org/v0.js', hostName + '/dist/v0.js'); file = file.replace('https://cdn.ampproject.org/amp4ads-v0.js', hostName + '/dist/amp4ads-v0.js'); file = file.replace(/https:\/\/cdn.ampproject.org\/v0\/(.+?).js/g, hostName + '/dist/v0/$1.js'); file = file.replace(/\/dist.3p\/current\/(.*)\.max.html/, hostName + '/dist.3p/current-min/$1.html'); if (inabox) { file = file.replace('/dist/v0.js', '/dist/amp4ads-v0.js'); } } return file; } /** * @param {string} ampJsVersion * @param {string} file */ function addViewerIntegrationScript(ampJsVersion, file) { ampJsVersion = parseFloat(ampJsVersion); if (!ampJsVersion) { return file; } var viewerScript; if (Number.isInteger(ampJsVersion)) { // Viewer integration script from gws, such as // https://cdn.ampproject.org/viewer/google/v7.js viewerScript = '<script async src="https://cdn.ampproject.org/viewer/google/v' + ampJsVersion + '.js"></script>'; } else { // Viewer integration script from runtime, such as // https://cdn.ampproject.org/v0/amp-viewer-integration-0.1.js viewerScript = '<script async src="https://cdn.ampproject.org/v0/amp-viewer-integration-' + ampJsVersion + '.js" data-amp-report-test="viewer-integr.js"></script>'; } file = file.replace('</head>', viewerScript + '</head>'); return file; } /** * @param {string} path * @return {string} */ function extractFilePathSuffix(path) { return path.substr(-9); } /** * @param {string} path * @return {?string} */ function getPathMode(path) { var suffix = extractFilePathSuffix(path); if (suffix == '.max.html') { return 'max'; } else if (suffix == '.min.html') { return 'min'; } else { return null; } } function getUrlPrefix(req) { return req.protocol + '://' + req.headers.host; } /** * @param {string} url * @param {string} param * @param {*} value * @return {string} */ function addQueryParam(url, param, value) { const paramValue = encodeURIComponent(param) + '=' + encodeURIComponent(value); if (!url.includes('?')) { url += '?' + paramValue; } else { url += '&' + paramValue; } return url; } function enableCors(req, res, origin, opt_exposeHeaders) { res.setHeader('Access-Control-Allow-Credentials', 'true'); res.setHeader('Access-Control-Allow-Origin', origin); res.setHeader('Access-Control-Expose-Headers', ['AMP-Access-Control-Allow-Source-Origin'] .concat(opt_exposeHeaders || []).join(', ')); res.setHeader('AMP-Access-Control-Allow-Source-Origin', req.query.__amp_source_origin); } function assertCors(req, res, opt_validMethods, opt_exposeHeaders) { const validMethods = opt_validMethods || ['GET', 'POST', 'OPTIONS']; const invalidMethod = req.method + ' method is not allowed. Use POST.'; const invalidOrigin = 'Origin header is invalid.'; const invalidSourceOrigin = '__amp_source_origin parameter is invalid.'; const unauthorized = 'Unauthorized Request'; var origin; if (validMethods.indexOf(req.method) == -1) { res.statusCode = 405; res.end(JSON.stringify({message: invalidMethod})); throw invalidMethod; } if (req.headers.origin) { origin = req.headers.origin; if (!ORIGIN_REGEX.test(req.headers.origin)) { res.statusCode = 500; res.end(JSON.stringify({message: invalidOrigin})); throw invalidOrigin; } if (!SOURCE_ORIGIN_REGEX.test(req.query.__amp_source_origin)) { res.statusCode = 500; res.end(JSON.stringify({message: invalidSourceOrigin})); throw invalidSourceOrigin; } } else if (req.headers['amp-same-origin'] == 'true') { origin = getUrlPrefix(req); } else { res.statusCode = 401; res.end(JSON.stringify({message: unauthorized})); throw unauthorized; } enableCors(req, res, origin, opt_exposeHeaders); } module.exports = app;<|fim▁end|>
}).join('\n'); var img = `<amp-img src="${flip() ? 'https://placekitten.com/300/350' : 'https://baconmockup.com/300/350'}"
<|file_name|>Elan.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import time import EafIO import warnings class Eaf: """Read and write Elan's Eaf files. .. note:: All times are in milliseconds and can't have decimals. :var dict annotation_document: Annotation document TAG entries. :var dict licences: Licences included in the file. :var dict header: XML header. :var list media_descriptors: Linked files, where every file is of the form: ``{attrib}``. :var list properties: Properties, where every property is of the form: ``(value, {attrib})``. :var list linked_file_descriptors: Secondary linked files, where every linked file is of the form: ``{attrib}``. :var dict timeslots: Timeslot data of the form: ``{TimslotID -> time(ms)}``. :var dict tiers: Tier data of the form: ``{tier_name -> (aligned_annotations, reference_annotations, attributes, ordinal)}``, aligned_annotations of the form: ``[{annotation_id -> (begin_ts, end_ts, value, svg_ref)}]``, reference annotations of the form: ``[{annotation_id -> (reference, value, previous, svg_ref)}]``. :var list linguistic_types: Linguistic types, where every type is of the form: ``{id -> attrib}``. :var list locales: Locales, where every locale is of the form: ``{attrib}``. :var dict constraints: Constraint data of the form: ``{stereotype -> description}``. :var dict controlled_vocabularies: Controlled vocabulary data of the form: ``{id -> (descriptions, entries, ext_ref)}``, descriptions of the form: ``[(lang_ref, text)]``, entries of the form: ``{id -> (values, ext_ref)}``, values of the form: ``[(lang_ref, description, text)]``. :var list external_refs: External references, where every reference is of the form ``[id, type, value]``. :var list lexicon_refs: Lexicon references, where every reference is of the form: ``[{attribs}]``. """ def __init__(self, file_path=None, author='pympi'): """Construct either a new Eaf file or read on from a file/stream. :param str file_path: Path to read from, - for stdin. If ``None`` an empty Eaf file will be created. :param str author: Author of the file. """ self.naive_gen_ann, self.naive_gen_ts = False, False self.annotation_document = { 'AUTHOR': author, 'DATE': time.strftime("%Y-%m-%dT%H:%M:%S%z"), 'VERSION': '2.8', 'FORMAT': '2.8', 'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance', 'xsi:noNamespaceSchemaLocation': 'http://www.mpi.nl/tools/elan/EAFv2.8.xsd'} self.constraints = {} self.controlled_vocabularies = {} self.header = {} self.licences = {} self.linguistic_types = {} self.tiers = {} self.timeslots = {} self.external_refs = [] self.lexicon_refs = [] self.linked_file_descriptors = [] self.locales = [] self.media_descriptors = [] self.properties = [] self.new_time, self.new_ann = 0, 0 if file_path is None: self.add_linguistic_type('default-lt', None) self.constraints = {'Time_Subdivision': 'Time subdivision of paren' 't annotation\'s time interval, no time gaps a' 'llowed within this interval', 'Symbolic_Subdivision': 'Symbolic subdivision ' 'of a parent annotation. Annotations refering ' 'to the same parent are ordered', 'Symbolic_Association': '1-1 association with ' 'a parent annotation', 'Included_In': 'Time alignable annotations wit' 'hin the parent annotation\'s time interval, g' 'aps are allowed'} self.properties.append(('0', {'NAME': 'lastUsedAnnotation'})) self.add_tier('default') else: EafIO.parse_eaf(file_path, self) def to_file(self, file_path, pretty=True): """Write the object to a file, if the file already exists a backup will be created with the ``.bak`` suffix. :param str file_path: Path to write to, - for stdout. :param bool pretty: Flag for pretty XML printing. """ EafIO.to_eaf(file_path, self, pretty) def to_textgrid(self, excluded_tiers=[], included_tiers=[]): """Convert the object to a :class:`pympi.Praat.TextGrid` object. :param list excluded_tiers: Specifically exclude these tiers. :param list included_tiers: Only include this tiers, when empty all are included. :returns: :class:`pympi.Praat.TextGrid` object :raises ImportError: If the pympi.Praat module can't be loaded. """ from Praat import TextGrid tgout = TextGrid() tiers = [a for a in self.tiers if a not in excluded_tiers] if included_tiers: tiers = [a for a in tiers if a in included_tiers] for tier in tiers: currentTier = tgout.add_tier(tier) for interval in self.get_annotation_data_for_tier(tier): if interval[0] == interval[1]: continue currentTier.add_interval(interval[0]/1000.0, interval[1]/1000.0, interval[2]) return tgout def extract(self, start, end): """Extracts the selected time frame as a new object. :param int start: Start time. :param int end: End time. :returns: The extracted frame in a new object. """ from copy import deepcopy eaf_out = deepcopy(self) for tier in eaf_out.tiers.itervalues(): rems = [] for ann in tier[0]: if eaf_out.timeslots[tier[0][ann][1]] > end or\ eaf_out.timeslots[tier[0][ann][0]] < start: rems.append(ann) for r in rems: del tier[0][r] return eaf_out def get_linked_files(self): """Give all linked files.""" return self.media_descriptors def add_linked_file(self, file_path, relpath=None, mimetype=None, time_origin=None, ex_from=None): """Add a linked file. :param str file_path: Path of the file. :param str relpath: Relative path of the file. :param str mimetype: Mimetype of the file, if ``None`` it tries to guess it according to the file extension which currently only works for wav, mpg, mpeg and xml. :param int time_origin: Time origin for the media file. :param str ex_from: Extracted from field. :raises KeyError: If mimetype had to be guessed and a non standard extension or an unknown mimetype. """ if mimetype is None: mimes = {'wav': 'audio/x-wav', 'mpg': 'video/mpeg', 'mpeg': 'video/mpg', 'xml': 'text/xml'} mimetype = mimes[file_path.split('.')[-1]] self.media_descriptors.append({ 'MEDIA_URL': file_path, 'RELATIVE_MEDIA_URL': relpath, 'MIME_TYPE': mimetype, 'TIME_ORIGIN': time_origin, 'EXTRACTED_FROM': ex_from}) def copy_tier(self, eaf_obj, tier_name): """Copies a tier to another :class:`pympi.Elan.Eaf` object. :param pympi.Elan.Eaf eaf_obj: Target Eaf object. :param str tier_name: Name of the tier. :raises KeyError: If the tier doesn't exist. """ eaf_obj.remove_tier(tier_name) eaf_obj.add_tier(tier_name, tier_dict=self.tiers[tier_name][3]) for ann in self.get_annotation_data_for_tier(tier_name): eaf_obj.insert_annotation(tier_name, ann[0], ann[1], ann[2]) def add_tier(self, tier_id, ling='default-lt', parent=None, locale=None, part=None, ann=None, tier_dict=None): """Add a tier. :param str tier_id: Name of the tier. :param str ling: Linguistic type, if the type is not available it will warn and pick the first available type. :param str parent: Parent tier name. :param str locale: Locale. :param str part: Participant. :param str ann: Annotator. :param dict tier_dict: TAG attributes, when this is not ``None`` it will ignore all other options. """ if ling not in self.linguistic_types: warnings.warn( 'add_tier: Linguistic type non existent, choosing the first') ling = self.linguistic_types.keys()[0] if tier_dict is None: self.tiers[tier_id] = ({}, {}, { 'TIER_ID': tier_id, 'LINGUISTIC_TYPE_REF': ling, 'PARENT_REF': parent, 'PARTICIPANT': part, 'DEFAULT_LOCALE': locale, 'ANNOTATOR': ann}, len(self.tiers)) else: self.tiers[tier_id] = ({}, {}, tier_dict, len(self.tiers)) def remove_tiers(self, tiers): """Remove multiple tiers, note that this is a lot faster then removing them individually because of the delayed cleaning of timeslots. :param list tiers: Names of the tier to remove. :raises KeyError: If a tier is non existent. """ for a in tiers: self.remove_tier(a, check=False, clean=False) self.clean_time_slots() def remove_tier(self, id_tier, clean=True): """Remove tier. :param str id_tier: Name of the tier. :param bool clean: Flag to also clean the timeslots. :raises KeyError: If tier is non existent. """ del(self.tiers[id_tier]) if clean: self.clean_time_slots() def get_tier_names(self): """List all the tier names. :returns: List of all tier names """ return self.tiers.keys() def get_parameters_for_tier(self, id_tier): """Give the parameter dictionary, this is usaable in :func:`add_tier`. :param str id_tier: Name of the tier. :returns: Dictionary of parameters. :raises KeyError: If the tier is non existent. """ return self.tiers[id_tier][2] def child_tiers_for(self, id_tier): """Give all child tiers for a tier. :param str id_tier: Name of the tier. :returns: List of all children :raises KeyError: If the tier is non existent. """ return [m for m in self.tiers if 'PARENT_REF' in self.tiers[m][2] and self.tiers[m][2]['PARENT_REF'] == id_tier] def get_annotation_data_for_tier(self, id_tier): """Gives a list of annotations of the form: ``(begin, end, value)`` :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ a = self.tiers[id_tier][0] return [(self.timeslots[a[b][0]], self.timeslots[a[b][1]], a[b][2]) for b in a] def get_annotation_data_at_time(self, id_tier, time): """Give the annotations at the given time. :param str id_tier: Name of the tier.<|fim▁hole|> :returns: List of annotations at that time. :raises KeyError: If the tier is non existent. """ anns = self.tiers[id_tier][0] return sorted( [(self.timeslots[m[0]], self.timeslots[m[1]], m[2]) for m in anns.itervalues() if self.timeslots[m[0]] <= time and self.timeslots[m[1]] >= time]) def get_annotation_datas_between_times(self, id_tier, start, end): """Gives the annotations within the times. :param str id_tier: Name of the tier. :param int start: Start time of the annotation. :param int end: End time of the annotation. :returns: List of annotations within that time. :raises KeyError: If the tier is non existent. """ anns = self.tiers[id_tier][0] return sorted([ (self.timeslots[m[0]], self.timeslots[m[1]], m[2]) for m in anns.itervalues() if self.timeslots[m[1]] >= start and self.timeslots[m[0]] <= end]) def remove_all_annotations_from_tier(self, id_tier): """remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ self.tiers[id_tier][0], self.tiers[id_tier][1] = {}, {} self.clean_time_slots() def insert_annotation(self, id_tier, start, end, value='', svg_ref=None): """Insert an annotation. :param str id_tier: Name of the tier. :param int start: Start time of the annotation. :param int end: End time of the annotation. :param str value: Value of the annotation. :param str svg_ref: Svg reference. :raises KeyError: If the tier is non existent. """ start_ts = self.generate_ts_id(start) end_ts = self.generate_ts_id(end) self.tiers[id_tier][0][self.generate_annotation_id()] =\ (start_ts, end_ts, value, svg_ref) def remove_annotation(self, id_tier, time, clean=True): """Remove an annotation in a tier, if you need speed the best thing is to clean the timeslots after the last removal. :param str id_tier: Name of the tier. :param int time: Timepoint within the annotation. :param bool clean: Flag to clean the timeslots afterwards. :raises KeyError: If the tier is non existent. """ for b in [a for a in self.tiers[id_tier][0].iteritems() if a[1][0] >= time and a[1][1] <= time]: del(self.tiers[id_tier][0][b[0]]) if clean: self.clean_time_slots() def insert_ref_annotation(self, id_tier, ref, value, prev, svg_ref=None): """Insert a reference annotation. :param str id_tier: Name of the tier. :param str ref: Id of the referenced annotation. :param str value: Value of the annotation. :param str prev: Id of the previous annotation. :param str svg_ref: Svg reference. :raises KeyError: If the tier is non existent. """ self.tiers[id_tier][1][self.generate_annotation_id()] =\ (ref, value, prev, svg_ref) def get_ref_annotation_data_for_tier(self, id_tier): """"Give a list of all reference annotations of the form: ``[{id -> (ref, value, previous, svg_ref}]`` :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ return self.tiers[id_tier][1] def remove_controlled_vocabulary(self, cv): """Remove a controlled vocabulary. :param str cv: Controlled vocabulary id. :raises KeyError: If the controlled vocabulary is non existent. """ del(self.controlled_vocabularies[cv]) def generate_annotation_id(self): """Generate the next annotation id, this function is mainly used internally. """ if self.naive_gen_ann: new = self.last_ann+1 self.last_ann = new else: new = 1 anns = {int(ann[1:]) for tier in self.tiers.itervalues() for ann in tier[0]} if len(anns) > 0: newann = set(xrange(1, max(anns))).difference(anns) if len(newann) == 0: new = max(anns)+1 self.naive_gen_ann = True self.last_ann = new else: new = sorted(newann)[0] return 'a%d' % new def generate_ts_id(self, time=None): """Generate the next timeslot id, this function is mainly used internally :param int time: Initial time to assign to the timeslot """ if self.naive_gen_ts: new = self.last_ts+1 self.last_ts = new else: new = 1 tss = {int(x[2:]) for x in self.timeslots} if len(tss) > 0: newts = set(xrange(1, max(tss))).difference(tss) if len(newts) == 0: new = max(tss)+1 self.naive_gen_ts = True self.last_ts = new else: new = sorted(newts)[0] ts = 'ts%d' % new self.timeslots[ts] = time return ts def clean_time_slots(self): """Clean up all unused timeslots. .. warning:: This can and will take time for larger tiers. When you want to do a lot of operations on a lot of tiers please unset the flags for cleaning in the functions so that the cleaning is only performed afterwards. """ ts_in_tier = set(sum([a[0:2] for tier in self.tiers.itervalues() for a in tier[0].itervalues()], ())) ts_avail = set(self.timeslots) for a in ts_in_tier.symmetric_difference(ts_avail): del(self.timeslots[a]) self.naive_gen_ts = False self.naive_gen_ann = False def generate_annotation_concat(self, tiers, start, end, sep='-'): """Give a string of concatenated annotation values for annotations within a timeframe. :param list tiers: List of tier names. :param int start: Start time. :param int end: End time. :param str sep: Separator string to use. :returns: String containing a concatenation of annotation values. :raises KeyError: If a tier is non existent. """ return sep.join( set(d[2] for t in tiers if t in self.tiers for d in self.get_annotation_datas_between_times(t, start, end))) def merge_tiers(self, tiers, tiernew=None, gaptresh=1): """Merge tiers into a new tier and when the gap is lower then the threshhold glue the annotations together. :param list tiers: List of tier names. :param str tiernew: Name for the new tier, if ``None`` the name will be generated. :param int gapthresh: Threshhold for the gaps. :raises KeyError: If a tier is non existent. :raises TypeError: If there are no annotations within the tiers. """ if tiernew is None: tiernew = '%s_Merged' % '_'.join(tiers) self.remove_tier(tiernew) self.add_tier(tiernew) timepts = sorted(set.union( *[set(j for j in xrange(d[0], d[1])) for d in [ann for tier in tiers for ann in self.get_annotation_data_for_tier(tier)]])) if len(timepts) > 1: start = timepts[0] for i in xrange(1, len(timepts)): if timepts[i]-timepts[i-1] > gaptresh: self.insert_annotation( tiernew, start, timepts[i-1], self.generate_annotation_concat(tiers, start, timepts[i-1])) start = timepts[i] self.insert_annotation( tiernew, start, timepts[i-1], self.generate_annotation_concat(tiers, start, timepts[i-1])) def shift_annotations(self, time): """Shift all annotations in time, this creates a new object. :param int time: Time shift width, negative numbers make a right shift. :returns: Shifted :class:`pympi.Elan.Eaf' object. """ e = self.extract( -1*time, self.get_full_time_interval()[1]) if time < 0 else\ self.extract(0, self.get_full_time_interval()[1]-time) for tier in e.tiers.itervalues(): for ann in tier[0].itervalues(): e.timeslots[ann[0]] = e.timeslots[ann[0]]+time e.timeslots[ann[1]] = e.timeslots[ann[1]]+time e.clean_time_slots() return e def filterAnnotations(self, tier, tier_name=None, filtin=None, filtex=None): """Filter annotations in a tier :param str tier: Name of the tier: :param str tier_name: Name of the new tier, when ``None`` the name will be generated. :param list filtin: List of strings to be included, if None all annotations all is included. :param list filtex: List of strings to be excluded, if None no strings are excluded. :raises KeyError: If the tier is non existent. """ if tier_name is None: tier_name = '%s_filter' % tier self.remove_tier(tier_name) self.add_tier(tier_name) for a in [b for b in self.get_annotation_data_for_tier(tier) if (filtex is None or b[2] not in filtex) and (filtin is None or b[2] in filtin)]: self.insert_annotation(tier_name, a[0], a[1], a[2]) def glue_annotations_in_tier(self, tier, tier_name=None, treshhold=85, filtin=None, filtex=None): """Glue annotatotions together in a tier. :param str tier: Name of the tier. :param str tier_name: Name of the new tier, if ``None`` the name will be generated. :param int threshhold: Threshhold for the maximum gap to still glue. :param list filtin: List of strings to be included, if None all annotations all is included. :param list filtex: List of strings to be excluded, if None no strings are excluded. :raises KeyError: If the tier is non existent. """ if tier_name is None: tier_name = '%s_glued' % tier self.remove_tier(tier_name) self.add_tier(tier_name) tier_data = sorted(self.get_annotation_data_for_tier(tier)) tier_data = [t for t in tier_data if (filtin is None or t[2] in filtin) and (filtex is None or t[2] not in filtex)] currentAnn = None for i in xrange(0, len(tier_data)): if currentAnn is None: currentAnn = (tier_data[i][0], tier_data[i][1], tier_data[i][2]) elif tier_data[i][0] - currentAnn[1] < treshhold: currentAnn = (currentAnn[0], tier_data[i][1], '%s_%s' % (currentAnn[2], tier_data[i][2])) else: self.insert_annotation(tier_name, currentAnn[0], currentAnn[1], currentAnn[2]) currentAnn = tier_data[i] if currentAnn is not None: self.insert_annotation(tier_name, currentAnn[0], tier_data[len(tier_data)-1][1], currentAnn[2]) def get_full_time_interval(self): """Give the full time interval of the file. :returns: Tuple of the form: ``(min_time, max_time``. """ return (min(self.timeslots.itervalues()), max(self.timeslots.itervalues())) def create_gaps_and_overlaps_tier(self, tier1, tier2, tier_name=None, maxlen=-1): """Create a tier with the gaps and overlaps of the annotations. For types see :func:`get_gaps_and_overlaps_duration` :param str tier1: Name of the first tier. :param str tier2: Name of the second tier. :param str tier_name: Name of the new tier, if ``None`` the name will be generated. :param int maxlen: Maximum length of gaps (skip longer ones), if ``-1`` no maximum will be used. :returns: List of gaps and overlaps of the form: ``[(type, start, end)]``. :raises KeyError: If a tier is non existent. :raises IndexError: If no annotations are available in the tiers. """ if tier_name is None: tier_name = '%s_%s_ftos' % (tier1, tier2) self.remove_tier(tier_name) self.add_tier(tier_name) ftos = self.get_gaps_and_overlaps_duration(tier1, tier2, maxlen) for fto in ftos: self.insert_annotation(tier_name, fto[1], fto[2], fto[0]) return ftos def get_gaps_and_overlaps_duration(self, tier1, tier2, maxlen=-1, progressbar=False): """Give gaps and overlaps. The return types are shown in the table below. The string will be of the format: ``id_tiername_tiername``. For example when a gap occurs between tier1 and tier2 and they are called ``speakerA`` and ``speakerB`` the annotation value of that gap will be ``G12_speakerA_speakerB``. | The gaps and overlaps are calculated using Heldner and Edlunds method found in: | *Heldner, M., & Edlund, J. (2010). Pauses, gaps and overlaps in conversations. Journal of Phonetics, 38(4), 555–568. doi:10.1016/j.wocn.2010.08.002* +-----+--------------------------------------------+ | id | Description | +=====+============================================+ | O12 | Overlap from tier1 to tier2 | +-----+--------------------------------------------+ | O21 | Overlap from tier2 to tier1 | +-----+--------------------------------------------+ | G12 | Gap from tier1 to tier2 | +-----+--------------------------------------------+ | G21 | Gap from tier2 to tier1 | +-----+--------------------------------------------+ | P1 | Pause for tier1 | +-----+--------------------------------------------+ | P2 | Pause for tier2 | +-----+--------------------------------------------+ | B12 | Within speaker overlap from tier1 to tier2 | +-----+--------------------------------------------+ | B21 | Within speaker overlap from tier2 to tier1 | +-----+--------------------------------------------+ :param str tier1: Name of the first tier. :param str tier2: Name of the second tier. :param int maxlen: Maximum length of gaps (skip longer ones), if ``-1`` no maximum will be used. :param bool progressbar: Flag for debugging purposes that shows the progress during the process. :returns: List of gaps and overlaps of the form: ``[(type, start, end)]``. :raises KeyError: If a tier is non existent. :raises IndexError: If no annotations are available in the tiers. """ spkr1anns = sorted((self.timeslots[a[0]], self.timeslots[a[1]]) for a in self.tiers[tier1][0].values()) spkr2anns = sorted((self.timeslots[a[0]], self.timeslots[a[1]]) for a in self.tiers[tier2][0].values()) line1 = [] isin = lambda x, lst: False if\ len([i for i in lst if i[0] <= x and i[1] >= x]) == 0 else True minmax = (min(spkr1anns[0][0], spkr2anns[0][0]), max(spkr1anns[-1][1], spkr2anns[-1][1])) last = (1, minmax[0]) lastP = 0 for ts in xrange(*minmax): in1, in2 = isin(ts, spkr1anns), isin(ts, spkr2anns) if in1 and in2: # Both speaking if last[0] == 'B': continue ty = 'B' elif in1: # Only 1 speaking if last[0] == '1': continue ty = '1' elif in2: # Only 2 speaking if last[0] == '2': continue ty = '2' else: # None speaking if last[0] == 'N': continue ty = 'N' line1.append((last[0], last[1], ts)) last = (ty, ts) if progressbar and int((ts*1.0/minmax[1])*100) > lastP: lastP = int((ts*1.0/minmax[1])*100) print '%d%%' % lastP line1.append((last[0], last[1], minmax[1])) ftos = [] for i in xrange(len(line1)): if line1[i][0] == 'N': if i != 0 and i < len(line1) - 1 and\ line1[i-1][0] != line1[i+1][0]: ftos.append(('G12_%s_%s' % (tier1, tier2) if line1[i-1][0] == '1' else 'G21_%s_%s' % (tier2, tier1), line1[i][1], line1[i][2])) else: ftos.append(('P_%s' % (tier1 if line1[i-1][0] == '1' else tier2), line1[i][1], line1[i][2])) elif line1[i][0] == 'B': if i != 0 and i < len(line1) - 1 and\ line1[i-1][0] != line1[i+1][0]: ftos.append(('O12_%s_%s' % ((tier1, tier2) if line1[i-1][0] else 'O21_%s_%s' % (tier2, tier1)), line1[i][1], line1[i][2])) else: ftos.append(('B_%s_%s' % ((tier1, tier2) if line1[i-1][0] == '1' else (tier2, tier1)), line1[i][1], line1[i][2])) return [f for f in ftos if maxlen == -1 or abs(f[2] - f[1]) < maxlen] def create_controlled_vocabulary(self, cv_id, descriptions, entries, ext_ref=None): """Create a controlled vocabulary. .. warning:: This is a very raw implementation and you should check the Eaf file format specification for the entries. :param str cv_id: Name of the controlled vocabulary. :param list descriptions: List of descriptions. :param dict entries: Entries dictionary. :param str ext_ref: External reference. """ self.controlledvocabularies[cv_id] = (descriptions, entries, ext_ref) def get_tier_ids_for_linguistic_type(self, ling_type, parent=None): """Give a list of all tiers matching a linguistic type. :param str ling_type: Name of the linguistic type. :param str parent: Only match tiers from this parent, when ``None`` this option will be ignored. :returns: List of tiernames. :raises KeyError: If a tier or linguistic type is non existent. """ return [t for t in self.tiers if self.tiers[t][2]['LINGUISTIC_TYPE_REF'] == ling_type and (parent is None or self.tiers[t][2]['PARENT_REF'] == parent)] def remove_linguistic_type(self, ling_type): """Remove a linguistic type. :param str ling_type: Name of the linguistic type. """ del(self.linguistic_types[ling_type]) def add_linguistic_type(self, lingtype, constraints=None, timealignable=True, graphicreferences=False, extref=None): """Add a linguistic type. :param str lingtype: Name of the linguistic type. :param list constraints: Constraint names. :param bool timealignable: Flag for time alignable. :param bool graphicreferences: Flag for graphic references. :param str extref: External reference. """ self.linguistic_types[lingtype] = { 'LINGUISTIC_TYPE_ID': lingtype, 'TIME_ALIGNABLE': str(timealignable).lower(), 'GRAPHIC_REFERENCES': str(graphicreferences).lower(), 'CONSTRAINTS': constraints} if extref is not None: self.linguistic_types[lingtype]['EXT_REF'] = extref def get_linguistic_types(self): """Give a list of available linguistic types. :returns: List of linguistic type names. """ return self.linguistic_types.keys()<|fim▁end|>
:param int time: Time of the annotation.
<|file_name|>LifeNode.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013 Triforce - in association with the University of Pretoria and Epi-Use <Advance/> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package afk.ge.tokyo.ems.nodes; import afk.ge.ems.Node; import afk.ge.tokyo.ems.components.Life; /** * * @author Daniel */ <|fim▁hole|> public Life life; }<|fim▁end|>
public class LifeNode extends Node {
<|file_name|>test_resources.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import unittest import bokeh.resources as resources from bokeh.resources import _get_cdn_urls WRAPPER = """Bokeh.$(function() { foo });""" WRAPPER_DEV = '''require(["jquery", "main"], function($, Bokeh) { Bokeh.set_log_level("info"); Bokeh.$(function() { foo }); });''' LOG_LEVELS = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] DEFAULT_LOG_JS_RAW = 'Bokeh.set_log_level("info");' ## Test JSResources def test_js_resources_default_mode_is_inline(): r = resources.JSResources() assert r.mode == "inline" def test_js_resources_inline_has_no_css_resources(): r = resources.JSResources(mode="inline") assert r.mode == "inline" assert r.dev is False assert len(r.js_raw) == 3 assert r.js_raw[-1] == DEFAULT_LOG_JS_RAW assert hasattr(r, 'css_raw') is False assert r.messages == [] ## Test CSSResources def test_css_resources_default_mode_is_inline(): r = resources.CSSResources() assert r.mode == "inline" def test_inline_css_resources():<|fim▁hole|> r = resources.CSSResources(mode="inline") assert r.mode == "inline" assert r.dev is False assert len(r.css_raw) == 2 assert hasattr(r, 'js_raw') is False assert r.messages == [] class TestResources(unittest.TestCase): def test_basic(self): r = resources.Resources() self.assertEqual(r.mode, "inline") def test_log_level(self): r = resources.Resources() for level in LOG_LEVELS: r.log_level = level self.assertEqual(r.log_level, level) if not r.dev: self.assertEqual(r.js_raw[-1], 'Bokeh.set_log_level("%s");' % level) self.assertRaises(ValueError, setattr, r, "log_level", "foo") def test_module_attrs(self): self.assertEqual(resources.CDN.mode, "cdn") self.assertEqual(resources.INLINE.mode, "inline") def test_inline(self): r = resources.Resources(mode="inline") self.assertEqual(r.mode, "inline") self.assertEqual(r.dev, False) self.assertEqual(len(r.js_raw), 3) self.assertEqual(r.js_raw[-1], DEFAULT_LOG_JS_RAW) self.assertEqual(len(r.css_raw), 2) self.assertEqual(r.messages, []) def test_get_cdn_urls(self): dev_version = "0.0.1dev" result = _get_cdn_urls(dev_version) url = result['js_files'][0] self.assertIn('bokeh/dev', url) def test_cdn(self): resources.__version__ = "1.0" r = resources.Resources(mode="cdn", version="1.0") self.assertEqual(r.mode, "cdn") self.assertEqual(r.dev, False) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) resources.__version__ = "1.0-1-abc" r = resources.Resources(mode="cdn", version="1.0") self.assertEqual(r.messages, [ {'text': "Requesting CDN BokehJS version '1.0' from Bokeh development version '1.0-1-abc'. This configuration is unsupported and may not work!", 'type': 'warn'} ]) def test_server(self): r = resources.Resources(mode="server") self.assertEqual(r.mode, "server") self.assertEqual(r.dev, False) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) r = resources.Resources(mode="server", root_url="http://foo/") self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_server_dev(self): r = resources.Resources(mode="server-dev") self.assertEqual(r.mode, "server") self.assertEqual(r.dev, True) self.assertEqual(len(r.js_raw), 1) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) r = resources.Resources(mode="server-dev", root_url="http://foo/") self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_relative(self): r = resources.Resources(mode="relative") self.assertEqual(r.mode, "relative") self.assertEqual(r.dev, False) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_relative_dev(self): r = resources.Resources(mode="relative-dev") self.assertEqual(r.mode, "relative") self.assertEqual(r.dev, True) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_absolute(self): r = resources.Resources(mode="absolute") self.assertEqual(r.mode, "absolute") self.assertEqual(r.dev, False) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_absolute_dev(self): r = resources.Resources(mode="absolute-dev") self.assertEqual(r.mode, "absolute") self.assertEqual(r.dev, True) self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW]) self.assertEqual(r.css_raw, []) self.assertEqual(r.messages, []) def test_argument_checks(self): self.assertRaises(ValueError, resources.Resources, "foo") for mode in ("inline", "cdn", "server", "server-dev", "absolute", "absolute-dev"): self.assertRaises(ValueError, resources.Resources, mode, root_dir="foo") for mode in ("inline", "server", "server-dev", "relative", "relative-dev", "absolute", "absolute-dev"): self.assertRaises(ValueError, resources.Resources, mode, version="foo") for mode in ("inline", "cdn", "relative", "relative-dev", "absolute", "absolute-dev"): self.assertRaises(ValueError, resources.Resources, mode, root_url="foo")<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>#!python # log/urls.py from django.conf.urls import url from . import views # We are adding a URL called /home urlpatterns = [ url(r'^$', views.home, name='home'), url(r'^clients/$', views.clients, name='clients'), url(r'^clients/(?P<id>\d+)/$', views.client_detail, name='client_detail'), url(r'^clients/new/$', views.client_new, name='client_new'), url(r'^clients/(?P<id>\d+)/edit/$', views.client_edit, name='client_edit'), url(r'^clients/sevices/$', views.clients_services_count, name='clients_services_count'), url(r'^clients/bills/(?P<id>\d+)/$', views.all_clients_bills, name='all_clients_bills'),<|fim▁hole|> url(r'^clients/bills/$', views.fresh_clients, name='fresh_clients'), url(r'^clients/del/(?P<id>\d+)/$', views.delete_client, name='delete_client'), url(r'^contracts/$', views.contracts, name='contracts'), url(r'^contracts/(?P<id>\d+)/$', views.contract_detail, name='contract_detail'), url(r'^contracts/new/$', views.contract_new, name='contract_new'), url(r'^contracts/(?P<id>\d+)/edit/$', views.contract_edit, name='contract_edit'), url(r'^contracts/list/(?P<id>\d+)/$', views.all_clients_contracts, name='all_clients_contracts'), url(r'^contracts/list/$', views.contracts_services, name='contracts_services'), url(r'^contracts/del/(?P<id>\d+)/$', views.delete_contract, name='delete_contract'), url(r'^manager/$', views.managers, name='managers'), url(r'^manager/(?P<id>\d+)/$', views.manager_detail, name='manager_detail'), url(r'^manager/new/$', views.manager_new, name='manager_new'), url(r'^manager/(?P<id>\d+)/edit/$', views.manager_edit, name='manager_edit'), url(r'^manager/clients/$', views.managers_clients_count, name='managers_clients_count'), url(r'^managers/del/(?P<id>\d+)/$', views.delete_manager, name='delete_manager'), url(r'^briefs/$', views.brief, name='briefs'), url(r'^briefs/(?P<id>\d+)/$', views.brief_detail, name='brief_detail'), url(r'^briefs/new/$', views.brief_new, name='brief_new'), url(r'^briefs/(?P<id>\d+)/edit/$', views.brief_edit, name='brief_edit'), url(r'^briefs/del/(?P<id>\d+)/$', views.delete_brief, name='delete_brief'), url(r'^briefs/list/(?P<id>\d+)/$', views.all_clients_briefs, name='all_clients_briefs'), url(r'^services/$', views.services, name='services'), url(r'^services/(?P<id>\d+)/$', views.service_detail, name='service_detail'), url(r'^services/new/$', views.services_new, name='services_new'), url(r'^services/(?P<id>\d+)/edit/$', views.service_edit, name='service_edit'), url(r'^services/table/(?P<id>\d+)/$', views.service_all_clients, name='service_all_clients'), url(r'^services/del/(?P<id>\d+)/$', views.delete_service, name='delete_service'), url(r'^contractors/$', views.contractors, name='contractors'), url(r'^contractors/(?P<id>\d+)/$', views.contractor_detail, name='contractor_detail'), url(r'^contractors/new/$', views.contractors_new, name='contractors_new'), url(r'^contractors/(?P<id>\d+)/edit/$', views.contractor_edit, name='contractor_edit'), url(r'^contractors/newest/$', views.newest_contractors, name='newest_contractors'), url(r'^contractors/del/(?P<id>\d+)/$', views.delete_contractor, name='delete_contractor'), url(r'^acts/$', views.acts, name='acts'), url(r'^acts/(?P<id>\d+)/$', views.act_detail, name='act_detail'), url(r'^acts/new/$', views.act_new, name='act_new'), url(r'^acts/(?P<id>\d+)/edit/$', views.act_edit, name='act_edit'), url(r'^acts/del/(?P<id>\d+)/$', views.delete_act, name='delete_act'), url(r'^bills/$', views.bills, name='bills'), url(r'^bills/(?P<id>\d+)/$', views.bills_detail, name='bills_detail'), url(r'^bills/new/$', views.bills_new, name='bills_new'), url(r'^bills/(?P<id>\d+)/edit/$', views.bills_edit, name='bills_edit'), url(r'^bill/del/(?P<id>\d+)/$', views.delete_bill, name='delete_bill'), ]<|fim▁end|>
<|file_name|>_imaging.py<|end_file_name|><|fim▁begin|># encoding: utf-8 # module PIL._imaging # from /usr/lib/python2.7/dist-packages/PIL/_imaging.so # by generator 1.135 # no doc # no imports # Variables with simple values DEFAULT_STRATEGY = 0 FILTERED = 1 FIXED = 4 HUFFMAN_ONLY = 2 jpeglib_version = '8.0' PILLOW_VERSION = '2.5.1' RLE = 3 zlib_version = '1.2.8' # functions def alpha_composite(*args, **kwargs): # real signature unknown pass def bit_decoder(*args, **kwargs): # real signature unknown pass def blend(*args, **kwargs): # real signature unknown pass def convert(*args, **kwargs): # real signature unknown pass def copy(*args, **kwargs): # real signature unknown pass def crc32(*args, **kwargs): # real signature unknown pass def draw(*args, **kwargs): # real signature unknown pass def effect_mandelbrot(*args, **kwargs): # real signature unknown<|fim▁hole|> pass def effect_noise(*args, **kwargs): # real signature unknown pass def eps_encoder(*args, **kwargs): # real signature unknown pass def fill(*args, **kwargs): # real signature unknown pass def fli_decoder(*args, **kwargs): # real signature unknown pass def font(*args, **kwargs): # real signature unknown pass def getcodecstatus(*args, **kwargs): # real signature unknown pass def getcount(*args, **kwargs): # real signature unknown pass def gif_decoder(*args, **kwargs): # real signature unknown pass def gif_encoder(*args, **kwargs): # real signature unknown pass def hex_decoder(*args, **kwargs): # real signature unknown pass def hex_encoder(*args, **kwargs): # real signature unknown pass def jpeg_decoder(*args, **kwargs): # real signature unknown pass def jpeg_encoder(*args, **kwargs): # real signature unknown pass def libtiff_decoder(*args, **kwargs): # real signature unknown pass def libtiff_encoder(*args, **kwargs): # real signature unknown pass def linear_gradient(*args, **kwargs): # real signature unknown pass def map_buffer(*args, **kwargs): # real signature unknown pass def msp_decoder(*args, **kwargs): # real signature unknown pass def new(*args, **kwargs): # real signature unknown pass def open_ppm(*args, **kwargs): # real signature unknown pass def outline(*args, **kwargs): # real signature unknown pass def packbits_decoder(*args, **kwargs): # real signature unknown pass def path(*args, **kwargs): # real signature unknown pass def pcd_decoder(*args, **kwargs): # real signature unknown pass def pcx_decoder(*args, **kwargs): # real signature unknown pass def pcx_encoder(*args, **kwargs): # real signature unknown pass def radial_gradient(*args, **kwargs): # real signature unknown pass def raw_decoder(*args, **kwargs): # real signature unknown pass def raw_encoder(*args, **kwargs): # real signature unknown pass def sun_rle_decoder(*args, **kwargs): # real signature unknown pass def tga_rle_decoder(*args, **kwargs): # real signature unknown pass def tiff_lzw_decoder(*args, **kwargs): # real signature unknown pass def wedge(*args, **kwargs): # real signature unknown pass def xbm_decoder(*args, **kwargs): # real signature unknown pass def xbm_encoder(*args, **kwargs): # real signature unknown pass def zip_decoder(*args, **kwargs): # real signature unknown pass def zip_encoder(*args, **kwargs): # real signature unknown pass # no classes<|fim▁end|>
<|file_name|>api_1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ rio.blueprints.api_1 ~~~~~~~~~~~~~~~~~~~~~ """ <|fim▁hole|><|fim▁end|>
from flask import Blueprint bp = Blueprint('api_1', __name__)
<|file_name|>xevie.rs<|end_file_name|><|fim▁begin|>/* * This file generated automatically from xevie.xml by r_client.py. * Edit at your peril. */ //Make the compiler quiet #![allow(unused_imports)] #![allow(non_camel_case_types)] use std; use libc::*; use ffi; pub static XEVIE_MAJOR_VERSION : c_uint = 1; pub static XEVIE_MINOR_VERSION : c_uint = 0; pub struct query_version_cookie { sequence : c_uint } pub struct query_version_request { pub major_opcode : u8, pub minor_opcode : u8, pub length : u16, pub client_major_version : u16, pub client_minor_version : u16 } pub struct query_version_reply { pub response_type : u8, pub pad0 : u8, pub sequence : u16, pub length : u32, pub server_major_version : u16, pub server_minor_version : u16, pub pad1 : [u8;20] } pub struct start_cookie { sequence : c_uint } pub struct start_request { pub major_opcode : u8, pub minor_opcode : u8, pub length : u16, pub screen : u32 } pub struct start_reply { pub response_type : u8, pub pad0 : u8, pub sequence : u16, pub length : u32, pub pad1 : [u8;24] } pub struct end_cookie { sequence : c_uint } pub struct end_request { pub major_opcode : u8, pub minor_opcode : u8, pub length : u16, pub cmap : u32 } pub struct end_reply { pub response_type : u8, pub pad0 : u8, pub sequence : u16, pub length : u32, pub pad1 : [u8;24] } pub struct event { pub pad0 : [u8;32] } /** * @brief event_iterator **/ pub struct event_iterator { pub data : *mut event, pub rem : c_int, pub index: c_int } pub struct send_cookie { sequence : c_uint } pub struct send_request { pub major_opcode : u8, pub minor_opcode : u8, pub length : u16, pub event : event, pub data_type : u32, pub pad0 : [u8;64] } pub struct send_reply { pub response_type : u8, pub pad0 : u8, pub sequence : u16, pub length : u32, pub pad1 : [u8;24] } pub struct select_input_cookie { sequence : c_uint } pub struct select_input_request { pub major_opcode : u8, pub minor_opcode : u8, pub length : u16, pub event_mask : u32 } pub struct select_input_reply { pub response_type : u8, pub pad0 : u8, pub sequence : u16, pub length : u32, pub pad1 : [u8;24] } #[link(name="xcb-xevie")] extern "C" { /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * */ pub fn xcb_xevie_query_version (c : *mut ffi::base::connection, client_major_version : u16, client_minor_version : u16) -> query_version_cookie; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * * This form can be used only if the request will cause * a reply to be generated. Any returned error will be * placed in the event queue. */ pub fn xcb_xevie_query_version_unchecked (c : *mut ffi::base::connection, client_major_version : u16, client_minor_version : u16) -> query_version_cookie; /** * Return the reply * @param c The connection * @param cookie The cookie * @param e The generic_error supplied * * Returns the reply of the request asked by * * The parameter @p e supplied to this function must be NULL if * xcb_xevie_query_version_unchecked(). is used. * Otherwise, it stores the error if any. * * The returned value must be freed by the caller using free(). */ pub fn xcb_xevie_query_version_reply (c : *mut ffi::base::connection, cookie : query_version_cookie, e : *mut *mut ffi::base::generic_error) -> *mut query_version_reply; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * */ pub fn xcb_xevie_start (c : *mut ffi::base::connection, screen : u32) -> start_cookie; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * * This form can be used only if the request will cause * a reply to be generated. Any returned error will be * placed in the event queue. */ pub fn xcb_xevie_start_unchecked (c : *mut ffi::base::connection, screen : u32) -> start_cookie; /** * Return the reply * @param c The connection * @param cookie The cookie * @param e The generic_error supplied * * Returns the reply of the request asked by * * The parameter @p e supplied to this function must be NULL if * xcb_xevie_start_unchecked(). is used. * Otherwise, it stores the error if any. * * The returned value must be freed by the caller using free(). */ pub fn xcb_xevie_start_reply (c : *mut ffi::base::connection, cookie : start_cookie, e : *mut *mut ffi::base::generic_error) -> *mut start_reply; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * */ pub fn xcb_xevie_end (c : *mut ffi::base::connection, cmap : u32) -> end_cookie; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * * This form can be used only if the request will cause * a reply to be generated. Any returned error will be * placed in the event queue. */ pub fn xcb_xevie_end_unchecked (c : *mut ffi::base::connection, cmap : u32) -> end_cookie; /** * Return the reply * @param c The connection * @param cookie The cookie * @param e The generic_error supplied * * Returns the reply of the request asked by * * The parameter @p e supplied to this function must be NULL if * xcb_xevie_end_unchecked(). is used. * Otherwise, it stores the error if any. * * The returned value must be freed by the caller using free(). */ pub fn xcb_xevie_end_reply (c : *mut ffi::base::connection, cookie : end_cookie, e : *mut *mut ffi::base::generic_error) -> *mut end_reply; /** * Get the next element of the iterator * @param i Pointer to a event_iterator * * Get the next element in the iterator. The member rem is * decreased by one. The member data points to the next * element. The member index is increased by sizeof(event) * * */ pub fn xcb_xevie_event_next (i:*mut event_iterator) -> c_void; /** * Return the iterator pointing to the last element * @param i An event_iterator * @return The iterator pointing to the last element * * Set the current element in the iterator to the last element. * The member rem is set to 0. The member data points to the * last element. */ pub fn xcb_xevie_event_end (i:event_iterator) -> ffi::base::generic_iterator; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * */ pub fn xcb_xevie_send (c : *mut ffi::base::connection, event : event, data_type : u32) -> send_cookie; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * * This form can be used only if the request will cause * a reply to be generated. Any returned error will be * placed in the event queue. */ pub fn xcb_xevie_send_unchecked (c : *mut ffi::base::connection, event : event, data_type : u32) -> send_cookie; /** * Return the reply * @param c The connection * @param cookie The cookie * @param e The generic_error supplied * * Returns the reply of the request asked by * * The parameter @p e supplied to this function must be NULL if * xcb_xevie_send_unchecked(). is used. * Otherwise, it stores the error if any. * * The returned value must be freed by the caller using free(). */ pub fn xcb_xevie_send_reply (c : *mut ffi::base::connection, cookie : send_cookie, e : *mut *mut ffi::base::generic_error) -> *mut send_reply; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * */ pub fn xcb_xevie_select_input (c : *mut ffi::base::connection, event_mask : u32) -> select_input_cookie; /** * * @param c The connection * @return A cookie * * Delivers a request to the X server. * * This form can be used only if the request will cause * a reply to be generated. Any returned error will be * placed in the event queue.<|fim▁hole|>/** * Return the reply * @param c The connection * @param cookie The cookie * @param e The generic_error supplied * * Returns the reply of the request asked by * * The parameter @p e supplied to this function must be NULL if * xcb_xevie_select_input_unchecked(). is used. * Otherwise, it stores the error if any. * * The returned value must be freed by the caller using free(). */ pub fn xcb_xevie_select_input_reply (c : *mut ffi::base::connection, cookie : select_input_cookie, e : *mut *mut ffi::base::generic_error) -> *mut select_input_reply; }<|fim▁end|>
*/ pub fn xcb_xevie_select_input_unchecked (c : *mut ffi::base::connection, event_mask : u32) -> select_input_cookie;
<|file_name|>list_property.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ InaSAFE Disaster risk assessment tool developed by AusAid - **metadata module.** Contact : [email protected] .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = '[email protected]' __revision__ = '$Format:%H$' __date__ = '10/12/15' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') import json from types import NoneType from safe.common.exceptions import MetadataCastError from safe.metadata.property import BaseProperty class ListProperty(BaseProperty): """A property that accepts list input.""" # if you edit this you need to adapt accordingly xml_value and is_valid _allowed_python_types = [list, NoneType] def __init__(self, name, value, xml_path): super(ListProperty, self).__init__( name, value, xml_path, self._allowed_python_types) @classmethod def is_valid(cls, value): return True <|fim▁hole|> return json.loads(value) except ValueError as e: raise MetadataCastError(e) @property def xml_value(self): if self.python_type is list: return json.dumps(self.value) elif self.python_type is NoneType: return '' else: raise RuntimeError('self._allowed_python_types and self.xml_value' 'are out of sync. This should never happen')<|fim▁end|>
def cast_from_str(self, value): try:
<|file_name|>keys.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 (c) rust-themis developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Cryptographic keys. //! //! This module contains data structures for keys supported by Themis: RSA and ECDSA key pairs. //! //! - [`EcdsaKeyPair`] consists of [`EcdsaPublicKey`] and [`EcdsaPrivateKey`] //! - [`RsaKeyPair`] consists of [`RsaPublicKey`] and [`RsaPrivateKey`] //! //! There are also generic data types which can hold keys of either kind: //! //! - [`KeyPair`] consists of [`PublicKey`] and [`PrivateKey`] //! //! `KeyPair` may hold either an `EcdsaKeyPair` or an `RsaKeyPair`. It is guaranteed to contain //! keys of matching kind, just as individual keys are guaranteed to be of the specified kind. //! //! [`EcdsaKeyPair`]: struct.EcdsaKeyPair.html //! [`EcdsaPublicKey`]: struct.EcdsaPublicKey.html //! [`EcdsaPrivateKey`]: struct.EcdsaPrivateKey.html //! [`RsaKeyPair`]: struct.RsaKeyPair.html //! [`RsaPublicKey`]: struct.RsaPublicKey.html //! [`RsaPrivateKey`]: struct.RsaPrivateKey.html //! [`KeyPair`]: struct.KeyPair.html //! [`PublicKey`]: struct.PublicKey.html //! [`PrivateKey`]: struct.PrivateKey.html //! //! # Examples //! //! ## Splitting and joining //! //! [Key generation functions][keygen] return matching key pairs. Some APIs (like Secure Message //! in encryption mode) require you to pass key pairs so you are ready to go. Sometimes you may //! need the keys separately, in which case they can be easily split into public and private half: //! //! [keygen]: ../keygen/index.html //! //! ``` //! use themis::keygen::gen_ec_key_pair; //! //! let key_pair = gen_ec_key_pair(); //! //! let (private, public) = key_pair.split(); //! ``` //! //! You may join them back into a pair if you wish: //! //! ``` //! # use themis::keygen::gen_ec_key_pair; //! use themis::keys::EcdsaKeyPair; //! //! # let key_pair = gen_ec_key_pair(); //! # let (private, public) = key_pair.split(); //! let key_pair = EcdsaKeyPair::join(private, public); //! ``` //! //! Joining is a zero-cost and error-free operation for concrete key kinds (RSA or ECDSA). //! However, when joining generic keys one must explicitly check for kind mismatch: //! //! ``` //! # fn check() -> Result<(), themis::Error> { //! use themis::keygen::{gen_ec_key_pair, gen_rsa_key_pair}; //! use themis::keys::KeyPair; //! //! let (private_ec, _) = gen_ec_key_pair().split(); //! let (_, public_rsa) = gen_rsa_key_pair().split(); //! //! // This will return an Err because ECDSA private key does not match RSA public key: //! let key_pair = KeyPair::try_join(private_ec, public_rsa)?; //! # Ok(()) //! # } //! # //! # assert!(check().is_err()); //! ``` //! //! Note that all individual keys as well as key pairs are automatically convertible into generic //! types via the standard `From`-`Into` traits. //! //! ## Serializing and deserializing //! //! All keys can be converted into bytes slices via the standard `AsRef` trait so that you can //! easily write them into files, send via network, pass to other Themis functions, and so on: //! //! ```no_run //! # fn main() -> Result<(), std::io::Error> { //! use std::fs::File; //! use std::io::Write; //! //! use themis::keygen::gen_rsa_key_pair; //! //! let (private, public) = gen_rsa_key_pair().split(); //! //! let mut file = File::create("private.key")?; //! file.write_all(private.as_ref())?; //! # Ok(()) //! # } //! ``` //! //! You can also restore the keys from raw bytes using `try_from_slice` methods. They check that //! the byte slice indeed contains a valid Themis key of the specified kind: //! //! ``` //! # fn main() -> Result<(), themis::Error> { //! use themis::keys::EcdsaPublicKey; //! //! # const ECDSA_PUBLIC: &[u8] = b"\x55\x45\x43\x32\x00\x00\x00\x2d\x13\x8b\xdf\x0c\x02\x1f\x09\x88\x39\xd9\x73\x3a\x84\x8f\xa8\x50\xd9\x2b\xed\x3d\x38\xcf\x1d\xd0\xce\xf4\xae\xdb\xcf\xaf\xcb\x6b\xa5\x4a\x08\x11\x21"; //! # //! # fn receive() -> Vec<u8> { //! # ECDSA_PUBLIC.to_vec() //! # } //! # //! // Obtain the key bytes somehow (e.g., read from file). //! let bytes: Vec<u8> = receive(); //! //! let public = EcdsaPublicKey::try_from_slice(&bytes)?; //! # Ok(()) //! # } //! ``` use std::fmt; use std::ptr; use bindings::{themis_gen_sym_key, themis_get_asym_key_kind, themis_is_valid_asym_key}; use zeroize::Zeroize; use crate::error::{Error, ErrorKind, Result}; use crate::utils::into_raw_parts; /// Key material. #[derive(Clone, Eq, PartialEq, Hash)] pub(crate) struct KeyBytes(Vec<u8>); impl KeyBytes { /// Makes a key from an owned byte vector. pub fn from_vec(bytes: Vec<u8>) -> Result<KeyBytes> { if bytes.is_empty() { Err(Error::with_kind(ErrorKind::InvalidParameter)) } else { Ok(KeyBytes(bytes)) } } /// Makes a key from a copy of a byte slice. pub fn copy_slice(bytes: &[u8]) -> Result<KeyBytes> { KeyBytes::from_vec(bytes.to_vec()) } /// Returns key bytes. pub fn as_bytes(&self) -> &[u8] { &self.0 } } impl fmt::Debug for KeyBytes { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "KeyBytes({} bytes)", self.0.len()) } } // Make sure that sensitive key material is removed from memory as soon as it is no longer needed. impl Drop for KeyBytes { fn drop(&mut self) { self.0.zeroize(); } } // // Key type definitions // /// RSA private key. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct RsaPrivateKey { inner: KeyBytes, } /// RSA public key. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct RsaPublicKey { inner: KeyBytes, } /// RSA key pair. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct RsaKeyPair { private_key: KeyBytes, public_key: KeyBytes, } /// ECDSA private key. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct EcdsaPrivateKey { inner: KeyBytes, } /// ECDSA public key. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct EcdsaPublicKey { inner: KeyBytes, } /// ECDSA key pair. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct EcdsaKeyPair { private_key: KeyBytes, public_key: KeyBytes, } /// A private key. /// /// This structure is used by cryptographic services which can support any kind of key. /// [`RsaPrivateKey`] or [`EcdsaPrivateKey`] can be turned into a `PrivateKey` at no cost. /// /// [`RsaPrivateKey`]: struct.RsaPrivateKey.html /// [`EcdsaPrivateKey`]: struct.EcdsaPrivateKey.html #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct PrivateKey { inner: KeyBytes, } /// A public key. /// /// This structure is used by cryptographic services which can support any kind of key. /// [`RsaPublicKey`] or [`EcdsaPublicKey`] can be turned into a `PublicKey` at no cost. /// /// [`RsaPublicKey`]: struct.RsaPublicKey.html /// [`EcdsaPublicKey`]: struct.EcdsaPublicKey.html #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct PublicKey { inner: KeyBytes, } /// A pair of asymmetric keys. /// /// This structure is used by cryptographic services which can support any kind of key pair. /// [`RsaKeyPair`] or [`EcdsaKeyPair`] can be turned into a `KeyPair` at no cost. A pair of /// [`PrivateKey`] and [`PublicKey`] can be joined into a `KeyPair` after a quick type check /// if their kinds match (either RSA or ECDSA). /// /// [`RsaKeyPair`]: struct.RsaKeyPair.html /// [`EcdsaKeyPair`]: struct.EcdsaKeyPair.html /// [`PrivateKey`]: struct.PrivateKey.html /// [`PublicKey`]: struct.PublicKey.html #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct KeyPair { private_key: KeyBytes, public_key: KeyBytes, } /// Kind of an asymmetric key. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum KeyKind { /// RSA private key. RsaPrivate, /// RSA public key. RsaPublic, /// ECDSA private key. EcdsaPrivate, /// ECDSA public key. EcdsaPublic, } // // Key pairs // impl RsaKeyPair { /// Splits this key pair into private and public keys. pub fn split(self) -> (RsaPrivateKey, RsaPublicKey) { ( RsaPrivateKey { inner: self.private_key, }, RsaPublicKey { inner: self.public_key, }, ) } /// Joins a pair of private and public keys. /// /// Note that this method _does not_ verify that the keys match: i.e., that it is possible /// to use the private key to decrypt data encrypted with the public key. pub fn join(private_key: RsaPrivateKey, public_key: RsaPublicKey) -> RsaKeyPair { RsaKeyPair { private_key: private_key.inner, public_key: public_key.inner, } } } impl EcdsaKeyPair { /// Splits this key pair into private and public keys. pub fn split(self) -> (EcdsaPrivateKey, EcdsaPublicKey) { ( EcdsaPrivateKey { inner: self.private_key, }, EcdsaPublicKey { inner: self.public_key, }, ) } /// Joins a pair of private and public keys. /// /// Note that this method _does not_ verify that the keys match: i.e., that it is possible /// to use the private key to decrypt data encrypted with the public key. pub fn join(private_key: EcdsaPrivateKey, public_key: EcdsaPublicKey) -> EcdsaKeyPair { EcdsaKeyPair { private_key: private_key.inner, public_key: public_key.inner, } } } impl KeyPair { /// Access bytes of the private key. pub(crate) fn private_key_bytes(&self) -> &[u8] { self.private_key.as_bytes() } /// Access bytes of the public key. pub(crate) fn public_key_bytes(&self) -> &[u8] { self.public_key.as_bytes() } /// Splits this key pair into private and public keys. pub fn split(self) -> (PrivateKey, PublicKey) { ( PrivateKey { inner: self.private_key, }, PublicKey { inner: self.public_key, }, ) } /// Joins a pair of private and public keys. /// /// Note that this method _does not_ verify that the keys match: i.e., that it is possible /// to use the private key to decrypt data encrypted with the public key. /// /// However, it does verify that _the kinds_ of the keys match: i.e., that they are both /// either RSA or ECDSA keys. An error is returned if that’s not the case. You can check /// the kind of the key beforehand via its `kind()` method. pub fn try_join( private_key: impl Into<PrivateKey>, public_key: impl Into<PublicKey>, ) -> Result<KeyPair> { let (private_key, public_key) = (private_key.into(), public_key.into()); match (private_key.kind(), public_key.kind()) { (KeyKind::RsaPrivate, KeyKind::RsaPublic) => {} (KeyKind::EcdsaPrivate, KeyKind::EcdsaPublic) => {} _ => { return Err(Error::with_kind(ErrorKind::InvalidParameter)); } } Ok(KeyPair { private_key: private_key.inner, public_key: public_key.inner, }) } } // // Individual keys // impl RsaPrivateKey { /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid RSA private key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::RsaPrivate => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } /// Wraps an existing trusted byte vector into a key. pub(crate) fn from_vec(bytes: Vec<u8>) -> Self { let key = KeyBytes::from_vec(bytes).expect("invalid empty key"); debug_assert_eq!(get_key_kind(&key), Ok(KeyKind::RsaPrivate)); Self { inner: key } } } impl RsaPublicKey { /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid RSA public key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::RsaPublic => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } /// Wraps an existing trusted byte vector into a key. pub(crate) fn from_vec(bytes: Vec<u8>) -> Self { let key = KeyBytes::from_vec(bytes).expect("invalid empty key"); debug_assert_eq!(get_key_kind(&key), Ok(KeyKind::RsaPublic)); Self { inner: key } } } impl EcdsaPrivateKey { /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid ECDSA private key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::EcdsaPrivate => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } /// Wraps an existing trusted byte vector into a key. pub(crate) fn from_vec(bytes: Vec<u8>) -> Self { let key = KeyBytes::from_vec(bytes).expect("invalid empty key"); debug_assert_eq!(get_key_kind(&key), Ok(KeyKind::EcdsaPrivate)); Self { inner: key } } } impl EcdsaPublicKey { /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid ECDSA public key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::EcdsaPublic => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } /// Wraps an existing trusted byte vector into a key. pub(crate) fn from_vec(bytes: Vec<u8>) -> Self { let key = KeyBytes::from_vec(bytes).expect("invalid empty key"); debug_assert_eq!(get_key_kind(&key), Ok(KeyKind::EcdsaPublic)); Self { inner: key } } } impl PrivateKey { /// Retrieves actual kind of the stored key. pub fn kind(&self) -> KeyKind { get_key_kind_trusted(&self.inner) } /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid RSA or ECDSA private key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::RsaPrivate => Ok(Self { inner: key }), KeyKind::EcdsaPrivate => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } } impl PublicKey { /// Retrieves actual kind of the stored key. pub fn kind(&self) -> KeyKind { get_key_kind_trusted(&self.inner) } /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid RSA or ECDSA public key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { let key = KeyBytes::copy_slice(bytes.as_ref())?; match get_key_kind(&key)? { KeyKind::RsaPublic => Ok(Self { inner: key }), KeyKind::EcdsaPublic => Ok(Self { inner: key }), _ => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } } // The following functions have to be called in a particular sequence in order to be safe to use. // That's why they are free functions, not methods of KeyBytes. // // You can call get_key_kind() on any byte slice. If you get an Ok result back then you can call // get_key_kind_trusted() again on the very same byte slice to get the result faster. // // There's also a reason why they receive &KeyBytes, not just &[u8]. This is to maintain correct // pointer alignment. fn get_key_kind(key: &KeyBytes) -> Result<KeyKind> { is_valid_themis_key(key)?; try_get_key_kind(key) } fn get_key_kind_trusted(key: &KeyBytes) -> KeyKind { debug_assert!(is_valid_themis_key(key).is_ok()); try_get_key_kind(key).expect("get_key_kind_trusted() called for invalid key") } fn is_valid_themis_key(key: &KeyBytes) -> Result<()> { let (ptr, len) = into_raw_parts(key.as_bytes()); let status = unsafe { themis_is_valid_asym_key(ptr, len) }; let error = Error::from_themis_status(status); if error.kind() != ErrorKind::Success { return Err(error); } Ok(()) } fn try_get_key_kind(key: &KeyBytes) -> Result<KeyKind> { use bindings::themis_key_kind::*; let (ptr, len) = into_raw_parts(key.as_bytes()); let kind = unsafe { themis_get_asym_key_kind(ptr, len) }; match kind { THEMIS_KEY_RSA_PRIVATE => Ok(KeyKind::RsaPrivate), THEMIS_KEY_RSA_PUBLIC => Ok(KeyKind::RsaPublic), THEMIS_KEY_EC_PRIVATE => Ok(KeyKind::EcdsaPrivate), THEMIS_KEY_EC_PUBLIC => Ok(KeyKind::EcdsaPublic), THEMIS_KEY_INVALID => Err(Error::with_kind(ErrorKind::InvalidParameter)), } } // // Symmetric keys // /// Symmetric encryption key. /// /// These keys are used by [`SecureCell`] objects. /// /// Note that managing keys is _your_ responsibility. You have to make sure that keys /// are stored safely and are never disclosed to untrusted parties. You can consult /// [our guidelines][key-management] for some advice on key management. /// /// [`SecureCell`]: ../secure_cell/index.html /// [key-management]: https://docs.cossacklabs.com/themis/crypto-theory/key-management/ /// /// # Examples /// /// Generating a new symmetric key is trivial: /// /// ``` /// # fn main() -> Result<(), themis::Error> { /// use themis::keys::SymmetricKey; /// use themis::secure_cell::SecureCell; /// /// let key = SymmetricKey::new(); /// /// let cell = SecureCell::with_key(&key)?.seal(); /// /// let encrypted = cell.encrypt(b"message")?; /// let decrypted = cell.decrypt(&encrypted)?; /// assert_eq!(decrypted, b"message"); /// # Ok(()) /// # } /// ``` /// /// Keys can be converted into a byte slice via the standard `AsRef` trait so that you can /// easily write them into files, send via network, pass to other Themis functions, and so on: /// /// ```no_run /// # fn main() -> Result<(), std::io::Error> { /// # use themis::keys::SymmetricKey; /// # let key = SymmetricKey::new(); /// use std::fs::File; /// use std::io::Write; /// /// let mut file = File::create("master.key")?; /// file.write_all(key.as_ref())?; /// # Ok(()) /// # } /// ```<|fim▁hole|>/// /// You can also restore the keys from raw bytes using `try_from_slice` method. It checks that /// the byte slice indeed contains a valid Themis key: /// /// ```no_run /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// # use themis::keys::SymmetricKey; /// use std::fs::File; /// use std::io::Read; /// /// let mut file = File::open("master.key")?; /// /// let mut buffer = Vec::new(); /// file.read_to_end(&mut buffer)?; /// /// let key = SymmetricKey::try_from_slice(&buffer)?; /// # Ok(()) /// # } /// ``` #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct SymmetricKey { inner: KeyBytes, } impl SymmetricKey { /// Generates a new symmetric key. /// /// # Panics /// /// This function may panic in case of unrecoverable errors inside the library /// (e.g., out-of-memory or assertion violations). pub fn new() -> Self { match Self::try_gen_sym_key() { Ok(key) => key, Err(e) => panic!("themis_gen_sym_key() failed: {}", e), } } /// Generates a new symmetric key. fn try_gen_sym_key() -> Result<Self> { let mut key = Vec::new(); let mut key_len = 0; unsafe { let status = themis_gen_sym_key(ptr::null_mut(), &mut key_len); let error = Error::from_themis_status(status); if error.kind() != ErrorKind::BufferTooSmall { return Err(error); } } key.reserve(key_len); unsafe { let status = themis_gen_sym_key(key.as_mut_ptr(), &mut key_len); let error = Error::from_themis_status(status); if error.kind() != ErrorKind::Success { return Err(error); } debug_assert!(key_len <= key.capacity()); key.set_len(key_len as usize); } Ok(Self { inner: KeyBytes::from_vec(key).expect("invalid empty key"), }) } /// Parses a key from a byte slice. /// /// Returns an error if the slice does not contain a valid symmetric key. pub fn try_from_slice(bytes: impl AsRef<[u8]>) -> Result<Self> { Ok(Self { inner: KeyBytes::copy_slice(bytes.as_ref())?, }) } } impl Default for SymmetricKey { fn default() -> Self { SymmetricKey::new() } } // // AsRef<[u8]> casts // impl AsRef<[u8]> for RsaPrivateKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for RsaPublicKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for EcdsaPrivateKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for EcdsaPublicKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for PrivateKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for PublicKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } impl AsRef<[u8]> for SymmetricKey { fn as_ref(&self) -> &[u8] { self.inner.as_bytes() } } // // From/Into conversions // impl From<RsaPrivateKey> for PrivateKey { fn from(private_key: RsaPrivateKey) -> PrivateKey { PrivateKey { inner: private_key.inner, } } } impl From<RsaPublicKey> for PublicKey { fn from(public_key: RsaPublicKey) -> PublicKey { PublicKey { inner: public_key.inner, } } } impl From<EcdsaPrivateKey> for PrivateKey { fn from(private_key: EcdsaPrivateKey) -> PrivateKey { PrivateKey { inner: private_key.inner, } } } impl From<EcdsaPublicKey> for PublicKey { fn from(public_key: EcdsaPublicKey) -> PublicKey { PublicKey { inner: public_key.inner, } } } impl From<RsaKeyPair> for KeyPair { fn from(key_pair: RsaKeyPair) -> KeyPair { KeyPair { private_key: key_pair.private_key, public_key: key_pair.public_key, } } } impl From<EcdsaKeyPair> for KeyPair { fn from(key_pair: EcdsaKeyPair) -> KeyPair { KeyPair { private_key: key_pair.private_key, public_key: key_pair.public_key, } } }<|fim▁end|>
<|file_name|>random_mover.py<|end_file_name|><|fim▁begin|>import random from ..simulator import Simulator class RandomMover(Simulator): ACTIONS = ('up', 'down', 'left', 'right') def start(self): self.init_game() while True: self._check_pygame_events() for drone in self.drones:<|fim▁hole|> self.print_map() self._draw()<|fim▁end|>
drone.do_move(random.choice(self.ACTIONS))
<|file_name|>onlinemlp_backend.py<|end_file_name|><|fim▁begin|>from .base_backend import BaseBackend class MlpBackend(BaseBackend): def __init__(self, inpmulti, hidmulti, outmulti, learning_rate, inp96, hid96, out96, path, buffsize, mean, std, statspath): from neupre.misc.builders import build_model_mlp super(MlpBackend, self).__init__(int(buffsize)) self.model_multistep = build_model_mlp(inpmulti, hidmulti, outmulti) self.model_onestep96 = build_model_mlp(inp96, hid96, out96) self.initialize(False, path, mean, std, statspath) def train(self): log2 = self.model_multistep.fit(self.X_train_multistep, self.y_train_multistep, batch_size=10, nb_epoch=2, validation_split=0.1, verbose=1) log3 = self.model_onestep96.fit(self.X_train_onestep96, self.y_train_onestep96, batch_size=10, nb_epoch=2,<|fim▁hole|> p2 = self.model_multistep.predict(X_test_multistep) p3 = self.model_onestep96.predict(X_test_onestep96) return [p2, p3]<|fim▁end|>
validation_split=0.1, verbose=1) return [log2, log3] def predict(self, X_test_multistep, X_test_onestep96):
<|file_name|>DocumentCard.Props.ts<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { DocumentCard } from './DocumentCard'; import { DocumentCardTitle } from './DocumentCardTitle'; import { DocumentCardPreview } from './DocumentCardPreview'; import { DocumentCardLocation } from './DocumentCardLocation'; import { DocumentCardActivity } from './DocumentCardActivity'; import { DocumentCardActions } from './DocumentCardActions'; import { PersonaInitialsColor } from '../../Persona'; import { ImageFit } from '../../Image'; import { IButtonProps } from '../../Button'; export interface IDocumentCard { } export interface IDocumentCardProps extends React.Props<DocumentCard> { /** * Optional callback to access the IDocumentCard interface. Use this instead of ref for accessing * the public methods and properties of the component. */ componentRef?: (component: IDocumentCard) => void; /** * The type of DocumentCard to display. * @default DocumentCardType.normal */ type?: DocumentCardType; /** * Function to call when the card is clicked or keyboard Enter/Space is pushed. */ onClick?: (ev?: React.SyntheticEvent<HTMLElement>) => void; /** * A URL to navigate to when the card is clicked. If a function has also been provided, * it will be used instead of the URL. */ onClickHref?: string; /** * Optional class for document card. */ className?: string; /** * Hex color value of the line below the card, which should correspond to the document type. * This should only be supplied when using the 'compact' card layout. */ accentColor?: string; } export enum DocumentCardType { /** * Standard DocumentCard. */ normal = 0, /** * Compact layout. Displays the preview beside the details, rather than above. */ compact = 1 } export interface IDocumentCardPreviewProps extends React.Props<DocumentCardPreview> { /** * One or more preview images to display. */ previewImages: IDocumentCardPreviewImage[]; /** * The function return string that will describe the number of overflow documents. * such as (overflowCount: number) => `+${ overflowCount } more`, */ getOverflowDocumentCountText?: (overflowCount: number) => string; } export interface IDocumentCardPreviewImage { /** * File name for the document this preview represents. */ name?: string; /** * URL to view the file. */ url?: string; /** * Path to the preview image. */ previewImageSrc?: string; /** * Deprecated at v1.3.6, to be removed at >= v2.0.0. * @deprecated */ errorImageSrc?: string; /** * Path to the icon associated with this document type. */ iconSrc?: string; /** * If provided, forces the preview image to be this width. */ width?: number; /** * If provided, forces the preview image to be this height. */ height?: number; /** * Used to determine how to size the image to fit the dimensions of the component. * If both dimensions are provided, then the image is fit using ImageFit.scale, otherwise ImageFit.none is used. */ imageFit?: ImageFit; /** * Hex color value of the line below the preview, which should correspond to the document type. */ accentColor?: string; } export interface IDocumentCardTitleProps extends React.Props<DocumentCardTitle> { /** * Title text. If the card represents more than one document, this should be the title of one document and a "+X" string. For example, a collection of four documents would have a string of "Document.docx +3". */ title: string; /** * Whether we truncate the title to fit within the box. May have a performance impact. * @defaultvalue true */ shouldTruncate?: boolean; } export interface IDocumentCardLocationProps extends React.Props<DocumentCardLocation> { /** * Text for the location of the document. */ location: string; /** * URL to navigate to for this location. */ locationHref?: string; /** * Function to call when the location is clicked. */ onClick?: (ev?: React.MouseEvent<HTMLElement>) => void; /** * Aria label for the link to the document location. */ ariaLabel?: string;<|fim▁hole|> /** * Describes the activity that has taken place, such as "Created Feb 23, 2016". */ activity: string; /** * One or more people who are involved in this activity. */ people: IDocumentCardActivityPerson[]; } export interface IDocumentCardActivityPerson { /** * The name of the person. */ name: string; /** * Path to the profile photo of the person. */ profileImageSrc: string; /** * The user's initials to display in the profile photo area when there is no image. */ initials?: string; /** * The background color when the user's initials are displayed. * @defaultvalue PersonaInitialsColor.blue */ initialsColor?: PersonaInitialsColor; } export interface IDocumentCardActionsProps extends React.Props<DocumentCardActions> { /** * The actions available for this document. */ actions: IButtonProps[]; /** * The number of views this document has received. */ views?: Number; }<|fim▁end|>
} export interface IDocumentCardActivityProps extends React.Props<DocumentCardActivity> {
<|file_name|>paddocks.js<|end_file_name|><|fim▁begin|>/** * API for paddock information * * Copyright (c) 2015. Elec Research. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/ * * Author: Tim Miller. * **/ var express = require('express'); var Paddock = agquire('ag/db/models-mongo/paddock'); var Reading = agquire('ag/db/models-mongo/reading'); var permissions = require('../../utils/permissions'); var responder = agquire('ag/routes/utils/responder'); var PaddockController = agquire('ag/db/controllers/PaddockController'); var router = express.Router(); /* Route HTTP Verb Description ===================================================== /api/paddocks GET Get paddocks. /api/paddocks/:object_id GET Get single paddock. /api/paddocks POST Create a paddock. /api/paddocks/:object_id PUT Update a paddock. /api/paddocks/:object_id DELETE Delete a paddock. */ /** * @api {post} /spatial/paddocks/ Create a Paddock * @apiName PostPaddocks * @apiGroup Paddocks * * @apiParam {String} name Name of new paddock. * @apiParam {Number} farm_id Id of farm that this paddock belongs to. * @apiParam {Object} loc contains coordinates of paddock. Must be a GeoJSON Polygon (see example). * @apiParam {String} [created] Timestamp of paddock creation time. * * * @apiParamExample {json} Request-Example: * { * "name": "Demo Paddock" * "farm_id": 1 * "loc": { * "coordinates":[ * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] * ] * } * "created": "2015-04-19" * } * * @apiSuccessExample Success-Response * HTTP/1.1 200 OK * { * "paddock": * { * "_id": "primary_key_string", * "name": "Demo Paddock", * "farm_id": 1, * "created": "2015-04-19", * "updated": "2015-04-19", * "loc" { * "type": "Polygon", * "coordinates": [ * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] * ] * } * } * } * * @apiError IncompleteRequest Request body is missing required fields. * @apiErrorExample IncompleteRequest * HTTP/1.1 422 Unprocessable Entity * { * error: "Missing field response message" * } */ router.post('/', function(req, res) { var paddockDetails = req.body; var access = {}; access.requiredPermissions = [permissions.kEditFarmPermissions]; access.farm = {id: paddockDetails.farm_id}; responder.authAndRespond(req, res, access, function() { return { paddock: PaddockController.createPaddock(paddockDetails) } });<|fim▁hole|> /** * @api {put} /spatial/paddocks/:id Update a Paddock * @apiName PutPaddocks * @apiGroup Paddocks * * @apiParam {string} id Primary key of updated paddock. * @apiParam (paddock) {String} [name] Name of updated paddock. * @apiParam (paddock) {Object} [loc] Corners of updated paddock. Must be a GeoJSON Polygon (see example). * @apiParam (paddock) {Object} [updated] Timestamp of update time. * * @apiSuccessExample Success-Response * HTTP/1.1 200 OK * { * "paddock": * { * "_id": "primary_key_string", * "name": "Demo Paddock", * "farm_id": 1, * "created": "2015-04-19" * "loc" * { * "type": "Polygon", * "coordinates": * [ * [ * [0, 0], [0, 1], [1, 1], [1, 0], [0, 0] * ] * ] * } * } * } * * @apiError PaddockNotFound could find paddock with object_id parameter. * @apiErrorExample PaddockNotFound * HTTP/1.1 422 Unprocessable Entity * { * error: "Paddock Not Found" * } */ router.put('/:object_id', function(req, res) { var id = req.params.object_id; var paddockDetails = req.body; // get paddock first for purpose of checking farm permissions return PaddockController.getOnePaddock(id) .then(function(paddock) { var access = {}; access.requiredPermissions = [permissions.kEditFarmPermissions]; access.farm = {id: paddockDetails.farm_id}; responder.authAndRespond(req, res, access, function() { return { paddock: PaddockController.updatePaddock(id, paddockDetails) } }); }) .catch(function(err) { return res.status(422).send({error: err.message}); }); }); /** * Delete paddock */ /** * @api {delete} /spatial/paddocks/:id Delete a Paddock * @apiName DeletePaddocks * @apiGroup Paddocks * * @apiParam {string} id Primary key of paddock to delete. * * @apiError PaddockNotFound could find paddock with object_id parameter. * @apiErrorExample PaddockNotFound * HTTP/1.1 422 Unprocessable Entity * { * error: "Paddock Not Found" * } * * @apiError RemovePastureMeasurementError couldn't remove pasture measurements that belong to paddock. * @apiErrorExample RemovePastureMeasurementError * HTTP/1.1 422 Unprocessable Entity * { * error: "Couldn't remove pasture measurements, operation aborted" * } * * @apiSuccessExample * HTTP/1.1 200 OK * { * "result": 1 * } */ router.delete('/:object_id', function(req, res) { var id = req.params.object_id; // get paddock first for purpose of checking farm permissions return PaddockController.getOnePaddock(id) .then(function(paddock) { var access = {}; access.requiredPermissions = [permissions.kEditFarmPermissions]; access.farm = {id: paddock.farm_id}; responder.authAndRespond(req, res, access, function() { return { result: PaddockController.deletePaddock(id) } }); }) .catch(function(err) { return res.status(422).send({error: err.message}); }); /* return PaddockController.deletePaddock(id).then(function() { return res.send({message: 'Paddock Deleted'}); }) .catch(function(err){ return res.status(422).send({error: err.message}); }); */ }); /** * Get paddocks. */ /** * @api {get} /spatial/paddocks/ Get Paddocks * @apiName GetPaddocks * @apiGroup Paddocks * * @apiParam {Number} [include] List of objects to include in response (farm). * @apiParam {Number} [farm_id] Id of farm that contains requested paddocks. * @apiParam {Number} [limit] Maximum number of results that the query should return. The AgBase API cannot return more than 1000 results. * @apiParam {Number} [offset] The offset from the start of the query result. * @apiParam {String} [id] Id of paddock to return. * * @apiErrorExample Error-Response * HTTP/1.1 422 Unprocessable Entity * { * "error": "error message" * } * * @apiSuccessExample * HTTP/1.1 200 OK * { * "paddocks": * [ * { * "_id": "primary_key_string", * "name": "Demo Paddock", * "farm_id": 1, * "created": "2015-04-19" * "loc": * { * "type": "Polygon", * "coordinates": * [ * [ * [0, 0], [0, 1], [1, 1], [1, 0], [0, 0] * ] * ] * } * } * ] * } */ router.get('/', function(req, res) { var params = req.query; var access = {}; access.requiredPermissions = [permissions.kViewFarmPermissions]; if(params.id) { responder.authAndRespond(req, res, access, function() { return { paddock: PaddockController.getOnePaddock(params.id) } }); } else { var farms; var limit; var offset; var includeFarms = params.include === "farm"; if(params.farm_id) { farms = params.farm_id.split(','); access.farm = {id: farms}; } if(params.limit) { limit = params.limit; } if(params.offset) { offset = params.offset; } responder.authAndRespond(req, res, access, function() { return { paddocks: PaddockController.getPaddocks(includeFarms, farms, limit, offset) } }); } }); /** * countPaddocks(farms) * @api {get} /spatial/paddocks/count/ Get a count of Paddocks * @apiName CountPaddocks * @apiGroup Paddocks * * @apiParam {Number} [farm_id] Return paddocks that belong to farms defined by farm_id. * * @apiSuccessExample Success-Response * { * "count": <number of paddocks> * } */ router.get('/count/', function(req, res) { var params = req.query; var farms; if(params.farm_id) { farms = params.farm_id.split(','); } return PaddockController.countPaddocks(farms) .then(function(count) { return res.send({count: count}); }) .catch(function(err) { return res.status(422).send({error: err.message}); }); }); module.exports = router;<|fim▁end|>
});
<|file_name|>namespace_lookup.go<|end_file_name|><|fim▁begin|>package command import ( "fmt" "strings" "github.com/mitchellh/cli" "github.com/posener/complete" ) var _ cli.Command = (*NamespaceLookupCommand)(nil) var _ cli.CommandAutocomplete = (*NamespaceLookupCommand)(nil) type NamespaceLookupCommand struct { *BaseCommand } <|fim▁hole|> return "Look up an existing namespace" } func (c *NamespaceLookupCommand) Help() string { helpText := ` Usage: vault namespace lookup [options] PATH Get information about the namespace of the locally authenticated token: $ vault namespace lookup Get information about the namespace of a particular child token (e.g. ns1/ns2/): $ vault namespace lookup -namespace=ns1 ns2 ` + c.Flags().Help() return strings.TrimSpace(helpText) } func (c *NamespaceLookupCommand) Flags() *FlagSets { return c.flagSet(FlagSetHTTP | FlagSetOutputFormat) } func (c *NamespaceLookupCommand) AutocompleteArgs() complete.Predictor { return c.PredictVaultNamespaces() } func (c *NamespaceLookupCommand) AutocompleteFlags() complete.Flags { return c.Flags().Completions() } func (c *NamespaceLookupCommand) Run(args []string) int { f := c.Flags() if err := f.Parse(args); err != nil { c.UI.Error(err.Error()) return 1 } args = f.Args() switch { case len(args) < 1: c.UI.Error(fmt.Sprintf("Not enough arguments (expected 1, got %d)", len(args))) return 1 case len(args) > 1: c.UI.Error(fmt.Sprintf("Too many arguments (expected 1, got %d)", len(args))) return 1 } namespacePath := strings.TrimSpace(args[0]) client, err := c.Client() if err != nil { c.UI.Error(err.Error()) return 2 } secret, err := client.Logical().Read("sys/namespaces/" + namespacePath) if err != nil { c.UI.Error(fmt.Sprintf("Error looking up namespace: %s", err)) return 2 } if secret == nil { c.UI.Error("Namespace not found") return 2 } return OutputSecret(c.UI, secret) }<|fim▁end|>
func (c *NamespaceLookupCommand) Synopsis() string {
<|file_name|>describeGroups.js<|end_file_name|><|fim▁begin|>/* * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ <|fim▁hole|>'use strict'; var _ = require('underscore'); var common = require('./common'); var groups = function(groups) { var buf = this.buf; buf.appendUInt32BE(groups.length); _.each(groups, function(group) { common.appendString(buf, group); }); return this; }; exports.encode = function(version) { var ret = common.encode(common.DESCRIBEGROUP_API, version); ret.groups = groups; return ret; };<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .resnet_preact import resnet18_preact from .resnet_preact_bin import resnet18_preact_bin import torch, torch.nn as nn _model_factory = { "resnet18_preact":resnet18_preact, "resnet18_preact_bin":resnet18_preact_bin } class Classifier(torch.nn.Module): def __init__(self, feat_extractor,num_classes=None): super(Classifier,self).__init__() self.feat_extractor = feat_extractor self.class_fc = nn.Linear(feat_extractor.fc.in_features, num_classes) def forward(self,x): x = self.feat_extractor(x) class_output = self.class_fc(x) return class_output<|fim▁hole|> def get_model(arch_name, **kwargs): backbone = _model_factory[arch_name](**kwargs) return Classifier(backbone, num_classes = kwargs["num_classes"])<|fim▁end|>
<|file_name|>test_bigip_vcmp_guest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (c) 2017 F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import sys from nose.plugins.skip import SkipTest if sys.version_info < (2, 7): raise SkipTest("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_vcmp_guest import Parameters from library.modules.bigip_vcmp_guest import ModuleManager from library.modules.bigip_vcmp_guest import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: try: from ansible.modules.network.f5.bigip_vcmp_guest import Parameters from ansible.modules.network.f5.bigip_vcmp_guest import ModuleManager from ansible.modules.network.f5.bigip_vcmp_guest import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args except ImportError: raise SkipTest("F5 Ansible modules require the f5-sdk Python library") fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso', mgmt_network='bridged', mgmt_address='1.2.3.4/24', vlans=[ 'vlan1', 'vlan2' ] ) p = Parameters(params=args) assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso' assert p.mgmt_network == 'bridged' def test_module_parameters_mgmt_bridged_without_subnet(self): args = dict( mgmt_network='bridged', mgmt_address='1.2.3.4' ) p = Parameters(params=args) assert p.mgmt_network == 'bridged' assert p.mgmt_address == '1.2.3.4/32' def test_module_parameters_mgmt_address_cidr(self): args = dict( mgmt_network='bridged', mgmt_address='1.2.3.4/24' ) p = Parameters(params=args) assert p.mgmt_network == 'bridged' assert p.mgmt_address == '1.2.3.4/24' def test_module_parameters_mgmt_address_subnet(self): args = dict( mgmt_network='bridged', mgmt_address='1.2.3.4/255.255.255.0' ) p = Parameters(params=args) assert p.mgmt_network == 'bridged' assert p.mgmt_address == '1.2.3.4/24' def test_module_parameters_mgmt_route(self): args = dict( mgmt_route='1.2.3.4' ) p = Parameters(params=args) assert p.mgmt_route == '1.2.3.4' def test_module_parameters_vcmp_software_image_facts(self): # vCMP images may include a forward slash in their names. This is probably<|fim▁hole|> initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso/1', ) p = Parameters(params=args) assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso/1' def test_api_parameters(self): args = dict( initialImage="BIGIP-tmos-tier2-13.1.0.0.0.931.iso", managementGw="2.2.2.2", managementIp="1.1.1.1/24", managementNetwork="bridged", state="deployed", vlans=[ "/Common/vlan1", "/Common/vlan2" ] ) p = Parameters(params=args) assert p.initial_image == 'BIGIP-tmos-tier2-13.1.0.0.0.931.iso' assert p.mgmt_route == '2.2.2.2' assert p.mgmt_address == '1.1.1.1/24' assert '/Common/vlan1' in p.vlans assert '/Common/vlan2' in p.vlans class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() self.patcher1 = patch('time.sleep') self.patcher1.start() def tearDown(self): self.patcher1.stop() def test_create_vlan(self, *args): set_module_args(dict( name="guest1", mgmt_network="bridged", mgmt_address="10.10.10.10/24", initial_image="BIGIP-13.1.0.0.0.931.iso", server='localhost', password='password', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) # Override methods to force specific logic in the module to happen mm = ModuleManager(module=module) mm.create_on_device = Mock(return_value=True) mm.exists = Mock(return_value=False) mm.is_deployed = Mock(side_effect=[False, True, True, True, True]) mm.deploy_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True assert results['name'] == 'guest1'<|fim▁end|>
# related to the slots on the system, but it is not a valid value to specify # that slot when providing an initial image args = dict(
<|file_name|>gffread_extract_transcripts.py<|end_file_name|><|fim▁begin|>from logging import getLogger from abstract_step import AbstractStep import os logger = getLogger('uap_logger') class GffreadExtractTranscripts(AbstractStep): ''' extract transcripts from gtf http://ccb.jhu.edu/software/stringtie/gff.shtml write a fasta file with spliced exons for each GFF transcript gffread -w transcripts.fa -g /path/to/genome.fa transcripts.gtf ''' def __init__(self, pipeline): super(GffreadExtractTranscripts, self).__init__(pipeline) self.set_cores(1) self.add_connection('in/fasta') self.add_connection('in/anno') self.add_connection('out/fasta') self.add_connection('out/log_stderr') self.add_connection('out/log_stdout') self.require_tool('gffread') self.add_option('gtf', str, optional=True, default=None, description="path to gtf file") self.add_option( 'output-fasta-name', str, optional=False, default=None, description="name of the outputfile trancriptom myfasta.fa") def runs(self, run_ids_connections_files): run_id = self.get_option('output-fasta-name') # dependenency files (r for required) rfiles = [] with self.declare_run(run_id) as run: cmd = [self.get_tool('gffread'), '-w'] cmd.append(self.get_option('output-fasta-name')) for __, connection in run_ids_connections_files.items(): if 'in/fasta' in connection: cmd.append('-g') cmd.append(connection['in/fasta'][0]) rfiles.append(connection['in/fasta'][0]) continue if self.is_option_set_in_config('gtf'): cmd.append(os.path.abspath(self.get_option('gtf'))) else: for __, connection in run_ids_connections_files.items(): if 'in/anno' in connection: cmd.append(connection['in/anno'][0]) rfiles.append(connection['in/anno'][0]) continue stderr_file = "%s-gffread_extract_transcripts-log_stderr.txt" % ( run_id) log_stderr = run.add_output_file("log_stderr", stderr_file, rfiles) stdout_file = "%s-gffread_extract_transcripts-log_stdout.txt" % ( run_id)<|fim▁hole|> run.add_output_file("fasta", self.get_option('output-fasta-name'), rfiles) exec_group = run.new_exec_group() exec_group.add_command(cmd, stdout_path=log_stdout, stderr_path=log_stderr)<|fim▁end|>
log_stdout = run.add_output_file("log_stdout", stdout_file, rfiles)
<|file_name|>annotations.py<|end_file_name|><|fim▁begin|>import csv import json import numpy as np import os import pathlib import navpy from direct.showbase.ShowBase import ShowBase from panda3d.core import CardMaker, LPoint3, NodePath, Texture, TransparencyAttrib from direct.gui.DirectGui import * from tkinter import * class Annotations(): def __init__(self, render, surface, proj, ned_ref, tk_root): self.render = render self.surface = surface self.proj = proj self.ned_ref = ned_ref self.tk_root = tk_root self.icon = loader.loadTexture('explore/marker-icon-2x.png') self.view_size = 100 self.id_prefix = "<edit me>" self.next_id = 0 self.markers = [] self.nodes = [] self.load() def ned2lla(self, n, e, d): lla = navpy.ned2lla( [n, e, d], self.ned_ref[0], self.ned_ref[1], self.ned_ref[2] ) # print(n, e, d, lla) return lla<|fim▁hole|> self.markers.append(marker) def add_marker_dict(self, m): ned = navpy.lla2ned(m['lat_deg'], m['lon_deg'], m['alt_m'], self.ned_ref[0], self.ned_ref[1], self.ned_ref[2]) if m['alt_m'] < 1.0: # estimate surface elevation if needed pos = np.array([ned[1], ned[0]]) # x, y order norm = np.linalg.norm(pos) if norm > 0: v = pos / norm # walk towards the center 1m at a time until we get onto # the interpolation surface while True: z = self.surface.get_elevation(pos[0], pos[1]) print("pos:", pos, "v:", v, "z:", z) if z < -1.0: ned[2] = z break elif np.linalg.norm(pos) < 5: # getting too close to the (ned) ned reference pt, failed break else: pos -= v print("ned updated:", ned) if 'id' in m: id = m['id'] if id >= self.next_id: self.next_id = id + 1 else: id = self.next_id self.next_id += 1 self.add_marker(ned, m['comment'], id) def load(self): oldfile = os.path.join(self.proj.project_dir, 'annotations.json') file = os.path.join(self.proj.analysis_dir, 'annotations.json') if os.path.exists(oldfile): print("Moving annotations file to new location...") os.rename(oldfile, file) oldcsv = os.path.join(self.proj.project_dir, 'annotations.csv') newcsv = os.path.join(self.proj.analysis_dir, 'annotations.csv') if os.path.exists(oldcsv): os.rename(oldcsv, newcsv) if os.path.exists(file): print('Loading saved annotations:', file) f = open(file, 'r') root = json.load(f) if type(root) is dict: if 'id_prefix' in root: self.id_prefix = root['id_prefix'] if 'markers' in root: lla_list = root['markers'] else: lla_list = root f.close() for m in lla_list: if type(m) is dict: #print("m is dict") self.add_marker_dict( m ) elif type(m) is list: #print("m is list") ned = navpy.lla2ned(m[0], m[1], m[2], self.ned_ref[0], self.ned_ref[1], self.ned_ref[2]) # print(m, ned) ned[2] = self.surface.get_elevation(ned[1], ned[0]) if len(m) == 3: self.add_marker( ned, "" ) else: self.add_marker( ned, m[3] ) else: print('No annotations file found.') def save_kml(self): import simplekml import scipy kml = simplekml.Kml() # markers for m in self.markers: ned = m['ned'] lla = self.ned2lla( ned[0], ned[1], ned[2] ) name = "%s%03d" % (self.id_prefix, m['id']) kml.newpoint(name=name, coords=[(lla[1], lla[0], lla[2])], description=m['comment']) # lon, lat, elevation # area points = [] for i in self.proj.image_list: ned, ypr, quat = i.get_camera_pose(opt=True) lla = self.ned2lla( ned[0], ned[1], ned[2] ) points.append([lla[1], lla[0]]) hull = scipy.spatial.ConvexHull(points) poly = hull.points[hull.vertices].tolist() poly.append(poly[0]) # close the loop ls = kml.newlinestring(name=pathlib.Path(self.proj.project_dir).name, coords=poly) ls.style.linestyle.color = simplekml.Color.blue kmlname = os.path.join(self.proj.analysis_dir, 'annotations.kml') print('Saving annotations.kml:', kmlname) kml.save(kmlname) def save(self): self.save_kml() filename = os.path.join(self.proj.analysis_dir, 'annotations.json') print('Saving annotations:', filename) lla_list = [] for m in self.markers: ned = m['ned'] lla = self.ned2lla( ned[0], ned[1], ned[2] ) jm = { 'lat_deg': lla[0], 'lon_deg': lla[1], 'alt_m': float("%.2f" % (lla[2])), 'comment': m['comment'], 'id': m['id'] } lla_list.append(jm) f = open(filename, 'w') root = { 'id_prefix': self.id_prefix, 'markers': lla_list } json.dump(root, f, indent=4) f.close() # write out simple csv version filename = os.path.join(self.proj.analysis_dir, 'annotations.csv') with open(filename, 'w') as f: fieldnames = ['id', 'lat_deg', 'lon_deg', 'alt_m', 'comment'] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() for jm in lla_list: tmp = dict(jm) # copy tmp['id'] = "%s%03d" % (self.id_prefix, jm['id']) writer.writerow(tmp) def edit(self, id, ned, comment="", exists=False): lla = self.ned2lla(ned[0], ned[1], ned[2]) new = Toplevel(self.tk_root) self.edit_result = "cancel" e = None def on_ok(): new.quit() new.withdraw() print('comment:', e.get()) self.edit_result = "ok" def on_del(): new.quit() new.withdraw() print('comment:', e.get()) self.edit_result = "delete" def on_cancel(): print("on cancel") new.quit() new.withdraw() new.protocol("WM_DELETE_WINDOW", on_cancel) if not exists: new.title("New marker") f = Frame(new) f.pack(side=TOP, fill=X) w = Label(f, text="ID: ") w.pack(side=LEFT) ep = Entry(f) ep.insert(0, self.id_prefix) ep.pack(side=LEFT) w = Label(f, text=" %03d" % self.next_id) w.pack(side=LEFT) else: new.title("Edit marker") f = Frame(new) f.pack(side=TOP, fill=X) w = Label(f, text="ID: ") w.pack(side=LEFT) ep = Entry(f) ep.insert(0, self.id_prefix) ep.pack(side=LEFT) w = Label(f, text=" %03d" % id) w.pack(side=LEFT) f = Frame(new) f.pack(side=TOP, fill=X) w = Label(f, text="Lat: %.8f" % lla[0]) w.pack(side=LEFT) f = Frame(new) f.pack(side=TOP, fill=X) w = Label(f, text="Lon: %.8f" % lla[1]) w.pack(side=LEFT) f = Frame(new) f.pack(side=TOP, fill=X) w = Label(f, text="Alt(m): %.1f" % lla[2]) w.pack(side=LEFT) f = Frame(new) f.pack(side=TOP, fill=X) l = Label(f, text="Comment:") l.pack(side=LEFT) e = Entry(f) e.insert(0, comment) e.pack(side=LEFT) e.focus_set() f = Frame(new) f.pack(fill=X) bok = Button(f, text="OK", command=on_ok) bok.pack(side=LEFT, fill=X) if exists: bdel = Button(f, text="Delete", command=on_del) bdel.pack(side=LEFT, fill=X) bx = Button(f, text="Cancel", command=on_cancel) bx.pack(side=LEFT, fill=X) new.mainloop() if self.edit_result == "ok": self.id_prefix = ep.get() print("after main loop:", self.edit_result, e.get()) return self.edit_result, e.get() def toggle(self, cam_pos): mw = base.mouseWatcherNode if not mw.hasMouse(): return props = base.win.getProperties() y = props.getYSize() pxm = float(y) / self.view_size range = 25 / pxm hsize = 12 / pxm vsize = 40 / pxm mpos = mw.getMouse() print('mpos:', mpos) x = cam_pos[0] + mpos[0] * self.view_size*0.5 * base.getAspectRatio() y = cam_pos[1] + mpos[1] * self.view_size*0.5 dirty = False # check if we clicked on an existing marker found = -1 for i, m in enumerate(self.markers): ned = m['ned'] dx = abs(x - ned[1]) dy = y - ned[0] if dx <= (hsize*0.5)+1 and y >= ned[0]-1 and y <= ned[0]+vsize+1: found = i # del self.markers[i] # break if found >= 0: print("Found existing marker:", found) id = self.markers[found]['id'] ned = self.markers[found]['ned'] comment = self.markers[found]['comment'] result, comment = self.edit(id, ned, comment, exists=True) if result == 'ok': self.markers[found]['comment'] = comment dirty = True elif result == 'delete': del self.markers[found] dirty = True else: z = self.surface.get_elevation(x, y) result, comment = self.edit(self.next_id, [y, x, z], exists=False) if result == 'ok': id = self.next_id self.next_id += 1 self.add_marker( [y, x, z], comment, id ) dirty = True if dirty: self.rebuild(self.view_size) self.save() def rebuild(self, view_size): self.view_size = view_size props = base.win.getProperties() y = props.getYSize() pxm = float(y) / self.view_size hsize = 12 / pxm vsize = 40 / pxm #print(hsize, vsize) cm = CardMaker('card') cm.setFrame( LPoint3(-hsize, 0, 0 ), LPoint3( hsize, 0, 0 ), LPoint3( hsize, vsize, 0 ), LPoint3(-hsize, vsize, 0 ) ) for n in self.nodes: n.removeNode() self.nodes = [] for m in self.markers: node = NodePath(cm.generate()) node.setTexture(self.icon, 1) node.setTransparency(TransparencyAttrib.MAlpha) node.setDepthTest(False) node.setDepthWrite(False) node.setBin("unsorted", 1) ned = m['ned'] node.setPos(ned[1], ned[0], 0) node.reparentTo(self.render) self.nodes.append(node)<|fim▁end|>
def add_marker(self, ned, comment, id): marker = { "ned": ned, "comment": comment, "id": id }
<|file_name|>wrkpool.py<|end_file_name|><|fim▁begin|>import Queue import atexit import logging import threading import traceback class WorkerPool(object): """ Pool of worker threads; grows as necessary. """ _lock = threading.Lock() _pool = None # Singleton. def __init__(self): self._idle = [] # Queues of idle workers. self._workers = {} # Maps queue to worker. atexit.register(self.cleanup) @staticmethod def get_instance(): """ Return singleton instance. """ with WorkerPool._lock: if WorkerPool._pool is None: WorkerPool._pool = WorkerPool() return WorkerPool._pool @staticmethod def cleanup(): """ Cleanup resources (worker threads). """ WorkerPool.get_instance()._cleanup() def _cleanup(self): """ Cleanup resources (worker threads). """ with self._lock: for queue in self._workers: queue.put((None, None, None, None)) self._workers[queue].join(1) if self._workers[queue].is_alive(): logging.debug('WorkerPool: worker join timed-out.') try: self._idle.remove(queue) except ValueError: pass # Never released due to some other issue... self._idle = [] self._workers = {} @staticmethod def get(one_shot=False): """ Get a worker queue from the pool. Work requests should be of the form: ``(callable, *args, **kwargs, reply_queue)`` Work replies are of the form: ``(queue, retval, exc, traceback)`` one_shot: bool If True, the worker will self-release after processing one request. """ return WorkerPool.get_instance()._get(one_shot) def _get(self, one_shot): """ Get a worker queue from the pool. """ with self._lock: try: return self._idle.pop() except IndexError: queue = Queue.Queue() worker = threading.Thread(target=self._service_loop, args=(queue, one_shot)) worker.daemon = True worker.start() self._workers[queue] = worker return queue @staticmethod<|fim▁hole|> Release a worker queue back to the pool. queue: Queue Worker queue previously obtained from :meth:`get`. """ return WorkerPool.get_instance()._release(queue) def _release(self, queue): """ Release a worker queue back to the pool. """ with self._lock: self._idle.append(queue) def _service_loop(self, request_q, one_shot): """ Get (callable, args, kwargs) from request_q and queue result. """ while True: callable, args, kwargs, reply_q = request_q.get() if callable is None: request_q.task_done() return # Shutdown. exc = None trace = None retval = None try: retval = callable(*args, **kwargs) except Exception as exc: # Sometimes we have issues at shutdown. try: trace = traceback.format_exc() except Exception: # pragma no cover return request_q.task_done() if reply_q is not None: reply_q.put((request_q, retval, exc, trace)) if one_shot: self._release(request_q)<|fim▁end|>
def release(queue): """
<|file_name|>TagResourceResult.cpp<|end_file_name|><|fim▁begin|>/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/qldb/model/TagResourceResult.h> #include <aws/core/utils/json/JsonSerializer.h> #include <aws/core/AmazonWebServiceResult.h> #include <aws/core/utils/StringUtils.h> #include <aws/core/utils/UnreferencedParam.h> #include <utility> using namespace Aws::QLDB::Model; using namespace Aws::Utils::Json; using namespace Aws::Utils; using namespace Aws;<|fim▁hole|> TagResourceResult::TagResourceResult(const Aws::AmazonWebServiceResult<JsonValue>& result) { *this = result; } TagResourceResult& TagResourceResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result) { AWS_UNREFERENCED_PARAM(result); return *this; }<|fim▁end|>
TagResourceResult::TagResourceResult() { }
<|file_name|>htmlbuttonelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::activation::Activatable; use crate::dom::attr::Attr; use crate::dom::bindings::codegen::Bindings::HTMLButtonElementBinding; use crate::dom::bindings::codegen::Bindings::HTMLButtonElementBinding::HTMLButtonElementMethods; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::root::{DomRoot, MutNullableDom}; use crate::dom::bindings::str::DOMString; use crate::dom::document::Document; use crate::dom::element::{AttributeMutation, Element}; use crate::dom::event::Event; use crate::dom::eventtarget::EventTarget; use crate::dom::htmlelement::HTMLElement; use crate::dom::htmlfieldsetelement::HTMLFieldSetElement; use crate::dom::htmlformelement::HTMLFormElement; use crate::dom::htmlformelement::{FormControl, FormDatum, FormDatumValue}; use crate::dom::htmlformelement::{FormSubmitter, ResetFrom, SubmittedFrom}; use crate::dom::node::{window_from_node, BindContext, Node, UnbindContext}; use crate::dom::nodelist::NodeList; use crate::dom::validation::Validatable; use crate::dom::validitystate::{ValidationFlags, ValidityState}; use crate::dom::virtualmethods::VirtualMethods; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; use std::cell::Cell; use std::default::Default; use style::element_state::ElementState; #[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)] enum ButtonType { Submit, Reset, Button, Menu, } #[dom_struct] pub struct HTMLButtonElement { htmlelement: HTMLElement, button_type: Cell<ButtonType>, form_owner: MutNullableDom<HTMLFormElement>, } impl HTMLButtonElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLButtonElement { HTMLButtonElement { htmlelement: HTMLElement::new_inherited_with_state( ElementState::IN_ENABLED_STATE, local_name, prefix, document, ),<|fim▁hole|> } } #[allow(unrooted_must_root)] pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLButtonElement> { Node::reflect_node( Box::new(HTMLButtonElement::new_inherited( local_name, prefix, document, )), document, HTMLButtonElementBinding::Wrap, ) } } impl HTMLButtonElementMethods for HTMLButtonElement { // https://html.spec.whatwg.org/multipage/#dom-cva-validity fn Validity(&self) -> DomRoot<ValidityState> { let window = window_from_node(self); ValidityState::new(&window, self.upcast()) } // https://html.spec.whatwg.org/multipage/#dom-fe-disabled make_bool_getter!(Disabled, "disabled"); // https://html.spec.whatwg.org/multipage/#dom-fe-disabled make_bool_setter!(SetDisabled, "disabled"); // https://html.spec.whatwg.org/multipage/#dom-fae-form fn GetForm(&self) -> Option<DomRoot<HTMLFormElement>> { self.form_owner() } // https://html.spec.whatwg.org/multipage/#dom-button-type make_enumerated_getter!(Type, "type", "submit", "reset" | "button" | "menu"); // https://html.spec.whatwg.org/multipage/#dom-button-type make_setter!(SetType, "type"); // https://html.spec.whatwg.org/multipage/#dom-fs-formaction make_form_action_getter!(FormAction, "formaction"); // https://html.spec.whatwg.org/multipage/#dom-fs-formaction make_setter!(SetFormAction, "formaction"); // https://html.spec.whatwg.org/multipage/#dom-fs-formenctype make_enumerated_getter!( FormEnctype, "formenctype", "application/x-www-form-urlencoded", "text/plain" | "multipart/form-data" ); // https://html.spec.whatwg.org/multipage/#dom-fs-formenctype make_setter!(SetFormEnctype, "formenctype"); // https://html.spec.whatwg.org/multipage/#dom-fs-formmethod make_enumerated_getter!(FormMethod, "formmethod", "get", "post" | "dialog"); // https://html.spec.whatwg.org/multipage/#dom-fs-formmethod make_setter!(SetFormMethod, "formmethod"); // https://html.spec.whatwg.org/multipage/#dom-fs-formtarget make_getter!(FormTarget, "formtarget"); // https://html.spec.whatwg.org/multipage/#dom-fs-formtarget make_setter!(SetFormTarget, "formtarget"); // https://html.spec.whatwg.org/multipage/#attr-fs-formnovalidate make_bool_getter!(FormNoValidate, "formnovalidate"); // https://html.spec.whatwg.org/multipage/#attr-fs-formnovalidate make_bool_setter!(SetFormNoValidate, "formnovalidate"); // https://html.spec.whatwg.org/multipage/#dom-fe-name make_getter!(Name, "name"); // https://html.spec.whatwg.org/multipage/#dom-fe-name make_setter!(SetName, "name"); // https://html.spec.whatwg.org/multipage/#dom-button-value make_getter!(Value, "value"); // https://html.spec.whatwg.org/multipage/#dom-button-value make_setter!(SetValue, "value"); // https://html.spec.whatwg.org/multipage/#dom-lfe-labels fn Labels(&self) -> DomRoot<NodeList> { self.upcast::<HTMLElement>().labels() } } impl HTMLButtonElement { /// <https://html.spec.whatwg.org/multipage/#constructing-the-form-data-set> /// Steps range from 3.1 to 3.7 (specific to HTMLButtonElement) pub fn form_datum(&self, submitter: Option<FormSubmitter>) -> Option<FormDatum> { // Step 3.1: disabled state check is in get_unclean_dataset // Step 3.1: only run steps if this is the submitter if let Some(FormSubmitter::ButtonElement(submitter)) = submitter { if submitter != self { return None; } } else { return None; } // Step 3.2 let ty = self.Type(); // Step 3.4 let name = self.Name(); if name.is_empty() { // Step 3.1: Must have a name return None; } // Step 3.9 Some(FormDatum { ty: ty, name: name, value: FormDatumValue::String(self.Value()), }) } } impl VirtualMethods for HTMLButtonElement { fn super_type(&self) -> Option<&dyn VirtualMethods> { Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods) } fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) { self.super_type().unwrap().attribute_mutated(attr, mutation); match attr.local_name() { &local_name!("disabled") => { let el = self.upcast::<Element>(); match mutation { AttributeMutation::Set(Some(_)) => {}, AttributeMutation::Set(None) => { el.set_disabled_state(true); el.set_enabled_state(false); }, AttributeMutation::Removed => { el.set_disabled_state(false); el.set_enabled_state(true); el.check_ancestors_disabled_state_for_form_control(); }, } }, &local_name!("type") => match mutation { AttributeMutation::Set(_) => { let value = match &**attr.value() { "reset" => ButtonType::Reset, "button" => ButtonType::Button, "menu" => ButtonType::Menu, _ => ButtonType::Submit, }; self.button_type.set(value); }, AttributeMutation::Removed => { self.button_type.set(ButtonType::Submit); }, }, &local_name!("form") => { self.form_attribute_mutated(mutation); }, _ => {}, } } fn bind_to_tree(&self, context: &BindContext) { if let Some(ref s) = self.super_type() { s.bind_to_tree(context); } self.upcast::<Element>() .check_ancestors_disabled_state_for_form_control(); } fn unbind_from_tree(&self, context: &UnbindContext) { self.super_type().unwrap().unbind_from_tree(context); let node = self.upcast::<Node>(); let el = self.upcast::<Element>(); if node .ancestors() .any(|ancestor| ancestor.is::<HTMLFieldSetElement>()) { el.check_ancestors_disabled_state_for_form_control(); } else { el.check_disabled_attribute(); } } } impl FormControl for HTMLButtonElement { fn form_owner(&self) -> Option<DomRoot<HTMLFormElement>> { self.form_owner.get() } fn set_form_owner(&self, form: Option<&HTMLFormElement>) { self.form_owner.set(form); } fn to_element<'a>(&'a self) -> &'a Element { self.upcast::<Element>() } } impl Validatable for HTMLButtonElement { fn is_instance_validatable(&self) -> bool { true } fn validate(&self, validate_flags: ValidationFlags) -> bool { if validate_flags.is_empty() {} // Need more flag check for different validation types later true } } impl Activatable for HTMLButtonElement { fn as_element(&self) -> &Element { self.upcast() } fn is_instance_activatable(&self) -> bool { //https://html.spec.whatwg.org/multipage/#the-button-element !self.upcast::<Element>().disabled_state() } // https://html.spec.whatwg.org/multipage/#run-pre-click-activation-steps // https://html.spec.whatwg.org/multipage/#the-button-element:activation-behavior fn pre_click_activation(&self) {} // https://html.spec.whatwg.org/multipage/#run-canceled-activation-steps fn canceled_activation(&self) {} // https://html.spec.whatwg.org/multipage/#run-post-click-activation-steps fn activation_behavior(&self, _event: &Event, _target: &EventTarget) { let ty = self.button_type.get(); match ty { //https://html.spec.whatwg.org/multipage/#attr-button-type-submit-state ButtonType::Submit => { // TODO: is document owner fully active? if let Some(owner) = self.form_owner() { owner.submit( SubmittedFrom::NotFromForm, FormSubmitter::ButtonElement(self.clone()), ); } }, ButtonType::Reset => { // TODO: is document owner fully active? if let Some(owner) = self.form_owner() { owner.reset(ResetFrom::NotFromForm); } }, _ => (), } } }<|fim▁end|>
button_type: Cell::new(ButtonType::Submit), form_owner: Default::default(),
<|file_name|>test_pdfminer_psparser.py<|end_file_name|><|fim▁begin|>import logging from pdfminer.psparser import KWD, LIT, PSBaseParser, PSStackParser, PSEOF logger = logging.getLogger(__name__) class TestPSBaseParser: """Simplistic Test cases""" TESTDATA = rb"""%!PS begin end " @ # /a/BCD /Some_Name /foo#5f#xbaa 0 +1 -2 .5 1.234 (abc) () (abc ( def ) ghi) (def\040\0\0404ghi) (bach\\slask) (foo\nbaa) (this % is not a comment.) (foo baa) (foo\ baa) <> <20> < 40 4020 > <abcd00 12345> func/a/b{(c)do*}def [ 1 (z) ! ] << /foo (bar) >> """ TOKENS = [ (5, KWD(b"begin")), (11, KWD(b"end")), (16, KWD(b'"')), (19, KWD(b"@")), (21, KWD(b"#")), (23, LIT("a")), (25, LIT("BCD")), (30, LIT("Some_Name")), (41, LIT("foo_xbaa")), (54, 0), (56, 1), (59, -2), (62, 0.5), (65, 1.234), (71, b"abc"), (77, b""), (80, b"abc ( def ) ghi"), (98, b"def \x00 4ghi"), (118, b"bach\\slask"), (132, b"foo\nbaa"), (143, b"this % is not a comment."), (170, b"foo\nbaa"), (180, b"foobaa"), (191, b""), (194, b" "), (199, b"@@ "), (211, b"\xab\xcd\x00\x124\x05"), (226, KWD(b"func")), (230, LIT("a")), (232, LIT("b")), (234, KWD(b"{")), (235, b"c"), (238, KWD(b"do*")), (241, KWD(b"}")), (242, KWD(b"def")), (246, KWD(b"[")), (248, 1), (250, b"z"), (254, KWD(b"!")), (256, KWD(b"]")), (258, KWD(b"<<")), (261, LIT("foo")), (266, b"bar"), (272, KWD(b">>")), ] OBJS = [ (23, LIT("a")), (25, LIT("BCD")), (30, LIT("Some_Name")), (41, LIT("foo_xbaa")), (54, 0), (56, 1), (59, -2), (62, 0.5), (65, 1.234), (71, b"abc"), (77, b""), (80, b"abc ( def ) ghi"),<|fim▁hole|> (170, b"foo\nbaa"), (180, b"foobaa"), (191, b""), (194, b" "), (199, b"@@ "), (211, b"\xab\xcd\x00\x124\x05"), (230, LIT("a")), (232, LIT("b")), (234, [b"c"]), (246, [1, b"z"]), (258, {"foo": b"bar"}), ] def get_tokens(self, s): from io import BytesIO class MyParser(PSBaseParser): def flush(self): self.add_results(*self.popall()) parser = MyParser(BytesIO(s)) r = [] try: while True: r.append(parser.nexttoken()) except PSEOF: pass return r def get_objects(self, s): from io import BytesIO class MyParser(PSStackParser): def flush(self): self.add_results(*self.popall()) parser = MyParser(BytesIO(s)) r = [] try: while True: r.append(parser.nextobject()) except PSEOF: pass return r def test_1(self): tokens = self.get_tokens(self.TESTDATA) logger.info(tokens) assert tokens == self.TOKENS return def test_2(self): objs = self.get_objects(self.TESTDATA) logger.info(objs) assert objs == self.OBJS return<|fim▁end|>
(98, b"def \x00 4ghi"), (118, b"bach\\slask"), (132, b"foo\nbaa"), (143, b"this % is not a comment."),
<|file_name|>funding_contributor_v30_rc1.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ ORCID Member No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: Latest Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from orcid_api_v3.models.contributor_email_v30_rc1 import ContributorEmailV30Rc1 # noqa: F401,E501 from orcid_api_v3.models.contributor_orcid_v30_rc1 import ContributorOrcidV30Rc1 # noqa: F401,E501 from orcid_api_v3.models.credit_name_v30_rc1 import CreditNameV30Rc1 # noqa: F401,E501 from orcid_api_v3.models.funding_contributor_attributes_v30_rc1 import FundingContributorAttributesV30Rc1 # noqa: F401,E501 class FundingContributorV30Rc1(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'contributor_orcid': 'ContributorOrcidV30Rc1', 'credit_name': 'CreditNameV30Rc1', 'contributor_email': 'ContributorEmailV30Rc1', 'contributor_attributes': 'FundingContributorAttributesV30Rc1' } attribute_map = { 'contributor_orcid': 'contributor-orcid', 'credit_name': 'credit-name', 'contributor_email': 'contributor-email', 'contributor_attributes': 'contributor-attributes' } def __init__(self, contributor_orcid=None, credit_name=None, contributor_email=None, contributor_attributes=None): # noqa: E501 """FundingContributorV30Rc1 - a model defined in Swagger""" # noqa: E501 self._contributor_orcid = None self._credit_name = None self._contributor_email = None self._contributor_attributes = None self.discriminator = None if contributor_orcid is not None: self.contributor_orcid = contributor_orcid if credit_name is not None: self.credit_name = credit_name if contributor_email is not None: self.contributor_email = contributor_email if contributor_attributes is not None: self.contributor_attributes = contributor_attributes @property def contributor_orcid(self): """Gets the contributor_orcid of this FundingContributorV30Rc1. # noqa: E501 :return: The contributor_orcid of this FundingContributorV30Rc1. # noqa: E501 :rtype: ContributorOrcidV30Rc1 """ return self._contributor_orcid @contributor_orcid.setter def contributor_orcid(self, contributor_orcid): """Sets the contributor_orcid of this FundingContributorV30Rc1. :param contributor_orcid: The contributor_orcid of this FundingContributorV30Rc1. # noqa: E501 :type: ContributorOrcidV30Rc1 """ self._contributor_orcid = contributor_orcid @property def credit_name(self): """Gets the credit_name of this FundingContributorV30Rc1. # noqa: E501 :return: The credit_name of this FundingContributorV30Rc1. # noqa: E501 :rtype: CreditNameV30Rc1 """<|fim▁hole|> """Sets the credit_name of this FundingContributorV30Rc1. :param credit_name: The credit_name of this FundingContributorV30Rc1. # noqa: E501 :type: CreditNameV30Rc1 """ self._credit_name = credit_name @property def contributor_email(self): """Gets the contributor_email of this FundingContributorV30Rc1. # noqa: E501 :return: The contributor_email of this FundingContributorV30Rc1. # noqa: E501 :rtype: ContributorEmailV30Rc1 """ return self._contributor_email @contributor_email.setter def contributor_email(self, contributor_email): """Sets the contributor_email of this FundingContributorV30Rc1. :param contributor_email: The contributor_email of this FundingContributorV30Rc1. # noqa: E501 :type: ContributorEmailV30Rc1 """ self._contributor_email = contributor_email @property def contributor_attributes(self): """Gets the contributor_attributes of this FundingContributorV30Rc1. # noqa: E501 :return: The contributor_attributes of this FundingContributorV30Rc1. # noqa: E501 :rtype: FundingContributorAttributesV30Rc1 """ return self._contributor_attributes @contributor_attributes.setter def contributor_attributes(self, contributor_attributes): """Sets the contributor_attributes of this FundingContributorV30Rc1. :param contributor_attributes: The contributor_attributes of this FundingContributorV30Rc1. # noqa: E501 :type: FundingContributorAttributesV30Rc1 """ self._contributor_attributes = contributor_attributes def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(FundingContributorV30Rc1, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, FundingContributorV30Rc1): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other<|fim▁end|>
return self._credit_name @credit_name.setter def credit_name(self, credit_name):
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection from gaphor.diagram.tests.fixtures import diagram, element_factory, event_manager @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400))<|fim▁hole|> item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view<|fim▁end|>
<|file_name|>countdown.js<|end_file_name|><|fim▁begin|>/*global window */ /** * @license countdown.js v2.5.2 http://countdownjs.org * Copyright (c)2006-2014 Stephen M. McKamey. * Licensed under The MIT License. */ /*jshint bitwise:false */ /** * @public * @type {Object|null} */ var module; /** * API entry * @public * @param {function(Object)|Date|number} start the starting date * @param {function(Object)|Date|number} end the ending date * @param {number} units the units to populate * @return {Object|number} */ var countdown = ( /** * @param {Object} module CommonJS Module */ function(module) { /*jshint smarttabs:true */ 'use strict'; /** * @private * @const * @type {number} */ var MILLISECONDS = 0x001; /** * @private * @const * @type {number} */ var SECONDS = 0x002; /** * @private * @const * @type {number} */ var MINUTES = 0x004; /** * @private * @const * @type {number} */ var HOURS = 0x008; /** * @private * @const * @type {number} */ var DAYS = 0x010; /** * @private * @const * @type {number} */ var WEEKS = 0x020; /** * @private * @const * @type {number} */ var MONTHS = 0x040; /** * @private * @const * @type {number} */ var YEARS = 0x080; /** * @private * @const * @type {number} */ var DECADES = 0x100; /** * @private * @const * @type {number} */ var CENTURIES = 0x200; /** * @private * @const * @type {number} */ var MILLENNIA = 0x400; /** * @private * @const * @type {number} */ var DEFAULTS = YEARS|MONTHS|DAYS|HOURS|MINUTES|SECONDS; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_SECOND = 1000; /** * @private * @const * @type {number} */ var SECONDS_PER_MINUTE = 60; /** * @private * @const * @type {number} */ var MINUTES_PER_HOUR = 60; /** * @private * @const * @type {number} */ var HOURS_PER_DAY = 24; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; /** * @private * @const * @type {number} */ var DAYS_PER_WEEK = 7; /** * @private * @const * @type {number} */ var MONTHS_PER_YEAR = 12; /** * @private * @const * @type {number} */ var YEARS_PER_DECADE = 10; /** * @private * @const * @type {number} */ var DECADES_PER_CENTURY = 10; /** * @private * @const * @type {number} */ var CENTURIES_PER_MILLENNIUM = 10; /** * @private * @param {number} x number * @return {number} */ var ceil = Math.ceil; /** * @private * @param {number} x number * @return {number} */ var floor = Math.floor; /** * @private * @param {Date} ref reference date * @param {number} shift number of months to shift * @return {number} number of days shifted */ function borrowMonths(ref, shift) { var prevTime = ref.getTime(); // increment month by shift ref.setMonth( ref.getMonth() + shift ); // this is the trickiest since months vary in length return Math.round( (ref.getTime() - prevTime) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerMonth(ref) { var a = ref.getTime(); // increment month by 1 var b = new Date(a); b.setMonth( ref.getMonth() + 1 ); // this is the trickiest since months vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerYear(ref) { var a = ref.getTime(); // increment year by 1 var b = new Date(a); b.setFullYear( ref.getFullYear() + 1 ); // this is the trickiest since years (periodically) vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * Applies the Timespan to the given date. * * @private * @param {Timespan} ts * @param {Date=} date * @return {Date} */ function addToDate(ts, date) { date = (date instanceof Date) || ((date !== null) && isFinite(date)) ? new Date(+date) : new Date(); if (!ts) { return date; } // if there is a value field, use it directly var value = +ts.value || 0; if (value) { date.setTime(date.getTime() + value); return date; } value = +ts.milliseconds || 0; if (value) { date.setMilliseconds(date.getMilliseconds() + value); } value = +ts.seconds || 0; // if (value) { date.setSeconds(date.getSeconds() + value); // } value = +ts.minutes || 0; if (value) { date.setMinutes(date.getMinutes() + value); } value = +ts.hours || 0; if (value) { date.setHours(date.getHours() + value); } value = +ts.weeks || 0; if (value) { value *= DAYS_PER_WEEK; } value += +ts.days || 0; if (value) { date.setDate(date.getDate() + value); } value = +ts.months || 0; if (value) { date.setMonth(date.getMonth() + value); } value = +ts.millennia || 0; if (value) { value *= CENTURIES_PER_MILLENNIUM; } value += +ts.centuries || 0; if (value) { value *= DECADES_PER_CENTURY; } value += +ts.decades || 0; if (value) { value *= YEARS_PER_DECADE; } value += +ts.years || 0; if (value) { date.setFullYear(date.getFullYear() + value); } return date; } /** * @private * @const * @type {number} */ var LABEL_MILLISECONDS = 0; /** * @private * @const * @type {number} */ var LABEL_SECONDS = 1; /** * @private * @const * @type {number} */ var LABEL_MINUTES = 2; /** * @private * @const * @type {number} */ var LABEL_HOURS = 3; /** * @private * @const * @type {number} */ var LABEL_DAYS = 4; /** * @private * @const * @type {number} */ var LABEL_WEEKS = 5; /** * @private * @const * @type {number} */ var LABEL_MONTHS = 6; /** * @private * @const * @type {number} */ var LABEL_YEARS = 7; /** * @private * @const * @type {number} */ var LABEL_DECADES = 8; /** * @private * @const * @type {number} */ var LABEL_CENTURIES = 9; /** * @private * @const * @type {number} */ var LABEL_MILLENNIA = 10; /** * @private * @type {Array} */ var LABELS_SINGLUAR; /** * @private * @type {Array} */ var LABELS_PLURAL; /** * @private * @type {string} */ var LABEL_LAST; /** * @private * @type {string} */ var LABEL_DELIM; /** * @private * @type {string} */ var LABEL_NOW; /** * Formats a number as a string * * @private * @param {number} value * @return {string} */ var formatNumber; /** * @private * @param {number} value * @param {number} unit unit index into label list * @return {string} */ function plurality(value, unit) { return formatNumber(value)+((value === 1) ? LABELS_SINGLUAR[unit] : LABELS_PLURAL[unit]); } /** * Formats the entries with singular or plural labels * * @private * @param {Timespan} ts * @return {Array} */ var formatList; /** * Timespan representation of a duration of time * * @private * @this {Timespan} * @constructor */ function Timespan() {} /** * Formats the Timespan as a sentence * * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toString = function(emptyLabel) { var label = formatList(this); var count = label.length; if (!count) { return emptyLabel ? ''+emptyLabel : LABEL_NOW; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Formats the Timespan as a sentence in HTML * * @param {string=} tag HTML tag name to wrap each value * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toHTML = function(tag, emptyLabel) { tag = tag || 'span'; var label = formatList(this); var count = label.length; if (!count) { emptyLabel = emptyLabel || LABEL_NOW; return emptyLabel ? '<'+tag+'>'+emptyLabel+'</'+tag+'>' : emptyLabel; } for (var i=0; i<count; i++) { // wrap each unit in tag label[i] = '<'+tag+'>'+label[i]+'</'+tag+'>'; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Applies the Timespan to the given date * * @param {Date=} date the date to which the timespan is added. * @return {Date} */ Timespan.prototype.addTo = function(date) { return addToDate(this, date); }; /** * Formats the entries as English labels * * @private * @param {Timespan} ts * @return {Array} */ formatList = function(ts) { var list = []; var value = ts.millennia; if (value) { list.push(plurality(value, LABEL_MILLENNIA)); } value = ts.centuries; if (value) { list.push(plurality(value, LABEL_CENTURIES)); } value = ts.decades; if (value) { list.push(plurality(value, LABEL_DECADES)); } value = ts.years; if (value) { list.push(plurality(value, LABEL_YEARS)); } value = ts.months; if (value) { list.push(plurality(value, LABEL_MONTHS)); } value = ts.weeks; if (value) { list.push(plurality(value, LABEL_WEEKS)); } value = ts.days; if (value) { list.push(plurality(value, LABEL_DAYS)); } value = ts.hours; if (value) { list.push(plurality(value, LABEL_HOURS)); } value = ts.minutes; if (value) { list.push(plurality(value, LABEL_MINUTES)); } value = ts.seconds; // if (value) { list.push(plurality(value, LABEL_SECONDS)); // } value = ts.milliseconds; if (value) { list.push(plurality(value, LABEL_MILLISECONDS)); } return list; }; /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts * @param {string} toUnit */ function rippleRounded(ts, toUnit) { switch (toUnit) { case 'seconds': if (ts.seconds !== SECONDS_PER_MINUTE || isNaN(ts.minutes)) { return; } // ripple seconds up to minutes ts.minutes++; ts.seconds = 0; /* falls through */ case 'minutes': if (ts.minutes !== MINUTES_PER_HOUR || isNaN(ts.hours)) { return; } // ripple minutes up to hours ts.hours++; ts.minutes = 0; /* falls through */ case 'hours': if (ts.hours !== HOURS_PER_DAY || isNaN(ts.days)) { return; } // ripple hours up to days ts.days++; ts.hours = 0; /* falls through */ case 'days': if (ts.days !== DAYS_PER_WEEK || isNaN(ts.weeks)) { return; } // ripple days up to weeks ts.weeks++; ts.days = 0; /* falls through */ case 'weeks': if (ts.weeks !== daysPerMonth(ts.refMonth)/DAYS_PER_WEEK || isNaN(ts.months)) { return; } // ripple weeks up to months ts.months++; ts.weeks = 0; /* falls through */ case 'months': if (ts.months !== MONTHS_PER_YEAR || isNaN(ts.years)) { return; } // ripple months up to years ts.years++; ts.months = 0; /* falls through */ case 'years': if (ts.years !== YEARS_PER_DECADE || isNaN(ts.decades)) { return; } // ripple years up to decades ts.decades++; ts.years = 0; /* falls through */ case 'decades': if (ts.decades !== DECADES_PER_CENTURY || isNaN(ts.centuries)) { return; } // ripple decades up to centuries ts.centuries++; ts.decades = 0; /* falls through */ case 'centuries': if (ts.centuries !== CENTURIES_PER_MILLENNIUM || isNaN(ts.millennia)) { return; } // ripple centuries up to millennia ts.millennia++; ts.centuries = 0; /* falls through */ } } /** * Ripple up partial units one place * * @private * @param {Timespan} ts timespan * @param {number} frac accumulated fractional value * @param {string} fromUnit source unit name * @param {string} toUnit target unit name * @param {number} conversion multiplier between units * @param {number} digits max number of decimal digits to output * @return {number} new fractional value */ function fraction(ts, frac, fromUnit, toUnit, conversion, digits) { if (ts[fromUnit] >= 0) { frac += ts[fromUnit]; delete ts[fromUnit]; } frac /= conversion; if (frac + 1 <= 1) { // drop if below machine epsilon return 0; } if (ts[toUnit] >= 0) { // ensure does not have more than specified number of digits ts[toUnit] = +(ts[toUnit] + frac).toFixed(digits); rippleRounded(ts, toUnit); return 0; } return frac; } /** * Ripple up partial units to next existing * * @private * @param {Timespan} ts * @param {number} digits max number of decimal digits to output */ function fractional(ts, digits) { var frac = fraction(ts, 0, 'milliseconds', 'seconds', MILLISECONDS_PER_SECOND, digits); if (!frac) { return; } frac = fraction(ts, frac, 'seconds', 'minutes', SECONDS_PER_MINUTE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'minutes', 'hours', MINUTES_PER_HOUR, digits); if (!frac) { return; } frac = fraction(ts, frac, 'hours', 'days', HOURS_PER_DAY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'days', 'weeks', DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'weeks', 'months', daysPerMonth(ts.refMonth)/DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'months', 'years', daysPerYear(ts.refMonth)/daysPerMonth(ts.refMonth), digits); if (!frac) { return; } frac = fraction(ts, frac, 'years', 'decades', YEARS_PER_DECADE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'decades', 'centuries', DECADES_PER_CENTURY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'centuries', 'millennia', CENTURIES_PER_MILLENNIUM, digits); // should never reach this with remaining fractional value if (frac) { throw new Error('Fractional unit overflow'); } } /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts */ function ripple(ts) { var x; if (ts.milliseconds < 0) { // ripple seconds down to milliseconds x = ceil(-ts.milliseconds / MILLISECONDS_PER_SECOND); ts.seconds -= x; ts.milliseconds += x * MILLISECONDS_PER_SECOND; } else if (ts.milliseconds >= MILLISECONDS_PER_SECOND) { // ripple milliseconds up to seconds ts.seconds += floor(ts.milliseconds / MILLISECONDS_PER_SECOND); ts.milliseconds %= MILLISECONDS_PER_SECOND; } if (ts.seconds < 0) { // ripple minutes down to seconds x = ceil(-ts.seconds / SECONDS_PER_MINUTE); ts.minutes -= x; ts.seconds += x * SECONDS_PER_MINUTE; } else if (ts.seconds >= SECONDS_PER_MINUTE) { // ripple seconds up to minutes ts.minutes += floor(ts.seconds / SECONDS_PER_MINUTE); ts.seconds %= SECONDS_PER_MINUTE; } if (ts.minutes < 0) { // ripple hours down to minutes x = ceil(-ts.minutes / MINUTES_PER_HOUR); ts.hours -= x; ts.minutes += x * MINUTES_PER_HOUR; } else if (ts.minutes >= MINUTES_PER_HOUR) { // ripple minutes up to hours ts.hours += floor(ts.minutes / MINUTES_PER_HOUR); ts.minutes %= MINUTES_PER_HOUR; } if (ts.hours < 0) { // ripple days down to hours x = ceil(-ts.hours / HOURS_PER_DAY); ts.days -= x; ts.hours += x * HOURS_PER_DAY; } else if (ts.hours >= HOURS_PER_DAY) { // ripple hours up to days ts.days += floor(ts.hours / HOURS_PER_DAY); ts.hours %= HOURS_PER_DAY; } while (ts.days < 0) { // NOTE: never actually seen this loop more than once // ripple months down to days ts.months--; ts.days += borrowMonths(ts.refMonth, 1); } // weeks is always zero here if (ts.days >= DAYS_PER_WEEK) { // ripple days up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } if (ts.months < 0) { // ripple years down to months x = ceil(-ts.months / MONTHS_PER_YEAR); ts.years -= x; ts.months += x * MONTHS_PER_YEAR; } else if (ts.months >= MONTHS_PER_YEAR) { // ripple months up to years ts.years += floor(ts.months / MONTHS_PER_YEAR); ts.months %= MONTHS_PER_YEAR; } // years is always non-negative here // decades, centuries and millennia are always zero here if (ts.years >= YEARS_PER_DECADE) { // ripple years up to decades ts.decades += floor(ts.years / YEARS_PER_DECADE); ts.years %= YEARS_PER_DECADE; if (ts.decades >= DECADES_PER_CENTURY) { // ripple decades up to centuries ts.centuries += floor(ts.decades / DECADES_PER_CENTURY); ts.decades %= DECADES_PER_CENTURY; if (ts.centuries >= CENTURIES_PER_MILLENNIUM) { // ripple centuries up to millennia ts.millennia += floor(ts.centuries / CENTURIES_PER_MILLENNIUM); ts.centuries %= CENTURIES_PER_MILLENNIUM; } } } } /** * Remove any units not requested * * @private * @param {Timespan} ts * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function pruneUnits(ts, units, max, digits) { var count = 0; // Calc from largest unit to smallest to prevent underflow if (!(units & MILLENNIA) || (count >= max)) { // ripple millennia down to centuries ts.centuries += ts.millennia * CENTURIES_PER_MILLENNIUM; delete ts.millennia; } else if (ts.millennia) { count++; } if (!(units & CENTURIES) || (count >= max)) { // ripple centuries down to decades ts.decades += ts.centuries * DECADES_PER_CENTURY; delete ts.centuries; } else if (ts.centuries) { count++; } if (!(units & DECADES) || (count >= max)) { // ripple decades down to years ts.years += ts.decades * YEARS_PER_DECADE; delete ts.decades; } else if (ts.decades) { count++; } if (!(units & YEARS) || (count >= max)) { // ripple years down to months ts.months += ts.years * MONTHS_PER_YEAR; delete ts.years; } else if (ts.years) { count++; } if (!(units & MONTHS) || (count >= max)) { // ripple months down to days if (ts.months) { ts.days += borrowMonths(ts.refMonth, ts.months); } delete ts.months; if (ts.days >= DAYS_PER_WEEK) { // ripple day overflow back up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } } else if (ts.months) { count++; } if (!(units & WEEKS) || (count >= max)) { // ripple weeks down to days ts.days += ts.weeks * DAYS_PER_WEEK; delete ts.weeks; } else if (ts.weeks) { count++; } if (!(units & DAYS) || (count >= max)) { //ripple days down to hours ts.hours += ts.days * HOURS_PER_DAY; delete ts.days; } else if (ts.days) { count++; } if (!(units & HOURS) || (count >= max)) { // ripple hours down to minutes ts.minutes += ts.hours * MINUTES_PER_HOUR; delete ts.hours; } else if (ts.hours) { count++; } if (!(units & MINUTES) || (count >= max)) { // ripple minutes down to seconds ts.seconds += ts.minutes * SECONDS_PER_MINUTE; delete ts.minutes; } else if (ts.minutes) { count++; } if (!(units & SECONDS) || (count >= max)) { // ripple seconds down to milliseconds ts.milliseconds += ts.seconds * MILLISECONDS_PER_SECOND; delete ts.seconds; } else if (ts.seconds) { count++; } // nothing to ripple milliseconds down to // so ripple back up to smallest existing unit as a fractional value if (!(units & MILLISECONDS) || (count >= max)) { fractional(ts, digits); } } /** * Populates the Timespan object * * @private * @param {Timespan} ts * @param {?Date} start the starting date * @param {?Date} end the ending date * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function populate(ts, start, end, units, max, digits) { var now = new Date(); ts.start = start = start || now; ts.end = end = end || now; ts.units = units; ts.value = end.getTime() - start.getTime(); if (ts.value < 0) { // swap if reversed var tmp = end; end = start; start = tmp; } // reference month for determining days in month ts.refMonth = new Date(start.getFullYear(), start.getMonth(), 15, 12, 0, 0); try { // reset to initial deltas ts.millennia = 0; ts.centuries = 0; ts.decades = 0; ts.years = end.getFullYear() - start.getFullYear(); ts.months = end.getMonth() - start.getMonth(); ts.weeks = 0; ts.days = end.getDate() - start.getDate(); ts.hours = end.getHours() - start.getHours(); ts.minutes = end.getMinutes() - start.getMinutes(); ts.seconds = end.getSeconds() - start.getSeconds(); ts.milliseconds = end.getMilliseconds() - start.getMilliseconds(); ripple(ts); pruneUnits(ts, units, max, digits); } finally { delete ts.refMonth; } return ts; } /** * Determine an appropriate refresh rate based upon units * * @private * @param {number} units the units to populate * @return {number} milliseconds to delay */ function getDelay(units) { if (units & MILLISECONDS) { // refresh very quickly return MILLISECONDS_PER_SECOND / 30; //30Hz } if (units & SECONDS) { // refresh every second return MILLISECONDS_PER_SECOND; //1Hz } if (units & MINUTES) { // refresh every minute return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE; } if (units & HOURS) { // refresh hourly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR; } if (units & DAYS) { // refresh daily return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY; } // refresh the rest weekly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY * DAYS_PER_WEEK; } /** * API entry point * * @public * @param {Date|number|Timespan|null|function(Timespan,number)} start the starting date * @param {Date|number|Timespan|null|function(Timespan,number)} end the ending date * @param {number=} units the units to populate * @param {number=} max number of labels to output * @param {number=} digits max number of decimal digits to output * @return {Timespan|number} */ function countdown(start, end, units, max, digits) { var callback; // ensure some units or use defaults units = +units || DEFAULTS; // max must be positive max = (max > 0) ? max : NaN; // clamp digits to an integer between [0, 20] digits = (digits > 0) ? (digits < 20) ? Math.round(digits) : 20 : 0; // ensure start date var startTS = null; if ('function' === typeof start) { callback = start; start = null; } else if (!(start instanceof Date)) { if ((start !== null) && isFinite(start)) { start = new Date(+start); } else { if ('object' === typeof startTS) { startTS = /** @type{Timespan} */(start); } start = null; } } // ensure end date var endTS = null; if ('function' === typeof end) { callback = end; end = null; } else if (!(end instanceof Date)) { if ((end !== null) && isFinite(end)) { end = new Date(+end); } else { if ('object' === typeof end) { endTS = /** @type{Timespan} */(end); } end = null; } } // must wait to interpret timespans until after resolving dates if (startTS) { start = addToDate(startTS, end); } if (endTS) { end = addToDate(endTS, start); } if (!start && !end) { // used for unit testing return new Timespan(); } if (!callback) { return populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)); } // base delay off units var delay = getDelay(units), timerId, fn = function() { callback( populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)), timerId ); }; fn(); return (timerId = setInterval(fn, delay)); } /** * @public * @const * @type {number} */ countdown.MILLISECONDS = MILLISECONDS; /** * @public * @const * @type {number} */ countdown.SECONDS = SECONDS; /** * @public * @const * @type {number} */ countdown.MINUTES = MINUTES; /** * @public * @const * @type {number} */ countdown.HOURS = HOURS; /** * @public * @const * @type {number} */ countdown.DAYS = DAYS; /** * @public * @const * @type {number} */ countdown.WEEKS = WEEKS; /** * @public * @const * @type {number} */ countdown.MONTHS = MONTHS; /** * @public * @const * @type {number} */ countdown.YEARS = YEARS; /** * @public * @const * @type {number} */ countdown.DECADES = DECADES; /** * @public * @const * @type {number} */ countdown.CENTURIES = CENTURIES;<|fim▁hole|> /** * @public * @const * @type {number} */ countdown.MILLENNIA = MILLENNIA; /** * @public * @const * @type {number} */ countdown.DEFAULTS = DEFAULTS; /** * @public * @const * @type {number} */ countdown.ALL = MILLENNIA|CENTURIES|DECADES|YEARS|MONTHS|WEEKS|DAYS|HOURS|MINUTES|SECONDS|MILLISECONDS; /** * Override the unit labels * @public * @param {string|Array=} singular a pipe ('|') delimited list of singular unit name overrides * @param {string|Array=} plural a pipe ('|') delimited list of plural unit name overrides * @param {string=} last a delimiter before the last unit (default: ' and ') * @param {string=} delim a delimiter to use between all other units (default: ', ') * @param {string=} empty a label to use when all units are zero (default: '') * @param {function(number):string=} formatter a function which formats numbers as a string */ countdown.setLabels = function(singular, plural, last, delim, empty, formatter) { singular = singular || []; if (singular.split) { singular = singular.split('|'); } plural = plural || []; if (plural.split) { plural = plural.split('|'); } for (var i=LABEL_MILLISECONDS; i<=LABEL_MILLENNIA; i++) { // override any specified units LABELS_SINGLUAR[i] = singular[i] || LABELS_SINGLUAR[i]; LABELS_PLURAL[i] = plural[i] || LABELS_PLURAL[i]; } LABEL_LAST = ('string' === typeof last) ? last : LABEL_LAST; LABEL_DELIM = ('string' === typeof delim) ? delim : LABEL_DELIM; LABEL_NOW = ('string' === typeof empty) ? empty : LABEL_NOW; formatNumber = ('function' === typeof formatter) ? formatter : formatNumber; }; /** * Revert to the default unit labels * @public */ var resetLabels = countdown.resetLabels = function() { LABELS_SINGLUAR = ' millisecond| second| minute| hour| day| week| month| year| decade| century| millennium'.split('|'); LABELS_PLURAL = ' milliseconds| seconds| minutes| hours| days| weeks| months| years| decades| centuries| millennia'.split('|'); LABEL_LAST = ' and '; LABEL_DELIM = ', '; LABEL_NOW = ''; formatNumber = function(value) { return '<span class="contest_timedelta">' + value + "</span>"; }; }; resetLabels(); if (module && module.exports) { module.exports = countdown; } else if (typeof window.define === 'function' && typeof window.define.amd !== 'undefined') { window.define('countdown', [], function() { return countdown; }); } return countdown; })(module);<|fim▁end|>
<|file_name|>lex-bad-fp-base-7.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|> fn main() { let g = 0o7.0e7f64; //~ ERROR: octal float literal is not supported }<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.