prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>simple-imap.js<|end_file_name|><|fim▁begin|>var Imap = require('imap'), MailParser = require('mailparser').MailParser, moment = require('moment') util = require('util'), events = require('events'); var SimpleImap = function(options) { this.options = options; this.imap = null; this.start = function() { if (this.imap === null) { this.imap = new Imap(this.options); var selfImap = this.imap, self = this; selfImap.on('ready', function() { self.emit('ready'); selfImap.openBox(self.options.mailbox, false, function() { self.emit('open'); }); }); selfImap.on('mail', function(num) { selfImap.search(['UNSEEN'], function(err, result) { if (result.length) { var f = selfImap.fetch(result, { markSeen: true, struct: true, bodies: ''<|fim▁hole|> }); f.on('message', function(msg, seqNo) { msg.on('body', function(stream, info) { var buffer = ''; stream.on('data', function(chunk) { buffer += chunk.toString('utf8'); }); stream.on('end', function() { var mailParser = new MailParser(); mailParser.on('end', function(mailObject) { self.emit('mail', { from: mailObject.from, subject: mailObject.subject, text: mailObject.text, html: mailObject.html, date: moment(mailObject.date).format('YYYY-MM-DD HH:mm:ss') }); }); mailParser.write(buffer); mailParser.end(); }); }); }); } }); }); selfImap.on('end', function() { self.emit('end'); }); selfImap.on('error', function(err) { self.emit('error', err); }); selfImap.on('close', function(hadError) { self.emit('close', hadError); }); } this.imap.connect(); } this.stop = function() { this.imap.destroy(); } this.restart = function() { this.stop(); if (arguments.length >= 1) this.options = arguments[0]; this.start(); } this.getImap = function() { return this.imap; } }; util.inherits(SimpleImap, events.EventEmitter); module.exports = SimpleImap<|fim▁end|>
<|file_name|>serv.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs::File; use std::io; use std::net::{IpAddr, Shutdown, SocketAddr, SocketAddrV4, TcpListener, TcpStream}; use std::path::PathBuf; use std::sync::Arc; use std::thread; use std::time::Duration; use crate::chain; use crate::chain::txhashset::BitmapChunk; use crate::core::core; use crate::core::core::hash::Hash; use crate::core::core::{OutputIdentifier, Segment, SegmentIdentifier, TxKernel}; use crate::core::global; use crate::core::pow::Difficulty; use crate::handshake::Handshake; use crate::peer::Peer; use crate::peers::Peers; use crate::store::PeerStore; use crate::types::{ Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan, TxHashSetRead, }; use crate::util::secp::pedersen::RangeProof; use crate::util::StopState; use chrono::prelude::{DateTime, Utc}; /// P2P server implementation, handling bootstrapping to find and connect to /// peers, receiving connections from other peers and keep track of all of them. pub struct Server { pub config: P2PConfig, capabilities: Capabilities, handshake: Arc<Handshake>, pub peers: Arc<Peers>, stop_state: Arc<StopState>, } // TODO TLS impl Server { /// Creates a new idle p2p server with no peers pub fn new( db_root: &str, capabilities: Capabilities, config: P2PConfig, adapter: Arc<dyn ChainAdapter>, genesis: Hash, stop_state: Arc<StopState>, ) -> Result<Server, Error> { Ok(Server { config: config.clone(), capabilities, handshake: Arc::new(Handshake::new(genesis, config.clone())), peers: Arc::new(Peers::new(PeerStore::new(db_root)?, adapter, config)),<|fim▁hole|> /// Starts a new TCP server and listen to incoming connections. This is a /// blocking call until the TCP server stops. pub fn listen(&self) -> Result<(), Error> { // start TCP listener and handle incoming connections let addr = SocketAddr::new(self.config.host, self.config.port); let listener = TcpListener::bind(addr)?; listener.set_nonblocking(true)?; let sleep_time = Duration::from_millis(5); loop { // Pause peer ingress connection request. Only for tests. if self.stop_state.is_paused() { thread::sleep(Duration::from_secs(1)); continue; } match listener.accept() { Ok((stream, peer_addr)) => { // We want out TCP stream to be in blocking mode. // The TCP listener is in nonblocking mode so we *must* explicitly // move the accepted TCP stream into blocking mode (or all kinds of // bad things can and will happen). // A nonblocking TCP listener will accept nonblocking TCP streams which // we do not want. stream.set_nonblocking(false)?; let mut peer_addr = PeerAddr(peer_addr); // attempt to see if it an ipv4-mapped ipv6 // if yes convert to ipv4 if peer_addr.0.is_ipv6() { if let IpAddr::V6(ipv6) = peer_addr.0.ip() { if let Some(ipv4) = ipv6.to_ipv4() { peer_addr = PeerAddr(SocketAddr::V4(SocketAddrV4::new( ipv4, peer_addr.0.port(), ))) } } } if self.check_undesirable(&stream) { // Shutdown the incoming TCP connection if it is not desired if let Err(e) = stream.shutdown(Shutdown::Both) { debug!("Error shutting down conn: {:?}", e); } continue; } match self.handle_new_peer(stream) { Err(Error::ConnectionClose) => debug!("shutting down, ignoring a new peer"), Err(e) => { debug!("Error accepting peer {}: {:?}", peer_addr.to_string(), e); let _ = self.peers.add_banned(peer_addr, ReasonForBan::BadHandshake); } Ok(_) => {} } } Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { // nothing to do, will retry in next iteration } Err(e) => { debug!("Couldn't establish new client connection: {:?}", e); } } if self.stop_state.is_stopped() { break; } thread::sleep(sleep_time); } Ok(()) } /// Asks the server to connect to a new peer. Directly returns the peer if /// we're already connected to the provided address. pub fn connect(&self, addr: PeerAddr) -> Result<Arc<Peer>, Error> { if self.stop_state.is_stopped() { return Err(Error::ConnectionClose); } if Peer::is_denied(&self.config, addr) { debug!("connect_peer: peer {} denied, not connecting.", addr); return Err(Error::ConnectionClose); } if global::is_production_mode() { let hs = self.handshake.clone(); let addrs = hs.addrs.read(); if addrs.contains(&addr) { debug!("connect: ignore connecting to PeerWithSelf, addr: {}", addr); return Err(Error::PeerWithSelf); } } if let Some(p) = self.peers.get_connected_peer(addr) { // if we're already connected to the addr, just return the peer trace!("connect_peer: already connected {}", addr); return Ok(p); } trace!( "connect_peer: on {}:{}. connecting to {}", self.config.host, self.config.port, addr ); match TcpStream::connect_timeout(&addr.0, Duration::from_secs(10)) { Ok(stream) => { let addr = SocketAddr::new(self.config.host, self.config.port); let total_diff = self.peers.total_difficulty()?; let peer = Peer::connect( stream, self.capabilities, total_diff, PeerAddr(addr), &self.handshake, self.peers.clone(), )?; let peer = Arc::new(peer); self.peers.add_connected(peer.clone())?; Ok(peer) } Err(e) => { trace!( "connect_peer: on {}:{}. Could not connect to {}: {:?}", self.config.host, self.config.port, addr, e ); Err(Error::Connection(e)) } } } fn handle_new_peer(&self, stream: TcpStream) -> Result<(), Error> { if self.stop_state.is_stopped() { return Err(Error::ConnectionClose); } let total_diff = self.peers.total_difficulty()?; // accept the peer and add it to the server map let peer = Peer::accept( stream, self.capabilities, total_diff, &self.handshake, self.peers.clone(), )?; self.peers.add_connected(Arc::new(peer))?; Ok(()) } /// Checks whether there's any reason we don't want to accept an incoming peer /// connection. There can be a few of them: /// 1. Accepting the peer connection would exceed the configured maximum allowed /// inbound peer count. Note that seed nodes may wish to increase the default /// value for PEER_LISTENER_BUFFER_COUNT to help with network bootstrapping. /// A default buffer of 8 peers is allowed to help with network growth. /// 2. The peer has been previously banned and the ban period hasn't /// expired yet. /// 3. We're already connected to a peer at the same IP. While there are /// many reasons multiple peers can legitimately share identical IP /// addresses (NAT), network distribution is improved if they choose /// different sets of peers themselves. In addition, it prevent potential /// duplicate connections, malicious or not. fn check_undesirable(&self, stream: &TcpStream) -> bool { if self.peers.iter().inbound().connected().count() as u32 >= self.config.peer_max_inbound_count() + self.config.peer_listener_buffer_count() { debug!("Accepting new connection will exceed peer limit, refusing connection."); return true; } if let Ok(peer_addr) = stream.peer_addr() { let peer_addr = PeerAddr(peer_addr); if self.peers.is_banned(peer_addr) { debug!("Peer {} banned, refusing connection.", peer_addr); return true; } // The call to is_known() can fail due to contention on the peers map. // If it fails we want to default to refusing the connection. match self.peers.is_known(peer_addr) { Ok(true) => { debug!("Peer {} already known, refusing connection.", peer_addr); return true; } Err(_) => { error!( "Peer {} is_known check failed, refusing connection.", peer_addr ); return true; } _ => (), } } false } pub fn stop(&self) { self.stop_state.stop(); self.peers.stop(); } /// Pause means: stop all the current peers connection, only for tests. /// Note: /// 1. must pause the 'seed' thread also, to avoid the new egress peer connection /// 2. must pause the 'p2p-server' thread also, to avoid the new ingress peer connection. pub fn pause(&self) { self.peers.stop(); } } /// A no-op network adapter used for testing. pub struct DummyAdapter {} impl ChainAdapter for DummyAdapter { fn total_difficulty(&self) -> Result<Difficulty, chain::Error> { Ok(Difficulty::min_dma()) } fn total_height(&self) -> Result<u64, chain::Error> { Ok(0) } fn get_transaction(&self, _h: Hash) -> Option<core::Transaction> { None } fn tx_kernel_received(&self, _h: Hash, _peer_info: &PeerInfo) -> Result<bool, chain::Error> { Ok(true) } fn transaction_received( &self, _: core::Transaction, _stem: bool, ) -> Result<bool, chain::Error> { Ok(true) } fn compact_block_received( &self, _cb: core::CompactBlock, _peer_info: &PeerInfo, ) -> Result<bool, chain::Error> { Ok(true) } fn header_received( &self, _bh: core::BlockHeader, _peer_info: &PeerInfo, ) -> Result<bool, chain::Error> { Ok(true) } fn block_received( &self, _: core::Block, _: &PeerInfo, _: chain::Options, ) -> Result<bool, chain::Error> { Ok(true) } fn headers_received( &self, _: &[core::BlockHeader], _: &PeerInfo, ) -> Result<bool, chain::Error> { Ok(true) } fn locate_headers(&self, _: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> { Ok(vec![]) } fn get_block(&self, _: Hash, _: &PeerInfo) -> Option<core::Block> { None } fn txhashset_read(&self, _h: Hash) -> Option<TxHashSetRead> { unimplemented!() } fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> { unimplemented!() } fn txhashset_receive_ready(&self) -> bool { false } fn txhashset_write( &self, _h: Hash, _txhashset_data: File, _peer_info: &PeerInfo, ) -> Result<bool, chain::Error> { Ok(false) } fn txhashset_download_update( &self, _start_time: DateTime<Utc>, _downloaded_size: u64, _total_size: u64, ) -> bool { false } fn get_tmp_dir(&self) -> PathBuf { unimplemented!() } fn get_tmpfile_pathname(&self, _tmpfile_name: String) -> PathBuf { unimplemented!() } fn get_kernel_segment( &self, _hash: Hash, _id: SegmentIdentifier, ) -> Result<Segment<TxKernel>, chain::Error> { unimplemented!() } fn get_bitmap_segment( &self, _hash: Hash, _id: SegmentIdentifier, ) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> { unimplemented!() } fn get_output_segment( &self, _hash: Hash, _id: SegmentIdentifier, ) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> { unimplemented!() } fn get_rangeproof_segment( &self, _hash: Hash, _id: SegmentIdentifier, ) -> Result<Segment<RangeProof>, chain::Error> { unimplemented!() } } impl NetAdapter for DummyAdapter { fn find_peer_addrs(&self, _: Capabilities) -> Vec<PeerAddr> { vec![] } fn peer_addrs_received(&self, _: Vec<PeerAddr>) {} fn peer_difficulty(&self, _: PeerAddr, _: Difficulty, _: u64) {} fn is_banned(&self, _: PeerAddr) -> bool { false } }<|fim▁end|>
stop_state, }) }
<|file_name|>choices.py<|end_file_name|><|fim▁begin|>from model_utils import Choices SPONSOR_TYPES = Choices( ('diamond', 'DIAMOND', 'Diamond Sponsor'), ('lanyard', 'LANYARD', 'Lanyard Sponsor'), ('track', 'TRACK', 'Track Sponsor'), ('foodanddrinks', 'FOOD_AND_DRINKS', 'Food & Drinks Sponsor'), ('lounge', 'LOUNGE', 'Lounge Sponsor'), ('standard', 'STANDARD', 'Standard Sponsor'), ('supporter', 'SUPPORTER', 'Supporter Sponsor'), ('mainmedia', 'MAIN_MEDIA', 'Main Media Sponsor'), ('media', 'MEDIA', 'Media sponsors'), ('video', 'VIDEO', 'Video sponsors'),<|fim▁hole|><|fim▁end|>
)
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { default } from './RubymineOriginal'
<|file_name|>Feature.java<|end_file_name|><|fim▁begin|>package com.lgvalle.beaufitulphotos.fivehundredpxs.model; import com.lgvalle.beaufitulphotos.R; /** * Created by luis.gonzalez on 23/07/14. * Enum to represent service features */ public enum Feature { Popular("popular", R.string.feature_popular), HighestRated("highest_rated", R.string.feature_highest_rated); private final String param; private final int title; Feature(String param, int title) { this.param = param; this.title = title; } <|fim▁hole|> public String getParam() { return param; } public int getTitle() { return title; } }<|fim▁end|>
<|file_name|>IntType.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2010---2013 星星(wuweixing)<[email protected]> * * This file is part of Wabacus * * Wabacus is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.wabacus.system.datatype; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.DecimalFormat; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.wabacus.config.database.type.AbsDatabaseType; public class IntType extends AbsNumberType { private final static Log log=LogFactory.getLog(IntType.class); private final static Map<String,AbsNumberType> mIntTypeObjects=new HashMap<String,AbsNumberType>(); public Object getColumnValue(ResultSet rs,String column,AbsDatabaseType dbtype) throws SQLException { return Integer.valueOf(rs.getInt(column)); } public Object getColumnValue(ResultSet rs,int iindex,AbsDatabaseType dbtype) throws SQLException { return Integer.valueOf(rs.getInt(iindex)); } public void setPreparedStatementValue(int iindex,String value,PreparedStatement pstmt, AbsDatabaseType dbtype) throws SQLException { log.debug("setInt("+iindex+","+value+")"); Object objTmp=label2value(value); if(objTmp==null) { pstmt.setObject(iindex,null,java.sql.Types.INTEGER); }else { pstmt.setInt(iindex,(Integer)objTmp); } } public Class getJavaTypeClass() { return Integer.class; } public Object label2value(String label) { if(label==null||label.trim().equals("")) return null; if(this.numberformat!=null&&!this.numberformat.trim().equals("")) { return Integer.valueOf(this.getNumber(label.trim()).intValue()); }else { int idxdot=label.indexOf("."); if(idxdot==0) { label="0"; }else if(idxdot>0) { label=label.substring(0,idxdot).trim(); if(label.equals("")) label="0"; } return Integer.valueOf(label.trim()); } } public String value2label(Object value) { if(value==null) return ""; if(!(value instanceof Integer)) return String.valueOf(value); if(this.numberformat!=null&&!this.numberformat.trim().equals(""))<|fim▁hole|> }else { return String.valueOf(value); } } protected Map<String,AbsNumberType> getAllMNumberTypeObjects() { return mIntTypeObjects; } }<|fim▁end|>
{ DecimalFormat df=new DecimalFormat(this.numberformat); return df.format((Integer)value);
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0. /*! This module provides an implementation of mpsc channel based on crossbeam_channel. Comparing to the crossbeam_channel, this implementation supports closed detection and try operations. */ pub mod batch; use crossbeam::channel::{ self, RecvError, RecvTimeoutError, SendError, TryRecvError, TrySendError, }; use std::cell::Cell; use std::sync::atomic::{AtomicBool, AtomicIsize, Ordering}; use std::sync::Arc; use std::time::Duration; struct State { sender_cnt: AtomicIsize, connected: AtomicBool, } impl State { fn new() -> State { State { sender_cnt: AtomicIsize::new(1), connected: AtomicBool::new(true), } } #[inline] fn is_sender_connected(&self) -> bool { self.connected.load(Ordering::Acquire) } } /// A sender that can be closed. /// /// Closed means that sender can no longer send out any messages after closing. /// However, receiver may still block at receiving. /// /// Note that a receiver should reports error in such case. /// However, to fully implement a close mechanism, like waking up waiting /// receivers, requires a cost of performance. And the mechanism is unnecessary /// for current usage. /// /// TODO: use builtin close when crossbeam-rs/crossbeam#236 is resolved. pub struct Sender<T> { sender: channel::Sender<T>, state: Arc<State>, } impl<T> Clone for Sender<T> { #[inline] fn clone(&self) -> Sender<T> { self.state.sender_cnt.fetch_add(1, Ordering::AcqRel); Sender { sender: self.sender.clone(), state: self.state.clone(), } } } impl<T> Drop for Sender<T> { #[inline] fn drop(&mut self) { let res = self.state.sender_cnt.fetch_add(-1, Ordering::AcqRel); if res == 1 { self.close_sender(); } } } /// The receive end of a channel. pub struct Receiver<T> { receiver: channel::Receiver<T>, state: Arc<State>, } impl<T> Sender<T> { /// Returns the number of messages in the channel. #[inline] pub fn len(&self) -> usize { self.sender.len() } /// Returns true if the channel is empty. /// /// Note: Zero-capacity channels are always empty. #[inline] pub fn is_empty(&self) -> bool { self.sender.is_empty() } /// Blocks the current thread until a message is sent or the channel is disconnected. #[inline] pub fn send(&self, t: T) -> Result<(), SendError<T>> { if self.state.is_sender_connected() { self.sender.send(t) } else { Err(SendError(t)) } } /// Attempts to send a message into the channel without blocking. #[inline] pub fn try_send(&self, t: T) -> Result<(), TrySendError<T>> { if self.state.is_sender_connected() { self.sender.try_send(t) } else { Err(TrySendError::Disconnected(t)) } } /// Stop the sender from sending any further messages. #[inline] pub fn close_sender(&self) { self.state.connected.store(false, Ordering::Release); }<|fim▁hole|> /// Check if the sender is still connected. #[inline] pub fn is_sender_connected(&self) -> bool { self.state.is_sender_connected() } } impl<T> Receiver<T> { /// Returns the number of messages in the channel. #[inline] pub fn len(&self) -> usize { self.receiver.len() } /// Returns true if the channel is empty. /// /// Note: Zero-capacity channels are always empty. #[inline] pub fn is_empty(&self) -> bool { self.receiver.is_empty() } /// Blocks the current thread until a message is received or /// the channel is empty and disconnected. #[inline] pub fn recv(&self) -> Result<T, RecvError> { self.receiver.recv() } /// Attempts to receive a message from the channel without blocking. #[inline] pub fn try_recv(&self) -> Result<T, TryRecvError> { self.receiver.try_recv() } /// Waits for a message to be received from the channel, /// but only for a limited time. #[inline] pub fn recv_timeout(&self, timeout: Duration) -> Result<T, RecvTimeoutError> { self.receiver.recv_timeout(timeout) } } impl<T> Drop for Receiver<T> { #[inline] fn drop(&mut self) { self.state.connected.store(false, Ordering::Release); } } /// Create an unbounded channel. #[inline] pub fn unbounded<T>() -> (Sender<T>, Receiver<T>) { let state = Arc::new(State::new()); let (sender, receiver) = channel::unbounded(); ( Sender { sender, state: state.clone(), }, Receiver { receiver, state }, ) } /// Create a bounded channel. #[inline] pub fn bounded<T>(cap: usize) -> (Sender<T>, Receiver<T>) { let state = Arc::new(State::new()); let (sender, receiver) = channel::bounded(cap); ( Sender { sender, state: state.clone(), }, Receiver { receiver, state }, ) } const CHECK_INTERVAL: usize = 8; /// A sender of channel that limits the maximun pending messages count loosely. pub struct LooseBoundedSender<T> { sender: Sender<T>, tried_cnt: Cell<usize>, limit: usize, } impl<T> LooseBoundedSender<T> { /// Returns the number of messages in the channel. #[inline] pub fn len(&self) -> usize { self.sender.len() } /// Returns true if the channel is empty. /// /// Note: Zero-capacity channels are always empty. #[inline] pub fn is_empty(&self) -> bool { self.sender.is_empty() } /// Send a message regardless its capacity limit. #[inline] pub fn force_send(&self, t: T) -> Result<(), SendError<T>> { let cnt = self.tried_cnt.get(); self.tried_cnt.set(cnt + 1); self.sender.send(t) } /// Attempts to send a message into the channel without blocking. #[inline] pub fn try_send(&self, t: T) -> Result<(), TrySendError<T>> { let cnt = self.tried_cnt.get(); if cnt < CHECK_INTERVAL { self.tried_cnt.set(cnt + 1); } else if self.len() < self.limit { self.tried_cnt.set(1); } else { return Err(TrySendError::Full(t)); } match self.sender.send(t) { Ok(()) => Ok(()), Err(SendError(t)) => Err(TrySendError::Disconnected(t)), } } /// Stop the sender from sending any further messages. #[inline] pub fn close_sender(&self) { self.sender.close_sender(); } /// Check if the sender is still connected. #[inline] pub fn is_sender_connected(&self) -> bool { self.sender.state.is_sender_connected() } } impl<T> Clone for LooseBoundedSender<T> { #[inline] fn clone(&self) -> LooseBoundedSender<T> { LooseBoundedSender { sender: self.sender.clone(), tried_cnt: self.tried_cnt.clone(), limit: self.limit, } } } /// Create a loosely bounded channel with the given capacity. pub fn loose_bounded<T>(cap: usize) -> (LooseBoundedSender<T>, Receiver<T>) { let (sender, receiver) = unbounded(); ( LooseBoundedSender { sender, tried_cnt: Cell::new(0), limit: cap, }, receiver, ) } #[cfg(test)] mod tests { use crossbeam::channel::*; use std::thread; use std::time::*; #[test] fn test_bounded() { let (tx, rx) = super::bounded::<u64>(10); tx.try_send(1).unwrap(); for i in 2..11 { tx.clone().send(i).unwrap(); } assert_eq!(tx.try_send(11), Err(TrySendError::Full(11))); assert_eq!(rx.try_recv(), Ok(1)); for i in 2..11 { assert_eq!(rx.recv(), Ok(i)); } assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); let timer = Instant::now(); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Timeout) ); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); drop(rx); assert_eq!(tx.send(2), Err(SendError(2))); assert_eq!(tx.try_send(2), Err(TrySendError::Disconnected(2))); assert!(!tx.is_sender_connected()); let (tx, rx) = super::bounded::<u64>(10); tx.send(2).unwrap(); tx.send(3).unwrap(); drop(tx); assert_eq!(rx.try_recv(), Ok(2)); assert_eq!(rx.recv(), Ok(3)); assert_eq!(rx.recv(), Err(RecvError)); assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Disconnected) ); let (tx, rx) = super::bounded::<u64>(10); assert!(tx.is_empty()); assert!(tx.is_sender_connected()); assert_eq!(tx.len(), 0); assert!(rx.is_empty()); assert_eq!(rx.len(), 0); tx.send(2).unwrap(); tx.send(3).unwrap(); assert_eq!(tx.len(), 2); assert_eq!(rx.len(), 2); tx.close_sender(); assert_eq!(tx.send(3), Err(SendError(3))); assert_eq!(tx.try_send(3), Err(TrySendError::Disconnected(3))); assert!(!tx.is_sender_connected()); assert_eq!(rx.try_recv(), Ok(2)); assert_eq!(rx.recv(), Ok(3)); assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); // assert_eq!(rx.recv(), Err(RecvError)); let (tx1, rx1) = super::bounded::<u64>(10); let (tx2, rx2) = super::bounded::<u64>(0); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); tx1.send(10).unwrap(); thread::sleep(Duration::from_millis(100)); assert_eq!(rx2.recv(), Ok(2)); }); let timer = Instant::now(); assert_eq!(rx1.recv(), Ok(10)); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); let timer = Instant::now(); tx2.send(2).unwrap(); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(50), "{:?}", elapsed); } #[test] fn test_unbounded() { let (tx, rx) = super::unbounded::<u64>(); tx.try_send(1).unwrap(); tx.send(2).unwrap(); assert_eq!(rx.try_recv(), Ok(1)); assert_eq!(rx.recv(), Ok(2)); assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); let timer = Instant::now(); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Timeout) ); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); drop(rx); assert_eq!(tx.send(2), Err(SendError(2))); assert_eq!(tx.try_send(2), Err(TrySendError::Disconnected(2))); assert!(!tx.is_sender_connected()); let (tx, rx) = super::unbounded::<u64>(); drop(tx); assert_eq!(rx.recv(), Err(RecvError)); assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Disconnected) ); let (tx, rx) = super::unbounded::<u64>(); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); tx.send(10).unwrap(); }); let timer = Instant::now(); assert_eq!(rx.recv(), Ok(10)); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); let (tx, rx) = super::unbounded::<u64>(); assert!(tx.is_empty()); assert!(tx.is_sender_connected()); assert_eq!(tx.len(), 0); assert!(rx.is_empty()); assert_eq!(rx.len(), 0); tx.send(2).unwrap(); tx.send(3).unwrap(); assert_eq!(tx.len(), 2); assert_eq!(rx.len(), 2); tx.close_sender(); assert_eq!(tx.send(3), Err(SendError(3))); assert_eq!(tx.try_send(3), Err(TrySendError::Disconnected(3))); assert!(!tx.is_sender_connected()); assert_eq!(rx.try_recv(), Ok(2)); assert_eq!(rx.recv(), Ok(3)); assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); // assert_eq!(rx.recv(), Err(RecvError)); } #[test] fn test_loose() { let (tx, rx) = super::loose_bounded(10); tx.try_send(1).unwrap(); for i in 2..11 { tx.clone().try_send(i).unwrap(); } for i in 1..super::CHECK_INTERVAL { tx.force_send(i).unwrap(); } assert_eq!(tx.try_send(4), Err(TrySendError::Full(4))); tx.force_send(5).unwrap(); assert_eq!(tx.try_send(6), Err(TrySendError::Full(6))); assert_eq!(rx.try_recv(), Ok(1)); for i in 2..11 { assert_eq!(rx.recv(), Ok(i)); } for i in 1..super::CHECK_INTERVAL { assert_eq!(rx.try_recv(), Ok(i)); } assert_eq!(rx.try_recv(), Ok(5)); assert_eq!(rx.try_recv(), Err(TryRecvError::Empty)); let timer = Instant::now(); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Timeout) ); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); tx.force_send(1).unwrap(); drop(rx); assert_eq!(tx.force_send(2), Err(SendError(2))); assert_eq!(tx.try_send(2), Err(TrySendError::Disconnected(2))); for _ in 0..super::CHECK_INTERVAL { assert_eq!(tx.try_send(2), Err(TrySendError::Disconnected(2))); } let (tx, rx) = super::loose_bounded(10); tx.try_send(2).unwrap(); drop(tx); assert_eq!(rx.recv(), Ok(2)); assert_eq!(rx.recv(), Err(RecvError)); assert_eq!(rx.try_recv(), Err(TryRecvError::Disconnected)); assert_eq!( rx.recv_timeout(Duration::from_millis(100)), Err(RecvTimeoutError::Disconnected) ); let (tx, rx) = super::loose_bounded(10); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); tx.try_send(10).unwrap(); }); let timer = Instant::now(); assert_eq!(rx.recv(), Ok(10)); let elapsed = timer.elapsed(); assert!(elapsed >= Duration::from_millis(100), "{:?}", elapsed); } }<|fim▁end|>
<|file_name|>extractor.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with // this file, You can obtain one at https://mozilla.org/MPL/2.0/. use message::Message; use error::{Error, Result}; use common::{find_string, escape_string}; use std::collections::HashMap; use std::path::Path; use std::fs::File; use std::io::Read; use std::io::Write; use regex::Regex; use walkdir::WalkDir; /// Struct that extracts all messages from source code and can print them /// to a `.pot` file. /// /// This file can then be used as a starting point to begin translation. /// It should be relatively similar to `gettext` generated files. /// /// # Example /// /// ``` /// use crowbook_intl::Extractor; /// let mut extractor = Extractor::new(); /// extractor.add_messages_from_dir("src/").unwrap(); /// println!("{}", extractor.generate_pot_file()); /// ``` /// /// # Note /// /// This struct only add messages that are considered as needing localization, /// that is, the first argument of calls so `lformat!` macro. #[derive(Debug, Clone)] pub struct Extractor { messages: HashMap<String, Message>, // Matches the format string (as used by `lformat!` and the actual escaped string // given to potfile orig_strings: HashMap<String, String>, } impl Extractor { /// Create a new, empty extractor pub fn new() -> Extractor { Extractor { messages: HashMap::new(), orig_strings: HashMap::new(), } } /// Returns a hashmap mapping the original strings (as used by `lformat!`) /// to escaped strings. Only contains strings that are different and /// must thus be handled. pub fn original_strings<'a>(&'a self) -> &'a HashMap<String, String> { &self.orig_strings } /// Add all the messages contained in a source file pub fn add_messages_from_file<P: AsRef<Path>>(&mut self, file: P) -> Result<()> { lazy_static! { static ref REMOVE_COMMS: Regex = Regex::new(r#"//[^\n]*"#).unwrap(); static ref FIND_MSGS: Regex = Regex::new(r#"lformat!\("#).unwrap(); } let filename = format!("{}", file.as_ref().display()); let mut f = try!(File::open(file) .map_err(|e| Error::parse(format!("could not open file {}: {}", &filename, e)))); let mut content = String::new(); try!(f.read_to_string(&mut content) .map_err(|e| Error::parse(format!("could not read file {}: {}", &filename, e)))); content = REMOVE_COMMS.replace_all(&content, "").into_owned(); for caps in FIND_MSGS.captures_iter(&content) { let pos = caps.get(0).unwrap().end(); let line = 1 + &content[..pos].bytes().filter(|b| b == &b'\n').count(); let bytes = content[pos..].as_bytes(); let orig_msg: String = try!(find_string(bytes) .map_err(|_| Error::parse(format!("{}:{}: could not parse as string", &filename, line)))); let msg = escape_string(orig_msg.as_str()).into_owned(); if msg != orig_msg { self.orig_strings.insert(orig_msg, msg.clone()); } if self.messages.contains_key(msg.as_str()) { self.messages.get_mut(&msg).unwrap().add_source(filename.as_str(), line); } else { let mut message = Message::new(msg.as_str()); message.add_source(filename.as_str(), line); self.messages.insert(msg, message); } } <|fim▁hole|> /// Add messages from all `.rs` files contained in a directory /// (walks through subdirectories) pub fn add_messages_from_dir<P: AsRef<Path>>(&mut self, dir: P) -> Result<()> { let filtered = WalkDir::new(dir) .into_iter() .filter_map(|e| e.ok()) .map(|e| e.path() .to_string_lossy() .into_owned()) .filter(|s| s.ends_with(".rs")); for filename in filtered { try!(self.add_messages_from_file(&filename)); } Ok(()) } /// Generate a pot-like file from the strings extracted from all files (if any) pub fn generate_pot_file(&self) -> String { let mut output = String::from(POT_HEADER); let mut values = self.messages .values() .collect::<Vec<_>>(); values.sort(); for value in values { output.push_str(&format!("{}", value)); } output } /// Write a pot-like file to specified location pub fn write_pot_file(&mut self, file: &str) -> Result<()> { let mut f = try!(File::create(file).map_err(|e| Error::new(format!("Could not create file {}: {}", file, e)))); let content = self.generate_pot_file(); try!(f.write_all(content.as_bytes()) .map_err(|e| Error::new(format!("Could not write to file {}: {}", file, e)))); Ok(()) } } const POT_HEADER: &'static str = r#"# SOME DESCRIPTIVE TITLE # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # LICENSE # AUTHOR <EMAIL@ADDRESS>, YEAR. # #, fuzzy msgid "" msgstr "" "Content-Type: text/plain; charset=UTF-8\n" "#;<|fim▁end|>
Ok(()) }
<|file_name|>generatePlots.py<|end_file_name|><|fim▁begin|>import numpy as np import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plot import matplotlib.pylab from matplotlib.backends.backend_pdf import PdfPages import re def drawPlots(data,plotObj,name,yLabel,position): drawing = plotObj.add_subplot(position,1,position) drawing.set_ylabel(yLabel, fontsize=16) drawing.set_xlabel("Sample", fontsize=18) drawing.plot(data[name], label = name) drawing.legend(loc = 'upper center', bbox_to_anchor=(0.9, 1.128)) # drawing.legend(loc = 'upper center') def drawXtremIOCharts(): xenvData = np.genfromtxt('xenvPerfStats.csv', dtype=float, delimiter=',', names=True) xmsData = np.genfromtxt('xmsPerfStats.csv', dtype=float, delimiter=',', names=True) plot.ioff() iops = plot.figure(figsize=(20,15)) iops.suptitle("IOPs", fontsize=20) iopsInit = len(iops.axes) bw = plot.figure(figsize=(20,15)) bw.suptitle("Bandwidth MB/s", fontsize=20) bwInit = len(bw.axes) latency = plot.figure(figsize=(20,15)) latency.suptitle("Latency, MicroSec.", fontsize=20) latencyInit = len(latency.axes) xCpu = plot.figure(figsize=(20,15)) xCpu.suptitle("X-ENV Utilization", fontsize=20) xCpuInit = len(xCpu.axes) for name in xmsData.dtype.names: if re.search('iops', name): drawPlots(xmsData,iops,name,"IOPs",iopsInit+1) if re.search('bandwidth', name): drawPlots(xmsData,bw,name,"Bandwidth, MB/s", bwInit+1) if re.search('latency', name): drawPlots(xmsData,latency,name,"Latency, MicroSec", latencyInit+1) for name in xenvData.dtype.names: drawPlots(xenvData,xCpu,name,"% CPU Utilization", xCpuInit+1) pdfDoc = PdfPages('XtremPerfcharts.pdf') pdfDoc.savefig(iops) pdfDoc.savefig(bw) pdfDoc.savefig(latency) pdfDoc.savefig(xCpu) pdfDoc.close() plot.close(iops) plot.close(bw) plot.close(latency) plot.close(xCpu) # plot.show() def drawVolPerfCharts(vol): volData = np.genfromtxt('%s.csv' % (vol), dtype=float, delimiter=',', names=True) plot.ioff() iops = plot.figure(figsize=(20,15)) iops.suptitle("IOPs", fontsize=20) iopsInit = len(iops.axes) bw = plot.figure(figsize=(20,15)) bw.suptitle("Bandwidth MB/s", fontsize=20) bwInit = len(bw.axes) latency = plot.figure(figsize=(20,15)) latency.suptitle("Latency, MicroSec.", fontsize=20) latencyInit = len(latency.axes) for name in volData.dtype.names: if re.search('iops', name): drawPlots(volData,iops,name,"IOPs",iopsInit+1) if re.search('bandwidth', name): drawPlots(volData,bw,name,"Bandwidth, MB/s", bwInit+1) if re.search('latency', name): drawPlots(volData,latency,name,"Latency, MicroSec", latencyInit+1)<|fim▁hole|> pdfDoc.savefig(iops) pdfDoc.savefig(bw) pdfDoc.savefig(latency) pdfDoc.close() plot.close(iops) plot.close(bw) plot.close(latency) def drawEsxCharts(hostname,storageHba): pdfDoc = PdfPages('host_%s.pdf'%(hostname)) data = np.genfromtxt('%s.csv' %(hostname), dtype=float, delimiter=',', names=True) # print data.dtype.names cpu = plot.figure(figsize=(20,15)) cpu.suptitle("% CPU-Utilization", fontsize=20) cpuInit = len(cpu.axes) memory = plot.figure(figsize=(20,15)) memory.suptitle("% Memory Usage", fontsize=20) memoryInit = len(memory.axes) for name in data.dtype.names: if re.match('CPU_Utilization', name): plotName = '% CPU Util' drawPlots(data,cpu,name,"% CPU Util",cpuInit+1) if re.match('Memory_Usage', name): plotName = '% Usage' drawPlots(data,memory,name,"% Memory Usage", memoryInit+1) for hba in storageHba: hba_iops = plot.figure(figsize=(20,15)) hba_iops.suptitle("%s IOPs"%(hba), fontsize=20) hbaIopsInit = len(hba_iops.axes) hba_bw = plot.figure(figsize=(20,15)) hba_bw.suptitle("%s Bandwidth"%(hba), fontsize=20) hbaBwInit = len(hba_bw.axes) hba_latency = plot.figure(figsize=(20,15)) hba_latency.suptitle("%s Latency"%(hba), fontsize=20) hbaLatencyInit = len(hba_latency.axes) for name in data.dtype.names: if re.search('Storage_adapter%s'%(hba), name) and re.search('requests_per_second', name): plotName = '%s IOPs' %(hba) drawPlots(data,hba_iops,name,"IOPs",hbaIopsInit+1) if re.search('Storage_adapter%s'%(hba), name) and re.search(r'_rate_average', name): plotName = 'Bandwidth Utilization' drawPlots(data,hba_bw,name,"Bandwidth Utilization", hbaBwInit+1) if re.search('Storage_adapter%s'%(hba), name) and re.search(r'_latency_average', name): plotName = 'Latency' drawPlots(data,hba_latency,name,"Latency (msec)", hbaLatencyInit+1) pdfDoc.savefig(hba_latency) pdfDoc.savefig(hba_iops) pdfDoc.savefig(hba_bw) pdfDoc.savefig(cpu) pdfDoc.savefig(memory) pdfDoc.close() plot.close(hba_iops) plot.close(hba_bw) plot.close(hba_latency) plot.close(cpu) plot.close(memory) # plot.show() def main(): drawXtremIOCharts() # data = np.genfromtxt('xtremPerfStats.csv', dtype=float, delimiter=',', names=True) # print data.dtype.names # iops = plot.figure() # iopsInit = len(iops.axes) # bw = plot.figure() # bwInit = len(bw.axes) # latency = plot.figure() # latencyInit = len(latency.axes) # xCpu = plot.figure() # xCpuInit = len(xCpu.axes) # for name in data.dtype.names: # if re.search('iops', name): # drawPlots(data,iops,name,"IOPs",iopsInit+1) # if re.search('bandwidth', name): # drawPlots(data,bw,name,"Bandwidth, MB/s", bwInit+1) # if re.search('latency', name): # drawPlots(data,latency,name,"Latency, MicroSec", latencyInit+1) # if re.search('SC', name): # drawPlots(data,xCpu,name,"% CPU Utilization", xCpuInit+1) # plot.show() if __name__ == '__main__': main()<|fim▁end|>
pdfDoc = PdfPages('%s.pdf' %(vol))
<|file_name|>SignInteractEvent.java<|end_file_name|><|fim▁begin|>package net.ess3.api.events; import com.earth2me.essentials.signs.EssentialsSign; import net.ess3.api.IUser; import org.bukkit.event.HandlerList; /** * Fired when an Essentials sign is interacted with. * * This is primarily intended for use with EssentialsX's sign abstraction - external plugins should not listen on this event. */ public class SignInteractEvent extends SignEvent { private static final HandlerList handlers = new HandlerList(); public SignInteractEvent(final EssentialsSign.ISign sign, final EssentialsSign essSign, final IUser user) { super(sign, essSign, user); } @Override public HandlerList getHandlers() { return handlers; }<|fim▁hole|> public static HandlerList getHandlerList() { return handlers; } }<|fim▁end|>
<|file_name|>movement.js<|end_file_name|><|fim▁begin|>import settings from './../settings' import {findDistance, limitPositions, chooseOne, randomInt, getAvgPostion} from './general' module.exports = { applyLimbForces: (Eves) => { for(var i = 0; i < Eves.length; i++) { var eve = Eves[i]; for(var j = 0; j < eve.limbs.length; j++) { var limb = eve.limbs[j]; var b0 = eve.bodyParts[limb.connections[0]]; var b1 = eve.bodyParts[limb.connections[1]]; var displacement, force; limb.currentLength = findDistance(b0.pos, b1.pos) if(limb.growing) { displacement = limb.maxLength - limb.currentLength; force = displacement * 0.1 - 1.5; if(limb.currentLength >= limb.maxLength) { limb.growing = false; } } else { displacement = limb.initialLength - limb.currentLength; force = displacement * 0.1 + 1.5; if(limb.currentLength <= limb.initialLength) { limb.growing = true; } } var xPosDiff = b1.pos.x - b0.pos.x; var yPosDiff = b1.pos.y - b0.pos.y; if(xPosDiff === 0) { var theta = Math.PI; } else { var theta = Math.atan(yPosDiff / xPosDiff); } if (xPosDiff >= 0) { force *= -1; } var movementFactor = 1; if(limb.growing) { movementFactor = 0.5; } var dVx0 = force / b0.mass * Math.cos(theta); var dVy0 = force / b0.mass * Math.sin(theta); var dVx1 = -force / b1.mass * Math.cos(theta) * movementFactor; var dVy1 = -force / b1.mass * Math.sin(theta) * movementFactor; b0.vel.x = Math.min( 20, Math.max( b0.vel.x + dVx0, -20 )); b0.vel.y = Math.min( 20, Math.max( b0.vel.y + dVy0, -20 )); b1.vel.x = Math.min( 20, Math.max( b1.vel.x + dVx1, -20 )); b1.vel.y = Math.min( 20, Math.max( b1.vel.y + dVy1, -20 )); } } }, updateBodyPartPositions: (Eves) => { for(var i = 0; i < Eves.length; i++) { var eve = Eves[i]; for(var j = 0; j < eve.bodyParts.length; j++) { var bodyPart = eve.bodyParts[j]; bodyPart.pos.x += bodyPart.vel.x; //check if offscreen if(bodyPart.pos.x <= bodyPart.mass || bodyPart.pos.x >= settings.width - bodyPart.mass) { bodyPart.pos.x = limitPositions(bodyPart.pos.x, 1, bodyPart.mass)[0]; bodyPart.vel.x = -1 * bodyPart.vel.x; } bodyPart.pos.y += bodyPart.vel.y; if(bodyPart.pos.y <= bodyPart.mass || bodyPart.pos.y >= settings.height - bodyPart.mass) { bodyPart.pos.y = limitPositions(1, bodyPart.pos.y, bodyPart.mass)[1]; bodyPart.vel.y = -1 * bodyPart.vel.y; } //check if offscreen //NEEEDS TO GO ON CLIENT ONLY?? // d3.select('#' + eve.id + 'b' + j) // .attr('cx', bodyPart.pos.x).attr('cy', bodyPart.pos.y); // } for(var k = 0; k < eve.limbs.length; k++) { var b0 = eve.bodyParts[eve.limbs[k].connections[0]]; var b1 = eve.bodyParts[eve.limbs[k].connections[1]];<|fim▁hole|> // d3.select('#' + eve.id + 'l' + k) // .attr('x1', b0.pos.x).attr('y1', b0.pos.y) // .attr('x2', b1.pos.x).attr('y2', b1.pos.y); // } } } }<|fim▁end|>
//NEEDS TO GO ON CLIENT ONLY??
<|file_name|>fragment.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `Fragment` type, which represents the leaves of the layout tree. #![deny(unsafe_code)] use app_units::Au; use canvas_traits::CanvasMsg; use context::LayoutContext; use euclid::{Point2D, Rect, Size2D}; use floats::ClearType; use flow::{self, Flow}; use flow_ref::{self, FlowRef}; use gfx; use gfx::display_list::{BLUR_INFLATION_FACTOR, OpaqueNode}; use gfx::text::glyph::CharIndex; use gfx::text::text_run::{TextRun, TextRunSlice}; use gfx_traits::LayerId; use incremental::{self, RECONSTRUCT_FLOW, RestyleDamage}; use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFragmentContext, InlineFragmentNodeInfo}; use inline::{InlineMetrics, LAST_FRAGMENT_OF_ELEMENT}; use ipc_channel::ipc::IpcSender; use layout_debug; use model::{self, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, specified}; use msg::compositor_msg::LayerType; use msg::constellation_msg::PipelineId; use net_traits::image::base::Image; use net_traits::image_cache_task::UsePlaceholder; use rustc_serialize::{Encodable, Encoder}; use script::dom::htmlcanvaselement::HTMLCanvasData; use std::borrow::ToOwned; use std::cmp::{max, min}; use std::collections::LinkedList; use std::fmt; use std::sync::{Arc, Mutex}; use string_cache::Atom; use style::computed_values::content::ContentItem; use style::computed_values::{border_collapse, clear, display, mix_blend_mode, overflow_wrap}; use style::computed_values::{overflow_x, position, text_decoration, transform_style}; use style::computed_values::{white_space, word_break, z_index}; use style::properties::ComputedValues; use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrAuto}; use style::values::computed::{LengthOrPercentageOrNone}; use text; use text::TextRunScanner; use url::Url; use util; use util::logical_geometry::{LogicalMargin, LogicalRect, LogicalSize, WritingMode}; use util::range::*; use util::str::slice_chars; use wrapper::{PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode}; /// Fragments (`struct Fragment`) are the leaves of the layout tree. They cannot position /// themselves. In general, fragments do not have a simple correspondence with CSS fragments in the /// specification: /// /// * Several fragments may correspond to the same CSS box or DOM node. For example, a CSS text box /// broken across two lines is represented by two fragments. /// /// * Some CSS fragments are not created at all, such as some anonymous block fragments induced by /// inline fragments with block-level sibling fragments. In that case, Servo uses an `InlineFlow` /// with `BlockFlow` siblings; the `InlineFlow` is block-level, but not a block container. It is /// positioned as if it were a block fragment, but its children are positioned according to /// inline flow. /// /// A `SpecificFragmentInfo::Generic` is an empty fragment that contributes only borders, margins, /// padding, and backgrounds. It is analogous to a CSS nonreplaced content box. /// /// A fragment's type influences how its styles are interpreted during layout. For example, /// replaced content such as images are resized differently from tables, text, or other content. /// Different types of fragments may also contain custom data; for example, text fragments contain /// text. /// /// Do not add fields to this structure unless they're really really mega necessary! Fragments get /// moved around a lot and thus their size impacts performance of layout quite a bit. /// /// FIXME(#2260, pcwalton): This can be slimmed down some by (at least) moving `inline_context` /// to be on `InlineFlow` only. #[derive(Clone)] pub struct Fragment { /// An opaque reference to the DOM node that this `Fragment` originates from. pub node: OpaqueNode, /// The CSS style of this fragment. pub style: Arc<ComputedValues>, /// The position of this fragment relative to its owning flow. The size includes padding and /// border, but not margin. /// /// NB: This does not account for relative positioning. /// NB: Collapsed borders are not included in this. pub border_box: LogicalRect<Au>, /// The sum of border and padding; i.e. the distance from the edge of the border box to the /// content edge of the fragment. pub border_padding: LogicalMargin<Au>, /// The margin of the content box. pub margin: LogicalMargin<Au>, /// Info specific to the kind of fragment. Keep this enum small. pub specific: SpecificFragmentInfo, /// Holds the style context information for fragments that are part of an inline formatting /// context. pub inline_context: Option<InlineFragmentContext>, /// How damaged this fragment is since last reflow. pub restyle_damage: RestyleDamage, /// The pseudo-element that this fragment represents. pub pseudo: PseudoElementType<()>, /// Various flags for this fragment. pub flags: FragmentFlags, /// A debug ID that is consistent for the life of this fragment (via transform etc). pub debug_id: u16, } impl Encodable for Fragment { fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> { e.emit_struct("fragment", 0, |e| { try!(e.emit_struct_field("id", 0, |e| self.debug_id().encode(e))); try!(e.emit_struct_field("border_box", 1, |e| self.border_box.encode(e))); e.emit_struct_field("margin", 2, |e| self.margin.encode(e)) }) } } /// Info specific to the kind of fragment. /// /// Keep this enum small. As in, no more than one word. Or pcwalton will yell at you. #[derive(Clone)] pub enum SpecificFragmentInfo { Generic, /// A piece of generated content that cannot be resolved into `ScannedText` until the generated /// content resolution phase (e.g. an ordered list item marker). GeneratedContent(Box<GeneratedContentInfo>), Iframe(Box<IframeFragmentInfo>), Image(Box<ImageFragmentInfo>), Canvas(Box<CanvasFragmentInfo>), /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was /// declared with `display: inline;`. InlineAbsoluteHypothetical(InlineAbsoluteHypotheticalFragmentInfo), InlineBlock(InlineBlockFragmentInfo), /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). InlineAbsolute(InlineAbsoluteFragmentInfo), ScannedText(Box<ScannedTextFragmentInfo>), Table, TableCell, TableColumn(TableColumnFragmentInfo), TableRow, TableWrapper, UnscannedText(UnscannedTextFragmentInfo), } impl SpecificFragmentInfo { fn restyle_damage(&self) -> RestyleDamage { let flow = match *self { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::Generic => return RestyleDamage::empty(), SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineAbsolute(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineBlock(ref info) => &info.flow_ref, }; flow::base(&**flow).restyle_damage } pub fn get_type(&self) -> &'static str { match *self { SpecificFragmentInfo::Canvas(_) => "SpecificFragmentInfo::Canvas", SpecificFragmentInfo::Generic => "SpecificFragmentInfo::Generic", SpecificFragmentInfo::GeneratedContent(_) => "SpecificFragmentInfo::GeneratedContent", SpecificFragmentInfo::Iframe(_) => "SpecificFragmentInfo::Iframe", SpecificFragmentInfo::Image(_) => "SpecificFragmentInfo::Image", SpecificFragmentInfo::InlineAbsolute(_) => "SpecificFragmentInfo::InlineAbsolute", SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => { "SpecificFragmentInfo::InlineAbsoluteHypothetical" } SpecificFragmentInfo::InlineBlock(_) => "SpecificFragmentInfo::InlineBlock", SpecificFragmentInfo::ScannedText(_) => "SpecificFragmentInfo::ScannedText", SpecificFragmentInfo::Table => "SpecificFragmentInfo::Table", SpecificFragmentInfo::TableCell => "SpecificFragmentInfo::TableCell", SpecificFragmentInfo::TableColumn(_) => "SpecificFragmentInfo::TableColumn", SpecificFragmentInfo::TableRow => "SpecificFragmentInfo::TableRow", SpecificFragmentInfo::TableWrapper => "SpecificFragmentInfo::TableWrapper", SpecificFragmentInfo::UnscannedText(_) => "SpecificFragmentInfo::UnscannedText", } } } impl fmt::Debug for SpecificFragmentInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { SpecificFragmentInfo::ScannedText(ref info) => { write!(f, "\"{}\"", slice_chars(&*info.run.text, info.range.begin().get() as usize, info.range.end().get() as usize)) } SpecificFragmentInfo::UnscannedText(ref info) => { write!(f, "\"{}\"", info.text) } _ => Ok(()) } } } /// Clamp a value obtained from style_length, based on min / max lengths. fn clamp_size(size: Au, min_size: LengthOrPercentage, max_size: LengthOrPercentageOrNone, container_size: Au) -> Au { let min_size = model::specified(min_size, container_size); let max_size = model::specified_or_none(max_size, container_size); max(min_size, match max_size { None => size, Some(max_size) => min(size, max_size), }) } /// Information for generated content. #[derive(Clone)] pub enum GeneratedContentInfo { ListItem, ContentItem(ContentItem), } /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was declared /// with `display: inline;`. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteHypotheticalFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteHypotheticalFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteHypotheticalFragmentInfo { InlineAbsoluteHypotheticalFragmentInfo { flow_ref: flow_ref, } } } /// A fragment that represents an inline-block element. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineBlockFragmentInfo { pub flow_ref: FlowRef, } impl InlineBlockFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineBlockFragmentInfo { InlineBlockFragmentInfo { flow_ref: flow_ref, } } } /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteFragmentInfo { InlineAbsoluteFragmentInfo { flow_ref: flow_ref, } } } #[derive(Clone)] pub struct CanvasFragmentInfo { pub replaced_image_fragment_info: ReplacedImageFragmentInfo, pub renderer_id: Option<usize>, pub ipc_renderer: Option<Arc<Mutex<IpcSender<CanvasMsg>>>>, } impl CanvasFragmentInfo { pub fn new<'ln, N: ThreadSafeLayoutNode<'ln>>(node: &N, data: HTMLCanvasData) -> CanvasFragmentInfo { CanvasFragmentInfo { replaced_image_fragment_info: ReplacedImageFragmentInfo::new(node, Some(Au::from_px(data.width as i32)), Some(Au::from_px(data.height as i32))), renderer_id: data.renderer_id, ipc_renderer: data.ipc_renderer .map(|renderer| Arc::new(Mutex::new(renderer))), } } /// Returns the original inline-size of the canvas. pub fn canvas_inline_size(&self) -> Au { self.replaced_image_fragment_info.dom_inline_size.unwrap_or(Au(0)) } /// Returns the original block-size of the canvas. pub fn canvas_block_size(&self) -> Au { self.replaced_image_fragment_info.dom_block_size.unwrap_or(Au(0)) } } /// A fragment that represents a replaced content image and its accompanying borders, shadows, etc. #[derive(Clone)] pub struct ImageFragmentInfo { /// The image held within this fragment. pub replaced_image_fragment_info: ReplacedImageFragmentInfo, pub image: Option<Arc<Image>>, } impl ImageFragmentInfo { /// Creates a new image fragment from the given URL and local image cache. /// /// FIXME(pcwalton): The fact that image fragments store the cache in the fragment makes little /// sense to me. pub fn new<'ln, N: ThreadSafeLayoutNode<'ln>>(node: &N, url: Option<Url>, layout_context: &LayoutContext) -> ImageFragmentInfo { fn convert_length<'ln, N>(node: &N, name: &Atom) -> Option<Au> where N: ThreadSafeLayoutNode<'ln> { let element = node.as_element(); element.get_attr(&ns!(), name) .and_then(|string| string.parse().ok()) .map(Au::from_px) } let image = url.and_then(|url| { layout_context.get_or_request_image(url, UsePlaceholder::Yes) }); ImageFragmentInfo { replaced_image_fragment_info: ReplacedImageFragmentInfo::new(node, convert_length(node, &atom!("width")), convert_length(node, &atom!("height"))), image: image, } } /// Returns the original inline-size of the image. pub fn image_inline_size(&mut self) -> Au { match self.image { Some(ref image) => { Au::from_px(if self.replaced_image_fragment_info.writing_mode_is_vertical { image.height } else { image.width } as i32) } None => Au(0) } } /// Returns the original block-size of the image. pub fn image_block_size(&mut self) -> Au { match self.image { Some(ref image) => { Au::from_px(if self.replaced_image_fragment_info.writing_mode_is_vertical { image.width } else { image.height } as i32) } None => Au(0) } } /// Tile an image pub fn tile_image(position: &mut Au, size: &mut Au, virtual_position: Au, image_size: u32) { // Avoid division by zero below! let image_size = image_size as i32; if image_size == 0 { return } let delta_pixels = (virtual_position - *position).to_px(); let tile_count = (delta_pixels + image_size - 1) / image_size; let offset = Au::from_px(image_size * tile_count); let new_position = virtual_position - offset; *size = *position - new_position + *size; *position = new_position; } } #[derive(Clone)] pub struct ReplacedImageFragmentInfo { pub computed_inline_size: Option<Au>, pub computed_block_size: Option<Au>, pub dom_inline_size: Option<Au>, pub dom_block_size: Option<Au>, pub writing_mode_is_vertical: bool, } impl ReplacedImageFragmentInfo { pub fn new<'ln, N>(node: &N, dom_width: Option<Au>, dom_height: Option<Au>) -> ReplacedImageFragmentInfo where N: ThreadSafeLayoutNode<'ln> { let is_vertical = node.style().writing_mode.is_vertical(); ReplacedImageFragmentInfo { computed_inline_size: None, computed_block_size: None, dom_inline_size: if is_vertical { dom_height } else { dom_width }, dom_block_size: if is_vertical { dom_width } else { dom_height }, writing_mode_is_vertical: is_vertical, } } /// Returns the calculated inline-size of the image, accounting for the inline-size attribute. pub fn computed_inline_size(&self) -> Au { self.computed_inline_size.expect("image inline_size is not computed yet!") } /// Returns the calculated block-size of the image, accounting for the block-size attribute. pub fn computed_block_size(&self) -> Au { self.computed_block_size.expect("image block_size is not computed yet!") } // Return used value for inline-size or block-size. // // `dom_length`: inline-size or block-size as specified in the `img` tag. // `style_length`: inline-size as given in the CSS pub fn style_length(style_length: LengthOrPercentageOrAuto, dom_length: Option<Au>, container_size: Option<Au>) -> MaybeAuto { match (style_length, dom_length, container_size) { (LengthOrPercentageOrAuto::Length(length), _, _) => MaybeAuto::Specified(length), (LengthOrPercentageOrAuto::Percentage(pc), _, Some(container_size)) => { MaybeAuto::Specified(container_size.scale_by(pc)) } (LengthOrPercentageOrAuto::Percentage(_), _, None) => MaybeAuto::Auto, (LengthOrPercentageOrAuto::Calc(calc), _, Some(container_size)) => { MaybeAuto::Specified(calc.length() + container_size.scale_by(calc.percentage())) } (LengthOrPercentageOrAuto::Calc(_), _, None) => MaybeAuto::Auto, (LengthOrPercentageOrAuto::Auto, Some(dom_length), _) => MaybeAuto::Specified(dom_length), (LengthOrPercentageOrAuto::Auto, None, _) => MaybeAuto::Auto, } } pub fn calculate_replaced_inline_size(&mut self, style: &ComputedValues, noncontent_inline_size: Au, container_inline_size: Au, fragment_inline_size: Au, fragment_block_size: Au) -> Au { let style_inline_size = style.content_inline_size(); let style_block_size = style.content_block_size(); let style_min_inline_size = style.min_inline_size(); let style_max_inline_size = style.max_inline_size(); let style_min_block_size = style.min_block_size(); let style_max_block_size = style.max_block_size(); // TODO(ksh8281): compute border,margin let inline_size = ReplacedImageFragmentInfo::style_length( style_inline_size, self.dom_inline_size, Some(container_inline_size)); let inline_size = match inline_size { MaybeAuto::Auto => { let intrinsic_width = fragment_inline_size; let intrinsic_height = fragment_block_size; if intrinsic_height == Au(0) { intrinsic_width } else { let ratio = intrinsic_width.to_f32_px() / intrinsic_height.to_f32_px(); let specified_height = ReplacedImageFragmentInfo::style_length( style_block_size, self.dom_block_size, None); let specified_height = match specified_height { MaybeAuto::Auto => intrinsic_height, MaybeAuto::Specified(h) => h, }; let specified_height = clamp_size(specified_height, style_min_block_size, style_max_block_size, Au(0)); Au::from_f32_px(specified_height.to_f32_px() * ratio) } }, MaybeAuto::Specified(w) => w, }; let inline_size = clamp_size(inline_size, style_min_inline_size, style_max_inline_size, container_inline_size); self.computed_inline_size = Some(inline_size); inline_size + noncontent_inline_size } pub fn calculate_replaced_block_size(&mut self, style: &ComputedValues, noncontent_block_size: Au, containing_block_block_size: Option<Au>, fragment_inline_size: Au, fragment_block_size: Au) -> Au { // TODO(ksh8281): compute border,margin,padding let style_block_size = style.content_block_size(); let style_min_block_size = style.min_block_size(); let style_max_block_size = style.max_block_size(); let inline_size = self.computed_inline_size(); let block_size = ReplacedImageFragmentInfo::style_length( style_block_size, self.dom_block_size, containing_block_block_size); let block_size = match block_size { MaybeAuto::Auto => { let intrinsic_width = fragment_inline_size; let intrinsic_height = fragment_block_size; let scale = intrinsic_width.to_f32_px() / inline_size.to_f32_px(); Au::from_f32_px(intrinsic_height.to_f32_px() / scale) }, MaybeAuto::Specified(h) => { h } }; let block_size = clamp_size(block_size, style_min_block_size, style_max_block_size, Au(0)); self.computed_block_size = Some(block_size); block_size + noncontent_block_size } } /// A fragment that represents an inline frame (iframe). This stores the pipeline ID so that the /// size of this iframe can be communicated via the constellation to the iframe's own layout task. #[derive(Clone)] pub struct IframeFragmentInfo { /// The pipeline ID of this iframe. pub pipeline_id: PipelineId, } impl IframeFragmentInfo { /// Creates the information specific to an iframe fragment. pub fn new<'ln, N: ThreadSafeLayoutNode<'ln>>(node: &N) -> IframeFragmentInfo { let pipeline_id = node.iframe_pipeline_id(); IframeFragmentInfo { pipeline_id: pipeline_id, } } #[inline] pub fn calculate_replaced_inline_size(&self, style: &ComputedValues, containing_size: Au) -> Au { // Calculate the replaced inline size (or default) as per CSS 2.1 § 10.3.2 IframeFragmentInfo::calculate_replaced_size(style.content_inline_size(), style.min_inline_size(), style.max_inline_size(), Some(containing_size), Au::from_px(300)) } #[inline] pub fn calculate_replaced_block_size(&self, style: &ComputedValues, containing_size: Option<Au>) -> Au { // Calculate the replaced block size (or default) as per CSS 2.1 § 10.3.2 IframeFragmentInfo::calculate_replaced_size(style.content_block_size(), style.min_block_size(), style.max_block_size(), containing_size, Au::from_px(150)) } fn calculate_replaced_size(content_size: LengthOrPercentageOrAuto, style_min_size: LengthOrPercentage, style_max_size: LengthOrPercentageOrNone, containing_size: Option<Au>, default_size: Au) -> Au { let computed_size = match (content_size, containing_size) { (LengthOrPercentageOrAuto::Length(length), _) => length, (LengthOrPercentageOrAuto::Percentage(pc), Some(container_size)) => container_size.scale_by(pc), (LengthOrPercentageOrAuto::Calc(calc), Some(container_size)) => { container_size.scale_by(calc.percentage()) + calc.length() }, (LengthOrPercentageOrAuto::Calc(calc), None) => calc.length(), (LengthOrPercentageOrAuto::Percentage(_), None) => default_size, (LengthOrPercentageOrAuto::Auto, _) => default_size, }; let containing_size = containing_size.unwrap_or(Au(0)); clamp_size(computed_size, style_min_size, style_max_size, containing_size) } } /// A scanned text fragment represents a single run of text with a distinct style. A `TextFragment` /// may be split into two or more fragments across line breaks. Several `TextFragment`s may /// correspond to a single DOM text node. Split text fragments are implemented by referring to /// subsets of a single `TextRun` object. #[derive(Clone)] pub struct ScannedTextFragmentInfo { /// The text run that this represents. pub run: Arc<TextRun>, /// The intrinsic size of the text fragment. pub content_size: LogicalSize<Au>, /// The position of the insertion point in characters, if any. /// /// TODO(pcwalton): Make this a range. pub insertion_point: Option<CharIndex>, /// The range within the above text run that this represents. pub range: Range<CharIndex>, /// The endpoint of the above range, including whitespace that was stripped out. This exists /// so that we can restore the range to its original value (before line breaking occurred) when /// performing incremental reflow. pub range_end_including_stripped_whitespace: CharIndex, /// Whether a line break is required after this fragment if wrapping on newlines (e.g. if /// `white-space: pre` is in effect). pub requires_line_break_afterward_if_wrapping_on_newlines: bool, } impl ScannedTextFragmentInfo { /// Creates the information specific to a scanned text fragment from a range and a text run. pub fn new(run: Arc<TextRun>, range: Range<CharIndex>, content_size: LogicalSize<Au>, insertion_point: &Option<CharIndex>, requires_line_break_afterward_if_wrapping_on_newlines: bool) -> ScannedTextFragmentInfo { ScannedTextFragmentInfo { run: run, range: range, insertion_point: *insertion_point, content_size: content_size, range_end_including_stripped_whitespace: range.end(), requires_line_break_afterward_if_wrapping_on_newlines: requires_line_break_afterward_if_wrapping_on_newlines, } } } /// Describes how to split a fragment. This is used during line breaking as part of the return /// value of `find_split_info_for_inline_size()`. #[derive(Debug, Clone)] pub struct SplitInfo { // TODO(bjz): this should only need to be a single character index, but both values are // currently needed for splitting in the `inline::try_append_*` functions. pub range: Range<CharIndex>, pub inline_size: Au, } impl SplitInfo { fn new(range: Range<CharIndex>, info: &ScannedTextFragmentInfo) -> SplitInfo { let inline_size = info.run.advance_for_range(&range); SplitInfo { range: range, inline_size: inline_size, } } } /// Describes how to split a fragment into two. This contains up to two `SplitInfo`s. pub struct SplitResult { /// The part of the fragment that goes on the first line. pub inline_start: Option<SplitInfo>, /// The part of the fragment that goes on the second line. pub inline_end: Option<SplitInfo>, /// The text run which is being split. pub text_run: Arc<TextRun>, } /// Describes how a fragment should be truncated. pub struct TruncationResult { /// The part of the fragment remaining after truncation. pub split: SplitInfo, /// The text run which is being truncated. pub text_run: Arc<TextRun>, } /// Data for an unscanned text fragment. Unscanned text fragments are the results of flow /// construction that have not yet had their inline-size determined. #[derive(Clone)] pub struct UnscannedTextFragmentInfo { /// The text inside the fragment. pub text: Box<str>, /// The position of the insertion point, if any. /// /// TODO(pcwalton): Make this a range. pub insertion_point: Option<CharIndex>, } impl UnscannedTextFragmentInfo { /// Creates a new instance of `UnscannedTextFragmentInfo` from the given text. #[inline] pub fn new(text: String, insertion_point: Option<CharIndex>) -> UnscannedTextFragmentInfo { UnscannedTextFragmentInfo { text: text.into_boxed_str(), insertion_point: insertion_point, } } } /// A fragment that represents a table column. #[derive(Copy, Clone)] pub struct TableColumnFragmentInfo { /// the number of columns a <col> element should span pub span: u32, } impl TableColumnFragmentInfo { /// Create the information specific to an table column fragment. pub fn new<'ln, N: ThreadSafeLayoutNode<'ln>>(node: &N) -> TableColumnFragmentInfo { let element = node.as_element(); let span = element.get_attr(&ns!(), &atom!("span")) .and_then(|string| string.parse().ok()) .unwrap_or(0); TableColumnFragmentInfo { span: span, } } } impl Fragment { /// Constructs a new `Fragment` instance. pub fn new<'ln, N: ThreadSafeLayoutNode<'ln>>(node: &N, specific: SpecificFragmentInfo) -> Fragment { let style = node.style().clone(); let writing_mode = style.writing_mode; let mut restyle_damage = node.restyle_damage(); restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: node.opaque(), style: style, restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: node.get_pseudo_element_type().strip(), flags: FragmentFlags::empty(), debug_id: layout_debug::generate_unique_debug_id(), } } /// Constructs a new `Fragment` instance from an opaque node. pub fn from_opaque_node_and_style(node: OpaqueNode, pseudo: PseudoElementType<()>, style: Arc<ComputedValues>, mut restyle_damage: RestyleDamage, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: node, style: style, restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: pseudo, flags: FragmentFlags::empty(), debug_id: layout_debug::generate_unique_debug_id(), } } /// Returns a debug ID of this fragment. This ID should not be considered stable across /// multiple layouts or fragment manipulations. pub fn debug_id(&self) -> u16 { self.debug_id } /// Transforms this fragment into another fragment of the given type, with the given size, /// preserving all the other data. pub fn transform(&self, size: LogicalSize<Au>, info: SpecificFragmentInfo) -> Fragment { let new_border_box = LogicalRect::from_point_size(self.style.writing_mode, self.border_box.start, size); let mut restyle_damage = incremental::rebuild_and_reflow(); restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: self.node, style: self.style.clone(), restyle_damage: restyle_damage, border_box: new_border_box, border_padding: self.border_padding, margin: self.margin, specific: info, inline_context: self.inline_context.clone(), pseudo: self.pseudo.clone(), flags: FragmentFlags::empty(), debug_id: self.debug_id, } } /// Transforms this fragment using the given `SplitInfo`, preserving all the other data. pub fn transform_with_split_info(&self, split: &SplitInfo, text_run: Arc<TextRun>) -> Fragment { let size = LogicalSize::new(self.style.writing_mode, split.inline_size, self.border_box.size.block); let requires_line_break_afterward_if_wrapping_on_newlines = self.requires_line_break_afterward_if_wrapping_on_newlines(); // FIXME(pcwalton): This should modify the insertion point as necessary. let info = box ScannedTextFragmentInfo::new( text_run, split.range, size, &None, requires_line_break_afterward_if_wrapping_on_newlines); self.transform(size, SpecificFragmentInfo::ScannedText(info)) } /// Transforms this fragment into an ellipsis fragment, preserving all the other data. pub fn transform_into_ellipsis(&self, layout_context: &LayoutContext) -> Fragment { let mut unscanned_ellipsis_fragments = LinkedList::new(); unscanned_ellipsis_fragments.push_back(self.transform( self.border_box.size, SpecificFragmentInfo::UnscannedText(UnscannedTextFragmentInfo::new("…".to_owned(), None)))); let ellipsis_fragments = TextRunScanner::new().scan_for_runs(&mut layout_context.font_context(), unscanned_ellipsis_fragments); debug_assert!(ellipsis_fragments.len() == 1); ellipsis_fragments.fragments.into_iter().next().unwrap() } pub fn restyle_damage(&self) -> RestyleDamage { self.restyle_damage | self.specific.restyle_damage() } pub fn contains_node(&self, node_address: OpaqueNode) -> bool { node_address == self.node || self.inline_context.as_ref().map_or(false, |ctx| { ctx.contains_node(node_address) }) } /// Adds a style to the inline context for this fragment. If the inline context doesn't exist /// yet, it will be created. pub fn add_inline_context_style(&mut self, node_info: InlineFragmentNodeInfo) { if self.inline_context.is_none() { self.inline_context = Some(InlineFragmentContext::new()); } self.inline_context.as_mut().unwrap().nodes.push(node_info); } /// Determines which quantities (border/padding/margin/specified) should be included in the /// intrinsic inline size of this fragment. fn quantities_included_in_intrinsic_inline_size(&self) -> QuantitiesIncludedInIntrinsicInlineSizes { match self.specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineAbsolute(_) => { QuantitiesIncludedInIntrinsicInlineSizes::all() } SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_PADDING | INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableWrapper => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS | INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableRow => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) => { QuantitiesIncludedInIntrinsicInlineSizes::empty() } } } /// Returns the portion of the intrinsic inline-size that consists of borders, padding, and/or /// margins. /// /// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? pub fn surrounding_intrinsic_inline_size(&self) -> Au { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let margin = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS) { let margin = style.logical_margin(); (MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero()) } else { Au(0) }; // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let padding = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_PADDING) { let padding = style.logical_padding(); (model::specified(padding.inline_start, Au(0)) + model::specified(padding.inline_end, Au(0))) } else { Au(0) }; let border = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_BORDER) { self.border_width().inline_start_end() } else { Au(0) }; margin + padding + border } /// Uses the style only to estimate the intrinsic inline-sizes. These may be modified for text /// or replaced elements. fn style_specified_intrinsic_inline_size(&self) -> IntrinsicISizesContribution { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); let specified = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED) { max(model::specified(style.min_inline_size(), Au(0)), MaybeAuto::from_style(style.content_inline_size(), Au(0)).specified_or_zero()) } else { Au(0) }; // FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? let surrounding_inline_size = self.surrounding_intrinsic_inline_size(); IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: specified, preferred_inline_size: specified, }, surrounding_size: surrounding_inline_size, } } pub fn calculate_line_height(&self, layout_context: &LayoutContext) -> Au { let font_style = self.style.get_font_arc(); let font_metrics = text::font_metrics_for_style(&mut layout_context.font_context(), font_style); text::line_height_from_style(&*self.style, &font_metrics) } /// Returns the sum of the inline-sizes of all the borders of this fragment. Note that this /// can be expensive to compute, so if possible use the `border_padding` field instead. #[inline] fn border_width(&self) -> LogicalMargin<Au> { let style_border_width = match self.specific { SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::InlineBlock(_) => LogicalMargin::zero(self.style.writing_mode), _ => self.style().logical_border_width(), }; match self.inline_context { None => style_border_width, Some(ref inline_fragment_context) => { inline_fragment_context.nodes.iter().fold(style_border_width, |accumulator, node| { let mut this_border_width = node.style.logical_border_width(); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_start = Au(0) } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_end = Au(0) } accumulator + this_border_width }) } } } /// Computes the margins in the inline direction from the containing block inline-size and the /// style. After this call, the inline direction of the `margin` field will be correct. /// /// Do not use this method if the inline direction margins are to be computed some other way /// (for example, via constraint solving for blocks). pub fn compute_inline_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) => { self.margin.inline_start = Au(0); self.margin.inline_end = Au(0); return } SpecificFragmentInfo::InlineBlock(_) => { // Inline-blocks do not take self margins into account but do account for margins // from outer inline contexts. self.margin.inline_start = Au(0); self.margin.inline_end = Au(0); } _ => { let margin = self.style().logical_margin(); self.margin.inline_start = MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero(); self.margin.inline_end = MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero(); } } if let Some(ref inline_context) = self.inline_context { for node in &inline_context.nodes { let margin = node.style.logical_margin(); let this_inline_start_margin = if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero() }; let this_inline_end_margin = if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero() }; self.margin.inline_start = self.margin.inline_start + this_inline_start_margin; self.margin.inline_end = self.margin.inline_end + this_inline_end_margin; } } } /// Computes the margins in the block direction from the containing block inline-size and the /// style. After this call, the block direction of the `margin` field will be correct. /// /// Do not use this method if the block direction margins are to be computed some other way /// (for example, via constraint solving for absolutely-positioned flows). pub fn compute_block_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) => { self.margin.block_start = Au(0); self.margin.block_end = Au(0) } _ => { // NB: Percentages are relative to containing block inline-size (not block-size) // per CSS 2.1. let margin = self.style().logical_margin(); self.margin.block_start = MaybeAuto::from_style(margin.block_start, containing_block_inline_size) .specified_or_zero(); self.margin.block_end = MaybeAuto::from_style(margin.block_end, containing_block_inline_size) .specified_or_zero(); } } } /// Computes the border and padding in both inline and block directions from the containing /// block inline-size and the style. After this call, the `border_padding` field will be /// correct. /// /// TODO(pcwalton): Remove `border_collapse`; we can figure it out from our style and specific /// fragment info. pub fn compute_border_and_padding(&mut self, containing_block_inline_size: Au, border_collapse: border_collapse::T) { // Compute border. let border = match border_collapse { border_collapse::T::separate => self.border_width(), border_collapse::T::collapse => LogicalMargin::zero(self.style.writing_mode), }; // Compute padding from the fragment's style. // // This is zero in the case of `inline-block` because that padding is applied to the // wrapped block, not the fragment. let padding_from_style = match self.specific { SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineBlock(_) => LogicalMargin::zero(self.style.writing_mode), _ => model::padding_from_style(self.style(), containing_block_inline_size), }; // Compute padding from the inline fragment context. let padding_from_inline_fragment_context = match (&self.specific, &self.inline_context) { (_, &None) | (&SpecificFragmentInfo::TableColumn(_), _) | (&SpecificFragmentInfo::TableRow, _) | (&SpecificFragmentInfo::TableWrapper, _) => { LogicalMargin::zero(self.style.writing_mode) } (_, &Some(ref inline_fragment_context)) => { let zero_padding = LogicalMargin::zero(self.style.writing_mode); inline_fragment_context.nodes.iter().fold(zero_padding, |accumulator, node| { let mut padding = model::padding_from_style(&*node.style, Au(0)); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { padding.inline_start = Au(0) } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { padding.inline_end = Au(0) } accumulator + padding }) } }; self.border_padding = border + padding_from_style + padding_from_inline_fragment_context } // Return offset from original position because of `position: relative`. pub fn relative_position(&self, containing_block_size: &LogicalSize<Au>) -> LogicalSize<Au> { fn from_style(style: &ComputedValues, container_size: &LogicalSize<Au>) -> LogicalSize<Au> { let offsets = style.logical_position(); let offset_i = if offsets.inline_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.inline_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.inline_end, container_size.inline).specified_or_zero() }; let offset_b = if offsets.block_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.block_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.block_end, container_size.inline).specified_or_zero() }; LogicalSize::new(style.writing_mode, offset_i, offset_b) } // Go over the ancestor fragments and add all relative offsets (if any). let mut rel_pos = if self.style().get_box().position == position::T::relative { from_style(self.style(), containing_block_size) } else { LogicalSize::zero(self.style.writing_mode) }; if let Some(ref inline_fragment_context) = self.inline_context { for node in &inline_fragment_context.nodes { if node.style.get_box().position == position::T::relative { rel_pos = rel_pos + from_style(&*node.style, containing_block_size); } } } rel_pos } /// Always inline for SCCP. /// /// FIXME(pcwalton): Just replace with the clear type from the style module for speed? #[inline(always)] pub fn clear(&self) -> Option<ClearType> { let style = self.style(); match style.get_box().clear { clear::T::none => None, clear::T::left => Some(ClearType::Left), clear::T::right => Some(ClearType::Right), clear::T::both => Some(ClearType::Both), } } #[inline(always)] pub fn style(&self) -> &ComputedValues { &*self.style } pub fn white_space(&self) -> white_space::T { self.style().get_inheritedtext().white_space } /// Returns the text decoration of this fragment, according to the style of the nearest ancestor /// element. /// /// NB: This may not be the actual text decoration, because of the override rules specified in /// CSS 2.1 § 16.3.1. Unfortunately, computing this properly doesn't really fit into Servo's /// model. Therefore, this is a best lower bound approximation, but the end result may actually /// have the various decoration flags turned on afterward. pub fn text_decoration(&self) -> text_decoration::T { self.style().get_text().text_decoration } /// Returns the inline-start offset from margin edge to content edge. /// /// FIXME(#2262, pcwalton): I think this method is pretty bogus, because it won't work for /// inlines. pub fn inline_start_offset(&self) -> Au { match self.specific { SpecificFragmentInfo::TableWrapper => self.margin.inline_start, SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow => self.border_padding.inline_start, SpecificFragmentInfo::TableColumn(_) => Au(0), _ => self.margin.inline_start + self.border_padding.inline_start, } } /// Returns true if this element can be split. This is true for text fragments, unless /// `white-space: pre` or `white-space: nowrap` is set. pub fn can_split(&self) -> bool { self.is_scanned_text_fragment() && self.white_space().allow_wrap() } /// Returns true if and only if this fragment is a generated content fragment. pub fn is_generated_content(&self) -> bool { match self.specific { SpecificFragmentInfo::GeneratedContent(..) => true, _ => false, } } /// Returns true if and only if this is a scanned text fragment. pub fn is_scanned_text_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::ScannedText(..) => true, _ => false, } } /// Computes the intrinsic inline-sizes of this fragment. pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {} SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::Image(ref mut image_fragment_info) => { // FIXME(pcwalton): Shouldn't `width` and `height` be preshints? let image_inline_size = match (image_fragment_info.replaced_image_fragment_info .dom_inline_size, self.style.content_inline_size()) { (None, LengthOrPercentageOrAuto::Auto) | (None, LengthOrPercentageOrAuto::Percentage(_)) => { image_fragment_info.image_inline_size() } (Some(dom_inline_size), _) => dom_inline_size, (None, LengthOrPercentageOrAuto::Length(length)) => length, (None, LengthOrPercentageOrAuto::Calc(calc)) => calc.length(), }; result.union_block(&IntrinsicISizes { minimum_inline_size: image_inline_size, preferred_inline_size: image_inline_size, }); } SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => { let canvas_inline_size = canvas_fragment_info.canvas_inline_size(); result.union_block(&IntrinsicISizes { minimum_inline_size: canvas_inline_size, preferred_inline_size: canvas_inline_size, }) } SpecificFragmentInfo::ScannedText(ref text_fragment_info) => { let range = &text_fragment_info.range; // See http://dev.w3.org/csswg/css-sizing/#max-content-inline-size. // TODO: Account for soft wrap opportunities. let max_line_inline_size = text_fragment_info.run .metrics_for_range(range) .advance_width; let min_line_inline_size = if self.white_space().allow_wrap() { text_fragment_info.run.min_width_for_range(range) } else { max_line_inline_size }; result.union_block(&IntrinsicISizes { minimum_inline_size: min_line_inline_size, preferred_inline_size: max_line_inline_size, }) } SpecificFragmentInfo::UnscannedText(..) => { panic!("Unscanned text fragments should have been scanned by now!") } }; // Take borders and padding for parent inline fragments into account, if necessary. if self.is_primary_fragment() { if let Some(ref context) = self.inline_context { for node in &context.nodes { let mut border_width = node.style.logical_border_width(); let mut padding = model::padding_from_style(&*node.style, Au(0)); let mut margin = model::specified_margin_from_style(&*node.style); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { border_width.inline_start = Au(0); padding.inline_start = Au(0); margin.inline_start = Au(0); } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { border_width.inline_end = Au(0); padding.inline_end = Au(0); margin.inline_end = Au(0); } result.surrounding_size = result.surrounding_size + border_width.inline_start_end() + padding.inline_start_end() + margin.inline_start_end(); } } } result } /// TODO: What exactly does this function return? Why is it Au(0) for /// `SpecificFragmentInfo::Generic`? pub fn content_inline_size(&self) -> Au { match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) => Au(0), SpecificFragmentInfo::Canvas(ref canvas_fragment_info) => { canvas_fragment_info.replaced_image_fragment_info.computed_inline_size() } SpecificFragmentInfo::Image(ref image_fragment_info) => { image_fragment_info.replaced_image_fragment_info.computed_inline_size() } SpecificFragmentInfo::ScannedText(ref text_fragment_info) => { let (range, run) = (&text_fragment_info.range, &text_fragment_info.run); let text_bounds = run.metrics_for_range(range).bounding_box; text_bounds.size.width } SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have inline_size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } } } /// Returns the dimensions of the content box. /// /// This is marked `#[inline]` because it is frequently called when only one or two of the /// values are needed and that will save computation. #[inline] pub fn content_box(&self) -> LogicalRect<Au> { self.border_box - self.border_padding } /// Attempts to find the split positions of a text fragment so that its inline-size is no more /// than `max_inline_size`. /// /// A return value of `None` indicates that the fragment could not be split. Otherwise the /// information pertaining to the split is returned. The inline-start and inline-end split /// information are both optional due to the possibility of them being whitespace. pub fn calculate_split_position(&self, max_inline_size: Au, starts_line: bool) -> Option<SplitResult> { let text_fragment_info = if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific { text_fragment_info } else { return None }; let mut flags = SplitOptions::empty(); if starts_line { flags.insert(STARTS_LINE); if self.style().get_inheritedtext().overflow_wrap == overflow_wrap::T::break_word { flags.insert(RETRY_AT_CHARACTER_BOUNDARIES) } } match self.style().get_inheritedtext().word_break { word_break::T::normal => { // Break at normal word boundaries. let natural_word_breaking_strategy = text_fragment_info.run.natural_word_slices_in_range(&text_fragment_info.range); self.calculate_split_position_using_breaking_strategy( natural_word_breaking_strategy, max_inline_size, flags) } word_break::T::break_all => { // Break at character boundaries. let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); flags.remove(RETRY_AT_CHARACTER_BOUNDARIES); self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } } } /// Truncates this fragment to the given `max_inline_size`, using a character-based breaking /// strategy. If no characters could fit, returns `None`. pub fn truncate_to_inline_size(&self, max_inline_size: Au) -> Option<TruncationResult> { let text_fragment_info = if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific { text_fragment_info } else { return None }; let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); match self.calculate_split_position_using_breaking_strategy(character_breaking_strategy, max_inline_size, SplitOptions::empty()) { None => None, Some(split_info) => { match split_info.inline_start { None => None, Some(split) => { Some(TruncationResult { split: split, text_run: split_info.text_run.clone(), }) } } } } } /// A helper method that uses the breaking strategy described by `slice_iterator` (at present, /// either natural word breaking or character breaking) to split this fragment. fn calculate_split_position_using_breaking_strategy<'a, I>( &self, slice_iterator: I, max_inline_size: Au, flags: SplitOptions) -> Option<SplitResult> where I: Iterator<Item=TextRunSlice<'a>> { let text_fragment_info = if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific { text_fragment_info } else { return None }; let mut remaining_inline_size = max_inline_size; let mut inline_start_range = Range::new(text_fragment_info.range.begin(), CharIndex(0)); let mut inline_end_range = None; let mut overflowing = false; debug!("calculate_split_position_using_breaking_strategy: splitting text fragment \ (strlen={}, range={:?}, max_inline_size={:?})", text_fragment_info.run.text.len(), text_fragment_info.range, max_inline_size); for slice in slice_iterator { debug!("calculate_split_position_using_breaking_strategy: considering slice \ (offset={:?}, slice range={:?}, remaining_inline_size={:?})", slice.offset, slice.range, remaining_inline_size); // Use the `remaining_inline_size` to find a split point if possible. If not, go around // the loop again with the next slice. let metrics = text_fragment_info.run.metrics_for_slice(slice.glyphs, &slice.range); let advance = metrics.advance_width; // Have we found the split point? if advance <= remaining_inline_size || slice.glyphs.is_whitespace() { // Keep going; we haven't found the split point yet. debug!("calculate_split_position_using_breaking_strategy: enlarging span"); remaining_inline_size = remaining_inline_size - advance; inline_start_range.extend_by(slice.range.length()); continue } // The advance is more than the remaining inline-size, so split here. First, check to // see if we're going to overflow the line. If so, perform a best-effort split. let mut remaining_range = slice.text_run_range(); let split_is_empty = inline_start_range.is_empty() && !self.requires_line_break_afterward_if_wrapping_on_newlines(); if split_is_empty { // We're going to overflow the line. overflowing = true; inline_start_range = slice.text_run_range(); remaining_range = Range::new(slice.text_run_range().end(), CharIndex(0)); remaining_range.extend_to(text_fragment_info.range.end()); } // Check to see if we need to create an inline-end chunk. let slice_begin = remaining_range.begin(); if slice_begin < text_fragment_info.range.end() { // There still some things left over at the end of the line, so create the // inline-end chunk. let mut inline_end = remaining_range; inline_end.extend_to(text_fragment_info.range.end()); inline_end_range = Some(inline_end); debug!("calculate_split_position: splitting remainder with inline-end range={:?}", inline_end); } // If we failed to find a suitable split point, we're on the verge of overflowing the // line. if split_is_empty || overflowing { // If we've been instructed to retry at character boundaries (probably via // `overflow-wrap: break-word`), do so. if flags.contains(RETRY_AT_CHARACTER_BOUNDARIES) { let character_breaking_strategy = text_fragment_info.run .character_slices_in_range(&text_fragment_info.range); let mut flags = flags; flags.remove(RETRY_AT_CHARACTER_BOUNDARIES); return self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } // We aren't at the start of the line, so don't overflow. Let inline layout wrap to // the next line instead. if !flags.contains(STARTS_LINE) { return None } } break } let split_is_empty = inline_start_range.is_empty() && !self.requires_line_break_afterward_if_wrapping_on_newlines(); let inline_start = if !split_is_empty { Some(SplitInfo::new(inline_start_range, &**text_fragment_info)) } else { None }; let inline_end = inline_end_range.map(|inline_end_range| { SplitInfo::new(inline_end_range, &**text_fragment_info) }); Some(SplitResult { inline_start: inline_start, inline_end: inline_end, text_run: text_fragment_info.run.clone(), }) } /// The opposite of `calculate_split_position_using_breaking_strategy`: merges this fragment /// with the next one. pub fn merge_with(&mut self, next_fragment: Fragment) { match (&mut self.specific, &next_fragment.specific) { (&mut SpecificFragmentInfo::ScannedText(ref mut this_info), &SpecificFragmentInfo::ScannedText(ref other_info)) => { debug_assert!(util::arc_ptr_eq(&this_info.run, &other_info.run)); this_info.range.extend_to(other_info.range_end_including_stripped_whitespace); this_info.content_size.inline = this_info.run.metrics_for_range(&this_info.range).advance_width; this_info.requires_line_break_afterward_if_wrapping_on_newlines = this_info.requires_line_break_afterward_if_wrapping_on_newlines || other_info.requires_line_break_afterward_if_wrapping_on_newlines; self.border_padding.inline_end = next_fragment.border_padding.inline_end; self.border_box.size.inline = this_info.content_size.inline + self.border_padding.inline_start_end(); } _ => panic!("Can only merge two scanned-text fragments!"), } self.meld_with_next_inline_fragment(&next_fragment); } /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. pub fn assign_replaced_inline_size_if_necessary(&mut self, container_inline_size: Au) { match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have inline size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) => {} }; let style = &*self.style; let noncontent_inline_size = self.border_padding.inline_start_end(); match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; // This is a hypothetical box, so it takes up no space. self.border_box.size.inline = Au(0); } SpecificFragmentInfo::InlineBlock(ref mut info) => { let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::ScannedText(ref info) => { // Scanned text fragments will have already had their content inline-sizes assigned // by this point. self.border_box.size.inline = info.content_size.inline + noncontent_inline_size } SpecificFragmentInfo::Image(ref mut image_fragment_info) => { let fragment_inline_size = image_fragment_info.image_inline_size(); let fragment_block_size = image_fragment_info.image_block_size(); self.border_box.size.inline = image_fragment_info.replaced_image_fragment_info .calculate_replaced_inline_size(style, noncontent_inline_size, container_inline_size, fragment_inline_size, fragment_block_size); } SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => { let fragment_inline_size = canvas_fragment_info.canvas_inline_size(); let fragment_block_size = canvas_fragment_info.canvas_block_size(); self.border_box.size.inline = canvas_fragment_info.replaced_image_fragment_info .calculate_replaced_inline_size(style, noncontent_inline_size, container_inline_size, fragment_inline_size, fragment_block_size); } SpecificFragmentInfo::Iframe(ref iframe_fragment_info) => { self.border_box.size.inline = iframe_fragment_info.calculate_replaced_inline_size(style, container_inline_size) + noncontent_inline_size; } _ => panic!("this case should have been handled above"), } } /// Assign block-size for this fragment if it is replaced content. The inline-size must have /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. pub fn assign_replaced_block_size_if_necessary(&mut self, containing_block_block_size: Option<Au>) { match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have block size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) => {} } let style = &*self.style; let noncontent_block_size = self.border_padding.block_start_end(); match self.specific { SpecificFragmentInfo::Image(ref mut image_fragment_info) => { let fragment_inline_size = image_fragment_info.image_inline_size(); let fragment_block_size = image_fragment_info.image_block_size(); self.border_box.size.block = image_fragment_info.replaced_image_fragment_info .calculate_replaced_block_size(style, noncontent_block_size, containing_block_block_size, fragment_inline_size, fragment_block_size); } SpecificFragmentInfo::Canvas(ref mut canvas_fragment_info) => { let fragment_inline_size = canvas_fragment_info.canvas_inline_size(); let fragment_block_size = canvas_fragment_info.canvas_block_size(); self.border_box.size.block = canvas_fragment_info.replaced_image_fragment_info .calculate_replaced_block_size(style, noncontent_block_size, containing_block_block_size, fragment_inline_size, fragment_block_size); } SpecificFragmentInfo::ScannedText(ref info) => { // Scanned text fragments' content block-sizes are calculated by the text run // scanner during flow construction. self.border_box.size.block = info.content_size.block + noncontent_block_size } SpecificFragmentInfo::InlineBlock(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account.<|fim▁hole|> self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::Iframe(ref info) => { self.border_box.size.block = info.calculate_replaced_block_size(style, containing_block_block_size) + noncontent_block_size; } _ => panic!("should have been handled above"), } } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment /// when used in an inline formatting context. See CSS 2.1 § 10.8.1. pub fn inline_metrics(&self, layout_context: &LayoutContext) -> InlineMetrics { match self.specific { SpecificFragmentInfo::Image(ref image_fragment_info) => { let computed_block_size = image_fragment_info.replaced_image_fragment_info .computed_block_size(); InlineMetrics { block_size_above_baseline: computed_block_size + self.border_padding.block_start, depth_below_baseline: self.border_padding.block_end, ascent: computed_block_size + self.border_padding.block_start, } } SpecificFragmentInfo::ScannedText(ref text_fragment) => { // See CSS 2.1 § 10.8.1. let line_height = self.calculate_line_height(layout_context); let font_derived_metrics = InlineMetrics::from_font_metrics(&text_fragment.run.font_metrics, line_height); InlineMetrics { block_size_above_baseline: font_derived_metrics.block_size_above_baseline + self.border_padding.block_start, depth_below_baseline: font_derived_metrics.depth_below_baseline + self.border_padding.block_end, ascent: font_derived_metrics.ascent + self.border_padding.block_start, } } SpecificFragmentInfo::InlineBlock(ref info) => { // See CSS 2.1 § 10.8.1. let block_flow = info.flow_ref.as_block(); let font_style = self.style.get_font_arc(); let font_metrics = text::font_metrics_for_style(&mut layout_context.font_context(), font_style); InlineMetrics::from_block_height(&font_metrics, block_flow.base.position.size.block, block_flow.fragment.margin.block_start, block_flow.fragment.margin.block_end) } SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) => { // Hypothetical boxes take up no space. InlineMetrics { block_size_above_baseline: Au(0), depth_below_baseline: Au(0), ascent: Au(0), } } _ => { InlineMetrics { block_size_above_baseline: self.border_box.size.block, depth_below_baseline: Au(0), ascent: self.border_box.size.block, } } } } /// Returns true if this fragment is a hypothetical box. See CSS 2.1 § 10.3.7. pub fn is_hypothetical(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => true, _ => false, } } /// Returns true if this fragment can merge with another immediately-following fragment or /// false otherwise. pub fn can_merge_with_fragment(&self, other: &Fragment) -> bool { match (&self.specific, &other.specific) { (&SpecificFragmentInfo::UnscannedText(ref first_unscanned_text), &SpecificFragmentInfo::UnscannedText(_)) => { // FIXME: Should probably use a whitelist of styles that can safely differ (#3165) if self.style().get_font() != other.style().get_font() || self.text_decoration() != other.text_decoration() || self.white_space() != other.white_space() { return false } let length = first_unscanned_text.text.len(); if length != 0 && first_unscanned_text.text.char_at_reverse(length) == '\n' { return false } // If this node has any styles that have border/padding/margins on the following // side, then we can't merge with the next fragment. if let Some(ref inline_context) = self.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_end != LengthOrPercentageOrAuto::Length(Au(0)) { return false } if inline_context_node.style.logical_padding().inline_end != LengthOrPercentage::Length(Au(0)) { return false } if inline_context_node.style.logical_border_width().inline_end != Au(0) { return false } } } // If the next fragment has any styles that have border/padding/margins on the // preceding side, then it can't merge with us. if let Some(ref inline_context) = other.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_start != LengthOrPercentageOrAuto::Length(Au(0)) { return false } if inline_context_node.style.logical_padding().inline_start != LengthOrPercentage::Length(Au(0)) { return false } if inline_context_node.style.logical_border_width().inline_start != Au(0) { return false } } } true } _ => false, } } /// Returns true if and only if this is the *primary fragment* for the fragment's style object /// (conceptually, though style sharing makes this not really true, of course). The primary /// fragment is the one that draws backgrounds, borders, etc., and takes borders, padding and /// margins into account. Every style object has at most one primary fragment. /// /// At present, all fragments are primary fragments except for inline-block and table wrapper /// fragments. Inline-block fragments are not primary fragments because the corresponding block /// flow is the primary fragment, while table wrapper fragments are not primary fragments /// because the corresponding table flow is the primary fragment. pub fn is_primary_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::TableWrapper => false, SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::UnscannedText(_) => true, } } /// Determines the inline sizes of inline-block fragments. These cannot be fully computed until /// inline size assignment has run for the child flow: thus it is computed "late", during /// block size assignment. pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific { let block_flow = flow_ref::deref_mut(&mut inline_block_info.flow_ref).as_block(); let margin = block_flow.fragment.style.logical_margin(); self.border_box.size.inline = block_flow.fragment.border_box.size.inline + MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero() } } pub fn update_late_computed_inline_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.i; flow_ref::deref_mut(&mut info.flow_ref) .update_late_computed_inline_position_if_necessary(position) } _ => {} } } pub fn update_late_computed_block_position_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let position = self.border_box.start.b; flow_ref::deref_mut(&mut info.flow_ref) .update_late_computed_block_position_if_necessary(position) } _ => {} } } pub fn repair_style(&mut self, new_style: &Arc<ComputedValues>) { self.style = (*new_style).clone() } /// Given the stacking-context-relative position of the containing flow, returns the border box /// of this fragment relative to the parent stacking context. This takes `position: relative` /// into account. /// /// If `coordinate_system` is `Parent`, this returns the border box in the parent stacking /// context's coordinate system. Otherwise, if `coordinate_system` is `Own` and this fragment /// establishes a stacking context itself, this returns a border box anchored at (0, 0). (If /// this fragment does not establish a stacking context, then it always belongs to its parent /// stacking context and thus `coordinate_system` is ignored.) /// /// This is the method you should use for display list construction as well as /// `getBoundingClientRect()` and so forth. pub fn stacking_relative_border_box(&self, stacking_relative_flow_origin: &Point2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, coordinate_system: CoordinateSystem) -> Rect<Au> { let container_size = relative_containing_block_size.to_physical(relative_containing_block_mode); let border_box = self.border_box.to_physical(self.style.writing_mode, container_size); if coordinate_system == CoordinateSystem::Own && self.establishes_stacking_context() { return Rect::new(Point2D::zero(), border_box.size) } // FIXME(pcwalton): This can double-count relative position sometimes for inlines (e.g. // `<div style="position:relative">x</div>`, because the `position:relative` trickles down // to the inline flow. Possibly we should extend the notion of "primary fragment" to fix // this. let relative_position = self.relative_position(relative_containing_block_size); border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)) .translate(stacking_relative_flow_origin) } /// Given the stacking-context-relative border box, returns the stacking-context-relative /// content box. pub fn stacking_relative_content_box(&self, stacking_relative_border_box: &Rect<Au>) -> Rect<Au> { let border_padding = self.border_padding.to_physical(self.style.writing_mode); Rect::new(Point2D::new(stacking_relative_border_box.origin.x + border_padding.left, stacking_relative_border_box.origin.y + border_padding.top), Size2D::new(stacking_relative_border_box.size.width - border_padding.horizontal(), stacking_relative_border_box.size.height - border_padding.vertical())) } /// Returns true if this fragment establishes a new stacking context and false otherwise. pub fn establishes_stacking_context(&self) -> bool { if self.flags.contains(HAS_LAYER) { return true } if self.style().get_effects().opacity != 1.0 { return true } if !self.style().get_effects().filter.is_empty() { return true } if self.style().get_effects().mix_blend_mode != mix_blend_mode::T::normal { return true } if self.style().get_effects().transform.0.is_some() { return true } match self.style().get_used_transform_style() { transform_style::T::flat | transform_style::T::preserve_3d => { return true } transform_style::T::auto => {} } // FIXME(pcwalton): Don't unconditionally form stacking contexts for `overflow_x: scroll` // and `overflow_y: scroll`. This needs multiple layers per stacking context. match (self.style().get_box().position, self.style().get_box().z_index, self.style().get_box().overflow_x, self.style().get_box().overflow_y.0) { (position::T::absolute, z_index::T::Auto, overflow_x::T::visible, overflow_x::T::visible) | (position::T::fixed, z_index::T::Auto, overflow_x::T::visible, overflow_x::T::visible) | (position::T::relative, z_index::T::Auto, overflow_x::T::visible, overflow_x::T::visible) => false, (position::T::absolute, _, _, _) | (position::T::fixed, _, _, _) | (position::T::relative, _, _, _) => true, (position::T::static_, _, _, _) => { false } } } // Get the effective z-index of this fragment. Z-indices only apply to positioned element // per CSS 2 9.9.1 (http://www.w3.org/TR/CSS2/visuren.html#z-index), so this value may differ // from the value specified in the style. pub fn effective_z_index(&self) -> i32 { match self.style().get_box().position { position::T::static_ => {}, _ => return self.style().get_box().z_index.number_or_zero(), } if self.style().get_effects().transform.0.is_some() { return self.style().get_box().z_index.number_or_zero(); } match self.style().get_box().display { display::T::flex => self.style().get_box().z_index.number_or_zero(), _ => 0, } } /// Computes the overflow rect of this fragment relative to the start of the flow. pub fn compute_overflow(&self, flow_size: &Size2D<Au>, relative_containing_block_size: &LogicalSize<Au>) -> Rect<Au> { let mut border_box = self.border_box.to_physical(self.style.writing_mode, *flow_size); // Relative position can cause us to draw outside our border box. // // FIXME(pcwalton): I'm not a fan of the way this makes us crawl though so many styles all // the time. Can't we handle relative positioning by just adjusting `border_box`? let relative_position = self.relative_position(relative_containing_block_size); border_box = border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)); let mut overflow = border_box; // Box shadows cause us to draw outside our border box. for box_shadow in &self.style().get_effects().box_shadow.0 { let offset = Point2D::new(box_shadow.offset_x, box_shadow.offset_y); let inflation = box_shadow.spread_radius + box_shadow.blur_radius * BLUR_INFLATION_FACTOR; overflow = overflow.union(&border_box.translate(&offset).inflate(inflation, inflation)) } // Outlines cause us to draw outside our border box. let outline_width = self.style.get_outline().outline_width; if outline_width != Au(0) { overflow = overflow.union(&border_box.inflate(outline_width, outline_width)) } // Include the overflow of the block flow, if any. match self.specific { SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); overflow = overflow.union(&flow::base(block_flow).overflow); } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); overflow = overflow.union(&flow::base(block_flow).overflow); } _ => (), } // FIXME(pcwalton): Sometimes excessively fancy glyphs can make us draw outside our border // box too. overflow } pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool { match self.specific { SpecificFragmentInfo::ScannedText(ref scanned_text) => { scanned_text.requires_line_break_afterward_if_wrapping_on_newlines } _ => false, } } pub fn strip_leading_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } match self.specific { SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => { let mut leading_whitespace_character_count = 0; { let text = slice_chars( &*scanned_text_fragment_info.run.text, scanned_text_fragment_info.range.begin().to_usize(), scanned_text_fragment_info.range.end().to_usize()); for character in text.chars() { if util::str::char_is_whitespace(character) { leading_whitespace_character_count += 1 } else { break } } } let whitespace_range = Range::new(scanned_text_fragment_info.range.begin(), CharIndex(leading_whitespace_character_count)); let text_bounds = scanned_text_fragment_info.run.metrics_for_range(&whitespace_range).bounding_box; self.border_box.size.inline = self.border_box.size.inline - text_bounds.size.width; scanned_text_fragment_info.content_size.inline = scanned_text_fragment_info.content_size.inline - text_bounds.size.width; scanned_text_fragment_info.range.adjust_by( CharIndex(leading_whitespace_character_count), -CharIndex(leading_whitespace_character_count)); WhitespaceStrippingResult::RetainFragment } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut new_text_string = String::new(); let mut modified = false; for (i, character) in unscanned_text_fragment_info.text.char_indices() { if gfx::text::util::is_bidi_control(character) { new_text_string.push(character); continue } if util::str::char_is_whitespace(character) { modified = true; continue } new_text_string.push_str(&unscanned_text_fragment_info.text[i..]); break } if modified { unscanned_text_fragment_info.text = new_text_string.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, } } /// Returns true if the entire fragment was stripped. pub fn strip_trailing_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } match self.specific { SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => { // FIXME(pcwalton): Is there a more clever (i.e. faster) way to do this? debug!("stripping trailing whitespace: range={:?}, len={}", scanned_text_fragment_info.range, scanned_text_fragment_info.run.text.chars().count()); let mut trailing_whitespace_character_count = 0; let text_bounds; { let text = slice_chars(&*scanned_text_fragment_info.run.text, scanned_text_fragment_info.range.begin().to_usize(), scanned_text_fragment_info.range.end().to_usize()); for ch in text.chars().rev() { if util::str::char_is_whitespace(ch) { trailing_whitespace_character_count += 1 } else { break } } let whitespace_range = Range::new(scanned_text_fragment_info.range.end() - CharIndex(trailing_whitespace_character_count), CharIndex(trailing_whitespace_character_count)); text_bounds = scanned_text_fragment_info.run .metrics_for_range(&whitespace_range) .bounding_box; self.border_box.size.inline = self.border_box.size.inline - text_bounds.size.width; } scanned_text_fragment_info.content_size.inline = scanned_text_fragment_info.content_size.inline - text_bounds.size.width; if trailing_whitespace_character_count != 0 { scanned_text_fragment_info.range.extend_by( CharIndex(-trailing_whitespace_character_count)); } WhitespaceStrippingResult::RetainFragment } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut trailing_bidi_control_characters_to_retain = Vec::new(); let (mut modified, mut last_character_index) = (true, 0); for (i, character) in unscanned_text_fragment_info.text.char_indices().rev() { if gfx::text::util::is_bidi_control(character) { trailing_bidi_control_characters_to_retain.push(character); continue } if util::str::char_is_whitespace(character) { modified = true; continue } last_character_index = i + character.len_utf8(); break } if modified { let mut text = unscanned_text_fragment_info.text.to_string(); text.truncate(last_character_index); for character in trailing_bidi_control_characters_to_retain.iter().rev() { text.push(*character); } unscanned_text_fragment_info.text = text.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, } } pub fn inline_styles(&self) -> InlineStyleIterator { InlineStyleIterator::new(self) } /// Returns the inline-size of this fragment's margin box. pub fn margin_box_inline_size(&self) -> Au { self.border_box.size.inline + self.margin.inline_start_end() } /// Returns true if this node *or any of the nodes within its inline fragment context* have /// non-`static` `position`. pub fn is_positioned(&self) -> bool { if self.style.get_box().position != position::T::static_ { return true } if let Some(ref inline_context) = self.inline_context { for node in inline_context.nodes.iter() { if node.style.get_box().position != position::T::static_ { return true } } } false } /// Returns true if this node is absolutely positioned. pub fn is_absolutely_positioned(&self) -> bool { self.style.get_box().position == position::T::absolute } pub fn is_inline_absolute(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsolute(..) => true, _ => false, } } pub fn meld_with_next_inline_fragment(&mut self, next_fragment: &Fragment) { if let Some(ref mut inline_context_of_this_fragment) = self.inline_context { if let Some(ref inline_context_of_next_fragment) = next_fragment.inline_context { for (i, inline_context_node_from_next_fragment) in inline_context_of_next_fragment.nodes.iter().enumerate() { if i >= inline_context_of_this_fragment.nodes.len() { continue } if !inline_context_node_from_next_fragment.flags.contains( LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node_from_next_fragment.address != inline_context_of_this_fragment.nodes[i].address { continue } inline_context_of_this_fragment.nodes[i].flags.insert( LAST_FRAGMENT_OF_ELEMENT); } } } } pub fn layer_id(&self) -> LayerId { let layer_type = match self.pseudo { PseudoElementType::Normal => LayerType::FragmentBody, PseudoElementType::Before(_) => LayerType::BeforePseudoContent, PseudoElementType::After(_) => LayerType::AfterPseudoContent }; LayerId::new_of_type(layer_type, self.node.id() as usize) } pub fn layer_id_for_overflow_scroll(&self) -> LayerId { LayerId::new_of_type(LayerType::OverflowScroll, self.node.id() as usize) } } impl fmt::Debug for Fragment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let border_padding_string = if !self.border_padding.is_zero() { format!(" border_padding={:?}", self.border_padding) } else { "".to_owned() }; let margin_string = if !self.margin.is_zero() { format!(" margin={:?}", self.margin) } else { "".to_owned() }; let damage_string = if self.restyle_damage != RestyleDamage::empty() { format!(" damage={:?}", self.restyle_damage) } else { "".to_owned() }; write!(f, "{}({}) [{:?}] border_box={:?}{}{}{}", self.specific.get_type(), self.debug_id(), self.specific, self.border_box, border_padding_string, margin_string, damage_string) } } bitflags! { flags QuantitiesIncludedInIntrinsicInlineSizes: u8 { const INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS = 0x01, const INTRINSIC_INLINE_SIZE_INCLUDES_PADDING = 0x02, const INTRINSIC_INLINE_SIZE_INCLUDES_BORDER = 0x04, const INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED = 0x08, } } bitflags! { // Various flags we can use when splitting fragments. See // `calculate_split_position_using_breaking_strategy()`. flags SplitOptions: u8 { #[doc = "True if this is the first fragment on the line."] const STARTS_LINE = 0x01, #[doc = "True if we should attempt to split at character boundaries if this split fails. \ This is used to implement `overflow-wrap: break-word`."] const RETRY_AT_CHARACTER_BOUNDARIES = 0x02, } } /// A top-down fragment border box iteration handler. pub trait FragmentBorderBoxIterator { /// The operation to perform. fn process(&mut self, fragment: &Fragment, level: i32, overflow: &Rect<Au>); /// Returns true if this fragment must be processed in-order. If this returns false, /// we skip the operation for this fragment, but continue processing siblings. fn should_process(&mut self, fragment: &Fragment) -> bool; } /// The coordinate system used in `stacking_relative_border_box()`. See the documentation of that /// method for details. #[derive(Clone, PartialEq, Debug)] pub enum CoordinateSystem { /// The border box returned is relative to the fragment's parent stacking context. Parent, /// The border box returned is relative to the fragment's own stacking context, if applicable. Own, } pub struct InlineStyleIterator<'a> { fragment: &'a Fragment, inline_style_index: usize, primary_style_yielded: bool, } impl<'a> Iterator for InlineStyleIterator<'a> { type Item = &'a ComputedValues; fn next(&mut self) -> Option<&'a ComputedValues> { if !self.primary_style_yielded { self.primary_style_yielded = true; return Some(&*self.fragment.style) } let inline_context = match self.fragment.inline_context { None => return None, Some(ref inline_context) => inline_context, }; let inline_style_index = self.inline_style_index; if inline_style_index == inline_context.nodes.len() { return None } self.inline_style_index += 1; Some(&*inline_context.nodes[inline_style_index].style) } } impl<'a> InlineStyleIterator<'a> { fn new(fragment: &Fragment) -> InlineStyleIterator { InlineStyleIterator { fragment: fragment, inline_style_index: 0, primary_style_yielded: false, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum WhitespaceStrippingResult { RetainFragment, FragmentContainedOnlyBidiControlCharacters, FragmentContainedOnlyWhitespace, } impl WhitespaceStrippingResult { fn from_unscanned_text_fragment_info(info: &UnscannedTextFragmentInfo) -> WhitespaceStrippingResult { if info.text.is_empty() { WhitespaceStrippingResult::FragmentContainedOnlyWhitespace } else if info.text.chars().all(gfx::text::util::is_bidi_control) { WhitespaceStrippingResult::FragmentContainedOnlyBidiControlCharacters } else { WhitespaceStrippingResult::RetainFragment } } } bitflags! { flags FragmentFlags: u8 { /// Whether this fragment has a layer. const HAS_LAYER = 0x01, } }<|fim▁end|>
let block_flow = flow_ref::deref_mut(&mut info.flow_ref).as_block();
<|file_name|>fckstylecommand.js<|end_file_name|><|fim▁begin|><<<<<<< HEAD /* * FCKeditor - The text editor for Internet - http://www.fckeditor.net * Copyright (C) 2003-2010 Frederico Caldeira Knabben * * == BEGIN LICENSE == * * Licensed under the terms of any of the following licenses at your * choice: * * - GNU General Public License Version 2 or later (the "GPL") * http://www.gnu.org/licenses/gpl.html * * - GNU Lesser General Public License Version 2.1 or later (the "LGPL") * http://www.gnu.org/licenses/lgpl.html * * - Mozilla Public License Version 1.1 or later (the "MPL") * http://www.mozilla.org/MPL/MPL-1.1.html * * == END LICENSE == * * FCKStyleCommand Class: represents the "Style" command. */ var FCKStyleCommand = function() {} FCKStyleCommand.prototype = { Name : 'Style', Execute : function( styleName, styleComboItem ) {<|fim▁hole|> if ( styleComboItem.Selected ) FCK.Styles.RemoveStyle( styleComboItem.Style ) ; else FCK.Styles.ApplyStyle( styleComboItem.Style ) ; FCKUndo.SaveUndoStep() ; FCK.Focus() ; FCK.Events.FireEvent( 'OnSelectionChange' ) ; }, GetState : function() { if ( FCK.EditMode != FCK_EDITMODE_WYSIWYG || !FCK.EditorDocument ) return FCK_TRISTATE_DISABLED ; if ( FCKSelection.GetType() == 'Control' ) { var el = FCKSelection.GetSelectedElement() ; if ( !el || !FCKStyles.CheckHasObjectStyle( el.nodeName.toLowerCase() ) ) return FCK_TRISTATE_DISABLED ; } return FCK_TRISTATE_OFF ; } }; ======= /* * FCKeditor - The text editor for Internet - http://www.fckeditor.net * Copyright (C) 2003-2010 Frederico Caldeira Knabben * * == BEGIN LICENSE == * * Licensed under the terms of any of the following licenses at your * choice: * * - GNU General Public License Version 2 or later (the "GPL") * http://www.gnu.org/licenses/gpl.html * * - GNU Lesser General Public License Version 2.1 or later (the "LGPL") * http://www.gnu.org/licenses/lgpl.html * * - Mozilla Public License Version 1.1 or later (the "MPL") * http://www.mozilla.org/MPL/MPL-1.1.html * * == END LICENSE == * * FCKStyleCommand Class: represents the "Style" command. */ var FCKStyleCommand = function() {} FCKStyleCommand.prototype = { Name : 'Style', Execute : function( styleName, styleComboItem ) { FCKUndo.SaveUndoStep() ; if ( styleComboItem.Selected ) FCK.Styles.RemoveStyle( styleComboItem.Style ) ; else FCK.Styles.ApplyStyle( styleComboItem.Style ) ; FCKUndo.SaveUndoStep() ; FCK.Focus() ; FCK.Events.FireEvent( 'OnSelectionChange' ) ; }, GetState : function() { if ( FCK.EditMode != FCK_EDITMODE_WYSIWYG || !FCK.EditorDocument ) return FCK_TRISTATE_DISABLED ; if ( FCKSelection.GetType() == 'Control' ) { var el = FCKSelection.GetSelectedElement() ; if ( !el || !FCKStyles.CheckHasObjectStyle( el.nodeName.toLowerCase() ) ) return FCK_TRISTATE_DISABLED ; } return FCK_TRISTATE_OFF ; } }; >>>>>>> origin/master<|fim▁end|>
FCKUndo.SaveUndoStep() ;
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for SCSGate switches.""" import logging import voluptuous as vol from homeassistant.components import scsgate from homeassistant.components.switch import SwitchDevice, PLATFORM_SCHEMA from homeassistant.const import ATTR_ENTITY_ID, ATTR_STATE, CONF_NAME, CONF_DEVICES import homeassistant.helpers.config_validation as cv ATTR_SCENARIO_ID = "scenario_id" CONF_TRADITIONAL = "traditional" CONF_SCENARIO = "scenario" CONF_SCS_ID = "scs_id" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_DEVICES): cv.schema_with_slug_keys(scsgate.SCSGATE_SCHEMA)}<|fim▁hole|> def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the SCSGate switches.""" logger = logging.getLogger(__name__) _setup_traditional_switches( logger=logger, config=config, add_entities_callback=add_entities ) _setup_scenario_switches(logger=logger, config=config, hass=hass) def _setup_traditional_switches(logger, config, add_entities_callback): """Add traditional SCSGate switches.""" traditional = config.get(CONF_TRADITIONAL) switches = [] if traditional: for _, entity_info in traditional.items(): if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[scsgate.CONF_SCS_ID] logger.info("Adding %s scsgate.traditional_switch", name) switch = SCSGateSwitch(name=name, scs_id=scs_id, logger=logger) switches.append(switch) add_entities_callback(switches) scsgate.SCSGATE.add_devices_to_register(switches) def _setup_scenario_switches(logger, config, hass): """Add only SCSGate scenario switches.""" scenario = config.get(CONF_SCENARIO) if scenario: for _, entity_info in scenario.items(): if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[scsgate.CONF_SCS_ID] logger.info("Adding %s scsgate.scenario_switch", name) switch = SCSGateScenarioSwitch( name=name, scs_id=scs_id, logger=logger, hass=hass ) scsgate.SCSGATE.add_device(switch) class SCSGateSwitch(SwitchDevice): """Representation of a SCSGate switch.""" def __init__(self, scs_id, name, logger): """Initialize the switch.""" self._name = name self._scs_id = scs_id self._toggled = False self._logger = logger @property def scs_id(self): """Return the SCS ID.""" return self._scs_id @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the device if any.""" return self._name @property def is_on(self): """Return true if switch is on.""" return self._toggled def turn_on(self, **kwargs): """Turn the device on.""" from scsgate.tasks import ToggleStatusTask scsgate.SCSGATE.append_task(ToggleStatusTask(target=self._scs_id, toggled=True)) self._toggled = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn the device off.""" from scsgate.tasks import ToggleStatusTask scsgate.SCSGATE.append_task( ToggleStatusTask(target=self._scs_id, toggled=False) ) self._toggled = False self.schedule_update_ha_state() def process_event(self, message): """Handle a SCSGate message related with this switch.""" if self._toggled == message.toggled: self._logger.info( "Switch %s, ignoring message %s because state already active", self._scs_id, message, ) # Nothing changed, ignoring return self._toggled = message.toggled self.schedule_update_ha_state() command = "off" if self._toggled: command = "on" self.hass.bus.fire( "button_pressed", {ATTR_ENTITY_ID: self._scs_id, ATTR_STATE: command} ) class SCSGateScenarioSwitch: """Provides a SCSGate scenario switch. This switch is always in an 'off" state, when toggled it's used to trigger events. """ def __init__(self, scs_id, name, logger, hass): """Initialize the scenario.""" self._name = name self._scs_id = scs_id self._logger = logger self._hass = hass @property def scs_id(self): """Return the SCS ID.""" return self._scs_id @property def name(self): """Return the name of the device if any.""" return self._name def process_event(self, message): """Handle a SCSGate message related with this switch.""" from scsgate.messages import StateMessage, ScenarioTriggeredMessage if isinstance(message, StateMessage): scenario_id = message.bytes[4] elif isinstance(message, ScenarioTriggeredMessage): scenario_id = message.scenario else: self._logger.warn("Scenario switch: received unknown message %s", message) return self._hass.bus.fire( "scenario_switch_triggered", {ATTR_ENTITY_ID: int(self._scs_id), ATTR_SCENARIO_ID: int(scenario_id, 16)}, )<|fim▁end|>
)
<|file_name|>broken.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. use capnp::{any_pointer}; use capnp::Error; use capnp::private::capability::{ClientHook, ParamsHook, PipelineHook, PipelineOp, RequestHook, ResultsHook}; use capnp::capability::{Promise, RemotePromise}; use std::rc::{Rc}; pub struct Pipeline { error: Error, } impl Pipeline { pub fn new(error: Error) -> Pipeline { Pipeline { error: error } } } impl PipelineHook for Pipeline { fn add_ref(&self) -> Box<dyn PipelineHook> { Box::new(Pipeline::new(self.error.clone())) } fn get_pipelined_cap(&self, _ops: &[PipelineOp]) -> Box<dyn ClientHook> { new_cap(self.error.clone()) } } pub struct Request { error: Error, message: ::capnp::message::Builder<::capnp::message::HeapAllocator>, } impl Request { pub fn new(error: Error, _size_hint: Option<::capnp::MessageSize>) -> Request { Request { error: error, message: ::capnp::message::Builder::new_default(), } } } impl RequestHook for Request { fn get<'a>(&'a mut self) -> any_pointer::Builder<'a> { self.message.get_root().unwrap() } fn get_brand(&self) -> usize { 0 } fn send<'a>(self: Box<Self>) -> RemotePromise<any_pointer::Owned> { let pipeline = Pipeline::new(self.error.clone()); RemotePromise { promise: Promise::err(self.error), pipeline: any_pointer::Pipeline::new(Box::new(pipeline)), } } fn tail_send(self: Box<Self>) -> Option<(u32, Promise<(), Error>, Box<dyn PipelineHook>)> { None } } struct ClientInner { error: Error, _resolved: bool, brand: usize, } pub struct Client { inner: Rc<ClientInner>, } impl Client { pub fn new(error: Error, resolved: bool, brand: usize) -> Client { Client {<|fim▁hole|> inner: Rc::new(ClientInner { error: error, _resolved: resolved, brand: brand, }), } } } impl ClientHook for Client { fn add_ref(&self) -> Box<dyn ClientHook> { Box::new(Client { inner: self.inner.clone() } ) } fn new_call(&self, _interface_id: u64, _method_id: u16, size_hint: Option<::capnp::MessageSize>) -> ::capnp::capability::Request<any_pointer::Owned, any_pointer::Owned> { ::capnp::capability::Request::new( Box::new(Request::new(self.inner.error.clone(), size_hint))) } fn call(&self, _interface_id: u64, _method_id: u16, _params: Box<dyn ParamsHook>, _results: Box<dyn ResultsHook>) -> Promise<(), Error> { Promise::err(self.inner.error.clone()) } fn get_ptr(&self) -> usize { (self.inner.as_ref()) as * const _ as usize } fn get_brand(&self) -> usize { self.inner.brand } fn get_resolved(&self) -> Option<Box<dyn ClientHook>> { None } fn when_more_resolved(&self) -> Option<Promise<Box<dyn ClientHook>, Error>> { None } } pub fn new_cap(exception: Error) -> Box<dyn ClientHook> { Box::new(Client::new(exception, false, 0)) }<|fim▁end|>
<|file_name|>StringReferenceProvider.java<|end_file_name|><|fim▁begin|>package de.espend.idea.shopware.reference.provider; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiPolyVariantReferenceBase; import com.intellij.psi.ResolveResult; import com.jetbrains.php.lang.psi.elements.StringLiteralExpression; import de.espend.idea.shopware.ShopwarePluginIcons; import de.espend.idea.shopware.util.ShopwareUtil; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; /** * @author Daniel Espendiller <[email protected]> */ public class StringReferenceProvider extends PsiPolyVariantReferenceBase<PsiElement> { final private String[] values; public StringReferenceProvider(StringLiteralExpression stringLiteralExpression, String... values) { super(stringLiteralExpression);<|fim▁hole|> this.values = values; } @NotNull @Override public ResolveResult[] multiResolve(boolean b) { return new ResolveResult[0]; } @NotNull @Override public Object[] getVariants() { final List<LookupElement> lookupElements = new ArrayList<>(); for(String value: values) { lookupElements.add(LookupElementBuilder.create(ShopwareUtil.toCamelCase(value, true)) .withIcon(ShopwarePluginIcons.SHOPWARE) ); } return lookupElements.toArray(); } }<|fim▁end|>
<|file_name|>async_await.py<|end_file_name|><|fim▁begin|>import threading import asyncio async def hello(): print('Hello world! (%s)' % threading.currentThread()) await asyncio.sleep(1) print('Hello again! (%s)' % threading.currentThread()) loop = asyncio.get_event_loop() <|fim▁hole|><|fim▁end|>
tasks = [hello(), hello()] loop.run_until_complete(asyncio.wait(tasks)) loop.close()
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright 2017 Huawei Technologies Co.,LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import pecan import wsme from wsme import types as wtypes from pecan import rest class APIBase(wtypes.Base): created_at = wsme.wsattr(datetime.datetime, readonly=True) """The time in UTC at which the object is created""" updated_at = wsme.wsattr(datetime.datetime, readonly=True) """The time in UTC at which the object is updated""" def as_dict(self): """Render this object as a dict of its fields.""" return dict((k, getattr(self, k)) for k in self.fields if hasattr(self, k) and getattr(self, k) != wsme.Unset) class CyborgController(rest.RestController): def _handle_patch(self, method, remainder, request=None): """Routes ``PATCH`` _custom_actions.""" # route to a patch_all or get if no additional parts are available if not remainder or remainder == ['']: controller = self._find_controller('patch_all', 'patch') if controller: return controller, [] pecan.abort(404) controller = getattr(self, remainder[0], None) if controller and not inspect.ismethod(controller): return pecan.routing.lookup_controller(controller, remainder[1:]) # route to custom_action match = self._handle_custom_action(method, remainder, request) if match: return match # finally, check for the regular patch_one/patch requests controller = self._find_controller('patch_one', 'patch') if controller: return controller, remainder pecan.abort(405)<|fim▁end|>
# not use this file except in compliance with the License. You may obtain # a copy of the License at
<|file_name|>copy_training_service.py<|end_file_name|><|fim▁begin|>############################################################################## # # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculties, institutes and professional schools. # The core business involves the administration of students, teachers, # courses, programs and so on. # # Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be) #<|fim▁hole|># # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## from django.db import transaction from education_group.ddd import command from education_group.ddd.domain import exception from education_group.ddd.domain.training import TrainingIdentity from ddd.logic.formation_catalogue.builder.training_builder import TrainingBuilder from education_group.ddd.repository import training as training_repository @transaction.atomic() def copy_training_to_next_year(copy_cmd: command.CopyTrainingToNextYearCommand) -> 'TrainingIdentity': # GIVEN repository = training_repository.TrainingRepository() existing_training = repository.get( entity_id=TrainingIdentity(acronym=copy_cmd.acronym, year=copy_cmd.postpone_from_year) ) # WHEN training_next_year = TrainingBuilder().copy_to_next_year(existing_training, repository) # THEN try: with transaction.atomic(): identity = repository.create(training_next_year) except exception.TrainingAcronymAlreadyExistException: identity = repository.update(training_next_year) return identity<|fim▁end|>
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version.
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Package' db.create_table(u'api_package', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)), ('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)), ('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)), )) db.send_create_signal(u'api', ['Package']) # Adding unique constraint on 'Package', fields ['name', 'url'] db.create_unique(u'api_package', ['name', 'url']) def backwards(self, orm): # Removing unique constraint on 'Package', fields ['name', 'url'] db.delete_unique(u'api_package', ['name', 'url']) # Deleting model 'Package' db.delete_table(u'api_package') models = { u'api.package': {<|fim▁hole|> 'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'}) } } complete_apps = ['api']<|fim▁end|>
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'}, 'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
<|file_name|>wechat.js<|end_file_name|><|fim▁begin|>/** * 微信网页端调用JS * @author dodge * @contact [email protected] * @link http://blog.4wer.com/wechat-timeline-share * @version 1.1 * * 自定义分享使用: * WeixinJS.hideOptionMenu() 隐藏右上角按钮 * WeixinJS.showOptionMenu() 显示右上角按钮 * WeixinJS.hideToolbar() 隐藏工具栏 * WeixinJS.showToolbar() 显示工具栏 * WeixinJS.getNetworkType() 获取网络状态 * WeixinJS.closeWindow() 关闭窗口 * WeixinJS.scanQRCode() 扫描二维码 * WeixinJS.openUrlByExtBrowser(url) 使用浏览器打开网址 * WeixinJS.jumpToBizProfile(username) 跳转到指定公众账号页面 * WeixinJS.sendEmail(title,content) 发送邮件 * WeixinJS.openProductView(latitude,longitude,name,address,scale,infoUrl) 查看地图 * WeixinJS.addContact(username) 添加微信账号 * WeixinJS.imagePreview(urls,current) 调出微信内图片预览 * WeixinJS.payCallback(appId,package,timeStamp,nonceStr,signType,paySign,callback) 微信JsApi支付接口 * WeixinJS.editAddress(appId,addrSign,timeStamp,nonceStr,callback) 微信JsApi支付接口 * 自定义分享内容数据格式: * var dataForWeixin={ appId:"", MsgImg:"消息图片路径", TLImg:"时间线图路径", url:"分享url路径", title:"标题", desc:"描述", fakeid:"", prepare:function(argv){ if (typeof argv.shareTo!='undefined') switch(argv.shareTo) { case 'friend': //发送给朋友 alert(argv.scene); //friend break; case 'timeline': //发送给朋友 break; case 'weibo': //发送到微博 alert(argv.url); break; case 'favorite': //收藏 alert(argv.scene);//favorite break; case 'connector': //分享到第三方应用 alert(argv.scene);//connector break; default: } }, callback:function(res){ //发送给好友或应用 if (res.err_msg=='send_app_msg:confirm') { //todo:func1(); alert(res.err_desc); } if (res.err_msg=='send_app_msg:cancel') { //todo:func2(); alert(res.err_desc); } //分享到朋友圈 if (res.err_msg=='share_timeline:confirm') { //todo:func1(); alert(res.err_desc); } if (res.err_msg=='share_timeline:cancel') { //todo:func1(); alert(res.err_desc); } //分享到微博 if (res.err_msg=='share_weibo:confirm') { //todo:func1(); alert(res.err_desc); } if (res.err_msg=='share_weibo:cancel') { //todo:func1(); alert(res.err_desc); } //收藏或分享到应用 if (res.err_msg=='send_app_msg:ok') { //todo:func1(); alert(res.err_desc); } } }; */ WeixinJS = typeof WeixinJS!='undefined' || {}; //隐藏右上角按钮 WeixinJS.hideOptionMenu = function() { document.addEventListener('WeixinJSBridgeReady', function onBridgeReady() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.call('hideOptionMenu'); }); }; //显示右上角按钮 WeixinJS.showOptionMenu = function() { document.addEventListener('WeixinJSBridgeReady', function onBridgeReady() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.call('showOptionMenu'); }); }; //隐藏底部导航栏 WeixinJS.hideToolbar = function() { document.addEventListener('WeixinJSBridgeReady', function onBridgeReady() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.call('hideToolbar'); }); }; //显示底部导航栏 WeixinJS.showToolbar = function() { document.addEventListener('WeixinJSBridgeReady', function onBridgeReady() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.call('showToolbar'); }); }; //网页获取用户网络状态 netType={"network_type:wifi":"wifi网络","network_type:edge":"非wifi,包含3G/2G","network_type:fail":"网络断开连接","network_type:wwan":"2g或者3g"}; WeixinJS.getNetworkType = function(callback) { document.addEventListener('WeixinJSBridgeReady', function onBridgeReady() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke('getNetworkType',{}, function(res){ //result: network_type:wifi,network_type:edge,network_type:fail,network_type:wwan //netType[e.err_msg] callback(res.err_msg); }); }); }; //关闭窗口 WeixinJS.closeWindow = function() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("closeWindow", {}); };<|fim▁hole|>}; //使用浏览器打开网址 WeixinJS.openUrlByExtBrowser=function(url){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("openUrlByExtBrowser",{"url" : url}); }; //跳转到指定公众账号页面 WeixinJS.jumpToBizProfile=function(username){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("jumpToBizProfile",{"tousername" : username}); }; //发送邮件 WeixinJS.sendEmail=function(title,content){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("sendEmail",{ "title" : title, "content" : content }); }; //查看地图 WeixinJS.openProductView=function(latitude,longitude,name,address,scale,infoUrl){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("openProductView",{ "latitude" : latitude, //纬度 "longitude" : longitude, //经度 "name" : name, //名称 "address" : address, //地址 "scale" : scale, //地图缩放级别 "infoUrl" : infoUrl, //查看位置界面底部的超链接 }); }; //添加微信账号 WeixinJS.addContact=function weixinAddContact(username){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("addContact", { "webtype": "1", "username": username }, function(e) { WeixinJSBridge.log(e.err_msg); //e.err_msg:add_contact:added 已经添加 //e.err_msg:add_contact:cancel 取消添加 //e.err_msg:add_contact:ok 添加成功 if(e.err_msg == 'add_contact:added' || e.err_msg == 'add_contact:ok'){ //关注成功,或者已经关注过 } }); }; /** * 调出微信内图片预览scrollview * @param array urls 图片url数组 * @param string current 当前图片url */ WeixinJS.imagePreview = function(urls,current) { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("imagePreview", { current: current, urls: urls }); }; WeixinJS.payCallback = function(appId,package,timeStamp,nonceStr,signType,paySign,callback){ if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke('getBrandWCPayRequest',{ "appId" : appId.toString(), "timeStamp" : timeStamp.toString(), "nonceStr" : nonceStr.toString(), "package" : package.toString(), "signType" : signType.toString(), "paySign" : paySign.toString() },function(res){ // res.err_msg == "get_brand_wcpay_request:ok" return true; // res.err_msg == "get_brand_wcpay_request:cancel" return false; callback(res); }); }; //编辑收货地址 WeixinJS.editAddress = function(appId,addrSign,timeStamp,nonceStr,callback){ var postdata = { "appId" : appId.toString(), "scope" : "jsapi_address", "signType" : "sha1", "addrSign" : addrSign.toString(), "timeStamp" : timeStamp.toString(), "nonceStr" : nonceStr.toString() }; if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke('editAddress',postdata, function(res){ //return res.proviceFirstStageName,res.addressCitySecondStageName,res.addressCountiesThirdStageName,res.addressDetailInfo,res.userName,res.addressPostalCode,res.telNumber //error return res.err_msg callback(res); }); }; (function(){ if (typeof dataForWeixin=="undefined") return; var onBridgeReady=function(){ WeixinJSBridge.on('menu:share:appmessage', function(argv){ (dataForWeixin.prepare)(argv); WeixinJSBridge.invoke('sendAppMessage',{ "appid":dataForWeixin.appId, "img_url":dataForWeixin.MsgImg, "img_width":"120", "img_height":"120", "link":dataForWeixin.url, "desc":dataForWeixin.desc, "title":dataForWeixin.title }, function(res){(dataForWeixin.callback)(res);}); }); WeixinJSBridge.on('menu:share:timeline', function(argv){ (dataForWeixin.prepare)(argv); WeixinJSBridge.invoke('shareTimeline',{ "img_url":dataForWeixin.TLImg, "img_width":"120", "img_height":"120", "link":dataForWeixin.url, "desc":dataForWeixin.desc, "title":dataForWeixin.title }, function(res){(dataForWeixin.callback)(res);}); }); WeixinJSBridge.on('menu:share:weibo', function(argv){ (dataForWeixin.prepare)(argv); WeixinJSBridge.invoke('shareWeibo',{ "content":dataForWeixin.title, "url":dataForWeixin.url }, function(res){(dataForWeixin.callback)(res);}); }); WeixinJSBridge.on('menu:share:facebook', function(argv){ (dataForWeixin.prepare)(argv); WeixinJSBridge.invoke('shareFB',{ "img_url":dataForWeixin.TLImg, "img_width":"120", "img_height":"120", "link":dataForWeixin.url, "desc":dataForWeixin.desc, "title":dataForWeixin.title }, function(res){(dataForWeixin.callback)(res);}); }); }; if(document.addEventListener){ document.addEventListener('WeixinJSBridgeReady', onBridgeReady, false); }else if(document.attachEvent){ document.attachEvent('WeixinJSBridgeReady' , onBridgeReady); document.attachEvent('onWeixinJSBridgeReady' , onBridgeReady); } })();<|fim▁end|>
//扫描二维码 WeixinJS.scanQRCode = function() { if (typeof WeixinJSBridge!='undefined') WeixinJSBridge.invoke("scanQRCode", {});
<|file_name|>scene_main.go<|end_file_name|><|fim▁begin|>package main /* * CSCI 4229 Assignment 5: Lighting * * Author: Zach Anders * * Some code derived from CSCI 4229 Examples 9, 10, and 13 (ex9.c, ex10.c, ex13.c) * * Code for 2D text display (due to missing windowpos2i) based off some example code * at http://programmingexamples.net/wiki/OpenGL/Text */ <|fim▁hole|>import ( "actor" "entity" "math/rand" "os" "runtime" "time" "glutil" "util" "world" "github.com/go-gl/gl" "github.com/ianremmler/ode" "github.com/rhencke/glut" ) var renderQueue util.RenderQueue = util.NewEmptyRenderQueue() //var currentCamera = CreateDefaultViewport() var my_world = world.NewWorld() var player = actor.NewPlayer(&my_world, glutil.Point3D{0, 0, 0}, 5) var light actor.OrbitActor var currentMouse = glutil.CreateMouseState() // Normal order: //Translate -> Rotate -> Scale // Creates the display function func DisplayFunc() { gl.Clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT) gl.Enable(gl.DEPTH_TEST) gl.LoadIdentity() // currentCamera.PositionSelf() player.PositionSelf() my_world.Tick() renderQueue.RenderAll() gl.Disable(gl.DEPTH_TEST) gl.Disable(gl.LIGHTING) gl.Color3f(.9, .9, .9) glutil.Print2d(5, 35, "Keys: W,S,A,D : Move Around | Q / E : Move Light left/right | L : Toggle light rotation") glutil.Print2d(5, 20, "Click and Drag: Rotate | Arrow keys: Rotate") glutil.Print2d(5, 5, "%s", player.ToString()) gl.Flush() glut.SwapBuffers() } // Creates the key handler function func KeyDownFunc(ch byte, x int, y int) { switch ch { case 27: os.Exit(0) break case 's': player.Translate(-2.5, 0.0, 0.0) break case 'w': player.Translate(2.5, 0.0, 0.0) break case 'd': player.Translate(0.0, 0.0, -2.5) break case 'a': player.Translate(0.0, 0.0, 2.5) break case 'q': light.AdjustAngle(-5) break case 'e': light.AdjustAngle(5) break case 'l': light.Toggle() break } glut.PostRedisplay() } // Creates the special key handler function func SpecialFunc(key int, x int, y int) { // Phi: Elevation, Theta: Azimuth if key == glut.KEY_RIGHT { player.Rotate(10, 0) } else if key == glut.KEY_LEFT { player.Rotate(-10, 0) } else if key == glut.KEY_UP { player.Rotate(0, 10) } else if key == glut.KEY_DOWN { player.Rotate(0, -10) } // Tell GLUT it is necessary to redisplay the scene glut.PostRedisplay() } func MouseMotion(x, y int) { if currentMouse.LeftDown { horiz_delta := currentMouse.X - x vert_delta := currentMouse.Y - y player.Rotate(int32(horiz_delta)/-5, int32(vert_delta)/5) player.ImmediateLook() } currentMouse.X, currentMouse.Y = x, y } func MouseDown(button, state, x, y int) { currentMouse.X, currentMouse.Y = x, y switch button { case glut.LEFT_BUTTON: currentMouse.LeftDown = (state == glut.DOWN) break case glut.MIDDLE_BUTTON: currentMouse.MiddleDown = (state == glut.DOWN) break case glut.RIGHT_BUTTON: currentMouse.RightDown = (state == glut.DOWN) break } } /* * GLUT calls this routine when the window is resized */ func Reshape(width int, height int) { asp := float64(1) // Ratio of the width to the height of the window if height > 0 { asp = float64(width) / float64(height) } // Set the viewport to the entire window gl.Viewport(0, 0, width, height) player.GetProjection().AspectRatio = asp player.GetProjection().SetProjectionMatrix() } // Idler function. Called whenever GLUT is idle. func IdleFunc() { s_time := glut.Get(glut.ELAPSED_TIME) glut.PostRedisplay() e_time := glut.Get(glut.ELAPSED_TIME) - s_time if e_time < 33 { time.Sleep(time.Duration(33-e_time) * time.Millisecond) } } func setup() { lightsource := actor.NewBallLight(&my_world, glutil.Point3D{15, 15, 15}) renderQueue.AddNamed(&lightsource, "light") light = actor.NewOrbitActor(&my_world, &lightsource, glutil.Point3D{0, 15, 0}, 50) my_world.AddActor(&light) rand.Seed(0x12345 + 8) my_world.AddActor(&player) block := actor.NewBlock(&my_world, glutil.Point3D{22, 0, 0}, glutil.Point3D{10, 20.5, 20}, glutil.Color4D{.5, .3, .1, 1}) my_world.AddActor(&block) renderQueue.Add(&block) tmpList := []actor.OrbitActor{} for i := 0; i < 250; i++ { speed := rand.Intn(3) //radius := rand.Intn(50) + 125 radius := 100 x, y, z := float64(rand.Intn(10)-5), float64(rand.Intn(10)-5), float64(rand.Intn(150)-75) angle := float64(rand.Intn(360)) //cylinder := NewCylinder(&my_world, glutil.Point3D{x, y + 30, z}, 1, 4) cylinder := actor.NewBlock(&my_world, glutil.Point3D{x, y + 30, z}, glutil.Point3D{.1, .1, .1}, glutil.Color4D{.8, .8, .8, .05}) tmpList = append(tmpList, actor.NewOrbitActor(&my_world, &cylinder, glutil.Point3D{x, y + 0, z}, float64(radius))) tmpList[i].SetSpeed(int32(speed)) tmpList[i].SetAngle(int32(angle)) tmpList[i].SetAxis(glutil.Point3D{0, 0, 1}) renderQueue.Add(&cylinder) my_world.AddActor(&tmpList[i]) } // Create castle //myCastle := NewCastle() //renderQueue.AddNamed(&myCastle, "castle") // Create ground ground := entity.NewXYPlane(glutil.Point3D{100, 0, 100}, glutil.Point3D{-100, 0, -100}, 0) ground.SetColor(glutil.Color4D{0.4313725490 * .7, 0.24705882 * .7, 0.098039215 * .7, 1}) ground.SetPolygonOffset(1.0) renderQueue.Add(&ground) my_world.AddEntity(&ground.Box) stairs := actor.NewStairs(&my_world, glutil.Point3D{0, 0, 0}) renderQueue.Add(&stairs) scale := float64(200) for i := 0; i < 100; i++ { x := (rand.Float64() * scale) - (scale / 2) z := (rand.Float64() * scale) - (scale / 2) x_scale, z_scale := (rand.Float64()/2)+.75, (rand.Float64()/2)+.75 y_scale := (rand.Float64() * 1) + .75 if (x < -5 || x > 20) && (z < -5 || z > 20) { tree1 := entity.NewTree(glutil.Point3D{x, 0, z}, x_scale, y_scale, z_scale) renderQueue.Add(&tree1) } } // Put camera somewhere //currentCamera.Translate(5, -5, 28) player.Translate(10, 0, -20) player.ImmediateJump() // Skip interpolation player.Rotate(-120, 0) player.ImmediateLook() // Skip interpolation } func main() { // Let go use 2 threads runtime.GOMAXPROCS(2) // Init glut.InitDisplayMode(glut.RGB | glut.DOUBLE | glut.DEPTH) glut.InitWindowSize(750, 750) glut.CreateWindow("Zach Anders - Assignment 5") setup() // Display Callbacks glut.DisplayFunc(DisplayFunc) glut.IdleFunc(IdleFunc) glut.ReshapeFunc(Reshape) // Input Callbacks glut.SpecialFunc(SpecialFunc) glut.KeyboardFunc(KeyDownFunc) glut.MotionFunc(MouseMotion) glut.MouseFunc(MouseDown) glut.MainLoop() }<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from django.contrib.auth import authenticate, login, get_user_model from mailin import Mailin from string import punctuation def authorize(request): email = request.POST.get('Email') password = request.POST.get('Password') if len(email) < 6 or len(password) < 10: return {'ERROR' : 'Too short'} else: user = authenticate(username = email, password = password) if user is not None: login(request,user) return {'VALID' : 'Logged in succesfully'} else: return {'ERROR' : 'Username or password incorrect!'} return {'ERROR' : 'An unknown error occurred'} def check_password(password, confirm): if password != confirm: return {'ERROR' : 'The two passwords do not match.'} elif len(password) < 10: return {'ERROR' : 'The password is too short.'} security_combo = [0,0,0] for c in password: if c.isupper(): security_combo[0] = 1 elif c.isalpha(): security_combo[1] = 1<|fim▁hole|> elif c in punctuation: security_combo[2] = 1 if 0 in security_combo: return {'ERROR' : 'Password is not complex enough. Password requires 1 lower, 1 upper and 1 symbol or number.'} return {'VALID' : 'Good'} def register(request): email = request.POST.get('Email') password = request.POST.get('Password') confirm = request.POST.get('confirmPassword') security_check = check_password(password, confirm) if 'ERROR' in security_check: return security_check else: user = get_user_model().objects.create_user(email=email, password=password) if 'VALID' in user: #return user; user_object = user['VALID'] Send_registration_email(user_object.user_email,user_object.activation_url); return user else: return user def Send_registration_email(emailAddress, activation_url): file = open('/var/www/html/ShotForTheHeart/ShotForTheHeart/Credentials').read() credentials = eval(file) mailSystem = Mailin("https://api.sendinblue.com/v2.0", credentials['email']) message = { 'to' : {'[email protected]':'Rick Knoop'}, 'from' : ['[email protected]' , 'Shot for the heart Guelph'], 'subject' : 'Activate your account', 'html' : 'Hello<br>You recently decided to register for an account at the Shot for the Heart website. Please click the link below to activate your account.<br><br>http://shotfortheheart.ca/register/'+activation_url+'<br><br>Thanks,<br>Shot for the Heart system administator.', } result = mailSystem.send_email(message) if 'failure' in result['code']: try: file = open('/var/www/html/ShotForTheHeart/emailError.log', 'w+') file.write(str(timezone.now)+' email address: '+str(user_email)+' Error information: '+str(result)+'\n\n') file.close() except: pass return {'ERROR': 'Your account was created correctly, but the email failed. Please contact [email protected]'} else: return {'VALID': 'Everything worked succesfully'}<|fim▁end|>
elif c.isdigit(): security_combo[2] = 1
<|file_name|>issue-10031.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// except according to those terms. // aux-build:issue_10031_aux.rs // pretty-expanded FIXME #23616 extern crate issue_10031_aux; pub fn main() { let _ = issue_10031_aux::Wrap(()); }<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
<|file_name|>Tools.js<|end_file_name|><|fim▁begin|>define([], function() {<|fim▁hole|> 'use strict'; /** * Prints a debugging console message for a shortcut * * @param {Shortcut} shortcut - shortcut object */ var printDebugConsoleMessage = function(shortcut) { console.log('Shortcut "' + shortcut.name + '" triggered with key "' + shortcut.key + '"', shortcut); }; return { printMessage: printDebugConsoleMessage }; });<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># This file is part of authapi. # Copyright (C) 2014-2020 Agora Voting SL <[email protected]> # authapi is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License. # authapi is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with authapi. If not, see <http://www.gnu.org/licenses/>. from django.conf.urls import url from .decorators import captcha_required from captcha import views<|fim▁hole|> urlpatterns = [ url(r'^new/', views.new_captcha, name='new_captcha'), ]<|fim▁end|>
<|file_name|>GameObject.cpp<|end_file_name|><|fim▁begin|>#include <iostream> #include "GameObject.h" using namespace Physics; void GameObject::init(string name, string particle, string entity) { m_name = name; if (mp_PhysicsManager->hasParticle(particle)) m_particleName = particle; if (mp_GraphicsManager->hasEntity(entity)) m_entityName = entity; m_position = mp_PhysicsManager->getParticlePosition(m_particleName); } void GameObject::update() { // get data from physics engine m_position = mp_PhysicsManager->getParticlePosition(m_particleName); //std::cout << "GAMEOBJECT:: update(): Attempting to update particle " << m_particleName << " with physics postion: " << newPos.ToString() << std::endl;<|fim▁hole|> //std::cout << "GAMEOBJECT:: update(): " << m_particleName << "'s position after update: " << newPos.ToString() << std::endl; }<|fim▁end|>
// give data to graphics engine mp_GraphicsManager->updateEntityPosition(m_entityName, m_position.GLM());
<|file_name|>XeroCredentials.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/** * User: thomas Date: 18/02/14 */ public interface XeroCredentials { String getXeroConsumerKey(); String getXeroConsumerSecret(); String getPrivateKeyPath(); }<|fim▁end|>
package com.pi.xerosync.dbconnect;
<|file_name|>EventDetailPanel.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2010-2012 Matthias Klass, Johannes Leimer, * Rico Lieback, Sebastian Gabriel, Lothar Gesslein, * Alexander Rampp, Kai Weidner * * This file is part of the Physalix Enrollment System * * Foobar is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Foobar is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Foobar. If not, see <http://www.gnu.org/licenses/>. */ package hsa.awp.usergui; import hsa.awp.event.model.Event; import hsa.awp.event.model.Occurrence; import hsa.awp.user.model.SingleUser; import hsa.awp.user.model.User; import hsa.awp.usergui.controller.IUserGuiController; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.link.ExternalLink; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.Model; import org.apache.wicket.spring.injection.annot.SpringBean; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; /** * Panel showing detailed information about an {@link Event}. * * @author klassm */ public class EventDetailPanel extends Panel { /** * unique serialization id. */ private static final long serialVersionUID = 9180564827437598145L; /** * GuiController which feeds the Gui with Data. */ @SpringBean(name = "usergui.controller") private transient IUserGuiController controller; /** * Constructor. * * @param id wicket:id. * @param event event to show. */ public EventDetailPanel(String id, Event event) { super(id); List<SingleUser> teachers = new LinkedList<SingleUser>(); event = controller.getEventById(event.getId()); for (Long teacherId : event.getTeachers()) { User user = controller.getUserById(teacherId); if (user != null && user instanceof SingleUser) { teachers.add((SingleUser) user); } } Collections.sort(teachers, new Comparator<SingleUser>() { @Override public int compare(SingleUser o1, SingleUser o2) { return o1.getName().compareTo(o2.getName()); } }); StringBuffer teachersList = new StringBuffer(); if (teachers.size() == 0) { teachersList.append("keine"); } else { boolean first = true; for (SingleUser teacher : teachers) { if (first) { first = false; } else { teachersList.append(", "); } teachersList.append(teacher.getName()); } } WebMarkupContainer eventGeneral = new WebMarkupContainer("event.general"); add(eventGeneral); eventGeneral.add(new Label("event.general.caption", "Allgemeines")); eventGeneral.add(new Label("event.general.eventId", new Model<Integer>(event.getEventId()))); eventGeneral.add(new Label("event.general.subjectName", new Model<String>(event.getSubject().getName()))); eventGeneral.add(new Label("event.general.maxParticipants", new Model<Integer>(event.getMaxParticipants()))); Label teacherLabel = new Label("event.general.teachers", new Model<String>(teachersList.toString())); eventGeneral.add(teacherLabel); eventGeneral.add(new Label("event.general.eventDescription", new Model<String>(event.getDetailInformation()))); ExternalLink detailLink = new ExternalLink("event.general.link", event.getSubject().getLink()); eventGeneral.add(detailLink); detailLink.add(new Label("event.general.linkDesc", event.getSubject().getLink())); String description = event.getSubject().getDescription(); if (description == null || ((description = description.trim().replace("\n", "<br>")).equals(""))) { description = "keine"; } Label subjectDescription = new Label("event.general.subjectDescription", new Model<String>(description)); subjectDescription.setEscapeModelStrings(false); eventGeneral.add(subjectDescription); WebMarkupContainer eventTimetable = new WebMarkupContainer("event.timetable"); add(eventTimetable); eventTimetable.add(new Label("event.timetable.caption", "Stundenplan")); List<Occurrence> occurences; if (event.getTimetable() == null) { occurences = new LinkedList<Occurrence>(); } else { occurences = new LinkedList<Occurrence>(event.getTimetable().getOccurrences()); } eventTimetable.add(new ListView<Occurrence>("event.timetable.list", occurences) { /** * unique serialization id. */ private static final long serialVersionUID = -1041971433878928045L; @Override protected void populateItem(ListItem<Occurrence> item) { DateFormat singleFormat = new SimpleDateFormat("dd.MM.yyyy HH:mm"); DateFormat dayFormat = new SimpleDateFormat("EEEE"); DateFormat timeFormat = new SimpleDateFormat("HH:mm"); String s; switch (item.getModelObject().getType()) { case SINGLE: s = "Einzeltermin vom " + singleFormat.format(item.getModelObject().getStartDate().getTime()); s += " bis " + singleFormat.format(item.getModelObject().getEndDate().getTime());<|fim▁hole|> s += " von " + timeFormat.format(item.getModelObject().getStartDate().getTime()) + " bis " + timeFormat.format(item.getModelObject().getEndDate().getTime()); break; default: s = ""; } item.add(new Label("event.timetable.list.occurrence", s)); } }); if (occurences.size() == 0) { eventTimetable.setVisible(false); } } }<|fim▁end|>
break; case PERIODICAL: s = "Wöchentlich am " + dayFormat.format(item.getModelObject().getStartDate().getTime());
<|file_name|>usage-builder.js<|end_file_name|><|fim▁begin|>'use strict'; const { extend } = require('underscore'); <|fim▁hole|> testResourceType, testAccountID, testMeteringPlanID, testRatingPlanID, testPricingPlanID } = require('./fixtures/usageDocumentFieldsConstants'); const _commonBlueprint = { collected_usage_id: testCollectedUsageID, resource_id: testResourceID, organization_id: testOrganizationID, space_id: testSpaceID, consumer_id: testConsumerID, plan_id: testPlanID, resource_type: testResourceType, account_id: testAccountID, metering_plan_id: testMeteringPlanID, rating_plan_id: testRatingPlanID, pricing_plan_id: testPricingPlanID }; const buildUsage = (...builders) => { const usage = {}; for(let builder of builders) builder(usage); return extend(usage, { id: dbclient.kturi(usage.resource_instance_id, usage.processed) }); }; const withEndTimestamp = (timestamp) => (usage) => usage.end = timestamp; const withStartTimestamp = (timestamp) => (usage) => usage.start = timestamp; const withProcessedTimestamp = (timestamp) => (usage) => usage.processed = timestamp; const withBlueprint = (blueprint) => (usage) => extend(usage, blueprint); const withDefaultBlueprint = () => (usage) => extend(usage, _commonBlueprint); const withResourceInstanceId = (resourceInstanceId) => (usage) => usage.resource_instance_id = resourceInstanceId; const withAccumulatedUsage = (accumulatedUsage) => (usage) => usage.accumulated_usage = accumulatedUsage; const buildAccumulatedUsage = (...builders) => { const accumulatedUsage = { windows: [[null], [null], [null], [null, null, null, null, null, null], [null, null]] }; for(let builder of builders) builder(accumulatedUsage); return accumulatedUsage; }; const withMetricName = (metricName) => (accumulatedUsage) => accumulatedUsage.metric = metricName; const withCurrentDayQuantity = (quantity) => (accumulatedUsage) => accumulatedUsage.windows[3][0] = { quantity: quantity }; const withPreviousDayQuantity = (quantity) => (accumulatedUsage) => accumulatedUsage.windows[3][1] = { quantity: quantity }; const withCurrentMonthQuantity = (quantity) => (accumulatedUsage) => accumulatedUsage.windows[4][0] = { quantity: quantity }; module.exports = { buildUsage, withEndTimestamp, withStartTimestamp, withProcessedTimestamp, withBlueprint, withDefaultBlueprint, withResourceInstanceId, withAccumulatedUsage, buildAccumulatedUsage, withMetricName, withCurrentDayQuantity, withCurrentMonthQuantity, withPreviousDayQuantity };<|fim▁end|>
const dbclient = require('abacus-dbclient'); const { testCollectedUsageID, testResourceID, testOrganizationID, testSpaceID, testConsumerID, testPlanID,
<|file_name|>invite_list.js<|end_file_name|><|fim▁begin|>/** A data model representing a list of Invites @class InviteList @extends Discourse.Model @namespace Discourse @module Discourse **/ Discourse.InviteList = Discourse.Model.extend({ empty: (function() { return this.blank('pending') && this.blank('redeemed'); }).property('pending.@each', 'redeemed.@each') }); Discourse.InviteList.reopenClass({ findInvitedBy: function(user) { var promise; promise = new RSVP.Promise(); $.ajax({ url: "/users/" + (user.get('username_lower')) + "/invited.json", success: function(result) { var invitedList; invitedList = result.invited_list; if (invitedList.pending) { invitedList.pending = invitedList.pending.map(function(i) { return Discourse.Invite.create(i); }); } if (invitedList.redeemed) { invitedList.redeemed = invitedList.redeemed.map(function(i) { return Discourse.Invite.create(i); }); } invitedList.user = user; return promise.resolve(Discourse.InviteList.create(invitedList)); }<|fim▁hole|> }); return promise; } });<|fim▁end|>
<|file_name|>cifarnet_preprocessing.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Provides utilities to preprocess images in CIFAR-10. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf _PADDING = 4 slim = tf.contrib.slim def preprocess_for_train(image, output_height, output_width, padding=_PADDING): """Preprocesses the given image for training. Note that the actual resizing scale is sampled from [`resize_size_min`, `resize_size_max`]. Args: image: A `Tensor` representing an image of arbitrary size. output_height: The height of the image after preprocessing. output_width: The width of the image after preprocessing. padding: The amound of padding before and after each dimension of the image. Returns: A preprocessed image. """ tf.summary.image('image', tf.expand_dims(image, 0)) # Transform the image to floats. image = tf.to_float(image) if padding > 0: image = tf.pad(image, [[padding, padding], [padding, padding], [0, 0]]) # image = tf.image.resize_images(image,(output_height,output_width)) # Randomly crop a [height, width] section of the image. distorted_image = tf.random_crop(image, [32, 32, 3]) # Randomly flip the image horizontally. distorted_image = tf.image.random_flip_left_right(distorted_image) tf.summary.image('distorted_image', tf.expand_dims(distorted_image, 0)) # Because these operations are not commutative, consider randomizing # the order their operation. distorted_image = tf.image.random_brightness(distorted_image, max_delta=63) distorted_image = tf.image.random_contrast(distorted_image, lower=0.2, upper=1.8) # Subtract off the mean and divide by the variance of the pixels. return tf.image.per_image_standardization(distorted_image) def preprocess_for_eval(image, output_height, output_width): """Preprocesses the given image for evaluation. Args: image: A `Tensor` representing an image of arbitrary size. output_height: The height of the image after preprocessing. output_width: The width of the image after preprocessing. Returns: A preprocessed image. """ tf.summary.image('image', tf.expand_dims(image, 0)) # Transform the image to floats. image = tf.to_float(image) # image = tf.image.resize_images(image, (output_height, output_width)) # Resize and crop if needed.<|fim▁hole|> resized_image = tf.image.resize_image_with_crop_or_pad(image, output_width, output_height) tf.summary.image('resized_image', tf.expand_dims(resized_image, 0)) # Subtract off the mean and divide by the variance of the pixels. return tf.image.per_image_standardization(resized_image) def preprocess_image(image, output_height, output_width, is_training=False): """Preprocesses the given image. Args: image: A `Tensor` representing an image of arbitrary size. output_height: The height of the image after preprocessing. output_width: The width of the image after preprocessing. is_training: `True` if we're preprocessing the image for training and `False` otherwise. Returns: A preprocessed image. """ if is_training: return preprocess_for_train(image, output_height, output_width) else: return preprocess_for_eval(image, output_height, output_width)<|fim▁end|>
<|file_name|>recreate_webassets.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError from django.db.models import Q from django.template.defaultfilters import filesizeformat from hav.apps.media.models import Media from hav.apps.hav_collections.models import Collection from hav.apps.archive.models import ArchiveFile from ...tasks import create from ...models import WebAsset class Command(BaseCommand): help = "Forces the recreation of webassets." def add_arguments(self, parser): # Named (optional) arguments parser.add_argument( "--dry-run", action="store_true", default=False, help="Only display which files would be affected.", ) parser.add_argument( "--media", type=int, default=[], action="append", help="Limit to media with given pk", ) parser.add_argument( "--collection", type=str, default=[], action="append", help="Limit to media in specific collection", ) parser.add_argument( "--extension", type=str, action="append", default=[], help="Filter by file extension (archived file)", ) def get_queryset(self, media_ids, collection_slugs, extensions): # start by filtering media media = Media.objects.all() if len(media_ids): media = Media.objects.filter(pk__in=media_ids) if len(collection_slugs): collections = Collection.objects.filter(slug__in=collection_slugs) media = media.filter(collection__in=collections) # now move down to the archived files archived_files = ( ArchiveFile.objects.filter(media__in=media) .prefetch_related("media_set", "media_set__collection") .order_by("media__set__id") ) if len(extensions): q = Q() for ext in extensions: q |= Q(original_filename__iendswith=ext) | Q(file__endswith=ext) archived_files = archived_files.filter(q) return archived_files def process_file(self, archived_file): archived_file.webasset_set.all().delete() create.delay(archived_file.pk) def handle(self, *args, **options): # gather all options to limit the resulting queryset media_ids = options.get("media", []) collection_slugs = options.get("collection", []) extensions = options.get("extension", []) archived_files = self.get_queryset(media_ids, collection_slugs, extensions) af_count = archived_files.count() self.stdout.write(f"Operating {af_count} files.") dry_run = options.get("dry_run") <|fim▁hole|> for af in archived_files: self.stdout.write( f"Processing file {af.file} (original name: {af.original_filename}, media: {af.media_set.get().id}, size: {filesizeformat(af.size)}, collection: {af.media_set.get().collection.slug})" ) if not dry_run: self.process_file(af) self.stdout.write(f"Processed {af_count} files.")<|fim▁end|>
<|file_name|>str.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Unicode string manipulation (`str` type) # Basic Usage Rust's string type is one of the core primitive types of the language. While represented by the name `str`, the name `str` is not actually a valid type in Rust. Each string must also be decorated with a pointer. `String` is used for an owned string, so there is only one commonly-used `str` type in Rust: `&str`. `&str` is the borrowed string type. This type of string can only be created from other strings, unless it is a static string (see below). As the word "borrowed" implies, this type of string is owned elsewhere, and this string cannot be moved out of. As an example, here's some code that uses a string. ```rust fn main() { let borrowed_string = "This string is borrowed with the 'static lifetime"; } ``` From the example above, you can see that Rust's string literals have the `'static` lifetime. This is akin to C's concept of a static string. String literals are allocated statically in the rodata of the executable/library. The string then has the type `&'static str` meaning that the string is valid for the `'static` lifetime, otherwise known as the lifetime of the entire program. As can be inferred from the type, these static strings are not mutable. # Mutability Many languages have immutable strings by default, and Rust has a particular flavor on this idea. As with the rest of Rust types, strings are immutable by default. If a string is declared as `mut`, however, it may be mutated. This works the same way as the rest of Rust's type system in the sense that if there's a mutable reference to a string, there may only be one mutable reference to that string. With these guarantees, strings can easily transition between being mutable/immutable with the same benefits of having mutable strings in other languages. # Representation Rust's string type, `str`, is a sequence of unicode codepoints encoded as a stream of UTF-8 bytes. All safely-created strings are guaranteed to be validly encoded UTF-8 sequences. Additionally, strings are not null-terminated and can contain null codepoints. The actual representation of strings have direct mappings to vectors: `&str` is the same as `&[u8]`. */ #![doc(primitive = "str")] use core::prelude::*; use core::char; use core::default::Default; use core::fmt; use core::cmp; use core::iter::AdditiveIterator; use core::mem; use Collection; use hash; use string::String; use vec::Vec; pub use core::str::{from_utf8, CharEq, Chars, CharOffsets}; pub use core::str::{Bytes, CharSplits}; pub use core::str::{CharSplitsN, Words, AnyLines, MatchIndices, StrSplits}; pub use core::str::{eq_slice, is_utf8, is_utf16, Utf16Items}; pub use core::str::{Utf16Item, ScalarValue, LoneSurrogate, utf16_items}; pub use core::str::{truncate_utf16_at_nul, utf8_char_width, CharRange}; pub use core::str::{Str, StrSlice}; /* Section: Creating a string */ /// Consumes a vector of bytes to create a new utf-8 string. /// /// Returns `Err` with the original vector if the vector contains invalid /// UTF-8. pub fn from_utf8_owned(vv: Vec<u8>) -> Result<String, Vec<u8>> { String::from_utf8(vv) } /// Convert a byte to a UTF-8 string /// /// # Failure /// /// Fails if invalid UTF-8 /// /// # Example /// /// ```rust /// use std::str; /// let string = str::from_byte(66u8); /// assert_eq!(string.as_slice(), "B"); /// ``` pub fn from_byte(b: u8) -> String { assert!(b < 128u8); String::from_char(1, b as char) } /// Convert a char to a string pub fn from_char(ch: char) -> String { let mut buf = String::new(); buf.push_char(ch); buf } /// Convert a vector of chars to a string pub fn from_chars(chs: &[char]) -> String { chs.iter().map(|c| *c).collect() } /// Methods for vectors of strings pub trait StrVector { /// Concatenate a vector of strings. fn concat(&self) -> String; /// Concatenate a vector of strings, placing a given separator between each. fn connect(&self, sep: &str) -> String; } impl<'a, S: Str> StrVector for &'a [S] { fn concat(&self) -> String { if self.is_empty() { return String::new(); } // `len` calculation may overflow but push_str but will check boundaries let len = self.iter().map(|s| s.as_slice().len()).sum(); let mut result = String::with_capacity(len); for s in self.iter() { result.push_str(s.as_slice()) } result } fn connect(&self, sep: &str) -> String { if self.is_empty() { return String::new(); } // concat is faster if sep.is_empty() { return self.concat(); } // this is wrong without the guarantee that `self` is non-empty // `len` calculation may overflow but push_str but will check boundaries let len = sep.len() * (self.len() - 1) + self.iter().map(|s| s.as_slice().len()).sum(); let mut result = String::with_capacity(len); let mut first = true; for s in self.iter() { if first { first = false; } else { result.push_str(sep); } result.push_str(s.as_slice()); } result } } impl<'a, S: Str> StrVector for Vec<S> { #[inline] fn concat(&self) -> String { self.as_slice().concat() } #[inline] fn connect(&self, sep: &str) -> String { self.as_slice().connect(sep) } } /* Section: Iterators */ // Helper functions used for Unicode normalization fn canonical_sort(comb: &mut [(char, u8)]) { let len = comb.len(); for i in range(0, len) { let mut swapped = false; for j in range(1, len-i) { let class_a = *comb[j-1].ref1(); let class_b = *comb[j].ref1(); if class_a != 0 && class_b != 0 && class_a > class_b { comb.swap(j-1, j); swapped = true; } } if !swapped { break; } } } #[deriving(Clone)] enum DecompositionType { Canonical, Compatible } /// External iterator for a string's decomposition's characters. /// Use with the `std::iter` module. #[deriving(Clone)] pub struct Decompositions<'a> { kind: DecompositionType, iter: Chars<'a>, buffer: Vec<(char, u8)>, sorted: bool } impl<'a> Iterator<char> for Decompositions<'a> { #[inline] fn next(&mut self) -> Option<char> { use unicode::normalization::canonical_combining_class; match self.buffer.as_slice().head() { Some(&(c, 0)) => { self.sorted = false; self.buffer.shift(); return Some(c); } Some(&(c, _)) if self.sorted => { self.buffer.shift(); return Some(c); } _ => self.sorted = false } let decomposer = match self.kind { Canonical => char::decompose_canonical, Compatible => char::decompose_compatible }; if !self.sorted { for ch in self.iter { let buffer = &mut self.buffer; let sorted = &mut self.sorted; decomposer(ch, |d| { let class = canonical_combining_class(d); if class == 0 && !*sorted { canonical_sort(buffer.as_mut_slice()); *sorted = true; } buffer.push((d, class)); }); if *sorted { break } } } if !self.sorted { canonical_sort(self.buffer.as_mut_slice()); self.sorted = true; } match self.buffer.shift() { Some((c, 0)) => { self.sorted = false; Some(c) } Some((c, _)) => Some(c), None => None } } fn size_hint(&self) -> (uint, Option<uint>) { let (lower, _) = self.iter.size_hint(); (lower, None) } } /// Replace all occurrences of one string with another /// /// # Arguments /// /// * s - The string containing substrings to replace /// * from - The string to replace /// * to - The replacement string /// /// # Return value /// /// The original string with all occurrences of `from` replaced with `to` pub fn replace(s: &str, from: &str, to: &str) -> String { let mut result = String::new(); let mut last_end = 0; for (start, end) in s.match_indices(from) { result.push_str(unsafe{raw::slice_bytes(s, last_end, start)}); result.push_str(to); last_end = end; } result.push_str(unsafe{raw::slice_bytes(s, last_end, s.len())}); result } /* Section: Misc */ /// Decode a UTF-16 encoded vector `v` into a string, returning `None` /// if `v` contains any invalid data. /// /// # Example /// /// ```rust /// use std::str; /// /// // 𝄞music /// let mut v = [0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0x0069, 0x0063]; /// assert_eq!(str::from_utf16(v), Some("𝄞music".to_string())); /// /// // 𝄞mu<invalid>ic /// v[4] = 0xD800; /// assert_eq!(str::from_utf16(v), None); /// ``` pub fn from_utf16(v: &[u16]) -> Option<String> { let mut s = String::with_capacity(v.len() / 2); for c in utf16_items(v) { match c { ScalarValue(c) => s.push_char(c), LoneSurrogate(_) => return None } } Some(s) } /// Decode a UTF-16 encoded vector `v` into a string, replacing /// invalid data with the replacement character (U+FFFD). /// /// # Example /// ```rust /// use std::str; /// /// // 𝄞mus<invalid>ic<invalid> /// let v = [0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0xDD1E, 0x0069, 0x0063, /// 0xD834]; /// /// assert_eq!(str::from_utf16_lossy(v), /// "𝄞mus\uFFFDic\uFFFD".to_string()); /// ``` pub fn from_utf16_lossy(v: &[u16]) -> String { utf16_items(v).map(|c| c.to_char_lossy()).collect() } // Return the initial codepoint accumulator for the first byte. // The first byte is special, only want bottom 5 bits for width 2, 4 bits // for width 3, and 3 bits for width 4 macro_rules! utf8_first_byte( ($byte:expr, $width:expr) => (($byte & (0x7F >> $width)) as u32) ) // return the value of $ch updated with continuation byte $byte macro_rules! utf8_acc_cont_byte( ($ch:expr, $byte:expr) => (($ch << 6) | ($byte & 63u8) as u32) ) static TAG_CONT_U8: u8 = 128u8; /// Converts a vector of bytes to a new utf-8 string. /// Any invalid utf-8 sequences are replaced with U+FFFD REPLACEMENT CHARACTER. /// /// # Example /// /// ```rust /// let input = b"Hello \xF0\x90\x80World"; /// let output = std::str::from_utf8_lossy(input); /// assert_eq!(output.as_slice(), "Hello \uFFFDWorld"); /// ``` pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> MaybeOwned<'a> { if is_utf8(v) { return Slice(unsafe { mem::transmute(v) }) } static REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8 let mut i = 0; let total = v.len(); fn unsafe_get(xs: &[u8], i: uint) -> u8 { unsafe { *xs.unsafe_ref(i) } } fn safe_get(xs: &[u8], i: uint, total: uint) -> u8 { if i >= total { 0 } else { unsafe_get(xs, i) } } let mut res = String::with_capacity(total); if i > 0 { unsafe { res.push_bytes(v.slice_to(i)) }; } // subseqidx is the index of the first byte of the subsequence we're looking at. // It's used to copy a bunch of contiguous good codepoints at once instead of copying // them one by one. let mut subseqidx = 0; while i < total { let i_ = i; let byte = unsafe_get(v, i); i += 1; macro_rules! error(() => ({ unsafe { if subseqidx != i_ { res.push_bytes(v.slice(subseqidx, i_)); } subseqidx = i; res.push_bytes(REPLACEMENT); } })) if byte < 128u8 { // subseqidx handles this } else { let w = utf8_char_width(byte); match w { 2 => { if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } 3 => { match (byte, safe_get(v, i, total)) { (0xE0 , 0xA0 .. 0xBF) => (), (0xE1 .. 0xEC, 0x80 .. 0xBF) => (), (0xED , 0x80 .. 0x9F) => (), (0xEE .. 0xEF, 0x80 .. 0xBF) => (), _ => { error!(); continue; } } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } 4 => { match (byte, safe_get(v, i, total)) { (0xF0 , 0x90 .. 0xBF) => (), (0xF1 .. 0xF3, 0x80 .. 0xBF) => (), (0xF4 , 0x80 .. 0x8F) => (), _ => { error!(); continue; } } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 { error!(); continue; } i += 1; } _ => { error!(); continue; } } } } if subseqidx < total { unsafe { res.push_bytes(v.slice(subseqidx, total)) }; } Owned(res.into_string()) } /* Section: MaybeOwned */ /// A `MaybeOwned` is a string that can hold either a `String` or a `&str`. /// This can be useful as an optimization when an allocation is sometimes /// needed but not always. pub enum MaybeOwned<'a> { /// A borrowed string Slice(&'a str), /// An owned string Owned(String) } /// `SendStr` is a specialization of `MaybeOwned` to be sendable pub type SendStr = MaybeOwned<'static>; impl<'a> MaybeOwned<'a> { /// Returns `true` if this `MaybeOwned` wraps an owned string #[inline] pub fn is_owned(&self) -> bool { match *self { Slice(_) => false, Owned(_) => true } } /// Returns `true` if this `MaybeOwned` wraps a borrowed string #[inline] pub fn is_slice(&self) -> bool { match *self { Slice(_) => true, Owned(_) => false } } } /// Trait for moving into a `MaybeOwned` pub trait IntoMaybeOwned<'a> { /// Moves self into a `MaybeOwned` fn into_maybe_owned(self) -> MaybeOwned<'a>; } impl<'a> IntoMaybeOwned<'a> for String { #[inline] fn into_maybe_owned(self) -> MaybeOwned<'a> { Owned(self) } } impl<'a> IntoMaybeOwned<'a> for &'a str { #[inline] fn into_maybe_owned(self) -> MaybeOwned<'a> { Slice(self) } } impl<'a> IntoMaybeOwned<'a> for MaybeOwned<'a> { #[inline] fn into_maybe_owned(self) -> MaybeOwned<'a> { self } } impl<'a> PartialEq for MaybeOwned<'a> { #[inline] fn eq(&self, other: &MaybeOwned) -> bool { self.as_slice() == other.as_slice() } } impl<'a> Eq for MaybeOwned<'a> {} impl<'a> PartialOrd for MaybeOwned<'a> { #[inline] fn partial_cmp(&self, other: &MaybeOwned) -> Option<Ordering> { Some(self.cmp(other)) } } impl<'a> Ord for MaybeOwned<'a> { #[inline] fn cmp(&self, other: &MaybeOwned) -> Ordering { self.as_slice().cmp(&other.as_slice()) } } impl<'a, S: Str> Equiv<S> for MaybeOwned<'a> { #[inline] fn equiv(&self, other: &S) -> bool { self.as_slice() == other.as_slice() } } impl<'a> Str for MaybeOwned<'a> { #[inline] fn as_slice<'b>(&'b self) -> &'b str { match *self { Slice(s) => s, Owned(ref s) => s.as_slice() } } } impl<'a> StrAllocating for MaybeOwned<'a> { #[inline] fn into_string(self) -> String { match self { Slice(s) => s.to_string(), Owned(s) => s } } } impl<'a> Collection for MaybeOwned<'a> { #[inline] fn len(&self) -> uint { self.as_slice().len() } } impl<'a> Clone for MaybeOwned<'a> { #[inline] fn clone(&self) -> MaybeOwned<'a> { match *self { Slice(s) => Slice(s), Owned(ref s) => Owned(s.to_string()) } } } impl<'a> Default for MaybeOwned<'a> { #[inline] fn default() -> MaybeOwned<'a> { Slice("") } } impl<'a, H: hash::Writer> hash::Hash<H> for MaybeOwned<'a> { #[inline] fn hash(&self, hasher: &mut H) { self.as_slice().hash(hasher) } } impl<'a> fmt::Show for MaybeOwned<'a> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Slice(ref s) => s.fmt(f), Owned(ref s) => s.fmt(f) } } } /// Unsafe operations pub mod raw { use core::prelude::*; use core::mem; use core::raw::Slice; use string::String; use vec::Vec; pub use core::str::raw::{from_utf8, c_str_to_static_slice, slice_bytes}; pub use core::str::raw::{slice_unchecked}; /// Create a Rust string from a *u8 buffer of the given length pub unsafe fn from_buf_len(buf: *const u8, len: uint) -> String { let mut result = String::new(); result.push_bytes(mem::transmute(Slice { data: buf, len: len, })); result } /// Create a Rust string from a null-terminated C string pub unsafe fn from_c_str(c_string: *const i8) -> String { let mut buf = String::new(); let mut len = 0; while *c_string.offset(len) != 0 { len += 1; } buf.push_bytes(mem::transmute(Slice { data: c_string, len: len as uint, })); buf } /// Converts an owned vector of bytes to a new owned string. This assumes /// that the utf-8-ness of the vector has already been validated #[inline] pub unsafe fn from_utf8_owned(v: Vec<u8>) -> String { mem::transmute(v) } /// Converts a byte to a string. pub unsafe fn from_byte(u: u8) -> String { from_utf8_owned(vec![u]) } /// Sets the length of a string /// /// This will explicitly set the size of the string, without actually /// modifying its buffers, so it is up to the caller to ensure that /// the string is actually the specified size. #[test] fn test_from_buf_len() { use slice::ImmutableVector; use str::StrAllocating; unsafe { let a = vec![65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]; let b = a.as_ptr(); let c = from_buf_len(b, 3u); assert_eq!(c, "AAA".to_string()); } } } /* Section: Trait implementations */ /// Any string that can be represented as a slice pub trait StrAllocating: Str { /// Convert `self` into a `String`, not making a copy if possible. fn into_string(self) -> String; /// Convert `self` into a `String`. #[inline] fn to_string(&self) -> String { String::from_str(self.as_slice()) } #[allow(missing_doc)] #[deprecated = "replaced by .into_string()"] fn into_owned(self) -> String { self.into_string() } /// Escape each char in `s` with `char::escape_default`. fn escape_default(&self) -> String { let me = self.as_slice(); let mut out = String::with_capacity(me.len()); for c in me.chars() { c.escape_default(|c| out.push_char(c)); } out } /// Escape each char in `s` with `char::escape_unicode`. fn escape_unicode(&self) -> String { let me = self.as_slice(); let mut out = String::with_capacity(me.len()); for c in me.chars() { c.escape_unicode(|c| out.push_char(c)); } out } /// Replace all occurrences of one string with another. /// /// # Arguments /// /// * `from` - The string to replace /// * `to` - The replacement string /// /// # Return value /// /// The original string with all occurrences of `from` replaced with `to`. /// /// # Example /// /// ```rust /// let s = "Do you know the muffin man, /// The muffin man, the muffin man, ...".to_string(); /// /// assert_eq!(s.replace("muffin man", "little lamb"), /// "Do you know the little lamb, /// The little lamb, the little lamb, ...".to_string()); /// /// // not found, so no change. /// assert_eq!(s.replace("cookie monster", "little lamb"), s); /// ``` fn replace(&self, from: &str, to: &str) -> String { let me = self.as_slice(); let mut result = String::new(); let mut last_end = 0; for (start, end) in me.match_indices(from) { result.push_str(unsafe{raw::slice_bytes(me, last_end, start)}); result.push_str(to); last_end = end; } result.push_str(unsafe{raw::slice_bytes(me, last_end, me.len())}); result } #[allow(missing_doc)] #[deprecated = "obsolete, use `to_string`"] #[inline] fn to_owned(&self) -> String { unsafe { mem::transmute(Vec::from_slice(self.as_slice().as_bytes())) } } /// Converts to a vector of `u16` encoded as UTF-16. #[deprecated = "use `utf16_units` instead"] fn to_utf16(&self) -> Vec<u16> { self.as_slice().utf16_units().collect::<Vec<u16>>() } /// Given a string, make a new string with repeated copies of it. fn repeat(&self, nn: uint) -> String { let me = self.as_slice(); let mut ret = String::with_capacity(nn * me.len()); for _ in range(0, nn) { ret.push_str(me); } ret } /// Levenshtein Distance between two strings. fn lev_distance(&self, t: &str) -> uint { let me = self.as_slice(); let slen = me.len(); let tlen = t.len(); if slen == 0 { return tlen; } if tlen == 0 { return slen; } let mut dcol = Vec::from_fn(tlen + 1, |x| x); for (i, sc) in me.chars().enumerate() { let mut current = i; *dcol.get_mut(0) = current + 1; for (j, tc) in t.chars().enumerate() { let next = *dcol.get(j + 1); if sc == tc { *dcol.get_mut(j + 1) = current; } else { *dcol.get_mut(j + 1) = cmp::min(current, next); *dcol.get_mut(j + 1) = cmp::min(*dcol.get(j + 1), *dcol.get(j)) + 1; } current = next; } } return *dcol.get(tlen); } /// An Iterator over the string in Unicode Normalization Form D /// (canonical decomposition). #[inline] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { iter: self.as_slice().chars(), buffer: Vec::new(), sorted: false, kind: Canonical } } /// An Iterator over the string in Unicode Normalization Form KD /// (compatibility decomposition). #[inline] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { iter: self.as_slice().chars(), buffer: Vec::new(), sorted: false, kind: Compatible } } } impl<'a> StrAllocating for &'a str { #[inline] fn into_string(self) -> String { self.to_string() } } /// Methods for owned strings pub trait OwnedStr { /// Consumes the string, returning the underlying byte buffer. /// /// The buffer does not have a null terminator. fn into_bytes(self) -> Vec<u8>; /// Pushes the given string onto this string, returning the concatenation of the two strings. fn append(self, rhs: &str) -> String; } impl OwnedStr for String { #[inline] fn into_bytes(self) -> Vec<u8> { unsafe { mem::transmute(self) } } #[inline] fn append(mut self, rhs: &str) -> String { self.push_str(rhs); self } } #[cfg(test)] mod tests { use std::prelude::*; use std::iter::AdditiveIterator; use std::default::Default; use str::*; use string::String; use vec::Vec; #[test] fn test_eq_slice() { assert!((eq_slice("foobar".slice(0, 3), "foo"))); assert!((eq_slice("barfoo".slice(3, 6), "foo"))); assert!((!eq_slice("foo1", "foo2"))); } #[test] fn test_le() { assert!("" <= ""); assert!("" <= "foo"); assert!("foo" <= "foo"); assert!("foo" != "bar"); } #[test] fn test_len() { assert_eq!("".len(), 0u); assert_eq!("hello world".len(), 11u); assert_eq!("\x63".len(), 1u); assert_eq!("\xa2".len(), 2u); assert_eq!("\u03c0".len(), 2u); assert_eq!("\u2620".len(), 3u); assert_eq!("\U0001d11e".len(), 4u); assert_eq!("".char_len(), 0u); assert_eq!("hello world".char_len(), 11u); assert_eq!("\x63".char_len(), 1u); assert_eq!("\xa2".char_len(), 1u); assert_eq!("\u03c0".char_len(), 1u); assert_eq!("\u2620".char_len(), 1u); assert_eq!("\U0001d11e".char_len(), 1u); assert_eq!("ประเทศไทย中华Việt Nam".char_len(), 19u); } #[test] fn test_find() { assert_eq!("hello".find('l'), Some(2u)); assert_eq!("hello".find(|c:char| c == 'o'), Some(4u)); assert!("hello".find('x').is_none()); assert!("hello".find(|c:char| c == 'x').is_none()); assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30u)); assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30u)); } #[test] fn test_rfind() { assert_eq!("hello".rfind('l'), Some(3u)); assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4u)); assert!("hello".rfind('x').is_none()); assert!("hello".rfind(|c:char| c == 'x').is_none()); assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30u)); assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30u)); } #[test] fn test_collect() { let empty = "".to_string(); let s: String = empty.as_slice().chars().collect(); assert_eq!(empty, s); let data = "ประเทศไทย中".to_string(); let s: String = data.as_slice().chars().collect(); assert_eq!(data, s); } #[test] fn test_into_bytes() { let data = "asdf".to_string(); let buf = data.into_bytes(); assert_eq!(b"asdf", buf.as_slice()); } #[test] fn test_find_str() { // byte positions assert_eq!("".find_str(""), Some(0u)); assert!("banana".find_str("apple pie").is_none()); let data = "abcabc"; assert_eq!(data.slice(0u, 6u).find_str("ab"), Some(0u)); assert_eq!(data.slice(2u, 6u).find_str("ab"), Some(3u - 2u)); assert!(data.slice(2u, 4u).find_str("ab").is_none()); let string = "ประเทศไทย中华Việt Nam"; let mut data = string.to_string(); data.push_str(string); assert!(data.as_slice().find_str("ไท华").is_none()); assert_eq!(data.as_slice().slice(0u, 43u).find_str(""), Some(0u)); assert_eq!(data.as_slice().slice(6u, 43u).find_str(""), Some(6u - 6u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str("ประ"), Some( 0u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str("ทศไ"), Some(12u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str("ย中"), Some(24u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str("iệt"), Some(34u)); assert_eq!(data.as_slice().slice(0u, 43u).find_str("Nam"), Some(40u)); assert_eq!(data.as_slice().slice(43u, 86u).find_str("ประ"), Some(43u - 43u)); assert_eq!(data.as_slice().slice(43u, 86u).find_str("ทศไ"), Some(55u - 43u)); assert_eq!(data.as_slice().slice(43u, 86u).find_str("ย中"), Some(67u - 43u)); assert_eq!(data.as_slice().slice(43u, 86u).find_str("iệt"), Some(77u - 43u)); assert_eq!(data.as_slice().slice(43u, 86u).find_str("Nam"), Some(83u - 43u)); } #[test] fn test_slice_chars() { fn t(a: &str, b: &str, start: uint) { assert_eq!(a.slice_chars(start, start + b.char_len()), b); } t("", "", 0); t("hello", "llo", 2); t("hello", "el", 1); t("αβλ", "β", 1); t("αβλ", "", 3); assert_eq!("ะเทศไท", "ประเทศไทย中华Việt Nam".slice_chars(2, 8)); } #[test] fn test_concat() { fn t(v: &[String], s: &str) { assert_eq!(v.concat().as_slice(), s); } t(["you".to_string(), "know".to_string(), "I'm".to_string(), "no".to_string(), "good".to_string()], "youknowI'mnogood"); let v: &[String] = []; t(v, ""); t(["hi".to_string()], "hi"); } #[test] fn test_connect() { fn t(v: &[String], sep: &str, s: &str) { assert_eq!(v.connect(sep).as_slice(), s); } t(["you".to_string(), "know".to_string(), "I'm".to_string(), "no".to_string(), "good".to_string()], " ", "you know I'm no good"); let v: &[String] = []; t(v, " ", ""); t(["hi".to_string()], " ", "hi"); } #[test] fn test_concat_slices() { fn t(v: &[&str], s: &str) { assert_eq!(v.concat().as_slice(), s); } t(["you", "know", "I'm", "no", "good"], "youknowI'mnogood"); let v: &[&str] = []; t(v, ""); t(["hi"], "hi"); } #[test] fn test_connect_slices() { fn t(v: &[&str], sep: &str, s: &str) { assert_eq!(v.connect(sep).as_slice(), s); } t(["you", "know", "I'm", "no", "good"], " ", "you know I'm no good"); t([], " ", ""); t(["hi"], " ", "hi"); } #[test] fn test_repeat() { assert_eq!("x".repeat(4), "xxxx".to_string()); assert_eq!("hi".repeat(4), "hihihihi".to_string()); assert_eq!("ไท华".repeat(3), "ไท华ไท华ไท华".to_string()); assert_eq!("".repeat(4), "".to_string()); assert_eq!("hi".repeat(0), "".to_string()); } #[test] fn test_unsafe_slice() { assert_eq!("ab", unsafe {raw::slice_bytes("abc", 0, 2)}); assert_eq!("bc", unsafe {raw::slice_bytes("abc", 1, 3)}); assert_eq!("", unsafe {raw::slice_bytes("abc", 1, 1)}); fn a_million_letter_a() -> String { let mut i = 0u; let mut rs = String::new(); while i < 100000 { rs.push_str("aaaaaaaaaa"); i += 1; } rs } fn half_a_million_letter_a() -> String { let mut i = 0u; let mut rs = String::new(); while i < 100000 { rs.push_str("aaaaa"); i += 1; } rs } let letters = a_million_letter_a(); assert!(half_a_million_letter_a() == unsafe {raw::slice_bytes(letters.as_slice(), 0u, 500000)}.to_string()); } #[test] fn test_starts_with() { assert!(("".starts_with(""))); assert!(("abc".starts_with(""))); assert!(("abc".starts_with("a"))); assert!((!"a".starts_with("abc"))); assert!((!"".starts_with("abc"))); assert!((!"ödd".starts_with("-"))); assert!(("ödd".starts_with("öd"))); } #[test] fn test_ends_with() { assert!(("".ends_with(""))); assert!(("abc".ends_with(""))); assert!(("abc".ends_with("c"))); assert!((!"a".ends_with("abc"))); assert!((!"".ends_with("abc"))); assert!((!"ddö".ends_with("-"))); assert!(("ddö".ends_with("dö"))); } #[test] fn test_is_empty() { assert!("".is_empty()); assert!(!"a".is_empty()); } #[test] fn test_replace() { let a = "a"; assert_eq!("".replace(a, "b"), "".to_string()); assert_eq!("a".replace(a, "b"), "b".to_string()); assert_eq!("ab".replace(a, "b"), "bb".to_string()); let test = "test"; assert!(" test test ".replace(test, "toast") == " toast toast ".to_string()); assert_eq!(" test test ".replace(test, ""), " ".to_string()); } #[test] fn test_replace_2a() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let a = "ประเ"; let a2 = "دولة الكويتทศไทย中华"; assert_eq!(data.replace(a, repl).as_slice(), a2); } #[test] fn test_replace_2b() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let b = "ะเ"; let b2 = "ปรدولة الكويتทศไทย中华"; assert_eq!(data.replace(b, repl).as_slice(), b2); } #[test] fn test_replace_2c() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let c = "中华"; let c2 = "ประเทศไทยدولة الكويت"; assert_eq!(data.replace(c, repl).as_slice(), c2); } #[test] fn test_replace_2d() { let data = "ประเทศไทย中华"; let repl = "دولة الكويت"; let d = "ไท华"; assert_eq!(data.replace(d, repl).as_slice(), data); } #[test] fn test_slice() { assert_eq!("ab", "abc".slice(0, 2)); assert_eq!("bc", "abc".slice(1, 3)); assert_eq!("", "abc".slice(1, 1)); assert_eq!("\u65e5", "\u65e5\u672c".slice(0, 3)); let data = "ประเทศไทย中华"; assert_eq!("ป", data.slice(0, 3)); assert_eq!("ร", data.slice(3, 6)); assert_eq!("", data.slice(3, 3)); assert_eq!("华", data.slice(30, 33)); fn a_million_letter_x() -> String { let mut i = 0u; let mut rs = String::new(); while i < 100000 { rs.push_str("华华华华华华华华华华"); i += 1; } rs } fn half_a_million_letter_x() -> String { let mut i = 0u; let mut rs = String::new(); while i < 100000 { rs.push_str("华华华华华"); i += 1; } rs } let letters = a_million_letter_x(); assert!(half_a_million_letter_x() == letters.as_slice().slice(0u, 3u * 500000u).to_string()); } #[test] fn test_slice_2() { let ss = "中华Việt Nam"; assert_eq!("华", ss.slice(3u, 6u)); assert_eq!("Việt Nam", ss.slice(6u, 16u)); assert_eq!("ab", "abc".slice(0u, 2u)); assert_eq!("bc", "abc".slice(1u, 3u)); assert_eq!("", "abc".slice(1u, 1u)); assert_eq!("中", ss.slice(0u, 3u)); assert_eq!("华V", ss.slice(3u, 7u)); assert_eq!("", ss.slice(3u, 3u)); /*0: 中 3: 华 6: V 7: i 8: ệ 11: t 12: 13: N 14: a 15: m */ } #[test] #[should_fail] fn test_slice_fail() { "中华Việt Nam".slice(0u, 2u); } #[test] fn test_slice_from() { assert_eq!("abcd".slice_from(0), "abcd"); assert_eq!("abcd".slice_from(2), "cd"); assert_eq!("abcd".slice_from(4), ""); } #[test] fn test_slice_to() { assert_eq!("abcd".slice_to(0), ""); assert_eq!("abcd".slice_to(2), "ab"); assert_eq!("abcd".slice_to(4), "abcd"); } #[test] fn test_trim_left_chars() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_left_chars(v), " *** foo *** "); assert_eq!(" *** foo *** ".trim_left_chars(&['*', ' ']), "foo *** "); assert_eq!(" *** *** ".trim_left_chars(&['*', ' ']), ""); assert_eq!("foo *** ".trim_left_chars(&['*', ' ']), "foo *** "); assert_eq!("11foo1bar11".trim_left_chars('1'), "foo1bar11"); assert_eq!("12foo1bar12".trim_left_chars(&['1', '2']), "foo1bar12"); assert_eq!("123foo1bar123".trim_left_chars(|c: char| c.is_digit()), "foo1bar123"); } #[test] fn test_trim_right_chars() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_right_chars(v), " *** foo *** "); assert_eq!(" *** foo *** ".trim_right_chars(&['*', ' ']), " *** foo"); assert_eq!(" *** *** ".trim_right_chars(&['*', ' ']), ""); assert_eq!(" *** foo".trim_right_chars(&['*', ' ']), " *** foo"); assert_eq!("11foo1bar11".trim_right_chars('1'), "11foo1bar"); assert_eq!("12foo1bar12".trim_right_chars(&['1', '2']), "12foo1bar"); assert_eq!("123foo1bar123".trim_right_chars(|c: char| c.is_digit()), "123foo1bar"); } #[test] fn test_trim_chars() { let v: &[char] = &[]; assert_eq!(" *** foo *** ".trim_chars(v), " *** foo *** "); assert_eq!(" *** foo *** ".trim_chars(&['*', ' ']), "foo"); assert_eq!(" *** *** ".trim_chars(&['*', ' ']), ""); assert_eq!("foo".trim_chars(&['*', ' ']), "foo"); assert_eq!("11foo1bar11".trim_chars('1'), "foo1bar"); assert_eq!("12foo1bar12".trim_chars(&['1', '2']), "foo1bar"); assert_eq!("123foo1bar123".trim_chars(|c: char| c.is_digit()), "foo1bar"); } #[test] fn test_trim_left() { assert_eq!("".trim_left(), ""); assert_eq!("a".trim_left(), "a"); assert_eq!(" ".trim_left(), ""); assert_eq!(" blah".trim_left(), "blah"); assert_eq!(" \u3000 wut".trim_left(), "wut"); assert_eq!("hey ".trim_left(), "hey "); } #[test] fn test_trim_right() { assert_eq!("".trim_right(), ""); assert_eq!("a".trim_right(), "a"); assert_eq!(" ".trim_right(), ""); assert_eq!("blah ".trim_right(), "blah"); assert_eq!("wut \u3000 ".trim_right(), "wut"); assert_eq!(" hey".trim_right(), " hey"); } #[test] fn test_trim() { assert_eq!("".trim(), ""); assert_eq!("a".trim(), "a"); assert_eq!(" ".trim(), ""); assert_eq!(" blah ".trim(), "blah"); assert_eq!("\nwut \u3000 ".trim(), "wut"); assert_eq!(" hey dude ".trim(), "hey dude"); } #[test] fn test_is_whitespace() { assert!("".is_whitespace()); assert!(" ".is_whitespace()); assert!("\u2009".is_whitespace()); // Thin space assert!(" \n\t ".is_whitespace()); assert!(!" _ ".is_whitespace()); } #[test] fn test_slice_shift_char() { let data = "ประเทศไทย中"; assert_eq!(data.slice_shift_char(), (Some('ป'), "ระเทศไทย中")); } #[test] fn test_slice_shift_char_2() { let empty = ""; assert_eq!(empty.slice_shift_char(), (None, "")); } #[test] fn test_is_utf8() { // deny overlong encodings assert!(!is_utf8([0xc0, 0x80])); assert!(!is_utf8([0xc0, 0xae])); assert!(!is_utf8([0xe0, 0x80, 0x80])); assert!(!is_utf8([0xe0, 0x80, 0xaf])); assert!(!is_utf8([0xe0, 0x81, 0x81])); assert!(!is_utf8([0xf0, 0x82, 0x82, 0xac])); assert!(!is_utf8([0xf4, 0x90, 0x80, 0x80])); // deny surrogates assert!(!is_utf8([0xED, 0xA0, 0x80])); assert!(!is_utf8([0xED, 0xBF, 0xBF])); assert!(is_utf8([0xC2, 0x80])); assert!(is_utf8([0xDF, 0xBF])); assert!(is_utf8([0xE0, 0xA0, 0x80])); assert!(is_utf8([0xED, 0x9F, 0xBF])); assert!(is_utf8([0xEE, 0x80, 0x80])); assert!(is_utf8([0xEF, 0xBF, 0xBF])); assert!(is_utf8([0xF0, 0x90, 0x80, 0x80])); assert!(is_utf8([0xF4, 0x8F, 0xBF, 0xBF])); } #[test] fn test_is_utf16() { macro_rules! pos ( ($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }); // non-surrogates pos!([0x0000], [0x0001, 0x0002], [0xD7FF], [0xE000]); // surrogate pairs (randomly generated with Python 3's // .encode('utf-16be')) pos!([0xdb54, 0xdf16, 0xd880, 0xdee0, 0xdb6a, 0xdd45], [0xd91f, 0xdeb1, 0xdb31, 0xdd84, 0xd8e2, 0xde14], [0xdb9f, 0xdc26, 0xdb6f, 0xde58, 0xd850, 0xdfae]); // mixtures (also random) pos!([0xd921, 0xdcc2, 0x002d, 0x004d, 0xdb32, 0xdf65], [0xdb45, 0xdd2d, 0x006a, 0xdacd, 0xddfe, 0x0006], [0x0067, 0xd8ff, 0xddb7, 0x000f, 0xd900, 0xdc80]); // negative tests macro_rules! neg ( ($($e:expr),*) => { { $(assert!(!is_utf16($e));)* } }); neg!( // surrogate + regular unit [0xdb45, 0x0000], // surrogate + lead surrogate [0xd900, 0xd900], // unterminated surrogate [0xd8ff], // trail surrogate without a lead [0xddb7]); // random byte sequences that Python 3's .decode('utf-16be') // failed on neg!([0x5b3d, 0x0141, 0xde9e, 0x8fdc, 0xc6e7], [0xdf5a, 0x82a5, 0x62b9, 0xb447, 0x92f3], [0xda4e, 0x42bc, 0x4462, 0xee98, 0xc2ca], [0xbe00, 0xb04a, 0x6ecb, 0xdd89, 0xe278], [0x0465, 0xab56, 0xdbb6, 0xa893, 0x665e], [0x6b7f, 0x0a19, 0x40f4, 0xa657, 0xdcc5], [0x9b50, 0xda5e, 0x24ec, 0x03ad, 0x6dee], [0x8d17, 0xcaa7, 0xf4ae, 0xdf6e, 0xbed7], [0xdaee, 0x2584, 0x7d30, 0xa626, 0x121a], [0xd956, 0x4b43, 0x7570, 0xccd6, 0x4f4a], [0x9dcf, 0x1b49, 0x4ba5, 0xfce9, 0xdffe], [0x6572, 0xce53, 0xb05a, 0xf6af, 0xdacf], [0x1b90, 0x728c, 0x9906, 0xdb68, 0xf46e], [0x1606, 0xbeca, 0xbe76, 0x860f, 0xdfa5], [0x8b4f, 0xde7a, 0xd220, 0x9fac, 0x2b6f], [0xb8fe, 0xebbe, 0xda32, 0x1a5f, 0x8b8b], [0x934b, 0x8956, 0xc434, 0x1881, 0xddf7], [0x5a95, 0x13fc, 0xf116, 0xd89b, 0x93f9], [0xd640, 0x71f1, 0xdd7d, 0x77eb, 0x1cd8], [0x348b, 0xaef0, 0xdb2c, 0xebf1, 0x1282], [0x50d7, 0xd824, 0x5010, 0xb369, 0x22ea]); } #[test] fn test_raw_from_c_str() { unsafe { let a = vec![65, 65, 65, 65, 65, 65, 65, 0]; let b = a.as_ptr(); let c = raw::from_c_str(b); assert_eq!(c, "AAAAAAA".to_string()); } } #[test] fn test_as_bytes() { // no null let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109 ]; assert_eq!("".as_bytes(), &[]); assert_eq!("abc".as_bytes(), &['a' as u8, 'b' as u8, 'c' as u8]); assert_eq!("ศไทย中华Việt Nam".as_bytes(), v.as_slice()); } #[test] #[should_fail] fn test_as_bytes_fail() { // Don't double free. (I'm not sure if this exercises the // original problem code path anymore.) let s = "".to_string(); let _bytes = s.as_bytes(); fail!(); } #[test] fn test_as_ptr() { let buf = "hello".as_ptr(); unsafe { assert_eq!(*buf.offset(0), 'h' as u8); assert_eq!(*buf.offset(1), 'e' as u8); assert_eq!(*buf.offset(2), 'l' as u8); assert_eq!(*buf.offset(3), 'l' as u8); assert_eq!(*buf.offset(4), 'o' as u8); } } #[test] fn test_subslice_offset() { let a = "kernelsprite"; let b = a.slice(7, a.len()); let c = a.slice(0, a.len() - 6); assert_eq!(a.subslice_offset(b), 7); assert_eq!(a.subslice_offset(c), 0); let string = "a\nb\nc"; let lines: Vec<&str> = string.lines().collect(); let lines = lines.as_slice(); assert_eq!(string.subslice_offset(lines[0]), 0); assert_eq!(string.subslice_offset(lines[1]), 2); assert_eq!(string.subslice_offset(lines[2]), 4); } #[test] #[should_fail] fn test_subslice_offset_2() { let a = "alchemiter"; let b = "cruxtruder"; a.subslice_offset(b); } #[test] fn vec_str_conversions() { let s1: String = "All mimsy were the borogoves".to_string(); let v: Vec<u8> = Vec::from_slice(s1.as_bytes()); let s2: String = from_utf8(v.as_slice()).unwrap().to_string(); let mut i: uint = 0u; let n1: uint = s1.len(); let n2: uint = v.len(); assert_eq!(n1, n2); while i < n1 { let a: u8 = s1.as_slice()[i]; let b: u8 = s2.as_slice()[i]; debug!("{}", a); debug!("{}", b); assert_eq!(a, b); i += 1u; } } #[test] fn test_contains() { assert!("abcde".contains("bcd")); assert!("abcde".contains("abcd")); assert!("abcde".contains("bcde")); assert!("abcde".contains("")); assert!("".contains("")); assert!(!"abcde".contains("def")); assert!(!"".contains("a")); let data = "ประเทศไทย中华Việt Nam"; assert!(data.contains("ประเ")); assert!(data.contains("ะเ")); assert!(data.contains("中华")); assert!(!data.contains("ไท华")); } #[test] fn test_contains_char() { assert!("abc".contains_char('b')); assert!("a".contains_char('a')); assert!(!"abc".contains_char('d')); assert!(!"".contains_char('a')); } #[test] fn test_utf16() { let pairs = [("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n".to_string(), vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16, 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf30_u16, 0x000a_u16]), ("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n".to_string(), vec![0xd801_u16, 0xdc12_u16, 0xd801_u16, 0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16, 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16,<|fim▁hole|> vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16, 0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16, 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]), ("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n".to_string(), vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16, 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16, 0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16, 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc93_u16, 0x0020_u16, 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc9a_u16, 0xd801_u16, 0xdc8d_u16, 0x0020_u16, 0xd801_u16, 0xdc8f_u16, 0xd801_u16, 0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16, 0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16, 0x000a_u16 ]), // Issue #12318, even-numbered non-BMP planes ("\U00020000".to_string(), vec![0xD840, 0xDC00])]; for p in pairs.iter() { let (s, u) = (*p).clone(); let s_as_utf16 = s.as_slice().utf16_units().collect::<Vec<u16>>(); let u_as_string = from_utf16(u.as_slice()).unwrap(); assert!(is_utf16(u.as_slice())); assert_eq!(s_as_utf16, u); assert_eq!(u_as_string, s); assert_eq!(from_utf16_lossy(u.as_slice()), s); assert_eq!(from_utf16(s_as_utf16.as_slice()).unwrap(), s); assert_eq!(u_as_string.as_slice().utf16_units().collect::<Vec<u16>>(), u); } } #[test] fn test_utf16_invalid() { // completely positive cases tested above. // lead + eof assert_eq!(from_utf16([0xD800]), None); // lead + lead assert_eq!(from_utf16([0xD800, 0xD800]), None); // isolated trail assert_eq!(from_utf16([0x0061, 0xDC00]), None); // general assert_eq!(from_utf16([0xD800, 0xd801, 0xdc8b, 0xD800]), None); } #[test] fn test_utf16_lossy() { // completely positive cases tested above. // lead + eof assert_eq!(from_utf16_lossy([0xD800]), "\uFFFD".to_string()); // lead + lead assert_eq!(from_utf16_lossy([0xD800, 0xD800]), "\uFFFD\uFFFD".to_string()); // isolated trail assert_eq!(from_utf16_lossy([0x0061, 0xDC00]), "a\uFFFD".to_string()); // general assert_eq!(from_utf16_lossy([0xD800, 0xd801, 0xdc8b, 0xD800]), "\uFFFD𐒋\uFFFD".to_string()); } #[test] fn test_truncate_utf16_at_nul() { let v = []; assert_eq!(truncate_utf16_at_nul(v), &[]); let v = [0, 2, 3]; assert_eq!(truncate_utf16_at_nul(v), &[]); let v = [1, 0, 3]; assert_eq!(truncate_utf16_at_nul(v), &[1]); let v = [1, 2, 0]; assert_eq!(truncate_utf16_at_nul(v), &[1, 2]); let v = [1, 2, 3]; assert_eq!(truncate_utf16_at_nul(v), &[1, 2, 3]); } #[test] fn test_char_at() { let s = "ศไทย中华Việt Nam"; let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; let mut pos = 0; for ch in v.iter() { assert!(s.char_at(pos) == *ch); pos += from_char(*ch).len(); } } #[test] fn test_char_at_reverse() { let s = "ศไทย中华Việt Nam"; let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; let mut pos = s.len(); for ch in v.iter().rev() { assert!(s.char_at_reverse(pos) == *ch); pos -= from_char(*ch).len(); } } #[test] fn test_escape_unicode() { assert_eq!("abc".escape_unicode(), "\\x61\\x62\\x63".to_string()); assert_eq!("a c".escape_unicode(), "\\x61\\x20\\x63".to_string()); assert_eq!("\r\n\t".escape_unicode(), "\\x0d\\x0a\\x09".to_string()); assert_eq!("'\"\\".escape_unicode(), "\\x27\\x22\\x5c".to_string()); assert_eq!("\x00\x01\xfe\xff".escape_unicode(), "\\x00\\x01\\xfe\\xff".to_string()); assert_eq!("\u0100\uffff".escape_unicode(), "\\u0100\\uffff".to_string()); assert_eq!("\U00010000\U0010ffff".escape_unicode(), "\\U00010000\\U0010ffff".to_string()); assert_eq!("ab\ufb00".escape_unicode(), "\\x61\\x62\\ufb00".to_string()); assert_eq!("\U0001d4ea\r".escape_unicode(), "\\U0001d4ea\\x0d".to_string()); } #[test] fn test_escape_default() { assert_eq!("abc".escape_default(), "abc".to_string()); assert_eq!("a c".escape_default(), "a c".to_string()); assert_eq!("\r\n\t".escape_default(), "\\r\\n\\t".to_string()); assert_eq!("'\"\\".escape_default(), "\\'\\\"\\\\".to_string()); assert_eq!("\u0100\uffff".escape_default(), "\\u0100\\uffff".to_string()); assert_eq!("\U00010000\U0010ffff".escape_default(), "\\U00010000\\U0010ffff".to_string()); assert_eq!("ab\ufb00".escape_default(), "ab\\ufb00".to_string()); assert_eq!("\U0001d4ea\r".escape_default(), "\\U0001d4ea\\r".to_string()); } #[test] fn test_total_ord() { "1234".cmp(&("123")) == Greater; "123".cmp(&("1234")) == Less; "1234".cmp(&("1234")) == Equal; "12345555".cmp(&("123456")) == Less; "22".cmp(&("1234")) == Greater; } #[test] fn test_char_range_at() { let data = "b¢€𤭢𤭢€¢b"; assert_eq!('b', data.char_range_at(0).ch); assert_eq!('¢', data.char_range_at(1).ch); assert_eq!('€', data.char_range_at(3).ch); assert_eq!('𤭢', data.char_range_at(6).ch); assert_eq!('𤭢', data.char_range_at(10).ch); assert_eq!('€', data.char_range_at(14).ch); assert_eq!('¢', data.char_range_at(17).ch); assert_eq!('b', data.char_range_at(19).ch); } #[test] fn test_char_range_at_reverse_underflow() { assert_eq!("abc".char_range_at_reverse(0).next, 0); } #[test] fn test_iterator() { let s = "ศไทย中华Việt Nam"; let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; let mut pos = 0; let mut it = s.chars(); for c in it { assert_eq!(c, v[pos]); pos += 1; } assert_eq!(pos, v.len()); } #[test] fn test_rev_iterator() { let s = "ศไทย中华Việt Nam"; let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ']; let mut pos = 0; let mut it = s.chars().rev(); for c in it { assert_eq!(c, v[pos]); pos += 1; } assert_eq!(pos, v.len()); } #[test] fn test_iterator_clone() { let s = "ศไทย中华Việt Nam"; let mut it = s.chars(); it.next(); assert!(it.zip(it.clone()).all(|(x,y)| x == y)); } #[test] fn test_bytesator() { let s = "ศไทย中华Việt Nam"; let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109 ]; let mut pos = 0; for b in s.bytes() { assert_eq!(b, v[pos]); pos += 1; } } #[test] fn test_bytes_revator() { let s = "ศไทย中华Việt Nam"; let v = [ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, 109 ]; let mut pos = v.len(); for b in s.bytes().rev() { pos -= 1; assert_eq!(b, v[pos]); } } #[test] fn test_char_indicesator() { let s = "ศไทย中华Việt Nam"; let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27]; let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; let mut pos = 0; let mut it = s.char_indices(); for c in it { assert_eq!(c, (p[pos], v[pos])); pos += 1; } assert_eq!(pos, v.len()); assert_eq!(pos, p.len()); } #[test] fn test_char_indices_revator() { let s = "ศไทย中华Việt Nam"; let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0]; let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ']; let mut pos = 0; let mut it = s.char_indices().rev(); for c in it { assert_eq!(c, (p[pos], v[pos])); pos += 1; } assert_eq!(pos, v.len()); assert_eq!(pos, p.len()); } #[test] fn test_split_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split(' ').collect(); assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let split: Vec<&str> = data.split(|c: char| c == ' ').collect(); assert_eq!( split, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let split: Vec<&str> = data.split('ä').collect(); assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split('ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let split: Vec<&str> = data.split(|c: char| c == 'ä').collect(); assert_eq!( split, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect(); rsplit.reverse(); assert_eq!(rsplit, vec!["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_splitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.splitn(' ', 3).collect(); assert_eq!(split, vec!["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); let split: Vec<&str> = data.splitn(|c: char| c == ' ', 3).collect(); assert_eq!(split, vec!["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); // Unicode let split: Vec<&str> = data.splitn('ä', 3).collect(); assert_eq!(split, vec!["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); let split: Vec<&str> = data.splitn(|c: char| c == 'ä', 3).collect(); assert_eq!(split, vec!["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); } #[test] fn test_rsplitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let mut split: Vec<&str> = data.rsplitn(' ', 3).collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); let mut split: Vec<&str> = data.rsplitn(|c: char| c == ' ', 3).collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); // Unicode let mut split: Vec<&str> = data.rsplitn('ä', 3).collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); let mut split: Vec<&str> = data.rsplitn(|c: char| c == 'ä', 3).collect(); split.reverse(); assert_eq!(split, vec!["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); } #[test] fn test_split_char_iterator_no_trailing() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split('\n').collect(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]); let split: Vec<&str> = data.split_terminator('\n').collect(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]); } #[test] fn test_rev_split_char_iterator_no_trailing() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let mut split: Vec<&str> = data.split('\n').rev().collect(); split.reverse(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb", ""]); let mut split: Vec<&str> = data.split_terminator('\n').rev().collect(); split.reverse(); assert_eq!(split, vec!["", "Märy häd ä little lämb", "Little lämb"]); } #[test] fn test_words() { let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n"; let words: Vec<&str> = data.words().collect(); assert_eq!(words, vec!["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"]) } #[test] fn test_nfd_chars() { assert_eq!("abc".nfd_chars().collect::<String>(), "abc".to_string()); assert_eq!("\u1e0b\u01c4".nfd_chars().collect::<String>(), "d\u0307\u01c4".to_string()); assert_eq!("\u2026".nfd_chars().collect::<String>(), "\u2026".to_string()); assert_eq!("\u2126".nfd_chars().collect::<String>(), "\u03a9".to_string()); assert_eq!("\u1e0b\u0323".nfd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u1e0d\u0307".nfd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("a\u0301".nfd_chars().collect::<String>(), "a\u0301".to_string()); assert_eq!("\u0301a".nfd_chars().collect::<String>(), "\u0301a".to_string()); assert_eq!("\ud4db".nfd_chars().collect::<String>(), "\u1111\u1171\u11b6".to_string()); assert_eq!("\uac1c".nfd_chars().collect::<String>(), "\u1100\u1162".to_string()); } #[test] fn test_nfkd_chars() { assert_eq!("abc".nfkd_chars().collect::<String>(), "abc".to_string()); assert_eq!("\u1e0b\u01c4".nfkd_chars().collect::<String>(), "d\u0307DZ\u030c".to_string()); assert_eq!("\u2026".nfkd_chars().collect::<String>(), "...".to_string()); assert_eq!("\u2126".nfkd_chars().collect::<String>(), "\u03a9".to_string()); assert_eq!("\u1e0b\u0323".nfkd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("\u1e0d\u0307".nfkd_chars().collect::<String>(), "d\u0323\u0307".to_string()); assert_eq!("a\u0301".nfkd_chars().collect::<String>(), "a\u0301".to_string()); assert_eq!("\u0301a".nfkd_chars().collect::<String>(), "\u0301a".to_string()); assert_eq!("\ud4db".nfkd_chars().collect::<String>(), "\u1111\u1171\u11b6".to_string()); assert_eq!("\uac1c".nfkd_chars().collect::<String>(), "\u1100\u1162".to_string()); } #[test] fn test_lines() { let data = "\nMäry häd ä little lämb\n\nLittle lämb\n"; let lines: Vec<&str> = data.lines().collect(); assert_eq!(lines, vec!["", "Märy häd ä little lämb", "", "Little lämb"]); let data = "\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n let lines: Vec<&str> = data.lines().collect(); assert_eq!(lines, vec!["", "Märy häd ä little lämb", "", "Little lämb"]); } #[test] fn test_split_strator() { fn t(s: &str, sep: &str, u: &[&str]) { let v: Vec<&str> = s.split_str(sep).collect(); assert_eq!(v.as_slice(), u.as_slice()); } t("--1233345--", "12345", ["--1233345--"]); t("abc::hello::there", "::", ["abc", "hello", "there"]); t("::hello::there", "::", ["", "hello", "there"]); t("hello::there::", "::", ["hello", "there", ""]); t("::hello::there::", "::", ["", "hello", "there", ""]); t("ประเทศไทย中华Việt Nam", "中华", ["ประเทศไทย", "Việt Nam"]); t("zzXXXzzYYYzz", "zz", ["", "XXX", "YYY", ""]); t("zzXXXzYYYz", "XXX", ["zz", "zYYYz"]); t(".XXX.YYY.", ".", ["", "XXX", "YYY", ""]); t("", ".", [""]); t("zz", "zz", ["",""]); t("ok", "z", ["ok"]); t("zzz", "zz", ["","z"]); t("zzzzz", "zz", ["","","z"]); } #[test] fn test_str_default() { use std::default::Default; fn t<S: Default + Str>() { let s: S = Default::default(); assert_eq!(s.as_slice(), ""); } t::<&str>(); t::<String>(); } #[test] fn test_str_container() { fn sum_len<S: Collection>(v: &[S]) -> uint { v.iter().map(|x| x.len()).sum() } let s = "01234".to_string(); assert_eq!(5, sum_len(["012", "", "34"])); assert_eq!(5, sum_len(["01".to_string(), "2".to_string(), "34".to_string(), "".to_string()])); assert_eq!(5, sum_len([s.as_slice()])); } #[test] fn test_str_from_utf8() { let xs = b"hello"; assert_eq!(from_utf8(xs), Some("hello")); let xs = "ศไทย中华Việt Nam".as_bytes(); assert_eq!(from_utf8(xs), Some("ศไทย中华Việt Nam")); let xs = b"hello\xFF"; assert_eq!(from_utf8(xs), None); } #[test] fn test_str_from_utf8_owned() { let xs = Vec::from_slice(b"hello"); assert_eq!(from_utf8_owned(xs), Ok("hello".to_string())); let xs = Vec::from_slice("ศไทย中华Việt Nam".as_bytes()); assert_eq!(from_utf8_owned(xs), Ok("ศไทย中华Việt Nam".to_string())); let xs = Vec::from_slice(b"hello\xFF"); assert_eq!(from_utf8_owned(xs), Err(Vec::from_slice(b"hello\xFF"))); } #[test] fn test_str_from_utf8_lossy() { let xs = b"hello"; assert_eq!(from_utf8_lossy(xs), Slice("hello")); let xs = "ศไทย中华Việt Nam".as_bytes(); assert_eq!(from_utf8_lossy(xs), Slice("ศไทย中华Việt Nam")); let xs = b"Hello\xC2 There\xFF Goodbye"; assert_eq!(from_utf8_lossy(xs), Owned("Hello\uFFFD There\uFFFD Goodbye".to_string())); let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; assert_eq!(from_utf8_lossy(xs), Owned("Hello\uFFFD\uFFFD There\uFFFD Goodbye".to_string())); let xs = b"\xF5foo\xF5\x80bar"; assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFD\uFFFDbar".to_string())); let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz"; assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFDbar\uFFFDbaz".to_string())); let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz"; assert_eq!(from_utf8_lossy(xs), Owned("\uFFFDfoo\uFFFDbar\uFFFD\uFFFDbaz".to_string())); let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar"; assert_eq!(from_utf8_lossy(xs), Owned("\uFFFD\uFFFD\uFFFD\uFFFD\ foo\U00010000bar".to_string())); // surrogates let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar"; assert_eq!(from_utf8_lossy(xs), Owned("\uFFFD\uFFFD\uFFFDfoo\ \uFFFD\uFFFD\uFFFDbar".to_string())); } #[test] fn test_from_str() { let owned: Option<::std::string::String> = from_str("string"); assert_eq!(owned.as_ref().map(|s| s.as_slice()), Some("string")); } #[test] fn test_maybe_owned_traits() { let s = Slice("abcde"); assert_eq!(s.len(), 5); assert_eq!(s.as_slice(), "abcde"); assert_eq!(s.to_str().as_slice(), "abcde"); assert_eq!(format!("{}", s).as_slice(), "abcde"); assert!(s.lt(&Owned("bcdef".to_string()))); assert_eq!(Slice(""), Default::default()); let o = Owned("abcde".to_string()); assert_eq!(o.len(), 5); assert_eq!(o.as_slice(), "abcde"); assert_eq!(o.to_str().as_slice(), "abcde"); assert_eq!(format!("{}", o).as_slice(), "abcde"); assert!(o.lt(&Slice("bcdef"))); assert_eq!(Owned("".to_string()), Default::default()); assert!(s.cmp(&o) == Equal); assert!(s.equiv(&o)); assert!(o.cmp(&s) == Equal); assert!(o.equiv(&s)); } #[test] fn test_maybe_owned_methods() { let s = Slice("abcde"); assert!(s.is_slice()); assert!(!s.is_owned()); let o = Owned("abcde".to_string()); assert!(!o.is_slice()); assert!(o.is_owned()); } #[test] fn test_maybe_owned_clone() { assert_eq!(Owned("abcde".to_string()), Slice("abcde").clone()); assert_eq!(Owned("abcde".to_string()), Owned("abcde".to_string()).clone()); assert_eq!(Slice("abcde"), Slice("abcde").clone()); assert_eq!(Slice("abcde"), Owned("abcde".to_string()).clone()); } #[test] fn test_maybe_owned_into_string() { assert_eq!(Slice("abcde").into_string(), "abcde".to_string()); assert_eq!(Owned("abcde".to_string()).into_string(), "abcde".to_string()); } #[test] fn test_into_maybe_owned() { assert_eq!("abcde".into_maybe_owned(), Slice("abcde")); assert_eq!(("abcde".to_string()).into_maybe_owned(), Slice("abcde")); assert_eq!("abcde".into_maybe_owned(), Owned("abcde".to_string())); assert_eq!(("abcde".to_string()).into_maybe_owned(), Owned("abcde".to_string())); } } #[cfg(test)] mod bench { use test::Bencher; use super::*; use std::prelude::*; #[bench] fn char_iterator(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.char_len(); b.iter(|| assert_eq!(s.chars().count(), len)); } #[bench] fn char_iterator_ascii(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb Mary had a little lamb, Little lamb"; let len = s.char_len(); b.iter(|| assert_eq!(s.chars().count(), len)); } #[bench] fn char_iterator_rev(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.char_len(); b.iter(|| assert_eq!(s.chars().rev().count(), len)); } #[bench] fn char_indicesator(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.char_len(); b.iter(|| assert_eq!(s.char_indices().count(), len)); } #[bench] fn char_indicesator_rev(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let len = s.char_len(); b.iter(|| assert_eq!(s.char_indices().rev().count(), len)); } #[bench] fn split_unicode_ascii(b: &mut Bencher) { let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam"; b.iter(|| assert_eq!(s.split('V').count(), 3)); } #[bench] fn split_unicode_not_ascii(b: &mut Bencher) { struct NotAscii(char); impl CharEq for NotAscii { fn matches(&mut self, c: char) -> bool { let NotAscii(cc) = *self; cc == c } fn only_ascii(&self) -> bool { false } } let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam"; b.iter(|| assert_eq!(s.split(NotAscii('V')).count(), 3)); } #[bench] fn split_ascii(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(' ').count(), len)); } #[bench] fn split_not_ascii(b: &mut Bencher) { struct NotAscii(char); impl CharEq for NotAscii { #[inline] fn matches(&mut self, c: char) -> bool { let NotAscii(cc) = *self; cc == c } fn only_ascii(&self) -> bool { false } } let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(NotAscii(' ')).count(), len)); } #[bench] fn split_extern_fn(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); fn pred(c: char) -> bool { c == ' ' } b.iter(|| assert_eq!(s.split(pred).count(), len)); } #[bench] fn split_closure(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(|c: char| c == ' ').count(), len)); } #[bench] fn split_slice(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); b.iter(|| assert_eq!(s.split(&[' ']).count(), len)); } #[bench] fn is_utf8_100_ascii(b: &mut Bencher) { let s = b"Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; assert_eq!(100, s.len()); b.iter(|| { is_utf8(s) }); } #[bench] fn is_utf8_100_multibyte(b: &mut Bencher) { let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes(); assert_eq!(100, s.len()); b.iter(|| { is_utf8(s) }); } #[bench] fn from_utf8_lossy_100_ascii(b: &mut Bencher) { let s = b"Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; assert_eq!(100, s.len()); b.iter(|| { let _ = from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_multibyte(b: &mut Bencher) { let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes(); assert_eq!(100, s.len()); b.iter(|| { let _ = from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_invalid(b: &mut Bencher) { let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; b.iter(|| { let _ = from_utf8_lossy(s); }); } #[bench] fn from_utf8_lossy_100_invalid(b: &mut Bencher) { let s = Vec::from_elem(100, 0xF5u8); b.iter(|| { let _ = from_utf8_lossy(s.as_slice()); }); } #[bench] fn bench_connect(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; let sep = "→"; let v = [s, s, s, s, s, s, s, s, s, s]; b.iter(|| { assert_eq!(v.connect(sep).len(), s.len() * 10 + sep.len() * 9); }) } #[bench] fn bench_contains_short_short(b: &mut Bencher) { let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; let needle = "sit"; b.iter(|| { assert!(haystack.contains(needle)); }) } #[bench] fn bench_contains_short_long(b: &mut Bencher) { let haystack = "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \ ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \ eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \ sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \ tempus vel, gravida nec quam. In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \ sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \ diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \ lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \ eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \ interdum. Curabitur ut nisi justo. Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \ mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \ lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \ est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \ felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \ ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \ feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \ Aliquam sit amet placerat lorem. Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \ mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \ Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \ lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \ suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \ cursus accumsan. Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \ feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \ vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \ leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \ malesuada sollicitudin quam eu fermentum."; let needle = "english"; b.iter(|| { assert!(!haystack.contains(needle)); }) } #[bench] fn bench_contains_bad_naive(b: &mut Bencher) { let haystack = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; let needle = "aaaaaaaab"; b.iter(|| { assert!(!haystack.contains(needle)); }) } #[bench] fn bench_contains_equal(b: &mut Bencher) { let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; let needle = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; b.iter(|| { assert!(haystack.contains(needle)); }) } }<|fim▁end|>
0x000a_u16]), ("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n".to_string(),
<|file_name|>onbuild.go<|end_file_name|><|fim▁begin|>package onbuild import ( "bytes" "fmt" "io" "os" "path/filepath" "github.com/golang/glog" "github.com/openshift/source-to-image/pkg/api" "github.com/openshift/source-to-image/pkg/build" "github.com/openshift/source-to-image/pkg/build/strategies/sti" "github.com/openshift/source-to-image/pkg/docker" "github.com/openshift/source-to-image/pkg/ignore" "github.com/openshift/source-to-image/pkg/scm" "github.com/openshift/source-to-image/pkg/scm/git" "github.com/openshift/source-to-image/pkg/scripts" "github.com/openshift/source-to-image/pkg/tar" "github.com/openshift/source-to-image/pkg/util" ) // OnBuild strategy executes the simple Docker build in case the image does not // support STI scripts but has ONBUILD instructions recorded. type OnBuild struct { docker docker.Docker git git.Git fs util.FileSystem tar tar.Tar source build.SourceHandler garbage build.Cleaner }<|fim▁hole|>type onBuildSourceHandler struct { build.Downloader build.Preparer build.Ignorer } // New returns a new instance of OnBuild builder func New(config *api.Config, overrides build.Overrides) (*OnBuild, error) { dockerHandler, err := docker.New(config.DockerConfig, config.PullAuthentication) if err != nil { return nil, err } b := &OnBuild{ docker: dockerHandler, git: git.New(), fs: util.NewFileSystem(), tar: tar.New(), } // Use STI Prepare() and download the 'run' script optionally. s, err := sti.New(config, overrides) s.SetScripts([]string{}, []string{api.Assemble, api.Run}) downloader := overrides.Downloader if downloader == nil { d, sourceURL, err := scm.DownloaderForSource(config.Source, config.ForceCopy) if err != nil { return nil, err } downloader = d config.Source = sourceURL } b.source = onBuildSourceHandler{ Downloader: downloader, Preparer: s, Ignorer: &ignore.DockerIgnorer{}, } b.garbage = &build.DefaultCleaner{b.fs, b.docker} return b, nil } // SourceTar produces a tar archive containing application source and stream it func (b *OnBuild) SourceTar(config *api.Config) (io.ReadCloser, error) { uploadDir := filepath.Join(config.WorkingDir, "upload", "src") tarFileName, err := b.tar.CreateTarFile(config.WorkingDir, uploadDir) if err != nil { return nil, err } return b.fs.Open(tarFileName) } // Build executes the ONBUILD kind of build func (b *OnBuild) Build(config *api.Config) (*api.Result, error) { glog.V(2).Info("Preparing the source code for build") // Change the installation directory for this config to store scripts inside // the application root directory. if err := b.source.Prepare(config); err != nil { return nil, err } // If necessary, copy the STI scripts into application root directory b.copySTIScripts(config) glog.V(2).Info("Creating application Dockerfile") if err := b.CreateDockerfile(config); err != nil { return nil, err } glog.V(2).Info("Creating application source code image") tarStream, err := b.SourceTar(config) if err != nil { return nil, err } defer tarStream.Close() opts := docker.BuildImageOptions{ Name: config.Tag, Stdin: tarStream, Stdout: os.Stdout, CGroupLimits: config.CGroupLimits, } glog.V(2).Info("Building the application source") if err := b.docker.BuildImage(opts); err != nil { return nil, err } glog.V(2).Info("Cleaning up temporary containers") b.garbage.Cleanup(config) var imageID string if len(opts.Name) > 0 { if imageID, err = b.docker.GetImageID(opts.Name); err != nil { return nil, err } } return &api.Result{ Success: true, WorkingDir: config.WorkingDir, ImageID: imageID, }, nil } // CreateDockerfile creates the ONBUILD Dockerfile func (b *OnBuild) CreateDockerfile(config *api.Config) error { buffer := bytes.Buffer{} uploadDir := filepath.Join(config.WorkingDir, "upload", "src") buffer.WriteString(fmt.Sprintf("FROM %s\n", config.BuilderImage)) entrypoint, err := GuessEntrypoint(b.fs, uploadDir) if err != nil { return err } env, err := scripts.GetEnvironment(config) if err != nil { glog.V(1).Infof("Environment: %v", err) } else { buffer.WriteString(scripts.ConvertEnvironmentToDocker(env)) } // If there is an assemble script present, run it as part of the build process // as the last thing. if b.hasAssembleScript(config) { buffer.WriteString(fmt.Sprintf("RUN sh assemble\n")) } // FIXME: This assumes that the WORKDIR is set to the application source root // directory. buffer.WriteString(fmt.Sprintf(`ENTRYPOINT ["./%s"]`+"\n", entrypoint)) return b.fs.WriteFile(filepath.Join(uploadDir, "Dockerfile"), buffer.Bytes()) } func (b *OnBuild) copySTIScripts(config *api.Config) { scriptsPath := filepath.Join(config.WorkingDir, "upload", "scripts") sourcePath := filepath.Join(config.WorkingDir, "upload", "src") if _, err := b.fs.Stat(filepath.Join(scriptsPath, api.Run)); err == nil { glog.V(3).Infof("Found S2I 'run' script, copying to application source dir") b.fs.Copy(filepath.Join(scriptsPath, api.Run), filepath.Join(sourcePath, api.Run)) } if _, err := b.fs.Stat(filepath.Join(scriptsPath, api.Assemble)); err == nil { glog.V(3).Infof("Found S2I 'assemble' script, copying to application source dir") b.fs.Copy(filepath.Join(scriptsPath, api.Assemble), filepath.Join(sourcePath, api.Assemble)) } } // hasAssembleScript checks if the the assemble script is available func (b *OnBuild) hasAssembleScript(config *api.Config) bool { assemblePath := filepath.Join(config.WorkingDir, "upload", "src", "assemble") _, err := b.fs.Stat(assemblePath) return err == nil }<|fim▁end|>
<|file_name|>DVVideoRTPSource.cpp<|end_file_name|><|fim▁begin|>/********** This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA **********/ // "liveMedia" // Copyright (c) 1996-2021 Live Networks, Inc. All rights reserved. // DV Video RTP Sources // Implementation #include "DVVideoRTPSource.hh" DVVideoRTPSource* DVVideoRTPSource::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, unsigned rtpTimestampFrequency) { return new DVVideoRTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency); } DVVideoRTPSource::DVVideoRTPSource(UsageEnvironment& env, Groupsock* rtpGS, unsigned char rtpPayloadFormat, unsigned rtpTimestampFrequency) : MultiFramedRTPSource(env, rtpGS, rtpPayloadFormat, rtpTimestampFrequency) { } DVVideoRTPSource::~DVVideoRTPSource() { } #define DV_DIF_BLOCK_SIZE 80 #define DV_SECTION_HEADER 0x1F Boolean DVVideoRTPSource ::processSpecialHeader(BufferedPacket* packet, unsigned& resultSpecialHeaderSize) { unsigned const packetSize = packet->dataSize(); if (packetSize < DV_DIF_BLOCK_SIZE) return False; // TARFU! u_int8_t const* data = packet->data(); fCurrentPacketBeginsFrame = data[0] == DV_SECTION_HEADER && (data[1]&0xf8) == 0 && data[2] == 0; // thanks to Ben Hutchings // The RTP "M" (marker) bit indicates the last fragment of a frame: fCurrentPacketCompletesFrame = packet->rtpMarkerBit(); // There is no special header resultSpecialHeaderSize = 0;<|fim▁hole|> return "video/DV"; }<|fim▁end|>
return True; } char const* DVVideoRTPSource::MIMEtype() const {
<|file_name|>d3d11_renderstate.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************** * The MIT License (MIT) * * Copyright (c) 2015-2018 Baldur Karlsson * Copyright (c) 2014 Crytek * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. ******************************************************************************/ #include "driver/d3d11/d3d11_renderstate.h" #include "driver/d3d11/d3d11_context.h" #include "driver/d3d11/d3d11_device.h" #include "driver/d3d11/d3d11_resources.h" D3D11RenderState::D3D11RenderState(D3D11RenderState::EmptyInit) { RDCEraseEl(IA); RDCEraseEl(VS); RDCEraseEl(HS); RDCEraseEl(DS); RDCEraseEl(GS); RDCEraseEl(SO); RDCEraseEl(RS); RDCEraseEl(PS); RDCEraseEl(OM); RDCEraseEl(CS); RDCEraseEl(CSUAVs); Predicate = NULL; PredicateValue = FALSE; Clear(); m_ImmediatePipeline = false; m_ViewportScissorPartial = true; m_pDevice = NULL; } D3D11RenderState::D3D11RenderState(const D3D11RenderState &other) { RDCEraseEl(IA); RDCEraseEl(VS); RDCEraseEl(HS); RDCEraseEl(DS); RDCEraseEl(GS); RDCEraseEl(SO); RDCEraseEl(RS); RDCEraseEl(PS); RDCEraseEl(OM); RDCEraseEl(CS); RDCEraseEl(CSUAVs); Predicate = NULL; PredicateValue = FALSE; m_ImmediatePipeline = false; m_pDevice = NULL; CopyState(other); } void D3D11RenderState::CopyState(const D3D11RenderState &other) { ReleaseRefs(); memcpy(&IA, &other.IA, sizeof(IA)); memcpy(&VS, &other.VS, sizeof(VS)); memcpy(&HS, &other.HS, sizeof(HS)); memcpy(&DS, &other.DS, sizeof(DS)); memcpy(&GS, &other.GS, sizeof(GS)); memcpy(&SO, &other.SO, sizeof(SO)); memcpy(&RS, &other.RS, sizeof(RS)); memcpy(&PS, &other.PS, sizeof(PS)); memcpy(&OM, &other.OM, sizeof(OM)); memcpy(&CS, &other.CS, sizeof(CS)); memcpy(&CSUAVs, &other.CSUAVs, sizeof(CSUAVs)); Predicate = other.Predicate; PredicateValue = other.PredicateValue; m_ViewportScissorPartial = other.m_ViewportScissorPartial; AddRefs(); } D3D11RenderState::~D3D11RenderState() { ReleaseRefs(); } void D3D11RenderState::ReleaseRefs() { ReleaseRef(IA.IndexBuffer); ReleaseRef(IA.Layout); for(UINT i = 0; i < D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT; i++) ReleaseRef(IA.VBs[i]); Shader *stages[] = {&VS, &HS, &DS, &GS, &PS, &CS}; for(int s = 0; s < 6; s++) { Shader *sh = stages[s]; ReleaseRef(sh->Object); for(UINT i = 0; i < D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT; i++) ReleaseRef(sh->ConstantBuffers[i]); for(UINT i = 0; i < D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT; i++) ReleaseRef(sh->Samplers[i]); for(UINT i = 0; i < D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT; i++) ReleaseRef(sh->SRVs[i]); for(UINT i = 0; i < D3D11_SHADER_MAX_INTERFACES; i++) ReleaseRef(sh->Instances[i]); sh++; } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) ReleaseRef(CSUAVs[i]); for(UINT i = 0; i < D3D11_SO_BUFFER_SLOT_COUNT; i++) ReleaseRef(SO.Buffers[i]); ReleaseRef(RS.State); ReleaseRef(OM.BlendState); ReleaseRef(OM.DepthStencilState); for(UINT i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) ReleaseRef(OM.RenderTargets[i]); for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) ReleaseRef(OM.UAVs[i]); ReleaseRef(OM.DepthView); ReleaseRef(Predicate); RDCEraseEl(IA); RDCEraseEl(VS); RDCEraseEl(HS); RDCEraseEl(DS); RDCEraseEl(GS); RDCEraseEl(SO); RDCEraseEl(RS); RDCEraseEl(PS); RDCEraseEl(OM); RDCEraseEl(CS); RDCEraseEl(CSUAVs); Predicate = NULL; } void D3D11RenderState::MarkReferenced(WrappedID3D11DeviceContext *ctx, bool initial) const { ctx->MarkResourceReferenced(GetIDForResource(IA.Layout), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetIDForResource(IA.IndexBuffer), initial ? eFrameRef_Unknown : eFrameRef_Read); for(UINT i = 0; i < D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT; i++) ctx->MarkResourceReferenced(GetIDForResource(IA.VBs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); const Shader *stages[] = {&VS, &HS, &DS, &GS, &PS, &CS}; for(int s = 0; s < 6; s++) { const Shader *sh = stages[s]; ctx->MarkResourceReferenced(GetIDForResource(sh->Object), initial ? eFrameRef_Unknown : eFrameRef_Read); for(UINT i = 0; i < D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT; i++) ctx->MarkResourceReferenced(GetIDForResource(sh->ConstantBuffers[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); for(UINT i = 0; i < D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT; i++) { if(sh->SRVs[i]) { ctx->MarkResourceReferenced(GetIDForResource(sh->SRVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetViewResourceResID(sh->SRVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); } } sh++; } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(CSUAVs[i]) { // UAVs we always assume to be partial updates ctx->MarkResourceReferenced(GetIDForResource(CSUAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetIDForResource(CSUAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); ctx->MarkResourceReferenced(GetViewResourceResID(CSUAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetViewResourceResID(CSUAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); } } for(UINT i = 0; i < D3D11_SO_BUFFER_SLOT_COUNT; i++) ctx->MarkResourceReferenced(GetIDForResource(SO.Buffers[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); for(UINT i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(OM.RenderTargets[i]) { ctx->MarkResourceReferenced(GetIDForResource(OM.RenderTargets[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); if(m_ViewportScissorPartial) ctx->MarkResourceReferenced(GetViewResourceResID(OM.RenderTargets[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetViewResourceResID(OM.RenderTargets[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); } } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(OM.UAVs[i]) { // UAVs we always assume to be partial updates ctx->MarkResourceReferenced(GetIDForResource(OM.UAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetIDForResource(OM.UAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); ctx->MarkResourceReferenced(GetViewResourceResID(OM.UAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetViewResourceResID(OM.UAVs[i]), initial ? eFrameRef_Unknown : eFrameRef_Write); } } if(OM.DepthView) { ctx->MarkResourceReferenced(GetIDForResource(OM.DepthView), initial ? eFrameRef_Unknown : eFrameRef_Read); if(m_ViewportScissorPartial) ctx->MarkResourceReferenced(GetViewResourceResID(OM.DepthView), initial ? eFrameRef_Unknown : eFrameRef_Read); ctx->MarkResourceReferenced(GetViewResourceResID(OM.DepthView), initial ? eFrameRef_Unknown : eFrameRef_Write); } if(Predicate) { ctx->MarkResourceReferenced(GetIDForResource(Predicate), initial ? eFrameRef_Unknown : eFrameRef_Read); } } void D3D11RenderState::CacheViewportPartial() { // tracks the min region of the enabled viewports plus scissors, to see if we could potentially // partially-update a render target (ie. we know for sure that we are only // writing to a region in one of the viewports). In this case we mark the // RT/DSV as read-write instead of just write, for initial state tracking. D3D11_RECT viewportScissorMin = {0, 0, 0xfffffff, 0xfffffff}; D3D11_RASTERIZER_DESC rsdesc; RDCEraseEl(rsdesc); rsdesc.ScissorEnable = FALSE; if(RS.State) RS.State->GetDesc(&rsdesc); for(UINT v = 0; v < RS.NumViews; v++) { D3D11_RECT scissor = {(LONG)RS.Viewports[v].TopLeftX, (LONG)RS.Viewports[v].TopLeftY, (LONG)RS.Viewports[v].Width, (LONG)RS.Viewports[v].Height}; // scissor (if set) is relative to matching viewport) if(v < RS.NumScissors && rsdesc.ScissorEnable) { scissor.left += RS.Scissors[v].left; scissor.top += RS.Scissors[v].top; scissor.right = RDCMIN(scissor.right, RS.Scissors[v].right - RS.Scissors[v].left); scissor.bottom = RDCMIN(scissor.bottom, RS.Scissors[v].bottom - RS.Scissors[v].top); } viewportScissorMin.left = RDCMAX(viewportScissorMin.left, scissor.left); viewportScissorMin.top = RDCMAX(viewportScissorMin.top, scissor.top); viewportScissorMin.right = RDCMIN(viewportScissorMin.right, scissor.right); viewportScissorMin.bottom = RDCMIN(viewportScissorMin.bottom, scissor.bottom); } m_ViewportScissorPartial = false; if(viewportScissorMin.left > 0 || viewportScissorMin.top > 0) { m_ViewportScissorPartial = true; } else { ID3D11Resource *res = NULL; if(OM.RenderTargets[0]) OM.RenderTargets[0]->GetResource(&res); else if(OM.DepthView) OM.DepthView->GetResource(&res); if(res) { D3D11_RESOURCE_DIMENSION dim; res->GetType(&dim); if(dim == D3D11_RESOURCE_DIMENSION_BUFFER) { // assume partial m_ViewportScissorPartial = true; } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE1D) { D3D11_TEXTURE1D_DESC desc; ((ID3D11Texture1D *)res)->GetDesc(&desc); if(viewportScissorMin.right < (LONG)desc.Width) m_ViewportScissorPartial = true; } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE2D) { D3D11_TEXTURE2D_DESC desc; ((ID3D11Texture2D *)res)->GetDesc(&desc); if(viewportScissorMin.right < (LONG)desc.Width || viewportScissorMin.bottom < (LONG)desc.Height) m_ViewportScissorPartial = true; } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE3D) { D3D11_TEXTURE3D_DESC desc; ((ID3D11Texture3D *)res)->GetDesc(&desc); if(viewportScissorMin.right < (LONG)desc.Width || viewportScissorMin.bottom < (LONG)desc.Height) m_ViewportScissorPartial = true; } } SAFE_RELEASE(res); } } void D3D11RenderState::AddRefs() { TakeRef(IA.IndexBuffer); TakeRef(IA.Layout); for(UINT i = 0; i < D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT; i++) TakeRef(IA.VBs[i]); Shader *stages[] = {&VS, &HS, &DS, &GS, &PS, &CS}; for(int s = 0; s < 6; s++) { Shader *sh = stages[s]; TakeRef(sh->Object); for(UINT i = 0; i < D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT; i++) TakeRef(sh->ConstantBuffers[i]); for(UINT i = 0; i < D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT; i++) TakeRef(sh->Samplers[i]); for(UINT i = 0; i < D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT; i++) TakeRef(sh->SRVs[i]); for(UINT i = 0; i < D3D11_SHADER_MAX_INTERFACES; i++) TakeRef(sh->Instances[i]); sh++; } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) TakeRef(CSUAVs[i]); for(UINT i = 0; i < D3D11_SO_BUFFER_SLOT_COUNT; i++) TakeRef(SO.Buffers[i]); TakeRef(RS.State); TakeRef(OM.BlendState); TakeRef(OM.DepthStencilState); for(UINT i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) TakeRef(OM.RenderTargets[i]); for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) TakeRef(OM.UAVs[i]); TakeRef(OM.DepthView); TakeRef(Predicate); } D3D11RenderState::D3D11RenderState(WrappedID3D11DeviceContext *context) { RDCEraseMem(this, sizeof(D3D11RenderState)); // IA context->IAGetInputLayout(&IA.Layout); context->IAGetPrimitiveTopology(&IA.Topo); context->IAGetVertexBuffers(0, D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT, IA.VBs, (UINT *)IA.Strides, (UINT *)IA.Offsets); context->IAGetIndexBuffer(&IA.IndexBuffer, &IA.IndexFormat, &IA.IndexOffset); // VS context->VSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, VS.SRVs); context->VSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, VS.Samplers); context->VSGetShader((ID3D11VertexShader **)&VS.Object, VS.Instances, &VS.NumInstances); // DS context->DSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, DS.SRVs); context->DSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, DS.Samplers); context->DSGetShader((ID3D11DomainShader **)&DS.Object, DS.Instances, &DS.NumInstances); // HS context->HSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, HS.SRVs); context->HSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, HS.Samplers); context->HSGetShader((ID3D11HullShader **)&HS.Object, HS.Instances, &HS.NumInstances); // GS context->GSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, GS.SRVs); context->GSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, GS.Samplers); context->GSGetShader((ID3D11GeometryShader **)&GS.Object, GS.Instances, &GS.NumInstances); context->SOGetTargets(D3D11_SO_BUFFER_SLOT_COUNT, SO.Buffers); // RS context->RSGetState(&RS.State); RDCEraseEl(RS.Viewports); RS.NumViews = D3D11_VIEWPORT_AND_SCISSORRECT_OBJECT_COUNT_PER_PIPELINE; context->RSGetViewports(&RS.NumViews, RS.Viewports); RDCEraseEl(RS.Scissors); RS.NumScissors = D3D11_VIEWPORT_AND_SCISSORRECT_OBJECT_COUNT_PER_PIPELINE; context->RSGetScissorRects(&RS.NumScissors, RS.Scissors); // CS context->CSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, CS.SRVs); if(context->IsFL11_1()) context->CSGetUnorderedAccessViews(0, D3D11_1_UAV_SLOT_COUNT, CSUAVs); else context->CSGetUnorderedAccessViews(0, D3D11_PS_CS_UAV_REGISTER_COUNT, CSUAVs); context->CSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, CS.Samplers); context->CSGetShader((ID3D11ComputeShader **)&CS.Object, CS.Instances, &CS.NumInstances); // PS context->PSGetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, PS.SRVs); context->PSGetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, PS.Samplers); context->PSGetShader((ID3D11PixelShader **)&PS.Object, PS.Instances, &PS.NumInstances); context->VSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, VS.ConstantBuffers, VS.CBOffsets, VS.CBCounts); context->DSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, DS.ConstantBuffers, DS.CBOffsets, DS.CBCounts); context->HSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, HS.ConstantBuffers, HS.CBOffsets, HS.CBCounts); context->GSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, GS.ConstantBuffers, GS.CBOffsets, GS.CBCounts); context->CSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, CS.ConstantBuffers, CS.CBOffsets, CS.CBCounts); context->PSGetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, PS.ConstantBuffers, PS.CBOffsets, PS.CBCounts); // OM context->OMGetBlendState(&OM.BlendState, OM.BlendFactor, &OM.SampleMask); context->OMGetDepthStencilState(&OM.DepthStencilState, &OM.StencRef); ID3D11RenderTargetView *tmpViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT]; context->OMGetRenderTargets(D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT, tmpViews, NULL); OM.UAVStartSlot = 0; for(int i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(tmpViews[i] != NULL) { OM.UAVStartSlot = i + 1; SAFE_RELEASE(tmpViews[i]); } } if(context->IsFL11_1()) context->OMGetRenderTargetsAndUnorderedAccessViews( OM.UAVStartSlot, OM.RenderTargets, &OM.DepthView, OM.UAVStartSlot, D3D11_1_UAV_SLOT_COUNT - OM.UAVStartSlot, OM.UAVs); else context->OMGetRenderTargetsAndUnorderedAccessViews( OM.UAVStartSlot, OM.RenderTargets, &OM.DepthView, OM.UAVStartSlot, D3D11_PS_CS_UAV_REGISTER_COUNT - OM.UAVStartSlot, OM.UAVs); context->GetPredication(&Predicate, &PredicateValue); } void D3D11RenderState::Clear() { ReleaseRefs(); OM.BlendFactor[0] = OM.BlendFactor[1] = OM.BlendFactor[2] = OM.BlendFactor[3] = 1.0f; OM.SampleMask = 0xffffffff; Predicate = NULL; PredicateValue = FALSE; for(size_t i = 0; i < ARRAY_COUNT(VS.CBCounts); i++) VS.CBCounts[i] = HS.CBCounts[i] = DS.CBCounts[i] = GS.CBCounts[i] = PS.CBCounts[i] = CS.CBCounts[i] = 4096; } bool D3D11RenderState::PredicationWouldPass() { if(Predicate == NULL) return true; BOOL data = TRUE; HRESULT hr = S_FALSE; do { hr = m_pDevice->GetImmediateContext()->GetData(Predicate, &data, sizeof(BOOL), 0); } while(hr == S_FALSE); RDCASSERTEQUAL(hr, S_OK); // From SetPredication for PredicateValue: // // "If TRUE, rendering will be affected by when the predicate's conditions are met. If FALSE, // rendering will be affected when the conditions are not met." // // Which is really confusingly worded. 'rendering will be affected' means 'no rendering will // happen', and 'conditions are met' for e.g. an occlusion query means that it passed. // Thus a passing occlusion query has value TRUE and is 'condition is met', so for a typical "skip // when occlusion query fails" the value will be FALSE. return PredicateValue != data; } void D3D11RenderState::ApplyState(WrappedID3D11DeviceContext *context) { context->ClearState(); // IA context->IASetInputLayout(IA.Layout); context->IASetPrimitiveTopology(IA.Topo); context->IASetIndexBuffer(IA.IndexBuffer, IA.IndexFormat, IA.IndexOffset); context->IASetVertexBuffers(0, D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT, IA.VBs, IA.Strides, IA.Offsets); // VS context->VSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, VS.SRVs); context->VSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, VS.Samplers); context->VSSetShader((ID3D11VertexShader *)VS.Object, VS.Instances, VS.NumInstances); // DS context->DSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, DS.SRVs); context->DSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, DS.Samplers); context->DSSetShader((ID3D11DomainShader *)DS.Object, DS.Instances, DS.NumInstances); // HS context->HSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, HS.SRVs); context->HSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, HS.Samplers); context->HSSetShader((ID3D11HullShader *)HS.Object, HS.Instances, HS.NumInstances); // GS context->GSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, GS.SRVs); context->GSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, GS.Samplers); context->GSSetShader((ID3D11GeometryShader *)GS.Object, GS.Instances, GS.NumInstances); context->SOSetTargets(D3D11_SO_BUFFER_SLOT_COUNT, SO.Buffers, SO.Offsets); // RS context->RSSetState(RS.State); context->RSSetViewports(RS.NumViews, RS.Viewports); context->RSSetScissorRects(RS.NumScissors, RS.Scissors); UINT UAV_keepcounts[D3D11_1_UAV_SLOT_COUNT] = {(UINT)-1, (UINT)-1, (UINT)-1, (UINT)-1, (UINT)-1, (UINT)-1, (UINT)-1, (UINT)-1}; // CS context->CSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, CS.SRVs); context->CSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, CS.Samplers); if(context->IsFL11_1()) context->CSSetUnorderedAccessViews(0, D3D11_1_UAV_SLOT_COUNT, CSUAVs, UAV_keepcounts); else context->CSSetUnorderedAccessViews(0, D3D11_PS_CS_UAV_REGISTER_COUNT, CSUAVs, UAV_keepcounts); context->CSSetShader((ID3D11ComputeShader *)CS.Object, CS.Instances, CS.NumInstances); // PS context->PSSetShaderResources(0, D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT, PS.SRVs); context->PSSetSamplers(0, D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT, PS.Samplers); context->PSSetShader((ID3D11PixelShader *)PS.Object, PS.Instances, PS.NumInstances); context->VSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, VS.ConstantBuffers, VS.CBOffsets, VS.CBCounts); context->DSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, DS.ConstantBuffers, DS.CBOffsets, DS.CBCounts); context->HSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, HS.ConstantBuffers, HS.CBOffsets, HS.CBCounts); context->GSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, GS.ConstantBuffers, GS.CBOffsets, GS.CBCounts); context->CSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, CS.ConstantBuffers, CS.CBOffsets, CS.CBCounts); context->PSSetConstantBuffers1(0, D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT, PS.ConstantBuffers, PS.CBOffsets, PS.CBCounts); // OM context->OMSetBlendState(OM.BlendState, OM.BlendFactor, OM.SampleMask); context->OMSetDepthStencilState(OM.DepthStencilState, OM.StencRef); if(context->IsFL11_1()) context->OMSetRenderTargetsAndUnorderedAccessViews( OM.UAVStartSlot, OM.RenderTargets, OM.DepthView, OM.UAVStartSlot, D3D11_1_UAV_SLOT_COUNT - OM.UAVStartSlot, OM.UAVs, UAV_keepcounts); else context->OMSetRenderTargetsAndUnorderedAccessViews( OM.UAVStartSlot, OM.RenderTargets, OM.DepthView, OM.UAVStartSlot, D3D11_PS_CS_UAV_REGISTER_COUNT - OM.UAVStartSlot, OM.UAVs, UAV_keepcounts); context->SetPredication(Predicate, PredicateValue); } void D3D11RenderState::TakeRef(ID3D11DeviceChild *p) { if(p) { p->AddRef(); if(m_ImmediatePipeline) { if(WrappedID3D11RenderTargetView1::IsAlloc(p) || WrappedID3D11ShaderResourceView1::IsAlloc(p) || WrappedID3D11DepthStencilView::IsAlloc(p) || WrappedID3D11UnorderedAccessView1::IsAlloc(p)) m_pDevice->InternalRef(); m_pDevice->InternalRef(); // we can use any specialisation of device child here, as all that is templated // is the nested pointer type. Saves having another class in the inheritance // heirarchy :( ((WrappedDeviceChild11<ID3D11Buffer> *)p)->PipelineAddRef(); } } } void D3D11RenderState::ReleaseRef(ID3D11DeviceChild *p) { if(p) { p->Release(); if(m_ImmediatePipeline) { if(WrappedID3D11RenderTargetView1::IsAlloc(p) || WrappedID3D11ShaderResourceView1::IsAlloc(p) || WrappedID3D11DepthStencilView::IsAlloc(p) || WrappedID3D11UnorderedAccessView1::IsAlloc(p)) m_pDevice->InternalRelease(); m_pDevice->InternalRelease(); // see above ((WrappedDeviceChild11<ID3D11Buffer> *)p)->PipelineRelease(); } } } bool D3D11RenderState::IsRangeBoundForWrite(const ResourceRange &range) { for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(CSUAVs[i] && range.Intersects(GetResourceRange(CSUAVs[i]))) { // RDCDEBUG("Resource was bound on CS UAV %u", i); return true; } } for(UINT i = 0; i < D3D11_SO_BUFFER_SLOT_COUNT; i++) { if(SO.Buffers[i] && range.Intersects(ResourceRange(SO.Buffers[i]))) { // RDCDEBUG("Resource was bound on SO buffer %u", i); return true; } } for(UINT i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(OM.RenderTargets[i] && range.Intersects(GetResourceRange(OM.RenderTargets[i]))) { // RDCDEBUG("Resource was bound on RTV %u", i); return true; } } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(OM.UAVs[i] && range.Intersects(GetResourceRange(OM.UAVs[i]))) { // RDCDEBUG("Resource was bound on OM UAV %d", i); return true; } } if(OM.DepthView) { const ResourceRange &depthRange = GetResourceRange(OM.DepthView); if(range.Intersects(depthRange)) { // RDCDEBUG("Resource was bound on OM DSV"); if(depthRange.IsDepthReadOnly() && depthRange.IsStencilReadOnly()) { // RDCDEBUG("but it's a readonly DSV, so that's fine"); } else if(depthRange.IsDepthReadOnly() && range.IsDepthReadOnly()) { // RDCDEBUG("but it's a depth readonly DSV and we're only reading depth, so that's fine"); } else if(depthRange.IsStencilReadOnly() && range.IsStencilReadOnly()) { // RDCDEBUG("but it's a stencil readonly DSV and we're only reading stencil, so that's OK"); } else { return true; } } } return false; } void D3D11RenderState::UnbindRangeForWrite(const ResourceRange &range) { for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(CSUAVs[i] && range.Intersects(GetResourceRange(CSUAVs[i]))) { ReleaseRef(CSUAVs[i]); CSUAVs[i] = NULL; } } for(UINT i = 0; i < D3D11_SO_BUFFER_SLOT_COUNT; i++) { if(SO.Buffers[i] && range.Intersects(ResourceRange(SO.Buffers[i]))) { ReleaseRef(SO.Buffers[i]); SO.Buffers[i] = NULL; } } for(UINT i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(OM.RenderTargets[i] && range.Intersects(GetResourceRange(OM.RenderTargets[i]))) { ReleaseRef(OM.RenderTargets[i]); OM.RenderTargets[i] = NULL; } } for(UINT i = 0; i < D3D11_1_UAV_SLOT_COUNT; i++) { if(OM.UAVs[i] && range.Intersects(GetResourceRange(OM.UAVs[i]))) { ReleaseRef(OM.UAVs[i]); OM.UAVs[i] = NULL; } } if(OM.DepthView && range.Intersects(GetResourceRange(OM.DepthView))) { ReleaseRef(OM.DepthView); OM.DepthView = NULL; } } void D3D11RenderState::UnbindRangeForRead(const ResourceRange &range) { for(int i = 0; i < D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT; i++) { if(IA.VBs[i] && range.Intersects(ResourceRange(IA.VBs[i]))) { // RDCDEBUG("Resource was bound on IA VB %u", i); ReleaseRef(IA.VBs[i]); IA.VBs[i] = NULL; } } if(IA.IndexBuffer && range.Intersects(ResourceRange(IA.IndexBuffer))) { // RDCDEBUG("Resource was bound on IA IB"); ReleaseRef(IA.IndexBuffer); IA.IndexBuffer = NULL; } // const char *names[] = { "VS", "DS", "HS", "GS", "PS", "CS" }; Shader *stages[] = {&VS, &HS, &DS, &GS, &PS, &CS}; for(int s = 0; s < 6; s++) { Shader *sh = stages[s]; for(UINT i = 0; i < D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT; i++) { if(sh->ConstantBuffers[i] && range.Intersects(ResourceRange(sh->ConstantBuffers[i]))) { // RDCDEBUG("Resource was bound on %s CB %u", names[s], i); ReleaseRef(sh->ConstantBuffers[i]); sh->ConstantBuffers[i] = NULL; } } for(UINT i = 0; i < D3D11_COMMONSHADER_INPUT_RESOURCE_SLOT_COUNT; i++) { if(!sh->SRVs[i]) continue; const ResourceRange &srvRange = GetResourceRange(sh->SRVs[i]); if(range.Intersects(srvRange)) { // RDCDEBUG("Resource was bound on %s SRV %u", names[s], i); if(range.IsDepthReadOnly() && srvRange.IsDepthReadOnly()) { // RDCDEBUG("but it's a depth readonly DSV and we're only reading depth, so that's fine"); } else if(range.IsStencilReadOnly() && srvRange.IsStencilReadOnly()) { // RDCDEBUG("but it's a stencil readonly DSV and we're only reading stenc, so that's OK"); } else { // RDCDEBUG("Unbinding."); ReleaseRef(sh->SRVs[i]); sh->SRVs[i] = NULL; } } } sh++; } } bool D3D11RenderState::ValidOutputMerger(ID3D11RenderTargetView *const RTs[], UINT NumRTs, ID3D11DepthStencilView *depth, ID3D11UnorderedAccessView *const uavs[], UINT NumUAVs) { D3D11_RENDER_TARGET_VIEW_DESC RTDescs[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT]; D3D11_DEPTH_STENCIL_VIEW_DESC DepthDesc; RDCEraseEl(RTDescs); RDCEraseEl(DepthDesc); ID3D11Resource *Resources[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = {0}; ID3D11Resource *DepthResource = NULL; D3D11_RESOURCE_DIMENSION renderdim[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = { D3D11_RESOURCE_DIMENSION_UNKNOWN}; D3D11_RESOURCE_DIMENSION depthdim = D3D11_RESOURCE_DIMENSION_UNKNOWN; for(UINT i = 0; RTs && i < NumRTs; i++) { if(RTs[i]) { RTs[i]->GetDesc(&RTDescs[i]); RTs[i]->GetResource(&Resources[i]); Resources[i]->GetType(&renderdim[i]); } } if(depth) { depth->GetDesc(&DepthDesc); depth->GetResource(&DepthResource); DepthResource->GetType(&depthdim); } bool valid = true; // check for duplicates and mark as invalid { ResourceRange rtvRanges[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = { ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, }; ResourceRange depthRange(depth); ResourceRange uavRanges[D3D11_1_UAV_SLOT_COUNT] = { ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, ResourceRange::Null, }; for(UINT i = 0; RTs && i < NumRTs; i++) { if(RTs[i]) rtvRanges[i] = GetResourceRange(RTs[i]); else break; } if(depth) depthRange = GetResourceRange(depth); int numUAVs = 0; for(UINT i = 0; uavs && i < NumUAVs; i++) { if(uavs[i]) { uavRanges[i] = GetResourceRange(uavs[i]); numUAVs = i + 1; } } // since constants are low, just do naive check for any intersecting ranges for(int i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(rtvRanges[i].IsNull()) continue; // does it match any other RTV? for(int j = i + 1; j < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; j++) { if(rtvRanges[i].Intersects(rtvRanges[j])) { valid = false; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, StringFormat::Fmt("Invalid output merger - Render targets %d and %d overlap", i, j)); break; } } // or depth? if(rtvRanges[i].Intersects(depthRange)) { valid = false; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, StringFormat::Fmt("Invalid output merger - Render target %d and depth overlap", i)); break; } // or a UAV? for(int j = 0; j < numUAVs; j++) { if(rtvRanges[i].Intersects(uavRanges[j])) { valid = false; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, StringFormat::Fmt("Invalid output merger - Render target %d and UAV %d overlap", i, j)); break; } } } for(int i = 0; valid && i < numUAVs; i++) { if(uavRanges[i].IsNull()) continue; // don't have to check RTVs, that's the reflection of the above check // does it match depth? if(uavRanges[i].Intersects(depthRange)) { valid = false; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, StringFormat::Fmt("Invalid output merger - UAV %d and depth overlap", i)); break; } // or another UAV? for(int j = i + 1; j < numUAVs; j++) { if(uavRanges[i].Intersects(uavRanges[j])) { valid = false; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, StringFormat::Fmt("Invalid output merger - UAVs %d and %d overlap", i, j)); break; } } } // don't have to check depth - it was checked against all RTs and UAVs above } ////////////////////////////////////////////////////////////////////////// // Resource dimensions of all views must be the same D3D11_RESOURCE_DIMENSION dim = D3D11_RESOURCE_DIMENSION_UNKNOWN; for(int i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(renderdim[i] == D3D11_RESOURCE_DIMENSION_UNKNOWN) continue; if(dim == D3D11_RESOURCE_DIMENSION_UNKNOWN) dim = renderdim[i]; if(renderdim[i] != dim) { valid = false; m_pDevice->AddDebugMessage(MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Invalid output merger - Render targets of different type"); break; } } if(depthdim != D3D11_RESOURCE_DIMENSION_UNKNOWN && dim != D3D11_RESOURCE_DIMENSION_UNKNOWN && depthdim != dim) { m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Invalid output merger - Render target(s) and depth target of different type"); valid = false; } if(!valid) { // RDCDEBUG("Resource dimensions don't match between render targets and/or depth stencil"); } else { // pretend all resources are 3D descs just to make the code simpler // * put arraysize for 1D/2D into the depth for 3D // * use sampledesc from 2d as it will be identical for 1D/3D D3D11_TEXTURE3D_DESC desc = {0}; D3D11_TEXTURE2D_DESC desc2 = {0}; for(int i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) { if(Resources[i] == NULL) continue; D3D11_TEXTURE1D_DESC d1 = {0}; D3D11_TEXTURE2D_DESC d2 = {0}; D3D11_TEXTURE3D_DESC d3 = {0}; if(dim == D3D11_RESOURCE_DIMENSION_BUFFER) { } if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE1D) { ((ID3D11Texture1D *)Resources[i])->GetDesc(&d1); d3.Width = RDCMAX(1U, d1.Width >> RTDescs[i].Texture1D.MipSlice); if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE1D) d3.Depth = 1; else if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE1DARRAY) d3.Depth = RDCMIN(d1.ArraySize, RTDescs[i].Texture1DArray.ArraySize); } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE2D) { ((ID3D11Texture2D *)Resources[i])->GetDesc(&d2); if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE2D) { d3.Width = RDCMAX(1U, d2.Width >> RTDescs[i].Texture2D.MipSlice); d3.Height = RDCMAX(1U, d2.Height >> RTDescs[i].Texture2D.MipSlice); d3.Depth = 1; } else if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE2DMS) { d3.Width = d2.Width; d3.Height = d2.Height; d3.Depth = 1; } else if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE2DARRAY) { d3.Width = RDCMAX(1U, d2.Width >> RTDescs[i].Texture2DArray.MipSlice); d3.Height = RDCMAX(1U, d2.Height >> RTDescs[i].Texture2DArray.MipSlice); d3.Depth = RDCMIN(d2.ArraySize, RTDescs[i].Texture2DArray.ArraySize); } else if(RTDescs[i].ViewDimension == D3D11_RTV_DIMENSION_TEXTURE2DMSARRAY) { d3.Width = d2.Width; d3.Height = d2.Height; d3.Depth = RDCMIN(d2.ArraySize, RTDescs[i].Texture2DMSArray.ArraySize); } } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE3D) { ((ID3D11Texture3D *)Resources[i])->GetDesc(&d3); d3.Width = RDCMAX(1U, d3.Width >> RTDescs[i].Texture3D.MipSlice); d3.Height = RDCMAX(1U, d3.Height >> RTDescs[i].Texture3D.MipSlice); d3.Depth = RDCMAX(1U, d3.Depth >> RTDescs[i].Texture3D.MipSlice); d3.Depth = RDCMIN(d3.Depth, RTDescs[i].Texture3D.WSize); } if(desc.Width == 0) { desc = d3; desc2 = d2; continue; } if(desc.Width != d3.Width || desc.Height != d3.Height || desc.Depth != d3.Depth || desc2.SampleDesc.Count != d2.SampleDesc.Count || desc2.SampleDesc.Quality != d2.SampleDesc.Quality) { m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Invalid output merger - Render targets are different dimensions"); valid = false; break; } } if(DepthResource && valid) { D3D11_TEXTURE1D_DESC d1 = {0}; D3D11_TEXTURE2D_DESC d2 = {0}; D3D11_TEXTURE3D_DESC d3 = {0}; if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE1D) { ((ID3D11Texture1D *)DepthResource)->GetDesc(&d1); d3.Width = RDCMAX(1U, d1.Width >> DepthDesc.Texture1D.MipSlice); if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE1D) d3.Depth = 1; else if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE1DARRAY) d3.Depth = RDCMIN(d1.ArraySize, DepthDesc.Texture1DArray.ArraySize); } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE2D) { ((ID3D11Texture2D *)DepthResource)->GetDesc(&d2); if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE2D) { d3.Width = RDCMAX(1U, d2.Width >> DepthDesc.Texture2D.MipSlice); d3.Height = RDCMAX(1U, d2.Height >> DepthDesc.Texture2D.MipSlice); d3.Depth = 1; } else if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE2DARRAY) { d3.Width = RDCMAX(1U, d2.Width >> DepthDesc.Texture2DArray.MipSlice); d3.Height = RDCMAX(1U, d2.Height >> DepthDesc.Texture2DArray.MipSlice); d3.Depth = RDCMIN(d2.ArraySize, DepthDesc.Texture2DArray.ArraySize); } else if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE2DMS) { d3.Width = d2.Width; d3.Height = d2.Height; d3.Depth = 1; } else if(DepthDesc.ViewDimension == D3D11_DSV_DIMENSION_TEXTURE2DMSARRAY) { d3.Width = d2.Width; d3.Height = d2.Height; d3.Depth = RDCMIN(d2.ArraySize, DepthDesc.Texture2DMSArray.ArraySize); } } else if(dim == D3D11_RESOURCE_DIMENSION_TEXTURE3D || dim == D3D11_RESOURCE_DIMENSION_BUFFER) { m_pDevice->AddDebugMessage(MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Invalid output merger - Depth target is Texture3D or Buffer " "(shouldn't be possible! How did you create this view?!)"); valid = false; } if(desc.Width != 0 && valid) { if(desc.Width != d3.Width || desc.Height != d3.Height || desc.Depth != d3.Depth || desc2.SampleDesc.Count != d2.SampleDesc.Count || desc2.SampleDesc.Quality != d2.SampleDesc.Quality) { valid = false; // explicitly allow over-sized depth targets if(desc.Width <= d3.Width && desc.Height <= d3.Height && desc.Depth <= d3.Depth && desc2.SampleDesc.Count == d2.SampleDesc.Count && desc2.SampleDesc.Quality == d2.SampleDesc.Quality) { valid = true; m_pDevice->AddDebugMessage( MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Valid but unusual output merger - Depth target is larger than render target(s)"); } else { m_pDevice->AddDebugMessage(MessageCategory::State_Setting, MessageSeverity::High, MessageSource::IncorrectAPIUse, "Invalid output merger - Depth target is different size or " "MS count to render target(s)"); } } } } } for(int i = 0; i < D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT; i++) SAFE_RELEASE(Resources[i]); SAFE_RELEASE(DepthResource); return valid;<|fim▁hole|>bool D3D11RenderState::InputAssembler::Used_VB(WrappedID3D11Device *device, uint32_t slot) const { if(Layout == NULL) return false; const vector<D3D11_INPUT_ELEMENT_DESC> &vec = device->GetLayoutDesc(Layout); for(size_t i = 0; i < vec.size(); i++) if(vec[i].InputSlot == slot) return true; return false; } bool D3D11RenderState::Shader::Used_CB(uint32_t slot) const { if(ConstantBuffers[slot] == NULL) return false; WrappedShader *shad = (WrappedShader *)(WrappedID3D11Shader<ID3D11VertexShader> *)Object; if(shad == NULL) return false; DXBC::DXBCFile *dxbc = shad->GetDXBC(); // have to assume it's used if there's no DXBC if(dxbc == NULL) return true; for(size_t i = 0; i < dxbc->m_CBuffers.size(); i++) if(dxbc->m_CBuffers[i].reg == slot) return true; return false; } bool D3D11RenderState::Shader::Used_SRV(uint32_t slot) const { if(SRVs[slot] == NULL) return false; WrappedShader *shad = (WrappedShader *)(WrappedID3D11Shader<ID3D11VertexShader> *)Object; if(shad == NULL) return false; DXBC::DXBCFile *dxbc = shad->GetDXBC(); // have to assume it's used if there's no DXBC if(dxbc == NULL) return true; for(const DXBC::ShaderInputBind &bind : dxbc->m_SRVs) if(bind.reg == slot) return true; return false; } bool D3D11RenderState::Shader::Used_UAV(uint32_t slot) const { WrappedShader *shad = (WrappedShader *)(WrappedID3D11Shader<ID3D11VertexShader> *)Object; if(shad == NULL) return false; DXBC::DXBCFile *dxbc = shad->GetDXBC(); // have to assume it's used if there's no DXBC if(dxbc == NULL) return true; for(const DXBC::ShaderInputBind &bind : dxbc->m_UAVs) if(bind.reg == slot) return true; return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11InputLayout *resource) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11Predicate *resource) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11ClassInstance *resource) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11DeviceChild *shader) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11SamplerState *resource) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11BlendState *state) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11RasterizerState *state) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11DepthStencilState *state) { return false; } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11Buffer *buffer) { if(buffer == NULL) return false; return IsRangeBoundForWrite(ResourceRange(buffer)); } template <> bool D3D11RenderState::IsBoundForWrite(ID3D11ShaderResourceView *srv) { if(srv == NULL) return false; return IsRangeBoundForWrite(GetResourceRange(srv)); } template <> void D3D11RenderState::UnbindForRead(ID3D11Buffer *buffer) { if(buffer == NULL) return; UnbindRangeForRead(ResourceRange(buffer)); } template <> void D3D11RenderState::UnbindForRead(ID3D11RenderTargetView *rtv) { if(rtv == NULL) return; UnbindRangeForRead(GetResourceRange(rtv)); } template <> void D3D11RenderState::UnbindForRead(ID3D11DepthStencilView *dsv) { if(dsv == NULL) return; const ResourceRange &dsvRange = GetResourceRange(dsv); if(dsvRange.IsDepthReadOnly() && dsvRange.IsStencilReadOnly()) { // don't need to. } else { UnbindRangeForRead(dsvRange); } } template <> void D3D11RenderState::UnbindForRead(ID3D11UnorderedAccessView *uav) { if(uav == NULL) return; UnbindRangeForRead(GetResourceRange(uav)); } template <> void D3D11RenderState::UnbindForWrite(ID3D11Buffer *buffer) { if(buffer == NULL) return; UnbindRangeForWrite(ResourceRange(buffer)); } template <> void D3D11RenderState::UnbindForWrite(ID3D11RenderTargetView *rtv) { if(rtv == NULL) return; UnbindRangeForWrite(GetResourceRange(rtv)); } template <> void D3D11RenderState::UnbindForWrite(ID3D11DepthStencilView *dsv) { if(dsv == NULL) return; UnbindRangeForWrite(GetResourceRange(dsv)); } template <> void D3D11RenderState::UnbindForWrite(ID3D11UnorderedAccessView *uav) { if(uav == NULL) return; UnbindRangeForWrite(GetResourceRange(uav)); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState::InputAssembler &el) { SERIALISE_MEMBER(Layout); SERIALISE_MEMBER(Topo); SERIALISE_MEMBER(VBs); SERIALISE_MEMBER(Strides); SERIALISE_MEMBER(Offsets); SERIALISE_MEMBER(IndexBuffer); SERIALISE_MEMBER(IndexFormat); SERIALISE_MEMBER(IndexOffset); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState::Shader &el) { SERIALISE_MEMBER(Object); SERIALISE_MEMBER(ConstantBuffers); SERIALISE_MEMBER(CBOffsets); SERIALISE_MEMBER(CBCounts); SERIALISE_MEMBER(SRVs); SERIALISE_MEMBER(Samplers); SERIALISE_MEMBER(Instances); SERIALISE_MEMBER(NumInstances); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState::StreamOut &el) { SERIALISE_MEMBER(Buffers); SERIALISE_MEMBER(Offsets); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState::Rasterizer &el) { SERIALISE_MEMBER(NumViews); SERIALISE_MEMBER(NumScissors); SERIALISE_MEMBER(Viewports); SERIALISE_MEMBER(Scissors); SERIALISE_MEMBER(State); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState::OutputMerger &el) { SERIALISE_MEMBER(DepthStencilState); SERIALISE_MEMBER(StencRef); SERIALISE_MEMBER(BlendState); SERIALISE_MEMBER(BlendFactor); SERIALISE_MEMBER(SampleMask); SERIALISE_MEMBER(DepthView); SERIALISE_MEMBER(RenderTargets); SERIALISE_MEMBER(UAVStartSlot); SERIALISE_MEMBER(UAVs); } template <class SerialiserType> void DoSerialise(SerialiserType &ser, D3D11RenderState &el) { SERIALISE_MEMBER(IA); SERIALISE_MEMBER(VS); SERIALISE_MEMBER(HS); SERIALISE_MEMBER(DS); SERIALISE_MEMBER(GS); SERIALISE_MEMBER(PS); SERIALISE_MEMBER(CS); SERIALISE_MEMBER(CSUAVs); SERIALISE_MEMBER(SO); SERIALISE_MEMBER(RS); SERIALISE_MEMBER(OM); SERIALISE_MEMBER(Predicate); SERIALISE_MEMBER(PredicateValue); if(ser.IsReading()) el.AddRefs(); } INSTANTIATE_SERIALISE_TYPE(D3D11RenderState); D3D11RenderStateTracker::D3D11RenderStateTracker(WrappedID3D11DeviceContext *ctx) : m_RS(*ctx->GetCurrentPipelineState()) { m_pContext = ctx; } D3D11RenderStateTracker::~D3D11RenderStateTracker() { m_RS.ApplyState(m_pContext); }<|fim▁end|>
}
<|file_name|>neon_node.rs<|end_file_name|><|fim▁begin|>use std::cmp; use std::collections::HashMap; use std::collections::{HashSet, VecDeque}; use std::convert::{TryFrom, TryInto}; use std::default::Default; use std::net::SocketAddr; use std::sync::mpsc::{sync_channel, Receiver, SyncSender, TrySendError}; use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, Mutex, }; use std::{thread, thread::JoinHandle}; use stacks::burnchains::{Burnchain, BurnchainParameters, Txid}; use stacks::chainstate::burn::db::sortdb::SortitionDB; use stacks::chainstate::burn::operations::{ leader_block_commit::{RewardSetInfo, BURN_BLOCK_MINED_AT_MODULUS}, BlockstackOperationType, LeaderBlockCommitOp, LeaderKeyRegisterOp, }; use stacks::chainstate::burn::BlockSnapshot; use stacks::chainstate::burn::ConsensusHash; use stacks::chainstate::coordinator::comm::CoordinatorChannels; use stacks::chainstate::coordinator::{get_next_recipients, OnChainRewardSetProvider}; use stacks::chainstate::stacks::db::unconfirmed::UnconfirmedTxMap; use stacks::chainstate::stacks::db::{ ChainStateBootData, ClarityTx, StacksChainState, MINER_REWARD_MATURITY, }; use stacks::chainstate::stacks::Error as ChainstateError; use stacks::chainstate::stacks::StacksPublicKey; use stacks::chainstate::stacks::{ miner::BlockBuilderSettings, miner::StacksMicroblockBuilder, StacksBlockBuilder, }; use stacks::chainstate::stacks::{ CoinbasePayload, StacksBlock, StacksMicroblock, StacksTransaction, StacksTransactionSigner, TransactionAnchorMode, TransactionPayload, TransactionVersion, }; use stacks::codec::StacksMessageCodec; use stacks::core::mempool::MemPoolDB; use stacks::core::FIRST_BURNCHAIN_CONSENSUS_HASH; use stacks::core::STACKS_EPOCH_2_05_MARKER; use stacks::cost_estimates::metrics::UnitMetric; use stacks::cost_estimates::UnitEstimator; use stacks::monitoring::{increment_stx_blocks_mined_counter, update_active_miners_count_gauge}; use stacks::net::{ atlas::{AtlasConfig, AtlasDB, AttachmentInstance}, db::{LocalPeer, PeerDB}, dns::DNSResolver, p2p::PeerNetwork, relay::Relayer, rpc::RPCHandlerArgs, Error as NetError, NetworkResult, PeerAddress, }; use stacks::types::chainstate::{ BlockHeaderHash, BurnchainHeaderHash, SortitionId, StacksAddress, StacksBlockHeader, VRFSeed, }; use stacks::util::get_epoch_time_ms; use stacks::util::get_epoch_time_secs; use stacks::util::hash::{to_hex, Hash160, Sha256Sum}; use stacks::util::secp256k1::Secp256k1PrivateKey; use stacks::util::strings::{UrlString, VecDisplay}; use stacks::util::vrf::VRFPublicKey; use stacks::vm::costs::ExecutionCost; use stacks::{burnchains::BurnchainSigner, chainstate::stacks::db::StacksHeaderInfo}; use crate::burnchains::bitcoin_regtest_controller::BitcoinRegtestController; use crate::run_loop::RegisteredKey; use crate::syncctl::PoxSyncWatchdogComms; use crate::ChainTip; use super::{BurnchainController, BurnchainTip, Config, EventDispatcher, Keychain}; use crate::stacks::vm::database::BurnStateDB; use stacks::monitoring; pub const RELAYER_MAX_BUFFER: usize = 100; struct AssembledAnchorBlock { parent_consensus_hash: ConsensusHash, my_burn_hash: BurnchainHeaderHash, anchored_block: StacksBlock, attempt: u64, } struct MicroblockMinerState { parent_consensus_hash: ConsensusHash, parent_block_hash: BlockHeaderHash, miner_key: Secp256k1PrivateKey, frequency: u64, last_mined: u128, quantity: u64, cost_so_far: ExecutionCost, settings: BlockBuilderSettings, } enum RelayerDirective { HandleNetResult(NetworkResult), ProcessTenure(ConsensusHash, BurnchainHeaderHash, BlockHeaderHash), RunTenure(RegisteredKey, BlockSnapshot, u128), // (vrf key, chain tip, time of issuance in ms) RegisterKey(BlockSnapshot), RunMicroblockTenure(u128), // time of issuance in ms Exit, } pub struct InitializedNeonNode { config: Config, relay_channel: SyncSender<RelayerDirective>, burnchain_signer: BurnchainSigner, last_burn_block: Option<BlockSnapshot>, is_miner: bool, pub atlas_config: AtlasConfig, leader_key_registration_state: LeaderKeyRegistrationState, pub p2p_thread_handle: JoinHandle<()>, pub relayer_thread_handle: JoinHandle<()>, } pub struct NeonGenesisNode { pub config: Config, keychain: Keychain, event_dispatcher: EventDispatcher, burnchain: Burnchain, } #[cfg(test)] type BlocksProcessedCounter = std::sync::Arc<std::sync::atomic::AtomicU64>; #[cfg(not(test))] type BlocksProcessedCounter = (); #[cfg(test)] fn bump_processed_counter(blocks_processed: &BlocksProcessedCounter) { blocks_processed.fetch_add(1, std::sync::atomic::Ordering::SeqCst); } #[cfg(not(test))] fn bump_processed_counter(_blocks_processed: &BlocksProcessedCounter) {} #[cfg(test)] fn set_processed_counter(blocks_processed: &BlocksProcessedCounter, value: u64) { blocks_processed.store(value, std::sync::atomic::Ordering::SeqCst); } #[cfg(not(test))] fn set_processed_counter(_blocks_processed: &BlocksProcessedCounter, _value: u64) {} enum Error { HeaderNotFoundForChainTip, WinningVtxNotFoundForChainTip, SnapshotNotFoundForChainTip, BurnchainTipChanged, } struct MiningTenureInformation { stacks_parent_header: StacksHeaderInfo, /// the consensus hash of the sortition that selected the Stacks block parent parent_consensus_hash: ConsensusHash, /// the burn block height of the sortition that selected the Stacks block parent parent_block_burn_height: u64, /// the total amount burned in the sortition that selected the Stacks block parent parent_block_total_burn: u64, parent_winning_vtxindex: u16, coinbase_nonce: u64, } /// Process artifacts from the tenure. /// At this point, we're modifying the chainstate, and merging the artifacts from the previous tenure. fn inner_process_tenure( anchored_block: &StacksBlock, consensus_hash: &ConsensusHash, parent_consensus_hash: &ConsensusHash, burn_db: &mut SortitionDB, chain_state: &mut StacksChainState, coord_comms: &CoordinatorChannels, ) -> Result<bool, ChainstateError> { let stacks_blocks_processed = coord_comms.get_stacks_blocks_processed(); if StacksChainState::has_stored_block( &chain_state.db(), &chain_state.blocks_path, consensus_hash, &anchored_block.block_hash(), )? { // already processed my tenure return Ok(true); } let ic = burn_db.index_conn(); // Preprocess the anchored block chain_state.preprocess_anchored_block( &ic, consensus_hash, &anchored_block, &parent_consensus_hash, 0, )?; if !coord_comms.announce_new_stacks_block() { return Ok(false); } if !coord_comms.wait_for_stacks_blocks_processed(stacks_blocks_processed, 15000) { warn!("ChainsCoordinator timed out while waiting for new stacks block to be processed"); } Ok(true) } fn inner_generate_coinbase_tx( keychain: &mut Keychain, nonce: u64, is_mainnet: bool, chain_id: u32, ) -> StacksTransaction { let mut tx_auth = keychain.get_transaction_auth().unwrap(); tx_auth.set_origin_nonce(nonce); let version = if is_mainnet { TransactionVersion::Mainnet } else { TransactionVersion::Testnet }; let mut tx = StacksTransaction::new( version, tx_auth, TransactionPayload::Coinbase(CoinbasePayload([0u8; 32])), ); tx.chain_id = chain_id; tx.anchor_mode = TransactionAnchorMode::OnChainOnly; let mut tx_signer = StacksTransactionSigner::new(&tx); keychain.sign_as_origin(&mut tx_signer); tx_signer.get_tx().unwrap() } fn inner_generate_poison_microblock_tx( keychain: &mut Keychain, nonce: u64, poison_payload: TransactionPayload, is_mainnet: bool, chain_id: u32, ) -> StacksTransaction { let mut tx_auth = keychain.get_transaction_auth().unwrap(); tx_auth.set_origin_nonce(nonce); let version = if is_mainnet { TransactionVersion::Mainnet } else { TransactionVersion::Testnet }; let mut tx = StacksTransaction::new(version, tx_auth, poison_payload); tx.chain_id = chain_id; tx.anchor_mode = TransactionAnchorMode::OnChainOnly; let mut tx_signer = StacksTransactionSigner::new(&tx); keychain.sign_as_origin(&mut tx_signer); tx_signer.get_tx().unwrap() } /// Constructs and returns a LeaderKeyRegisterOp out of the provided params fn inner_generate_leader_key_register_op( address: StacksAddress, vrf_public_key: VRFPublicKey, consensus_hash: &ConsensusHash, ) -> BlockstackOperationType { BlockstackOperationType::LeaderKeyRegister(LeaderKeyRegisterOp { public_key: vrf_public_key, memo: vec![], address, consensus_hash: consensus_hash.clone(), vtxindex: 0, txid: Txid([0u8; 32]), block_height: 0, burn_header_hash: BurnchainHeaderHash::zero(), }) } fn rotate_vrf_and_register( is_mainnet: bool, keychain: &mut Keychain, burn_block: &BlockSnapshot, btc_controller: &mut BitcoinRegtestController, ) -> bool { let vrf_pk = keychain.rotate_vrf_keypair(burn_block.block_height); let burnchain_tip_consensus_hash = &burn_block.consensus_hash; let op = inner_generate_leader_key_register_op( keychain.get_address(is_mainnet), vrf_pk, burnchain_tip_consensus_hash, ); let mut one_off_signer = keychain.generate_op_signer(); btc_controller.submit_operation(op, &mut one_off_signer, 1) } /// Constructs and returns a LeaderBlockCommitOp out of the provided params fn inner_generate_block_commit_op( sender: BurnchainSigner, block_header_hash: BlockHeaderHash, burn_fee: u64, key: &RegisteredKey, parent_burnchain_height: u32, parent_winning_vtx: u16, vrf_seed: VRFSeed, commit_outs: Vec<StacksAddress>, sunset_burn: u64, current_burn_height: u64, ) -> BlockstackOperationType { let (parent_block_ptr, parent_vtxindex) = (parent_burnchain_height, parent_winning_vtx); let burn_parent_modulus = (current_burn_height % BURN_BLOCK_MINED_AT_MODULUS) as u8; BlockstackOperationType::LeaderBlockCommit(LeaderBlockCommitOp { sunset_burn, block_header_hash, burn_fee, input: (Txid([0; 32]), 0), apparent_sender: sender, key_block_ptr: key.block_height as u32, key_vtxindex: key.op_vtxindex as u16, memo: vec![STACKS_EPOCH_2_05_MARKER], new_seed: vrf_seed, parent_block_ptr, parent_vtxindex, vtxindex: 0, txid: Txid([0u8; 32]), block_height: 0, burn_header_hash: BurnchainHeaderHash::zero(), burn_parent_modulus, commit_outs, }) } /// Mine and broadcast a single microblock, unconditionally. fn mine_one_microblock( microblock_state: &mut MicroblockMinerState, sortdb: &SortitionDB, chainstate: &mut StacksChainState, mempool: &mut MemPoolDB, ) -> Result<StacksMicroblock, ChainstateError> { debug!( "Try to mine one microblock off of {}/{} (total: {})", &microblock_state.parent_consensus_hash, &microblock_state.parent_block_hash, chainstate .unconfirmed_state .as_ref() .map(|us| us.num_microblocks()) .unwrap_or(0) ); let mint_result = { let ic = sortdb.index_conn(); let mut microblock_miner = match StacksMicroblockBuilder::resume_unconfirmed( chainstate, &ic, &microblock_state.cost_so_far, microblock_state.settings.clone(), ) { Ok(x) => x, Err(e) => { let msg = format!( "Failed to create a microblock miner at chaintip {}/{}: {:?}", &microblock_state.parent_consensus_hash, &microblock_state.parent_block_hash, &e ); error!("{}", msg); return Err(e); } }; let t1 = get_epoch_time_ms(); let mblock = microblock_miner.mine_next_microblock(mempool, &microblock_state.miner_key)?; let new_cost_so_far = microblock_miner.get_cost_so_far().expect("BUG: cannot read cost so far from miner -- indicates that the underlying Clarity Tx is somehow in use still."); let t2 = get_epoch_time_ms(); info!( "Mined microblock {} ({}) with {} transactions in {}ms", mblock.block_hash(), mblock.header.sequence, mblock.txs.len(), t2.saturating_sub(t1) ); Ok((mblock, new_cost_so_far)) }; let (mined_microblock, new_cost) = match mint_result { Ok(x) => x, Err(e) => { warn!("Failed to mine microblock: {}", e); return Err(e); } }; // preprocess the microblock locally chainstate.preprocess_streamed_microblock( &microblock_state.parent_consensus_hash, &microblock_state.parent_block_hash, &mined_microblock, )?; // update unconfirmed state cost microblock_state.cost_so_far = new_cost; microblock_state.quantity += 1; return Ok(mined_microblock); } fn try_mine_microblock( config: &Config, microblock_miner_state: &mut Option<MicroblockMinerState>, chainstate: &mut StacksChainState, sortdb: &SortitionDB, mem_pool: &mut MemPoolDB, winning_tip: (ConsensusHash, BlockHeaderHash, Secp256k1PrivateKey), ) -> Result<Option<StacksMicroblock>, NetError> { let ch = winning_tip.0; let bhh = winning_tip.1; let microblock_privkey = winning_tip.2; let mut next_microblock = None; if microblock_miner_state.is_none() { debug!( "Instantiate microblock mining state off of {}/{}", &ch, &bhh ); // we won a block! proceed to build a microblock tail if we've stored it match StacksChainState::get_anchored_block_header_info(chainstate.db(), &ch, &bhh) { Ok(Some(_)) => { let parent_index_hash = StacksBlockHeader::make_index_block_hash(&ch, &bhh); let cost_so_far = StacksChainState::get_stacks_block_anchored_cost( chainstate.db(), &parent_index_hash, )? .ok_or(NetError::NotFoundError)?; microblock_miner_state.replace(MicroblockMinerState { parent_consensus_hash: ch.clone(), parent_block_hash: bhh.clone(), miner_key: microblock_privkey.clone(), frequency: config.node.microblock_frequency, last_mined: 0, quantity: 0, cost_so_far: cost_so_far, settings: config.make_block_builder_settings(2), }); } Ok(None) => { warn!( "No such anchored block: {}/{}. Cannot mine microblocks", ch, bhh ); } Err(e) => { warn!( "Failed to get anchored block cost for {}/{}: {:?}", ch, bhh, &e ); } } } if let Some(mut microblock_miner) = microblock_miner_state.take() { if microblock_miner.parent_consensus_hash == ch && microblock_miner.parent_block_hash == bhh { if microblock_miner.last_mined + (microblock_miner.frequency as u128) < get_epoch_time_ms() { // opportunistically try and mine, but only if there are no attachable blocks in // recent history (i.e. in the last 10 minutes) let num_attachable = StacksChainState::count_attachable_staging_blocks( chainstate.db(), 1, get_epoch_time_secs() - 600, )?; if num_attachable == 0 { match mine_one_microblock(&mut microblock_miner, sortdb, chainstate, mem_pool) { Ok(microblock) => { // will need to relay this next_microblock = Some(microblock); } Err(ChainstateError::NoTransactionsToMine) => { info!("Will keep polling mempool for transactions to include in a microblock"); } Err(e) => { warn!("Failed to mine one microblock: {:?}", &e); } } } else { debug!("Will not mine microblocks yet -- have {} attachable blocks that arrived in the last 10 minutes", num_attachable); } } microblock_miner.last_mined = get_epoch_time_ms(); microblock_miner_state.replace(microblock_miner); } // otherwise, we're not the sortition winner, and the microblock miner state can be // discarded. } Ok(next_microblock) } fn run_microblock_tenure( config: &Config, microblock_miner_state: &mut Option<MicroblockMinerState>, chainstate: &mut StacksChainState, sortdb: &mut SortitionDB, mem_pool: &mut MemPoolDB, relayer: &mut Relayer, miner_tip: (ConsensusHash, BlockHeaderHash, Secp256k1PrivateKey), microblocks_processed: BlocksProcessedCounter, event_dispatcher: &EventDispatcher, ) { // TODO: this is sensitive to poll latency -- can we call this on a fixed // schedule, regardless of network activity? let parent_consensus_hash = &miner_tip.0; let parent_block_hash = &miner_tip.1; debug!( "Run microblock tenure for {}/{}", parent_consensus_hash, parent_block_hash ); // Mine microblocks, if we're active let next_microblock_opt = match try_mine_microblock( &config, microblock_miner_state, chainstate, sortdb, mem_pool, miner_tip.clone(), ) { Ok(x) => x, Err(e) => { warn!("Failed to mine next microblock: {:?}", &e); None } }; // did we mine anything? if let Some(next_microblock) = next_microblock_opt { // apply it let microblock_hash = next_microblock.block_hash(); let processed_unconfirmed_state = Relayer::refresh_unconfirmed(chainstate, sortdb); let num_mblocks = chainstate .unconfirmed_state .as_ref() .map(|ref unconfirmed| unconfirmed.num_microblocks()) .unwrap_or(0); info!( "Mined one microblock: {} seq {} (total processed: {})", &microblock_hash, next_microblock.header.sequence, num_mblocks ); set_processed_counter(&microblocks_processed, num_mblocks); let parent_index_block_hash = StacksBlockHeader::make_index_block_hash(parent_consensus_hash, parent_block_hash); event_dispatcher .process_new_microblocks(parent_index_block_hash, processed_unconfirmed_state); // send it off if let Err(e) = relayer.broadcast_microblock(parent_consensus_hash, parent_block_hash, next_microblock) { error!( "Failure trying to broadcast microblock {}: {}", microblock_hash, e ); } } } /// Grant the p2p thread a copy of the unconfirmed microblock transaction list, so it can serve it /// out via the unconfirmed transaction API. /// Not the prettiest way to do this, but the least disruptive way to do this. fn send_unconfirmed_txs( chainstate: &StacksChainState, unconfirmed_txs: Arc<Mutex<UnconfirmedTxMap>>, ) { if let Some(ref unconfirmed) = chainstate.unconfirmed_state { match unconfirmed_txs.lock() { Ok(mut txs) => { txs.clear(); txs.extend(unconfirmed.mined_txs.clone()); } Err(e) => { // can only happen due to a thread panic in the relayer error!("FATAL: unconfirmed tx arc mutex is poisoned: {:?}", &e); panic!(); } }; } } /// Have the p2p thread receive unconfirmed txs fn recv_unconfirmed_txs( chainstate: &mut StacksChainState, unconfirmed_txs: Arc<Mutex<UnconfirmedTxMap>>, ) { if let Some(ref mut unconfirmed) = chainstate.unconfirmed_state { match unconfirmed_txs.lock() { Ok(txs) => { unconfirmed.mined_txs.clear(); unconfirmed.mined_txs.extend(txs.clone()); } Err(e) => { // can only happen due to a thread panic in the relayer error!("FATAL: unconfirmed arc mutex is poisoned: {:?}", &e); panic!(); } }; } } fn spawn_peer( is_mainnet: bool, mut this: PeerNetwork, p2p_sock: &SocketAddr, rpc_sock: &SocketAddr, config: Config, poll_timeout: u64, relay_channel: SyncSender<RelayerDirective>, mut sync_comms: PoxSyncWatchdogComms, attachments_rx: Receiver<HashSet<AttachmentInstance>>, unconfirmed_txs: Arc<Mutex<UnconfirmedTxMap>>, event_observer: EventDispatcher, should_keep_running: Arc<AtomicBool>, ) -> Result<JoinHandle<()>, NetError> { let burn_db_path = config.get_burn_db_file_path(); let stacks_chainstate_path = config.get_chainstate_path_str(); let exit_at_block_height = config.burnchain.process_exit_at_block_height; this.bind(p2p_sock, rpc_sock).unwrap(); let (mut dns_resolver, mut dns_client) = DNSResolver::new(10); let sortdb = SortitionDB::open(&burn_db_path, false).map_err(NetError::DBError)?; let (mut chainstate, _) = StacksChainState::open( is_mainnet, config.burnchain.chain_id, &stacks_chainstate_path, ) .map_err(|e| NetError::ChainstateError(e.to_string()))?; // buffer up blocks to store without stalling the p2p thread let mut results_with_data = VecDeque::new(); let server_thread = thread::Builder::new() .name("p2p".to_string()) .spawn(move || { let cost_estimator = config .make_cost_estimator() .unwrap_or_else(|| Box::new(UnitEstimator)); let metric = config .make_cost_metric() .unwrap_or_else(|| Box::new(UnitMetric)); let mut mem_pool = MemPoolDB::open( is_mainnet, config.burnchain.chain_id, &stacks_chainstate_path, cost_estimator, metric, ) .expect("Database failure opening mempool"); // create estimators, metric instances for RPC handler let cost_estimator = config .make_cost_estimator() .unwrap_or_else(|| Box::new(UnitEstimator)); let metric = config .make_cost_metric() .unwrap_or_else(|| Box::new(UnitMetric)); let fee_estimator = config.make_fee_estimator(); let handler_args = RPCHandlerArgs { exit_at_block_height: exit_at_block_height.as_ref(), genesis_chainstate_hash: Sha256Sum::from_hex(stx_genesis::GENESIS_CHAINSTATE_HASH) .unwrap(), event_observer: Some(&event_observer), cost_estimator: Some(cost_estimator.as_ref()), cost_metric: Some(metric.as_ref()), fee_estimator: fee_estimator.as_ref().map(|x| x.as_ref()), ..RPCHandlerArgs::default() }; let mut num_p2p_state_machine_passes = 0; let mut num_inv_sync_passes = 0; let mut num_download_passes = 0; let mut mblock_deadline = 0; while should_keep_running.load(Ordering::SeqCst) { // initial block download? let ibd = sync_comms.get_ibd(); let download_backpressure = results_with_data.len() > 0; let poll_ms = if !download_backpressure && this.has_more_downloads() { // keep getting those blocks -- drive the downloader state-machine debug!( "P2P: backpressure: {}, more downloads: {}", download_backpressure, this.has_more_downloads() ); 1 } else { cmp::min(poll_timeout, config.node.microblock_frequency) }; <|fim▁hole|> Ok(expected_attachments) => expected_attachments, _ => { debug!("Atlas: attachment channel is empty"); HashSet::new() } }; let _ = Relayer::setup_unconfirmed_state_readonly(&mut chainstate, &sortdb); recv_unconfirmed_txs(&mut chainstate, unconfirmed_txs.clone()); match this.run( &sortdb, &mut chainstate, &mut mem_pool, Some(&mut dns_client), download_backpressure, ibd, poll_ms, &handler_args, &mut expected_attachments, ) { Ok(network_result) => { if num_p2p_state_machine_passes < network_result.num_state_machine_passes { // p2p state-machine did a full pass. Notify anyone listening. sync_comms.notify_p2p_state_pass(); num_p2p_state_machine_passes = network_result.num_state_machine_passes; } if num_inv_sync_passes < network_result.num_inv_sync_passes { // inv-sync state-machine did a full pass. Notify anyone listening. sync_comms.notify_inv_sync_pass(); num_inv_sync_passes = network_result.num_inv_sync_passes; } if num_download_passes < network_result.num_download_passes { // download state-machine did a full pass. Notify anyone listening. sync_comms.notify_download_pass(); num_download_passes = network_result.num_download_passes; } if network_result.has_data_to_store() { results_with_data .push_back(RelayerDirective::HandleNetResult(network_result)); } // only do this on the Ok() path, even if we're mining, because an error in // network dispatching is likely due to resource exhaustion if mblock_deadline < get_epoch_time_ms() { debug!("P2P: schedule microblock tenure"); results_with_data.push_back(RelayerDirective::RunMicroblockTenure( get_epoch_time_ms(), )); mblock_deadline = get_epoch_time_ms() + (config.node.microblock_frequency as u128); } } Err(e) => { error!("P2P: Failed to process network dispatch: {:?}", &e); if config.is_node_event_driven() { panic!(); } } }; while let Some(next_result) = results_with_data.pop_front() { // have blocks, microblocks, and/or transactions (don't care about anything else), // or a directive to mine microblocks if let Err(e) = relay_channel.try_send(next_result) { debug!( "P2P: {:?}: download backpressure detected", &this.local_peer ); match e { TrySendError::Full(directive) => { if let RelayerDirective::RunMicroblockTenure(_) = directive { // can drop this } else { // don't lose this data -- just try it again results_with_data.push_front(directive); } break; } TrySendError::Disconnected(_) => { info!("P2P: Relayer hang up with p2p channel"); should_keep_running.store(false, Ordering::SeqCst); break; } } } else { debug!("P2P: Dispatched result to Relayer!"); } } } relay_channel.try_send(RelayerDirective::Exit).unwrap(); debug!("P2P thread exit!"); }) .unwrap(); let _jh = thread::Builder::new() .name("dns-resolver".to_string()) .spawn(move || { dns_resolver.thread_main(); }) .unwrap(); Ok(server_thread) } fn spawn_miner_relayer( is_mainnet: bool, chain_id: u32, mut relayer: Relayer, local_peer: LocalPeer, config: Config, mut keychain: Keychain, burn_db_path: String, stacks_chainstate_path: String, relay_channel: Receiver<RelayerDirective>, event_dispatcher: EventDispatcher, blocks_processed: BlocksProcessedCounter, microblocks_processed: BlocksProcessedCounter, burnchain: Burnchain, coord_comms: CoordinatorChannels, unconfirmed_txs: Arc<Mutex<UnconfirmedTxMap>>, ) -> Result<JoinHandle<()>, NetError> { // Note: the chainstate coordinator is *the* block processor, it is responsible for writes to // the chainstate -- eventually, no other codepaths should be writing to it. // // the relayer _should not_ be modifying the sortdb, // however, it needs a mut reference to create read TXs. // should address via #1449 let mut sortdb = SortitionDB::open(&burn_db_path, true).map_err(NetError::DBError)?; let (mut chainstate, _) = StacksChainState::open(is_mainnet, chain_id, &stacks_chainstate_path) .map_err(|e| NetError::ChainstateError(e.to_string()))?; let mut last_mined_blocks: HashMap< BurnchainHeaderHash, Vec<(AssembledAnchorBlock, Secp256k1PrivateKey)>, > = HashMap::new(); let burn_fee_cap = config.burnchain.burn_fee_cap; let mut bitcoin_controller = BitcoinRegtestController::new_dummy(config.clone()); let mut microblock_miner_state: Option<MicroblockMinerState> = None; let mut miner_tip = None; // only set if we won the last sortition let mut last_microblock_tenure_time = 0; let mut last_tenure_issue_time = 0; let relayer_handle = thread::Builder::new().name("relayer".to_string()).spawn(move || { let cost_estimator = config.make_cost_estimator() .unwrap_or_else(|| Box::new(UnitEstimator)); let metric = config.make_cost_metric() .unwrap_or_else(|| Box::new(UnitMetric)); let mut mem_pool = MemPoolDB::open(is_mainnet, chain_id, &stacks_chainstate_path, cost_estimator, metric) .expect("Database failure opening mempool"); while let Ok(mut directive) = relay_channel.recv() { match directive { RelayerDirective::HandleNetResult(ref mut net_result) => { debug!("Relayer: Handle network result"); let net_receipts = relayer .process_network_result( &local_peer, net_result, &mut sortdb, &mut chainstate, &mut mem_pool, Some(&coord_comms), Some(&event_dispatcher), ) .expect("BUG: failure processing network results"); let mempool_txs_added = net_receipts.mempool_txs_added.len(); if mempool_txs_added > 0 { event_dispatcher.process_new_mempool_txs(net_receipts.mempool_txs_added); } let num_unconfirmed_microblock_tx_receipts = net_receipts.processed_unconfirmed_state.receipts.len(); if num_unconfirmed_microblock_tx_receipts > 0 { if let Some(unconfirmed_state) = chainstate.unconfirmed_state.as_ref() { let canonical_tip = unconfirmed_state.confirmed_chain_tip.clone(); event_dispatcher.process_new_microblocks(canonical_tip, net_receipts.processed_unconfirmed_state); } else { warn!("Relayer: oops, unconfirmed state is uninitialized but there are microblock events"); } } // Dispatch retrieved attachments, if any. if net_result.has_attachments() { event_dispatcher.process_new_attachments(&net_result.attachments); } // synchronize unconfirmed tx index to p2p thread send_unconfirmed_txs(&chainstate, unconfirmed_txs.clone()); } RelayerDirective::ProcessTenure(consensus_hash, burn_hash, block_header_hash) => { debug!( "Relayer: Process tenure {}/{} in {}", &consensus_hash, &block_header_hash, &burn_hash ); if let Some(last_mined_blocks_at_burn_hash) = last_mined_blocks.remove(&burn_hash) { for (last_mined_block, microblock_privkey) in last_mined_blocks_at_burn_hash.into_iter() { let AssembledAnchorBlock { parent_consensus_hash, anchored_block: mined_block, my_burn_hash: mined_burn_hash, attempt: _, } = last_mined_block; if mined_block.block_hash() == block_header_hash && burn_hash == mined_burn_hash { // we won! let reward_block_height = mined_block.header.total_work.work + MINER_REWARD_MATURITY; info!("Won sortition! Mining reward will be received in {} blocks (block #{})", MINER_REWARD_MATURITY, reward_block_height); debug!("Won sortition!"; "stacks_header" => %block_header_hash, "burn_hash" => %mined_burn_hash, ); increment_stx_blocks_mined_counter(); match inner_process_tenure( &mined_block, &consensus_hash, &parent_consensus_hash, &mut sortdb, &mut chainstate, &coord_comms, ) { Ok(coordinator_running) => { if !coordinator_running { warn!( "Coordinator stopped, stopping relayer thread..." ); return; } } Err(e) => { warn!( "Error processing my tenure, bad block produced: {}", e ); warn!( "Bad block"; "stacks_header" => %block_header_hash, "data" => %to_hex(&mined_block.serialize_to_vec()), ); continue; } }; // advertize _and_ push blocks for now let blocks_available = Relayer::load_blocks_available_data( &sortdb, vec![consensus_hash.clone()], ) .expect("Failed to obtain block information for a block we mined."); if let Err(e) = relayer.advertize_blocks(blocks_available) { warn!("Failed to advertise new block: {}", e); } let snapshot = SortitionDB::get_block_snapshot_consensus( sortdb.conn(), &consensus_hash, ) .expect("Failed to obtain snapshot for block") .expect("Failed to obtain snapshot for block"); if !snapshot.pox_valid { warn!( "Snapshot for {} is no longer valid; discarding {}...", &consensus_hash, &mined_block.block_hash() ); miner_tip = None; } else { let ch = snapshot.consensus_hash.clone(); let bh = mined_block.block_hash(); if let Err(e) = relayer .broadcast_block(snapshot.consensus_hash, mined_block) { warn!("Failed to push new block: {}", e); } // proceed to mine microblocks debug!( "Microblock miner tip is now {}/{} ({})", &consensus_hash, &block_header_hash, StacksBlockHeader::make_index_block_hash(&consensus_hash, &block_header_hash) ); miner_tip = Some((ch, bh, microblock_privkey)); Relayer::refresh_unconfirmed(&mut chainstate, &mut sortdb); send_unconfirmed_txs(&chainstate, unconfirmed_txs.clone()); } } else { debug!("Did not win sortition, my blocks [burn_hash= {}, block_hash= {}], their blocks [parent_consenus_hash= {}, burn_hash= {}, block_hash ={}]", mined_burn_hash, mined_block.block_hash(), parent_consensus_hash, burn_hash, block_header_hash); miner_tip = None; } } } } RelayerDirective::RunTenure(registered_key, last_burn_block, issue_timestamp_ms) => { if last_tenure_issue_time > issue_timestamp_ms { // coalesce -- stale continue; } let burn_header_hash = last_burn_block.burn_header_hash.clone(); let burn_chain_sn = SortitionDB::get_canonical_burn_chain_tip(sortdb.conn()) .expect("FATAL: failed to query sortition DB for canonical burn chain tip"); let burn_chain_tip = burn_chain_sn .burn_header_hash .clone(); let mut burn_tenure_snapshot = last_burn_block.clone(); if burn_chain_tip == burn_header_hash { // no burnchain change, so only re-run block tenure every so often in order // to give microblocks a chance to collect if issue_timestamp_ms < last_tenure_issue_time + (config.node.wait_time_for_microblocks as u128) { debug!("Relayer: will NOT run tenure since issuance is too fresh"); continue; } } else { // burnchain has changed since this directive was sent, so mine immediately burn_tenure_snapshot = burn_chain_sn; if issue_timestamp_ms + (config.node.wait_time_for_microblocks as u128) < get_epoch_time_ms() { // still waiting for microblocks to arrive continue; } debug!("Relayer: burnchain has advanced from {} to {}", &burn_header_hash, &burn_chain_tip); } debug!( "Relayer: Run tenure"; "height" => last_burn_block.block_height, "burn_header_hash" => %burn_chain_tip, "last_burn_header_hash" => %burn_header_hash ); let mut last_mined_blocks_vec = last_mined_blocks .remove(&burn_header_hash) .unwrap_or_default(); let last_mined_block_opt = InitializedNeonNode::relayer_run_tenure( &config, registered_key, &mut chainstate, &mut sortdb, &burnchain, burn_tenure_snapshot, &mut keychain, &mut mem_pool, burn_fee_cap, &mut bitcoin_controller, &last_mined_blocks_vec.iter().map(|(blk, _)| blk).collect(), &event_dispatcher, ); if let Some((last_mined_block, microblock_privkey)) = last_mined_block_opt { if last_mined_blocks_vec.len() == 0 { // (for testing) only bump once per epoch bump_processed_counter(&blocks_processed); } last_mined_blocks_vec.push((last_mined_block, microblock_privkey)); } last_mined_blocks.insert(burn_header_hash, last_mined_blocks_vec); last_tenure_issue_time = get_epoch_time_ms(); } RelayerDirective::RegisterKey(ref last_burn_block) => { rotate_vrf_and_register( is_mainnet, &mut keychain, last_burn_block, &mut bitcoin_controller, ); bump_processed_counter(&blocks_processed); } RelayerDirective::RunMicroblockTenure(tenure_issue_ms) => { if last_microblock_tenure_time > tenure_issue_ms { // stale request continue; } debug!("Relayer: run microblock tenure"); // unconfirmed state must be consistent with the chain tip, as must the // microblock mining state. if let Some((ch, bh, mblock_pkey)) = miner_tip.clone() { if let Some(miner_state) = microblock_miner_state.take() { if miner_state.parent_consensus_hash == ch || miner_state.parent_block_hash == bh { // preserve -- chaintip is unchanged microblock_miner_state = Some(miner_state); } else { debug!("Relayer: reset microblock miner state"); microblock_miner_state = None; } } run_microblock_tenure( &config, &mut microblock_miner_state, &mut chainstate, &mut sortdb, &mut mem_pool, &mut relayer, (ch, bh, mblock_pkey), microblocks_processed.clone(), &event_dispatcher, ); // synchronize unconfirmed tx index to p2p thread send_unconfirmed_txs(&chainstate, unconfirmed_txs.clone()); last_microblock_tenure_time = get_epoch_time_ms(); } else { debug!("Relayer: reset unconfirmed state to 0 microblocks"); set_processed_counter(&microblocks_processed, 0); microblock_miner_state = None; } } RelayerDirective::Exit => break } } debug!("Relayer exit!"); }).unwrap(); Ok(relayer_handle) } enum LeaderKeyRegistrationState { Inactive, Pending, Active(RegisteredKey), } /// This node is used for both neon testnet and for mainnet impl InitializedNeonNode { fn new( config: Config, mut keychain: Keychain, event_dispatcher: EventDispatcher, last_burn_block: Option<BurnchainTip>, miner: bool, blocks_processed: BlocksProcessedCounter, microblocks_processed: BlocksProcessedCounter, coord_comms: CoordinatorChannels, sync_comms: PoxSyncWatchdogComms, burnchain: Burnchain, attachments_rx: Receiver<HashSet<AttachmentInstance>>, atlas_config: AtlasConfig, should_keep_running: Arc<AtomicBool>, ) -> InitializedNeonNode { // we can call _open_ here rather than _connect_, since connect is first called in // make_genesis_block let sortdb = SortitionDB::open(&config.get_burn_db_file_path(), false) .expect("Error while instantiating sortition db"); let epochs = SortitionDB::get_stacks_epochs(sortdb.conn()) .expect("Error while loading stacks epochs"); let view = { let sortition_tip = SortitionDB::get_canonical_burn_chain_tip(&sortdb.conn()) .expect("Failed to get sortition tip"); SortitionDB::get_burnchain_view(&sortdb.conn(), &burnchain, &sortition_tip).unwrap() }; // create a new peerdb let data_url = UrlString::try_from(format!("{}", &config.node.data_url)).unwrap(); let initial_neighbors = config.node.bootstrap_node.clone(); if initial_neighbors.len() > 0 { info!( "Will bootstrap from peers {}", VecDisplay(&initial_neighbors) ); } else { warn!("Without a peer to bootstrap from, the node will start mining a new chain"); } let p2p_sock: SocketAddr = config.node.p2p_bind.parse().expect(&format!( "Failed to parse socket: {}", &config.node.p2p_bind )); let rpc_sock = config.node.rpc_bind.parse().expect(&format!( "Failed to parse socket: {}", &config.node.rpc_bind )); let p2p_addr: SocketAddr = config.node.p2p_address.parse().expect(&format!( "Failed to parse socket: {}", &config.node.p2p_address )); let node_privkey = { let mut re_hashed_seed = config.node.local_peer_seed.clone(); let my_private_key = loop { match Secp256k1PrivateKey::from_slice(&re_hashed_seed[..]) { Ok(sk) => break sk, Err(_) => { re_hashed_seed = Sha256Sum::from_data(&re_hashed_seed[..]) .as_bytes() .to_vec() } } }; my_private_key }; let mut peerdb = PeerDB::connect( &config.get_peer_db_file_path(), true, config.burnchain.chain_id, burnchain.network_id, Some(node_privkey), config.connection_options.private_key_lifetime.clone(), PeerAddress::from_socketaddr(&p2p_addr), p2p_sock.port(), data_url, &vec![], Some(&initial_neighbors), ) .map_err(|e| { eprintln!( "Failed to open {}: {:?}", &config.get_peer_db_file_path(), &e ); panic!(); }) .unwrap(); { // bootstrap nodes *always* allowed let mut tx = peerdb.tx_begin().unwrap(); for initial_neighbor in initial_neighbors.iter() { PeerDB::set_allow_peer( &mut tx, initial_neighbor.addr.network_id, &initial_neighbor.addr.addrbytes, initial_neighbor.addr.port, -1, ) .unwrap(); } tx.commit().unwrap(); } if !config.node.deny_nodes.is_empty() { warn!("Will ignore nodes {:?}", &config.node.deny_nodes); } { let mut tx = peerdb.tx_begin().unwrap(); for denied in config.node.deny_nodes.iter() { PeerDB::set_deny_peer( &mut tx, denied.addr.network_id, &denied.addr.addrbytes, denied.addr.port, get_epoch_time_secs() + 24 * 365 * 3600, ) .unwrap(); } tx.commit().unwrap(); } let atlasdb = AtlasDB::connect(atlas_config, &config.get_atlas_db_file_path(), true).unwrap(); let local_peer = match PeerDB::get_local_peer(peerdb.conn()) { Ok(local_peer) => local_peer, _ => panic!("Unable to retrieve local peer"), }; // force early mempool instantiation let cost_estimator = config .make_cost_estimator() .unwrap_or_else(|| Box::new(UnitEstimator)); let metric = config .make_cost_metric() .unwrap_or_else(|| Box::new(UnitMetric)); let _ = MemPoolDB::open( config.is_mainnet(), config.burnchain.chain_id, &config.get_chainstate_path_str(), cost_estimator, metric, ) .expect("BUG: failed to instantiate mempool"); // now we're ready to instantiate a p2p network object, the relayer, and the event dispatcher let mut p2p_net = PeerNetwork::new( peerdb, atlasdb, local_peer.clone(), config.burnchain.peer_version, burnchain.clone(), view, config.connection_options.clone(), epochs, ); // setup the relayer channel let (relay_send, relay_recv) = sync_channel(RELAYER_MAX_BUFFER); let burnchain_signer = keychain.get_burnchain_signer(); match monitoring::set_burnchain_signer(burnchain_signer.clone()) { Err(e) => { warn!("Failed to set global burnchain signer: {:?}", &e); } _ => {} } let relayer = Relayer::from_p2p(&mut p2p_net); let shared_unconfirmed_txs = Arc::new(Mutex::new(UnconfirmedTxMap::new())); let leader_key_registration_state = if config.node.mock_mining { // mock mining, pretend to have a registered key let vrf_public_key = keychain.rotate_vrf_keypair(1); LeaderKeyRegistrationState::Active(RegisteredKey { block_height: 1, op_vtxindex: 1, vrf_public_key, }) } else { LeaderKeyRegistrationState::Inactive }; let relayer_thread_handle = spawn_miner_relayer( config.is_mainnet(), config.burnchain.chain_id, relayer, local_peer, config.clone(), keychain, config.get_burn_db_file_path(), config.get_chainstate_path_str(), relay_recv, event_dispatcher.clone(), blocks_processed.clone(), microblocks_processed.clone(), burnchain, coord_comms, shared_unconfirmed_txs.clone(), ) .expect("Failed to initialize mine/relay thread"); let p2p_thread_handle = spawn_peer( config.is_mainnet(), p2p_net, &p2p_sock, &rpc_sock, config.clone(), 5000, relay_send.clone(), sync_comms, attachments_rx, shared_unconfirmed_txs, event_dispatcher, should_keep_running, ) .expect("Failed to initialize p2p thread"); info!("Start HTTP server on: {}", &config.node.rpc_bind); info!("Start P2P server on: {}", &config.node.p2p_bind); let last_burn_block = last_burn_block.map(|x| x.block_snapshot); let is_miner = miner; let atlas_config = AtlasConfig::default(config.is_mainnet()); InitializedNeonNode { config, relay_channel: relay_send, last_burn_block, burnchain_signer, is_miner, atlas_config, leader_key_registration_state, p2p_thread_handle, relayer_thread_handle, } } /// Tell the relayer to fire off a tenure and a block commit op, /// if it is time to do so. pub fn relayer_issue_tenure(&mut self) -> bool { if !self.is_miner { // node is a follower, don't try to issue a tenure return true; } if let Some(burnchain_tip) = self.last_burn_block.clone() { match self.leader_key_registration_state { LeaderKeyRegistrationState::Active(ref key) => { debug!( "Tenure: Using key {:?} off of {}", &key.vrf_public_key, &burnchain_tip.burn_header_hash ); self.relay_channel .send(RelayerDirective::RunTenure( key.clone(), burnchain_tip, get_epoch_time_ms(), )) .is_ok() } LeaderKeyRegistrationState::Inactive => { warn!( "Tenure: skipped tenure because no active VRF key. Trying to register one." ); self.leader_key_registration_state = LeaderKeyRegistrationState::Pending; self.relay_channel .send(RelayerDirective::RegisterKey(burnchain_tip)) .is_ok() } LeaderKeyRegistrationState::Pending => true, } } else { warn!("Tenure: Do not know the last burn block. As a miner, this is bad."); true } } /// Notify the relayer of a sortition, telling it to process the block /// and advertize it if it was mined by the node. /// returns _false_ if the relayer hung up the channel. pub fn relayer_sortition_notify(&self) -> bool { if !self.is_miner { // node is a follower, don't try to process my own tenure. return true; } if let Some(ref snapshot) = &self.last_burn_block { debug!( "Tenure: Notify sortition! Last snapshot is {}/{} ({})", &snapshot.consensus_hash, &snapshot.burn_header_hash, &snapshot.winning_stacks_block_hash ); if snapshot.sortition { return self .relay_channel .send(RelayerDirective::ProcessTenure( snapshot.consensus_hash.clone(), snapshot.parent_burn_header_hash.clone(), snapshot.winning_stacks_block_hash.clone(), )) .is_ok(); } } else { debug!("Tenure: Notify sortition! No last burn block"); } true } fn get_mining_tenure_information( chain_state: &mut StacksChainState, burn_db: &mut SortitionDB, check_burn_block: &BlockSnapshot, miner_address: StacksAddress, mine_tip_ch: &ConsensusHash, mine_tip_bh: &BlockHeaderHash, ) -> Result<MiningTenureInformation, Error> { let stacks_tip_header = StacksChainState::get_anchored_block_header_info( chain_state.db(), &mine_tip_ch, &mine_tip_bh, ) .unwrap() .ok_or_else(|| { error!( "Could not mine new tenure, since could not find header for known chain tip."; "tip_consensus_hash" => %mine_tip_ch, "tip_stacks_block_hash" => %mine_tip_bh ); Error::HeaderNotFoundForChainTip })?; // the stacks block I'm mining off of's burn header hash and vtxindex: let parent_snapshot = SortitionDB::get_block_snapshot_consensus(burn_db.conn(), mine_tip_ch) .expect("Failed to look up block's parent snapshot") .expect("Failed to look up block's parent snapshot"); let parent_sortition_id = &parent_snapshot.sortition_id; let parent_winning_vtxindex = SortitionDB::get_block_winning_vtxindex(burn_db.conn(), parent_sortition_id) .expect("SortitionDB failure.") .ok_or_else(|| { error!( "Failed to find winning vtx index for the parent sortition"; "parent_sortition_id" => %parent_sortition_id ); Error::WinningVtxNotFoundForChainTip })?; let parent_block = SortitionDB::get_block_snapshot(burn_db.conn(), parent_sortition_id) .expect("SortitionDB failure.") .ok_or_else(|| { error!( "Failed to find block snapshot for the parent sortition"; "parent_sortition_id" => %parent_sortition_id ); Error::SnapshotNotFoundForChainTip })?; // don't mine off of an old burnchain block let burn_chain_tip = SortitionDB::get_canonical_burn_chain_tip(burn_db.conn()) .expect("FATAL: failed to query sortition DB for canonical burn chain tip"); if burn_chain_tip.consensus_hash != check_burn_block.consensus_hash { info!( "New canonical burn chain tip detected. Will not try to mine."; "new_consensus_hash" => %burn_chain_tip.consensus_hash, "old_consensus_hash" => %check_burn_block.consensus_hash, "new_burn_height" => burn_chain_tip.block_height, "old_burn_height" => check_burn_block.block_height ); return Err(Error::BurnchainTipChanged); } debug!("Mining tenure's last consensus hash: {} (height {} hash {}), stacks tip consensus hash: {} (height {} hash {})", &check_burn_block.consensus_hash, check_burn_block.block_height, &check_burn_block.burn_header_hash, mine_tip_ch, parent_snapshot.block_height, &parent_snapshot.burn_header_hash); let coinbase_nonce = { let principal = miner_address.into(); let account = chain_state .with_read_only_clarity_tx( &burn_db.index_conn(), &StacksBlockHeader::make_index_block_hash(mine_tip_ch, mine_tip_bh), |conn| StacksChainState::get_account(conn, &principal), ) .expect(&format!( "BUG: stacks tip block {}/{} no longer exists after we queried it", mine_tip_ch, mine_tip_bh )); account.nonce }; Ok(MiningTenureInformation { stacks_parent_header: stacks_tip_header, parent_consensus_hash: mine_tip_ch.clone(), parent_block_burn_height: parent_block.block_height, parent_block_total_burn: parent_block.total_burn, parent_winning_vtxindex, coinbase_nonce, }) } /// Return the assembled anchor block info and microblock private key on success. /// Return None if we couldn't build a block for whatever reason fn relayer_run_tenure( config: &Config, registered_key: RegisteredKey, chain_state: &mut StacksChainState, burn_db: &mut SortitionDB, burnchain: &Burnchain, burn_block: BlockSnapshot, keychain: &mut Keychain, mem_pool: &mut MemPoolDB, burn_fee_cap: u64, bitcoin_controller: &mut BitcoinRegtestController, last_mined_blocks: &Vec<&AssembledAnchorBlock>, event_observer: &EventDispatcher, ) -> Option<(AssembledAnchorBlock, Secp256k1PrivateKey)> { let MiningTenureInformation { mut stacks_parent_header, parent_consensus_hash, parent_block_burn_height, parent_block_total_burn, parent_winning_vtxindex, coinbase_nonce, } = if let Some(stacks_tip) = chain_state.get_stacks_chain_tip(burn_db).unwrap() { let miner_address = keychain.origin_address(config.is_mainnet()).unwrap(); Self::get_mining_tenure_information( chain_state, burn_db, &burn_block, miner_address, &stacks_tip.consensus_hash, &stacks_tip.anchored_block_hash, ) .ok()? } else { debug!("No Stacks chain tip known, will return a genesis block"); let (network, _) = config.burnchain.get_bitcoin_network(); let burnchain_params = BurnchainParameters::from_params(&config.burnchain.chain, &network) .expect("Bitcoin network unsupported"); let chain_tip = ChainTip::genesis( &burnchain_params.first_block_hash, burnchain_params.first_block_height.into(), burnchain_params.first_block_timestamp.into(), ); MiningTenureInformation { stacks_parent_header: chain_tip.metadata, parent_consensus_hash: FIRST_BURNCHAIN_CONSENSUS_HASH.clone(), parent_block_burn_height: 0, parent_block_total_burn: 0, parent_winning_vtxindex: 0, coinbase_nonce: 0, } }; // has the tip changed from our previously-mined block for this epoch? let attempt = { let mut best_attempt = 0; debug!( "Consider {} in-flight Stacks tip(s)", &last_mined_blocks.len() ); for prev_block in last_mined_blocks.iter() { debug!( "Consider in-flight block {} on Stacks tip {}/{} in {} with {} txs", &prev_block.anchored_block.block_hash(), &prev_block.parent_consensus_hash, &prev_block.anchored_block.header.parent_block, &prev_block.my_burn_hash, &prev_block.anchored_block.txs.len() ); if prev_block.anchored_block.txs.len() == 1 { if last_mined_blocks.len() == 1 { // this is an empty block, and we've only tried once before. We should always // try again, with the `subsequent_miner_time_ms` allotment, in order to see if // we can make a bigger block debug!("Have only mined one empty block off of {}/{} height {}; unconditionally trying again", &prev_block.parent_consensus_hash, &prev_block.anchored_block.block_hash(), prev_block.anchored_block.header.total_work.work); best_attempt = 1; break; } else if prev_block.attempt == 1 { // Don't let the fact that we've built an empty block during this sortition // prevent us from trying again. best_attempt = 1; continue; } } if prev_block.parent_consensus_hash == parent_consensus_hash && prev_block.my_burn_hash == burn_block.burn_header_hash && prev_block.anchored_block.header.parent_block == stacks_parent_header.anchored_header.block_hash() { // the anchored chain tip hasn't changed since we attempted to build a block. // But, have discovered any new microblocks worthy of being mined? if let Ok(Some(stream)) = StacksChainState::load_descendant_staging_microblock_stream( chain_state.db(), &StacksBlockHeader::make_index_block_hash( &prev_block.parent_consensus_hash, &stacks_parent_header.anchored_header.block_hash(), ), 0, u16::MAX, ) { if (prev_block.anchored_block.header.parent_microblock == BlockHeaderHash([0u8; 32]) && stream.len() == 0) || (prev_block.anchored_block.header.parent_microblock != BlockHeaderHash([0u8; 32]) && stream.len() <= (prev_block.anchored_block.header.parent_microblock_sequence as usize) + 1) { // the chain tip hasn't changed since we attempted to build a block. Use what we // already have. debug!("Stacks tip is unchanged since we last tried to mine a block off of {}/{} at height {} with {} txs, in {} at burn height {}, and no new microblocks ({} <= {})", &prev_block.parent_consensus_hash, &prev_block.anchored_block.header.parent_block, prev_block.anchored_block.header.total_work.work, prev_block.anchored_block.txs.len(), prev_block.my_burn_hash, parent_block_burn_height, stream.len(), prev_block.anchored_block.header.parent_microblock_sequence); return None; } else { // there are new microblocks! // TODO: only consider rebuilding our anchored block if we (a) have // time, and (b) the new microblocks are worth more than the new BTC // fee minus the old BTC fee debug!("Stacks tip is unchanged since we last tried to mine a block off of {}/{} at height {} with {} txs, in {} at burn height {}, but there are new microblocks ({} > {})", &prev_block.parent_consensus_hash, &prev_block.anchored_block.header.parent_block, prev_block.anchored_block.header.total_work.work, prev_block.anchored_block.txs.len(), prev_block.my_burn_hash, parent_block_burn_height, stream.len(), prev_block.anchored_block.header.parent_microblock_sequence); best_attempt = cmp::max(best_attempt, prev_block.attempt); } } else { // no microblock stream to confirm, and the stacks tip hasn't changed debug!("Stacks tip is unchanged since we last tried to mine a block off of {}/{} at height {} with {} txs, in {} at burn height {}, and no microblocks present", &prev_block.parent_consensus_hash, &prev_block.anchored_block.header.parent_block, prev_block.anchored_block.header.total_work.work, prev_block.anchored_block.txs.len(), prev_block.my_burn_hash, parent_block_burn_height); return None; } } else { if burn_block.burn_header_hash == prev_block.my_burn_hash { // only try and re-mine if there was no sortition since the last chain tip debug!("Stacks tip has changed to {}/{} since we last tried to mine a block in {} at burn height {}; attempt was {} (for Stacks tip {}/{})", parent_consensus_hash, stacks_parent_header.anchored_header.block_hash(), prev_block.my_burn_hash, parent_block_burn_height, prev_block.attempt, &prev_block.parent_consensus_hash, &prev_block.anchored_block.header.parent_block); best_attempt = cmp::max(best_attempt, prev_block.attempt); } else { debug!("Burn tip has changed to {} ({}) since we last tried to mine a block in {}", &burn_block.burn_header_hash, burn_block.block_height, &prev_block.my_burn_hash); } } } best_attempt + 1 }; // Generates a proof out of the sortition hash provided in the params. let vrf_proof = match keychain.generate_proof( &registered_key.vrf_public_key, burn_block.sortition_hash.as_bytes(), ) { Some(vrfp) => vrfp, None => { // Try to recover a key registered in a former session. // registered_key.block_height gives us a pointer to the height of the block // holding the key register op, but the VRF was derived using the height of one // of the parents blocks. let _ = keychain.rotate_vrf_keypair(registered_key.block_height - 1); match keychain.generate_proof( &registered_key.vrf_public_key, burn_block.sortition_hash.as_bytes(), ) { Some(vrfp) => vrfp, None => { error!( "Failed to generate proof with {:?}", &registered_key.vrf_public_key ); return None; } } } }; debug!( "Generated VRF Proof: {} over {} with key {}", vrf_proof.to_hex(), &burn_block.sortition_hash, &registered_key.vrf_public_key.to_hex() ); // Generates a new secret key for signing the trail of microblocks // of the upcoming tenure. let microblock_secret_key = if attempt > 1 { match keychain.get_microblock_key() { Some(k) => k, None => { error!( "Failed to obtain microblock key for mining attempt"; "attempt" => %attempt ); return None; } } } else { keychain.rotate_microblock_keypair(burn_block.block_height) }; let mblock_pubkey_hash = Hash160::from_node_public_key(&StacksPublicKey::from_private(&microblock_secret_key)); let coinbase_tx = inner_generate_coinbase_tx( keychain, coinbase_nonce, config.is_mainnet(), config.burnchain.chain_id, ); // find the longest microblock tail we can build off of let microblock_info_opt = match StacksChainState::load_descendant_staging_microblock_stream_with_poison( chain_state.db(), &StacksBlockHeader::make_index_block_hash( &parent_consensus_hash, &stacks_parent_header.anchored_header.block_hash(), ), 0, u16::MAX, ) { Ok(x) => { let num_mblocks = x.as_ref().map(|(mblocks, ..)| mblocks.len()).unwrap_or(0); debug!( "Loaded {} microblocks descending from {}/{}", num_mblocks, &parent_consensus_hash, &stacks_parent_header.anchored_header.block_hash() ); x } Err(e) => { warn!( "Failed to load descendant microblock stream from {}/{}: {:?}", &parent_consensus_hash, &stacks_parent_header.anchored_header.block_hash(), &e ); None } }; if let Some((ref microblocks, ref poison_opt)) = &microblock_info_opt { if let Some(ref tail) = microblocks.last() { debug!( "Confirm microblock stream tailed at {} (seq {})", &tail.block_hash(), tail.header.sequence ); } // try and confirm as many microblocks as we can (but note that the stream itself may // be too long; we'll try again if that happens). stacks_parent_header.microblock_tail = microblocks.last().clone().map(|blk| blk.header.clone()); if let Some(poison_payload) = poison_opt { let poison_microblock_tx = inner_generate_poison_microblock_tx( keychain, coinbase_nonce + 1, poison_payload.clone(), config.is_mainnet(), config.burnchain.chain_id, ); let stacks_epoch = burn_db .index_conn() .get_stacks_epoch(burn_block.block_height as u32) .expect("Could not find a stacks epoch."); // submit the poison payload, privately, so we'll mine it when building the // anchored block. if let Err(e) = mem_pool.submit( chain_state, &parent_consensus_hash, &stacks_parent_header.anchored_header.block_hash(), &poison_microblock_tx, Some(event_observer), &stacks_epoch.block_limit, &stacks_epoch.epoch_id, ) { warn!( "Detected but failed to mine poison-microblock transaction: {:?}", &e ); } } } let (anchored_block, _, _) = match StacksBlockBuilder::build_anchored_block( chain_state, &burn_db.index_conn(), mem_pool, &stacks_parent_header, parent_block_total_burn, vrf_proof.clone(), mblock_pubkey_hash, &coinbase_tx, config.make_block_builder_settings((last_mined_blocks.len() + 1) as u64), Some(event_observer), ) { Ok(block) => block, Err(ChainstateError::InvalidStacksMicroblock(msg, mblock_header_hash)) => { // part of the parent microblock stream is invalid, so try again info!("Parent microblock stream is invalid; trying again without the offender {} (msg: {})", &mblock_header_hash, &msg); // truncate the stream stacks_parent_header.microblock_tail = match microblock_info_opt { Some((microblocks, _)) => { let mut tail = None; for mblock in microblocks.into_iter() { if mblock.block_hash() == mblock_header_hash { break; } tail = Some(mblock); } if let Some(ref t) = &tail { debug!( "New parent microblock stream tail is {} (seq {})", t.block_hash(), t.header.sequence ); } tail.map(|t| t.header) } None => None, }; // try again match StacksBlockBuilder::build_anchored_block( chain_state, &burn_db.index_conn(), mem_pool, &stacks_parent_header, parent_block_total_burn, vrf_proof.clone(), mblock_pubkey_hash, &coinbase_tx, config.make_block_builder_settings((last_mined_blocks.len() + 1) as u64), Some(event_observer), ) { Ok(block) => block, Err(e) => { error!("Failure mining anchor block even after removing offending microblock {}: {}", &mblock_header_hash, &e); return None; } } } Err(e) => { error!("Failure mining anchored block: {}", e); return None; } }; let block_height = anchored_block.header.total_work.work; info!( "Succeeded assembling {} block #{}: {}, with {} txs, attempt {}", if parent_block_total_burn == 0 { "Genesis" } else { "Stacks" }, block_height, anchored_block.block_hash(), anchored_block.txs.len(), attempt ); // let's figure out the recipient set! let recipients = match get_next_recipients( &burn_block, chain_state, burn_db, burnchain, &OnChainRewardSetProvider(), ) { Ok(x) => x, Err(e) => { error!("Failure fetching recipient set: {:?}", e); return None; } }; let sunset_burn = burnchain.expected_sunset_burn(burn_block.block_height + 1, burn_fee_cap); let rest_commit = burn_fee_cap - sunset_burn; let commit_outs = if burn_block.block_height + 1 < burnchain.pox_constants.sunset_end && !burnchain.is_in_prepare_phase(burn_block.block_height + 1) { RewardSetInfo::into_commit_outs(recipients, config.is_mainnet()) } else { vec![StacksAddress::burn_address(config.is_mainnet())] }; // let's commit let op = inner_generate_block_commit_op( keychain.get_burnchain_signer(), anchored_block.block_hash(), rest_commit, &registered_key, parent_block_burn_height .try_into() .expect("Could not convert parent block height into u32"), parent_winning_vtxindex, VRFSeed::from_proof(&vrf_proof), commit_outs, sunset_burn, burn_block.block_height, ); let mut op_signer = keychain.generate_op_signer(); debug!( "Submit block-commit for block {} tx-count {} height {} off of {}/{} with microblock parent {} (seq {}) in burn block {} ({}); attempt {}", &anchored_block.block_hash(), anchored_block.txs.len(), anchored_block.header.total_work.work, &parent_consensus_hash, &anchored_block.header.parent_block, &anchored_block.header.parent_microblock, &anchored_block.header.parent_microblock_sequence, &burn_block.burn_header_hash, burn_block.block_height, attempt ); let res = bitcoin_controller.submit_operation(op, &mut op_signer, attempt); if !res { if !config.node.mock_mining { warn!("Failed to submit Bitcoin transaction"); return None; } else { debug!("Mock-mining enabled; not sending Bitcoin transaction"); } } Some(( AssembledAnchorBlock { parent_consensus_hash: parent_consensus_hash, my_burn_hash: burn_block.burn_header_hash, anchored_block, attempt, }, microblock_secret_key, )) } /// Process a state coming from the burnchain, by extracting the validated KeyRegisterOp /// and inspecting if a sortition was won. /// `ibd`: boolean indicating whether or not we are in the initial block download pub fn process_burnchain_state( &mut self, sortdb: &SortitionDB, sort_id: &SortitionId, ibd: bool, ) -> Option<BlockSnapshot> { let mut last_sortitioned_block = None; let ic = sortdb.index_conn(); let block_snapshot = SortitionDB::get_block_snapshot(&ic, sort_id) .expect("Failed to obtain block snapshot for processed burn block.") .expect("Failed to obtain block snapshot for processed burn block."); let block_height = block_snapshot.block_height; let block_commits = SortitionDB::get_block_commits_by_block(&ic, &block_snapshot.sortition_id) .expect("Unexpected SortitionDB error fetching block commits"); update_active_miners_count_gauge(block_commits.len() as i64); let (_, network) = self.config.burnchain.get_bitcoin_network(); for op in block_commits.into_iter() { if op.txid == block_snapshot.winning_block_txid { info!( "Received burnchain block #{} including block_commit_op (winning) - {} ({})", block_height, op.apparent_sender.to_bitcoin_address(network), &op.block_header_hash ); last_sortitioned_block = Some((block_snapshot.clone(), op.vtxindex)); } else { if self.is_miner { info!( "Received burnchain block #{} including block_commit_op - {} ({})", block_height, op.apparent_sender.to_bitcoin_address(network), &op.block_header_hash ); } } } let key_registers = SortitionDB::get_leader_keys_by_block(&ic, &block_snapshot.sortition_id) .expect("Unexpected SortitionDB error fetching key registers"); let node_address = Keychain::address_from_burnchain_signer( &self.burnchain_signer, self.config.is_mainnet(), ); for op in key_registers.into_iter() { if op.address == node_address { if self.is_miner { info!( "Received burnchain block #{} including key_register_op - {}", block_height, op.address ); } if !ibd { // not in initial block download, so we're not just replaying an old key. // Registered key has been mined if let LeaderKeyRegistrationState::Pending = self.leader_key_registration_state { self.leader_key_registration_state = LeaderKeyRegistrationState::Active(RegisteredKey { vrf_public_key: op.public_key, block_height: op.block_height as u64, op_vtxindex: op.vtxindex as u32, }); } } } } // no-op on UserBurnSupport ops are not supported / produced at this point. self.last_burn_block = Some(block_snapshot); last_sortitioned_block.map(|x| x.0) } } impl NeonGenesisNode { /// Instantiate and initialize a new node, given a config pub fn new( config: Config, mut event_dispatcher: EventDispatcher, burnchain: Burnchain, boot_block_exec: Box<dyn FnOnce(&mut ClarityTx) -> ()>, ) -> Self { let keychain = Keychain::default(config.node.seed.clone()); let initial_balances = config .initial_balances .iter() .map(|e| (e.address.clone(), e.amount)) .collect(); let mut boot_data = ChainStateBootData::new(&burnchain, initial_balances, Some(boot_block_exec)); // do the initial open! let (_chain_state, receipts) = match StacksChainState::open_and_exec( config.is_mainnet(), config.burnchain.chain_id, &config.get_chainstate_path_str(), Some(&mut boot_data), ) { Ok(res) => res, Err(err) => panic!( "Error while opening chain state at path {}: {:?}", config.get_chainstate_path_str(), err ), }; event_dispatcher.process_boot_receipts(receipts); Self { keychain, config, event_dispatcher, burnchain, } } pub fn into_initialized_leader_node( self, burnchain_tip: BurnchainTip, blocks_processed: BlocksProcessedCounter, microblocks_processed: BlocksProcessedCounter, coord_comms: CoordinatorChannels, sync_comms: PoxSyncWatchdogComms, attachments_rx: Receiver<HashSet<AttachmentInstance>>, atlas_config: AtlasConfig, should_keep_running: Arc<AtomicBool>, ) -> InitializedNeonNode { let config = self.config; let keychain = self.keychain; let event_dispatcher = self.event_dispatcher; InitializedNeonNode::new( config, keychain, event_dispatcher, Some(burnchain_tip), true, blocks_processed, microblocks_processed, coord_comms, sync_comms, self.burnchain, attachments_rx, atlas_config, should_keep_running, ) } pub fn into_initialized_node( self, burnchain_tip: BurnchainTip, blocks_processed: BlocksProcessedCounter, microblocks_processed: BlocksProcessedCounter, coord_comms: CoordinatorChannels, sync_comms: PoxSyncWatchdogComms, attachments_rx: Receiver<HashSet<AttachmentInstance>>, atlas_config: AtlasConfig, should_keep_running: Arc<AtomicBool>, ) -> InitializedNeonNode { let config = self.config; let keychain = self.keychain; let event_dispatcher = self.event_dispatcher; InitializedNeonNode::new( config, keychain, event_dispatcher, Some(burnchain_tip), false, blocks_processed, microblocks_processed, coord_comms, sync_comms, self.burnchain, attachments_rx, atlas_config, should_keep_running, ) } }<|fim▁end|>
let mut expected_attachments = match attachments_rx.try_recv() {
<|file_name|>relational_layers_test.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2018 The DisentanglementLib Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for relational_layers.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from disentanglement_lib.evaluation.abstract_reasoning import relational_layers import numpy as np import tensorflow.compat.v1 as tf def _create_positional_encoding_matrices(): """Shared input/output pair for the positional encoding tests.""" input_array = np.arange(24, dtype=np.float64).reshape((1, 4, 3, 2)) output_array = np.eye(4) output_array = np.repeat(np.expand_dims(output_array, -1), 2, axis=-1) output_array = np.expand_dims(output_array, 0) return input_array, output_array class RelationalLayersTest(tf.test.TestCase): def test_repeat_for_tensor(self): a = np.arange(24).reshape((1, 4, 3, 2)) shouldbe = np.concatenate([a] * 3, axis=-2) result = self.evaluate(relational_layers.repeat(tf.constant(a), 3, axis=-2)) self.assertAllClose(shouldbe, result) def test_pairwise_edge_embeddings_for_tensor(self): a = np.array([[[1], [2]]]) shouldbe = np.array([[[[1, 1], [1, 2]], [[2, 1], [2, 2]]]]) layer = relational_layers.PairwiseEdgeEmbeddings() result = self.evaluate(layer(tf.constant(a))) self.assertAllClose(shouldbe, result) def test_relational_layer_for_tensor(self): a = np.array([[[1], [2]]]) shouldbe = np.array([[[2, 3], [4, 3]]]) layer = relational_layers.RelationalLayer( tf.keras.layers.Lambda(lambda x: x), tf.keras.layers.Lambda(lambda x: tf.reduce_sum(x, axis=-2))) result = self.evaluate(layer(tf.constant(a))) self.assertAllClose(shouldbe, result) def test_positional_encoding_like_for_static_shape_tensor(self): value, shouldbe = _create_positional_encoding_matrices() a = tf.constant(value) output_tensor = relational_layers.positional_encoding_like(a, -3, -2) result = self.evaluate(output_tensor) self.assertEqual((1, 4, 4, 2), result.shape) self.assertAllClose(shouldbe, result) def test_positional_encoding_like_for_dynamic_shape_tensor(self): value, shouldbe = _create_positional_encoding_matrices() a = tf.placeholder(tf.float32, shape=(None, 4, 3, 2)) output_tensor = relational_layers.positional_encoding_like(a, -3, -2) # Check the static shape. self.assertEqual([None, 4, 4, 2], output_tensor.get_shape().as_list()) # Check the solution. with self.session() as sess: result = sess.run(output_tensor, feed_dict={a: value}) self.assertAllClose(shouldbe, result) def test_add_positional_encoding_layer_for_tensor(self): value, shouldbe_positional = _create_positional_encoding_matrices() shouldbe = np.concatenate([value, shouldbe_positional], axis=-2) a = tf.constant(value) output_tensor = relational_layers.AddPositionalEncoding(-3, -2)(a) result = self.evaluate(output_tensor) self.assertAllClose(shouldbe, result) def test_stack_answers_for_tensors(self): # Tensors used for testing. context = np.arange(24).reshape((2, 3, 4)) answers = np.arange(24, 48).reshape((2, 3, 4)) # Compute the correct solutions. results = [] for i in range(answers.shape[-1]): results.append( np.concatenate([context, answers[:, :, i:(i + 1)]], axis=-1)) shouldbe = np.stack(results, axis=-2) # Compute the solution based on the layer. layer = relational_layers.StackAnswers(answer_axis=-1, stack_axis=-2) result = self.evaluate(layer([tf.constant(context), tf.constant(answers)])) # Check that they are the same. self.assertAllClose(shouldbe, result) def test_multi_dim_batch_apply_for_tensors(self): # Tensors used for testing. input_tensor = np.arange(24).reshape((2, 3, 4)) kernel = np.arange(24, 36).reshape((4, 3)) # Compute the correct solutions. shouldbe = np.matmul(input_tensor, kernel) # Compute the solution based on the layer. layer = relational_layers.MultiDimBatchApply( tf.keras.layers.Lambda(lambda x: tf.matmul(x, tf.constant(kernel))), num_dims_to_keep=1) result = self.evaluate(layer(tf.constant(input_tensor))) # Check that they are the same. self.assertAllClose(shouldbe, result) <|fim▁hole|> if __name__ == '__main__': tf.test.main()<|fim▁end|>
<|file_name|>task_view.py<|end_file_name|><|fim▁begin|>""" Task performed by user in django view """ import functools from django.core.urlresolvers import reverse from django.conf.urls import url from django.shortcuts import get_object_or_404 from ..activation import Activation, ViewActivation, STATUS from ..exceptions import FlowRuntimeError from . import base def flow_view(**lock_args): """ Decorator that locks and runs the flow view in transaction. Expects view with the signature `(request, activation, **kwargs)` or CBV view that implements TaskActivation, in this case, dispatch with would be called with `(request, **kwargs)` Returns `(request, flow_task, process_pk, task_pk, **kwargs)` """ class flow_view_decorator(object): def __init__(self, func, activation=None): self.func = func self.activation = activation functools.update_wrapper(self, func) def __call__(self, request, flow_cls, flow_task, process_pk, task_pk, **kwargs): lock = flow_task.flow_cls.lock_impl(flow_task.flow_cls.instance, **lock_args) with lock(flow_task.flow_cls, process_pk): task = get_object_or_404(flow_task.flow_cls.task_cls._default_manager, pk=task_pk) if self.activation: """ Class-based view that implements TaskActivation interface """ self.activation.initialize(flow_task, task) return self.func(request, **kwargs) else: """ Function based view or CBV without TaskActvation interface implementation """ activation = flow_task.activation_cls() activation.initialize(flow_task, task) return self.func(request, activation, **kwargs) def __get__(self, instance, instancetype): """ If we decorate method on CBV that implements StartActivation interface, no custom activation is required. """ if instance is None: return self func = self.func.__get__(instance, type) activation = instance if isinstance(instance, ViewActivation) else None<|fim▁hole|> class ManagedViewActivation(ViewActivation): """ Tracks task statistics in activation form """ management_form_cls = None def __init__(self, **kwargs): super(ManagedViewActivation, self).__init__(**kwargs) self.management_form = None self.management_form_cls = kwargs.pop('management_form_cls', None) def get_management_form_cls(self): if self.management_form_cls: return self.management_form_cls else: return self.flow_cls.management_form_cls @Activation.status.super() def prepare(self, data=None, user=None): super(ManagedViewActivation, self).prepare.original() if user: self.task.owner = user management_form_cls = self.get_management_form_cls() self.management_form = management_form_cls(data=data, instance=self.task) if data: if not self.management_form.is_valid(): raise FlowRuntimeError('Activation metadata is broken {}'.format(self.management_form.errors)) self.task = self.management_form.save(commit=False) def has_perm(self, user): return self.flow_task.can_execute(user, self.task) @classmethod def create_task(cls, flow_task, prev_activation, token): task = ViewActivation.create_task(flow_task, prev_activation, token) # Try to assign permission owner_permission = flow_task.calc_owner_permission(task) if owner_permission: task.owner_permission = owner_permission # Try to assign owner owner = flow_task.calc_owner(task) if owner: task.owner = owner task.status = STATUS.ASSIGNED return task class BaseView(base.TaskDescriptionMixin, base.NextNodeMixin, base.Task, base.ViewArgsMixin): """ Base class for ViewTasks """ task_type = 'HUMAN' activation_cls = ManagedViewActivation def __init__(self, view_or_cls, **kwargs): """ Accepts view callable or CBV View class with view kwargs, if CBV view implements ViewActivation, it used as activation_cls """ self._view, self._view_cls, self._view_args = None, None, None if isinstance(view_or_cls, type): self._view_cls = view_or_cls if issubclass(view_or_cls, ViewActivation): kwargs.setdefault('activation_cls', view_or_cls) else: self._view = view_or_cls super(BaseView, self).__init__(view_or_cls=view_or_cls, **kwargs) @property def view(self): if not self._view: self._view = self._view_cls.as_view(**self._view_args) return self._view def urls(self): return [url(r'^(?P<process_pk>\d+)/{}/(?P<task_pk>\d+)/$'.format(self.name), self.view, {'flow_task': self}, name=self.name)] class View(base.PermissionMixin, base.UndoViewMixin, base.CancelViewMixin, base.DetailsViewMixin, base.ActivateNextMixin, BaseView): """ View task Example:: task = flow.View(some_view) \\ .Permission('my_app.can_do_task') \\ .Next(this.next_task) In case of function based view:: task = flow.Task(task) @flow_start_view() def task(request, activation): if not activation.flow_task.has_perm(request.user): raise PermissionDenied activation.prepare(request.POST or None) form = SomeForm(request.POST or None) if form.is_valid(): form.save() activation.done() return redirect('/') return render(request, {'activation': activation, 'form': form}) Ensure to include `{{ activation.management_form }}` inside template, to proper track when task was started and other task performance statistics:: <form method="POST"> {{ form }} {{ activation.management_form }} <button type="submit"/> </form> """ def __init__(self, *args, **kwargs): self._assign_view = kwargs.pop('assign_view', None) super(View, self).__init__(*args, **kwargs) def Assign(self, owner=None, **owner_kwargs): """ Assign task to the User immediately on activation, accepts user lookup kwargs or callable :: Process -> User:: .Assign(username='employee') .Assign(lambda process: process.created_by) """ if owner: self._owner = owner else: self._owner = owner_kwargs return self @property def assign_view(self): from viewflow.views import AssignView return self._assign_view if self._assign_view else AssignView.as_view() def urls(self): urls = super(View, self).urls() urls.append(url(r'^(?P<process_pk>\d+)/{}/(?P<task_pk>\d+)/assign/$'.format(self.name), self.assign_view, {'flow_task': self}, name="{}__assign".format(self.name))) return urls def get_task_url(self, task, url_type, **kwargs): user = kwargs.get('user', None) # assign if url_type in ['assign', 'guess']: if task.status == STATUS.NEW and self.can_assign(user, task): url_name = '{}:{}__assign'.format(self.flow_cls.instance.namespace, self.name) return reverse(url_name, kwargs={'process_pk': task.process_id, 'task_pk': task.pk}) # execute if url_type in ['execute', 'guess']: if task.status == STATUS.ASSIGNED and self.can_execute(user, task): url_name = '{}:{}'.format(self.flow_cls.instance.namespace, self.name) return reverse(url_name, kwargs={'process_pk': task.process_id, 'task_pk': task.pk}) return super(View, self).get_task_url(task, url_type, **kwargs) def calc_owner(self, task): from django.contrib.auth import get_user_model owner = self._owner if callable(owner): owner = owner(task.process) elif isinstance(owner, dict): owner = get_user_model() ._default_manager.get(**owner) return owner def calc_owner_permission(self, task): owner_permission = self._owner_permission if callable(owner_permission): owner_permission = owner_permission(task.process) return owner_permission def can_assign(self, user, task): if task.owner_id: return False if user.is_anonymous(): return False if not task.owner_permission: """ Available for everyone """ return True obj = None if self._owner_permission_obj: if callable(self._owner_permission_obj): obj = self._owner_permission_obj(task.process) else: obj = self._owner_permission_obj return user.has_perm(task.owner_permission, obj=obj) def can_execute(self, user, task): if task.owner_permission is None and task.owner is None: return True return task.owner == user<|fim▁end|>
return self.__class__(func, activation=activation) return flow_view_decorator
<|file_name|>tag_block.go<|end_file_name|><|fim▁begin|>package tsi1 import ( "bytes" "encoding/binary" "errors" "fmt" "io" "github.com/influxdata/influxdb/pkg/rhh" ) // TagBlockVersion is the version of the tag block. const TagBlockVersion = 1 // Tag key flag constants. const ( TagKeyTombstoneFlag = 0x01 ) // Tag value flag constants. const ( TagValueTombstoneFlag = 0x01 ) // TagBlock variable size constants. const ( // TagBlock key block fields. TagKeyNSize = 8 TagKeyOffsetSize = 8 // TagBlock value block fields. TagValueNSize = 8 TagValueOffsetSize = 8 ) // TagBlock errors. var ( ErrUnsupportedTagBlockVersion = errors.New("unsupported tag block version") ErrTagBlockSizeMismatch = errors.New("tag block size mismatch") ) // TagBlock represents tag key/value block for a single measurement. type TagBlock struct { data []byte valueData []byte keyData []byte hashData []byte version int // tag block version } // Version returns the encoding version parsed from the data. // Only valid after UnmarshalBinary() has been successfully invoked. func (blk *TagBlock) Version() int { return blk.version } // UnmarshalBinary unpacks data into the tag block. Tag block is not copied so data // should be retained and unchanged after being passed into this function. func (blk *TagBlock) UnmarshalBinary(data []byte) error { // Read trailer. t, err := ReadTagBlockTrailer(data) if err != nil { return err } // Verify data size is correct. if int64(len(data)) != t.Size { return ErrTagBlockSizeMismatch } // Save data section. blk.valueData = data[t.ValueData.Offset:] blk.valueData = blk.valueData[:t.ValueData.Size] // Save key data section. blk.keyData = data[t.KeyData.Offset:] blk.keyData = blk.keyData[:t.KeyData.Size] // Save hash index block. blk.hashData = data[t.HashIndex.Offset:] blk.hashData = blk.hashData[:t.HashIndex.Size] // Save entire block. blk.data = data return nil } // TagKeyElem returns an element for a tag key. // Returns an element with a nil key if not found. func (blk *TagBlock) TagKeyElem(key []byte) TagKeyElem { var elem TagBlockKeyElem if !blk.DecodeTagKeyElem(key, &elem) { return nil } return &elem } func (blk *TagBlock) DecodeTagKeyElem(key []byte, elem *TagBlockKeyElem) bool { keyN := int64(binary.BigEndian.Uint64(blk.hashData[:TagKeyNSize])) hash := rhh.HashKey(key) pos := hash % keyN // Track current distance var d int64 for { // Find offset of tag key. offset := binary.BigEndian.Uint64(blk.hashData[TagKeyNSize+(pos*TagKeyOffsetSize):]) if offset == 0 { return false } // Parse into element. elem.unmarshal(blk.data[offset:], blk.data) // Return if keys match. if bytes.Equal(elem.key, key) { return true } // Check if we've exceeded the probe distance. if d > rhh.Dist(rhh.HashKey(elem.key), pos, keyN) { return false } // Move position forward. pos = (pos + 1) % keyN d++ if d > keyN { return false } } } // TagValueElem returns an element for a tag value. func (blk *TagBlock) TagValueElem(key, value []byte) TagValueElem { var valueElem TagBlockValueElem if !blk.DecodeTagValueElem(key, value, &valueElem) { return nil } return &valueElem } // TagValueElem returns an element for a tag value. func (blk *TagBlock) TagValueSeriesData(key, value []byte) (uint64, []byte) { var valueElem TagBlockValueElem if !blk.DecodeTagValueElem(key, value, &valueElem) { return 0, nil } return valueElem.series.n, valueElem.series.data } // DecodeTagValueElem returns an element for a tag value. func (blk *TagBlock) DecodeTagValueElem(key, value []byte, valueElem *TagBlockValueElem) bool { // Find key element, exit if not found. var keyElem TagBlockKeyElem if !blk.DecodeTagKeyElem(key, &keyElem) { return false } // Slice hash index data. hashData := keyElem.hashIndex.buf valueN := int64(binary.BigEndian.Uint64(hashData[:TagValueNSize])) hash := rhh.HashKey(value) pos := hash % valueN // Track current distance var d int64 for { // Find offset of tag value. offset := binary.BigEndian.Uint64(hashData[TagValueNSize+(pos*TagValueOffsetSize):]) if offset == 0 { return false } // Parse into element. valueElem.unmarshal(blk.data[offset:]) // Return if values match. if bytes.Equal(valueElem.value, value) { return true } // Check if we've exceeded the probe distance. max := rhh.Dist(rhh.HashKey(valueElem.value), pos, valueN) if d > max { return false } // Move position forward. pos = (pos + 1) % valueN d++ if d > valueN { return false } } } // TagKeyIterator returns an iterator over all the keys in the block. func (blk *TagBlock) TagKeyIterator() TagKeyIterator { return &tagBlockKeyIterator{ blk: blk, keyData: blk.keyData, } } // tagBlockKeyIterator represents an iterator over all keys in a TagBlock. type tagBlockKeyIterator struct { blk *TagBlock keyData []byte e TagBlockKeyElem } // Next returns the next element in the iterator. func (itr *tagBlockKeyIterator) Next() TagKeyElem { // Exit when there is no data left. if len(itr.keyData) == 0 { return nil } // Unmarshal next element & move data forward. itr.e.unmarshal(itr.keyData, itr.blk.data) itr.keyData = itr.keyData[itr.e.size:] assert(len(itr.e.Key()) > 0, "invalid zero-length tag key") return &itr.e } // tagBlockValueIterator represents an iterator over all values for a tag key. type tagBlockValueIterator struct { data []byte e TagBlockValueElem } // Next returns the next element in the iterator. func (itr *tagBlockValueIterator) Next() TagValueElem { // Exit when there is no data left. if len(itr.data) == 0 { return nil } // Unmarshal next element & move data forward. itr.e.unmarshal(itr.data) itr.data = itr.data[itr.e.size:] assert(len(itr.e.Value()) > 0, "invalid zero-length tag value") return &itr.e } // TagBlockKeyElem represents a tag key element in a TagBlock. type TagBlockKeyElem struct { flag byte key []byte // Value data data struct { offset uint64 size uint64 buf []byte } // Value hash index data hashIndex struct { offset uint64 size uint64 buf []byte } size int } // Deleted returns true if the key has been tombstoned. func (e *TagBlockKeyElem) Deleted() bool { return (e.flag & TagKeyTombstoneFlag) != 0 } // Key returns the key name of the element. func (e *TagBlockKeyElem) Key() []byte { return e.key } // TagValueIterator returns an iterator over the key's values. func (e *TagBlockKeyElem) TagValueIterator() TagValueIterator { return &tagBlockValueIterator{data: e.data.buf} } // unmarshal unmarshals buf into e. // The data argument represents the entire block data. func (e *TagBlockKeyElem) unmarshal(buf, data []byte) { start := len(buf) // Parse flag data. e.flag, buf = buf[0], buf[1:] // Parse data offset/size. e.data.offset, buf = binary.BigEndian.Uint64(buf), buf[8:] e.data.size, buf = binary.BigEndian.Uint64(buf), buf[8:] // Slice data. e.data.buf = data[e.data.offset:] e.data.buf = e.data.buf[:e.data.size] // Parse hash index offset/size. e.hashIndex.offset, buf = binary.BigEndian.Uint64(buf), buf[8:] e.hashIndex.size, buf = binary.BigEndian.Uint64(buf), buf[8:] // Slice hash index data. e.hashIndex.buf = data[e.hashIndex.offset:] e.hashIndex.buf = e.hashIndex.buf[:e.hashIndex.size] // Parse key. n, sz := binary.Uvarint(buf) e.key, buf = buf[sz:sz+int(n)], buf[int(n)+sz:] // Save length of elem. e.size = start - len(buf) } // TagBlockValueElem represents a tag value element. type TagBlockValueElem struct { flag byte value []byte series struct { n uint64 // Series count data []byte // Raw series data } size int } // Deleted returns true if the element has been tombstoned. func (e *TagBlockValueElem) Deleted() bool { return (e.flag & TagValueTombstoneFlag) != 0 } // Value returns the value for the element. func (e *TagBlockValueElem) Value() []byte { return e.value } // SeriesN returns the series count. func (e *TagBlockValueElem) SeriesN() uint64 { return e.series.n } // SeriesData returns the raw series data. func (e *TagBlockValueElem) SeriesData() []byte { return e.series.data } // SeriesID returns series ID at an index. func (e *TagBlockValueElem) SeriesID(i int) uint64 { return binary.BigEndian.Uint64(e.series.data[i*SeriesIDSize:]) } // SeriesIDs returns a list decoded series ids. func (e *TagBlockValueElem) SeriesIDs() ([]uint64, error) { a := make([]uint64, 0, e.series.n) var prev uint64 for data := e.series.data; len(data) > 0; { delta, n, err := uvarint(data) if err != nil { return nil, err } data = data[n:] seriesID := prev + uint64(delta) a = append(a, seriesID) prev = seriesID } return a, nil } // Size returns the size of the element. func (e *TagBlockValueElem) Size() int { return e.size } // unmarshal unmarshals buf into e. func (e *TagBlockValueElem) unmarshal(buf []byte) { start := len(buf) // Parse flag data. e.flag, buf = buf[0], buf[1:] // Parse value. sz, n := binary.Uvarint(buf) e.value, buf = buf[n:n+int(sz)], buf[n+int(sz):] // Parse series count. v, n := binary.Uvarint(buf) e.series.n = uint64(v) buf = buf[n:] // Parse data block size. sz, n = binary.Uvarint(buf) buf = buf[n:] // Save reference to series data. e.series.data = buf[:sz] buf = buf[sz:] // Save length of elem. e.size = start - len(buf) } // TagBlockTrailerSize is the total size of the on-disk trailer. const TagBlockTrailerSize = 0 + 8 + 8 + // value data offset/size 8 + 8 + // key data offset/size 8 + 8 + // hash index offset/size 8 + // size 2 // version // TagBlockTrailer represents meta data at the end of a TagBlock. type TagBlockTrailer struct { Version int // Encoding version Size int64 // Total size w/ trailer // Offset & size of value data section. ValueData struct { Offset int64 Size int64 } // Offset & size of key data section. KeyData struct { Offset int64 Size int64 } // Offset & size of hash map section. HashIndex struct { Offset int64 Size int64 } } // WriteTo writes the trailer to w. func (t *TagBlockTrailer) WriteTo(w io.Writer) (n int64, err error) { // Write data info. if err := writeUint64To(w, uint64(t.ValueData.Offset), &n); err != nil { return n, err } else if err := writeUint64To(w, uint64(t.ValueData.Size), &n); err != nil { return n, err } // Write key data info. if err := writeUint64To(w, uint64(t.KeyData.Offset), &n); err != nil { return n, err } else if err := writeUint64To(w, uint64(t.KeyData.Size), &n); err != nil { return n, err } // Write hash index info. if err := writeUint64To(w, uint64(t.HashIndex.Offset), &n); err != nil { return n, err } else if err := writeUint64To(w, uint64(t.HashIndex.Size), &n); err != nil { return n, err } // Write total size & encoding version. if err := writeUint64To(w, uint64(t.Size), &n); err != nil { return n, err } else if err := writeUint16To(w, IndexFileVersion, &n); err != nil { return n, err } return n, nil } // ReadTagBlockTrailer returns the tag block trailer from data. func ReadTagBlockTrailer(data []byte) (TagBlockTrailer, error) { var t TagBlockTrailer // Read version. t.Version = int(binary.BigEndian.Uint16(data[len(data)-2:])) if t.Version != TagBlockVersion { return t, ErrUnsupportedTagBlockVersion } // Slice trailer data. buf := data[len(data)-TagBlockTrailerSize:] // Read data section info. t.ValueData.Offset, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] t.ValueData.Size, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] // Read key section info. t.KeyData.Offset, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] t.KeyData.Size, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] // Read hash section info. t.HashIndex.Offset, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] t.HashIndex.Size, buf = int64(binary.BigEndian.Uint64(buf[0:8])), buf[8:] // Read total size. t.Size = int64(binary.BigEndian.Uint64(buf[0:8])) return t, nil } // TagBlockEncoder encodes a tags to a TagBlock section. type TagBlockEncoder struct { w io.Writer buf bytes.Buffer // Track value offsets. offsets *rhh.HashMap // Track bytes written, sections. n int64 trailer TagBlockTrailer // Track tag keys. keys []tagKeyEncodeEntry prevValue []byte } // NewTagBlockEncoder returns a new TagBlockEncoder. func NewTagBlockEncoder(w io.Writer) *TagBlockEncoder { return &TagBlockEncoder{ w: w, offsets: rhh.NewHashMap(rhh.Options{LoadFactor: LoadFactor}), trailer: TagBlockTrailer{ Version: TagBlockVersion, }, } } // N returns the number of bytes written. func (enc *TagBlockEncoder) N() int64 { return enc.n } // EncodeKey writes a tag key to the underlying writer. func (enc *TagBlockEncoder) EncodeKey(key []byte, deleted bool) error { // An initial empty byte must be written. if err := enc.ensureHeaderWritten(); err != nil { return err } // Verify key is lexicographically after previous key. if len(enc.keys) > 0 { prev := enc.keys[len(enc.keys)-1].key if cmp := bytes.Compare(prev, key); cmp == 1 { return fmt.Errorf("tag key out of order: prev=%s, new=%s", prev, key) } else if cmp == 0 { return fmt.Errorf("tag key already encoded: %s", key) } } // Flush values section for key. if err := enc.flushValueHashIndex(); err != nil { return err } // Append key on to the end of the key list. entry := tagKeyEncodeEntry{ key: key, deleted: deleted, } entry.data.offset = enc.n enc.keys = append(enc.keys, entry) // Clear previous value. enc.prevValue = nil return nil } // EncodeValue writes a tag value to the underlying writer. // The tag key must be lexicographical sorted after the previous encoded tag key. func (enc *TagBlockEncoder) EncodeValue(value []byte, deleted bool, seriesIDs []uint64) error { if len(enc.keys) == 0 { return fmt.Errorf("tag key must be encoded before encoding values") } else if len(value) == 0 { return fmt.Errorf("zero length tag value not allowed") } // Validate that keys are in-order. if cmp := bytes.Compare(enc.prevValue, value); cmp == 1 { return fmt.Errorf("tag value out of order: prev=%s, new=%s", enc.prevValue, value) } else if cmp == 0 { return fmt.Errorf("tag value already encoded: %s", value) } // Save offset to hash map. enc.offsets.Put(value, enc.n) // Write flag. if err := writeUint8To(enc.w, encodeTagValueFlag(deleted), &enc.n); err != nil { return err } // Write value. if err := writeUvarintTo(enc.w, uint64(len(value)), &enc.n); err != nil { return err } else if err := writeTo(enc.w, value, &enc.n); err != nil { return err } // Build series data in buffer. enc.buf.Reset() var prev uint64 for _, seriesID := range seriesIDs { delta := seriesID - prev var buf [binary.MaxVarintLen32]byte i := binary.PutUvarint(buf[:], uint64(delta)) if _, err := enc.buf.Write(buf[:i]); err != nil { return err } prev = seriesID } // Write series count. if err := writeUvarintTo(enc.w, uint64(len(seriesIDs)), &enc.n); err != nil { return err } // Write data size & buffer. if err := writeUvarintTo(enc.w, uint64(enc.buf.Len()), &enc.n); err != nil { return err } nn, err := enc.buf.WriteTo(enc.w) if enc.n += nn; err != nil { return err } // Save previous value. enc.prevValue = value return nil } // Close flushes the trailer of the encoder to the writer. func (enc *TagBlockEncoder) Close() error {<|fim▁hole|> // Flush last value set. if err := enc.ensureHeaderWritten(); err != nil { return err } else if err := enc.flushValueHashIndex(); err != nil { return err } // Save ending position of entire data block. enc.trailer.ValueData.Size = enc.n - enc.trailer.ValueData.Offset // Write key block to point to value blocks. if err := enc.encodeTagKeyBlock(); err != nil { return err } // Compute total size w/ trailer. enc.trailer.Size = enc.n + TagBlockTrailerSize // Write trailer. nn, err := enc.trailer.WriteTo(enc.w) enc.n += nn return err } // ensureHeaderWritten writes a single byte to offset the rest of the block. func (enc *TagBlockEncoder) ensureHeaderWritten() error { if enc.n > 0 { return nil } else if _, err := enc.w.Write([]byte{0}); err != nil { return err } enc.n++ enc.trailer.ValueData.Offset = enc.n return nil } // flushValueHashIndex builds writes the hash map at the end of a value set. func (enc *TagBlockEncoder) flushValueHashIndex() error { // Ignore if no keys have been written. if len(enc.keys) == 0 { return nil } key := &enc.keys[len(enc.keys)-1] // Save size of data section. key.data.size = enc.n - key.data.offset // Encode hash map length. key.hashIndex.offset = enc.n if err := writeUint64To(enc.w, uint64(enc.offsets.Cap()), &enc.n); err != nil { return err } // Encode hash map offset entries. for i := int64(0); i < enc.offsets.Cap(); i++ { _, v := enc.offsets.Elem(i) offset, _ := v.(int64) if err := writeUint64To(enc.w, uint64(offset), &enc.n); err != nil { return err } } key.hashIndex.size = enc.n - key.hashIndex.offset // Clear offsets. enc.offsets = rhh.NewHashMap(rhh.Options{LoadFactor: LoadFactor}) return nil } // encodeTagKeyBlock encodes the keys section to the writer. func (enc *TagBlockEncoder) encodeTagKeyBlock() error { offsets := rhh.NewHashMap(rhh.Options{Capacity: int64(len(enc.keys)), LoadFactor: LoadFactor}) // Encode key list in sorted order. enc.trailer.KeyData.Offset = enc.n for i := range enc.keys { entry := &enc.keys[i] // Save current offset so we can use it in the hash index. offsets.Put(entry.key, enc.n) if err := writeUint8To(enc.w, encodeTagKeyFlag(entry.deleted), &enc.n); err != nil { return err } // Write value data offset & size. if err := writeUint64To(enc.w, uint64(entry.data.offset), &enc.n); err != nil { return err } else if err := writeUint64To(enc.w, uint64(entry.data.size), &enc.n); err != nil { return err } // Write value hash index offset & size. if err := writeUint64To(enc.w, uint64(entry.hashIndex.offset), &enc.n); err != nil { return err } else if err := writeUint64To(enc.w, uint64(entry.hashIndex.size), &enc.n); err != nil { return err } // Write key length and data. if err := writeUvarintTo(enc.w, uint64(len(entry.key)), &enc.n); err != nil { return err } else if err := writeTo(enc.w, entry.key, &enc.n); err != nil { return err } } enc.trailer.KeyData.Size = enc.n - enc.trailer.KeyData.Offset // Encode hash map length. enc.trailer.HashIndex.Offset = enc.n if err := writeUint64To(enc.w, uint64(offsets.Cap()), &enc.n); err != nil { return err } // Encode hash map offset entries. for i := int64(0); i < offsets.Cap(); i++ { _, v := offsets.Elem(i) offset, _ := v.(int64) if err := writeUint64To(enc.w, uint64(offset), &enc.n); err != nil { return err } } enc.trailer.HashIndex.Size = enc.n - enc.trailer.HashIndex.Offset return nil } type tagKeyEncodeEntry struct { key []byte deleted bool data struct { offset int64 size int64 } hashIndex struct { offset int64 size int64 } } func encodeTagKeyFlag(deleted bool) byte { var flag byte if deleted { flag |= TagKeyTombstoneFlag } return flag } func encodeTagValueFlag(deleted bool) byte { var flag byte if deleted { flag |= TagValueTombstoneFlag } return flag }<|fim▁end|>
<|file_name|>blocks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import logging import pickle import re from cacheops import invalidate_model from django import http from django.contrib import messages from django.core.urlresolvers import reverse from django.http import HttpResponse from django.shortcuts import redirect, render from ..attendance import generate_roster_pdf from ...forms.admin.blocks import BlockForm, QuickBlockForm from ...models import EighthBlock, EighthScheduledActivity from ....auth.decorators import eighth_admin_required logger = logging.getLogger(__name__) @eighth_admin_required def add_block_view(request): if request.method == "POST" and "custom_block" in request.POST: form = QuickBlockForm(request.POST) if form.is_valid(): form.save() messages.success(request, "Successfully added block.") return redirect("eighth_admin_dashboard") else: messages.error(request, "Error adding block.") request.session["add_block_form"] = pickle.dumps(form) date = None show_letters = None <|fim▁hole|> if "date" in request.POST: date = request.POST.get("date") title_suffix = "" if date: date_format = re.compile(r'([0-9]{2})\/([0-9]{2})\/([0-9]{4})') fmtdate = date_format.sub(r'\3-\1-\2', date) logger.debug(fmtdate) title_suffix = " - {}".format(fmtdate) show_letters = True if "modify_blocks" in request.POST: letters = request.POST.getlist("blocks") current_letters = [] blocks_day = EighthBlock.objects.filter(date=fmtdate) for day in blocks_day: current_letters.append(day.block_letter) logger.debug(letters) logger.debug(current_letters) for l in letters: if len(l) == 0: continue if l not in current_letters: EighthBlock.objects.create(date=fmtdate, block_letter=l) messages.success(request, "Successfully added {} Block on {}".format(l, fmtdate)) for l in current_letters: if len(l) == 0: continue if l not in letters: EighthBlock.objects.get(date=fmtdate, block_letter=l).delete() messages.success(request, "Successfully removed {} Block on {}".format(l, fmtdate)) invalidate_model(EighthBlock) letters = [] visible_blocks = ["A", "B", "C", "D", "E", "F", "G", "H"] if show_letters: onday = EighthBlock.objects.filter(date=fmtdate) for l in visible_blocks: exists = onday.filter(block_letter=l) letters.append({"name": l, "exists": exists}) for blk in onday: if blk.block_letter not in visible_blocks: visible_blocks.append(blk.block_letter) letters.append({"name": blk.block_letter, "exists": True}) context = {"admin_page_title": "Add or Remove Blocks{}".format(title_suffix), "date": date, "letters": letters, "show_letters": show_letters, "add_block_form": QuickBlockForm} return render(request, "eighth/admin/add_block.html", context) @eighth_admin_required def edit_block_view(request, block_id): try: block = EighthBlock.objects.get(id=block_id) except EighthBlock.DoesNotExist: raise http.Http404 if request.method == "POST": form = BlockForm(request.POST, instance=block) if form.is_valid(): form.save() invalidate_model(EighthBlock) messages.success(request, "Successfully edited block.") return redirect("eighth_admin_dashboard") else: messages.error(request, "Error adding block.") else: form = BlockForm(instance=block) context = {"form": form, "delete_url": reverse("eighth_admin_delete_block", args=[block_id]), "admin_page_title": "Edit Block"} return render(request, "eighth/admin/edit_form.html", context) @eighth_admin_required def delete_block_view(request, block_id): try: block = EighthBlock.objects.get(id=block_id) except EighthBlock.DoesNotExist: raise http.Http404 if request.method == "POST": block.delete() invalidate_model(EighthBlock) messages.success(request, "Successfully deleted block.") return redirect("eighth_admin_dashboard") else: context = {"admin_page_title": "Delete Block", "item_name": str(block), "help_text": "Deleting this block will remove all records " "of it related to eighth period."} return render(request, "eighth/admin/delete_form.html", context) @eighth_admin_required def print_block_rosters_view(request, block_id): if "schact_id" in request.POST: response = HttpResponse(content_type="application/pdf") response["Content-Disposition"] = "inline; filename=\"block_{}_rosters.pdf\"".format(block_id) sched_act_ids = request.POST.getlist("schact_id") pdf_buffer = generate_roster_pdf(sched_act_ids, True) response.write(pdf_buffer.getvalue()) pdf_buffer.close() return response else: try: block = EighthBlock.objects.get(id=block_id) schacts = EighthScheduledActivity.objects.filter(block=block).order_by("sponsors") schacts = sorted(schacts, key=lambda x: "{}".format(x.get_true_sponsors())) except (EighthBlock.DoesNotExist, EighthScheduledActivity.DoesNotExist): raise http.Http404 context = {"eighthblock": block, "admin_page_title": "Choose activities to print", "schacts": schacts} return render(request, "eighth/admin/choose_roster_activities.html", context)<|fim▁end|>
if "date" in request.GET: date = request.GET.get("date")
<|file_name|>statement.py<|end_file_name|><|fim▁begin|>from . import claim, util from .attr_dict import AttrDict class Statement(AttrDict): @classmethod def from_json(cls, statement_doc): return normalize(statement_doc) <|fim▁hole|> references = {} for item in statement_doc.get('references', []): for pid, ref_docs in item['snaks'].items(): references[pid] = [claim.normalize(ref_doc) for ref_doc in ref_docs] return Statement({ 'id': statement_doc.get('id'), 'hash': statement_doc.get('hash'), 'claim': claim.normalize(statement_doc['mainsnak']), 'rank': statement_doc.get('rank', None), 'references': references, 'qualifiers': { prop: [claim.normalize(qualifier_doc) for qualifier_doc in statement_doc['qualifiers'][prop]] for prop in statement_doc.get('qualifiers-order', [])} })<|fim▁end|>
def normalize(statement_doc): statement_doc = util.ensure_decoded_json(statement_doc)
<|file_name|>streakgaming.py<|end_file_name|><|fim▁begin|># !/usr/bin/python # -*- coding: cp1252 -*- # ################################################################################## # # Copyright 2016 Félix Brezo and Yaiza Rubio (i3visio, [email protected]) # # This program is part of OSRFramework. You can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################## __author__ = "Yaiza Rubio and Félix Brezo <[email protected]>" __version__ = "1.1"<|fim▁hole|>import argparse import json import re import sys import urllib2 import osrframework.utils.browser as browser from osrframework.utils.platforms import Platform class Streakgaming(Platform): """ A <Platform> object for Streakgaming. """ def __init__(self): """ Constructor... """ self.platformName = "Streakgaming" self.tags = ["social", "news", "gaming"] ######################## # Defining valid modes # ######################## self.isValidMode = {} self.isValidMode["phonefy"] = False self.isValidMode["usufy"] = True self.isValidMode["searchfy"] = False ###################################### # Search URL for the different modes # ###################################### # Strings with the URL for each and every mode self.url = {} #self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>" self.url["usufy"] = "http://www.streakgaming.com/forum/members/" + "<usufy>" + ".html" #self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>" ###################################### # Whether the user needs credentials # ###################################### self.needsCredentials = {} #self.needsCredentials["phonefy"] = False self.needsCredentials["usufy"] = False #self.needsCredentials["searchfy"] = False ################# # Valid queries # ################# # Strings that will imply that the query number is not appearing self.validQuery = {} # The regular expression '.+' will match any query. #self.validQuery["phonefy"] = ".*" self.validQuery["usufy"] = ".+" #self.validQuery["searchfy"] = ".*" ################### # Not_found clues # ################### # Strings that will imply that the query number is not appearing self.notFoundText = {} #self.notFoundText["phonefy"] = [] self.notFoundText["usufy"] = ["<title>Streak Gaming Online Gambling Forum</title>"] #self.notFoundText["searchfy"] = [] ######################### # Fields to be searched # ######################### self.fieldsRegExp = {} # Definition of regular expressions to be searched in phonefy mode #self.fieldsRegExp["phonefy"] = {} # Example of fields: #self.fieldsRegExp["phonefy"]["i3visio.location"] = "" # Definition of regular expressions to be searched in usufy mode self.fieldsRegExp["usufy"] = {} # Example of fields: #self.fieldsRegExp["usufy"]["i3visio.location"] = "" # Definition of regular expressions to be searched in searchfy mode #self.fieldsRegExp["searchfy"] = {} # Example of fields: #self.fieldsRegExp["searchfy"]["i3visio.location"] = "" ################ # Fields found # ################ # This attribute will be feeded when running the program. self.foundFields = {}<|fim▁end|>
<|file_name|>Query.java<|end_file_name|><|fim▁begin|>/** * <copyright> * </copyright> * * $Id$ */ package org.eclipse.bpel4chor.model.pbd; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Query</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.eclipse.bpel4chor.model.pbd.Query#getQueryLanguage <em>Query Language</em>}</li> * <li>{@link org.eclipse.bpel4chor.model.pbd.Query#getOpaque <em>Opaque</em>}</li> * <li>{@link org.eclipse.bpel4chor.model.pbd.Query#getValue <em>Value</em>}</li> * </ul> * </p> * * @see org.eclipse.bpel4chor.model.pbd.PbdPackage#getQuery() * @model * @generated */ public interface Query extends ExtensibleElements { /** * Returns the value of the '<em><b>Query Language</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Query Language</em>' attribute isn't clear, * there really should be more of a description here...<|fim▁hole|> * </p> * <!-- end-user-doc --> * @return the value of the '<em>Query Language</em>' attribute. * @see #setQueryLanguage(String) * @see org.eclipse.bpel4chor.model.pbd.PbdPackage#getQuery_QueryLanguage() * @model * @generated */ String getQueryLanguage(); /** * Sets the value of the '{@link org.eclipse.bpel4chor.model.pbd.Query#getQueryLanguage <em>Query Language</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Query Language</em>' attribute. * @see #getQueryLanguage() * @generated */ void setQueryLanguage(String value); /** * Returns the value of the '<em><b>Opaque</b></em>' attribute. * The literals are from the enumeration {@link org.eclipse.bpel4chor.model.pbd.OpaqueBoolean}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Opaque</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Opaque</em>' attribute. * @see org.eclipse.bpel4chor.model.pbd.OpaqueBoolean * @see #setOpaque(OpaqueBoolean) * @see org.eclipse.bpel4chor.model.pbd.PbdPackage#getQuery_Opaque() * @model * @generated */ OpaqueBoolean getOpaque(); /** * Sets the value of the '{@link org.eclipse.bpel4chor.model.pbd.Query#getOpaque <em>Opaque</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Opaque</em>' attribute. * @see org.eclipse.bpel4chor.model.pbd.OpaqueBoolean * @see #getOpaque() * @generated */ void setOpaque(OpaqueBoolean value); /** * Returns the value of the '<em><b>Value</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value</em>' attribute. * @see #setValue(String) * @see org.eclipse.bpel4chor.model.pbd.PbdPackage#getQuery_Value() * @model * @generated */ String getValue(); /** * Sets the value of the '{@link org.eclipse.bpel4chor.model.pbd.Query#getValue <em>Value</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value</em>' attribute. * @see #getValue() * @generated */ void setValue(String value); } // Query<|fim▁end|>
<|file_name|>manage_test.py<|end_file_name|><|fim▁begin|><|fim▁hole|>if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tdjango.tests.testapp.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)<|fim▁end|>
#!/usr/bin/env python import os import sys
<|file_name|>HttpServletRequestAdapterTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one<|fim▁hole|> * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package jp.eisbahn.oauth2.server.spi.servlet; import static org.easymock.EasyMock.*; import static org.junit.Assert.*; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.junit.Test; public class HttpServletRequestAdapterTest { @Test public void test() { HttpServletRequest request = createMock(HttpServletRequest.class); expect(request.getParameter("name1")).andReturn("value1"); expect(request.getHeader("name2")).andReturn("value2"); @SuppressWarnings("serial") Map<String, String[]> map = new HashMap<String, String[]>() { { put("k1", new String[]{"v1"}); put("k2", new String[]{"v2"}); } }; expect(request.getParameterMap()).andReturn(map); replay(request); HttpServletRequestAdapter target = new HttpServletRequestAdapter(request); assertEquals("value1", target.getParameter("name1")); assertEquals("value2", target.getHeader("name2")); Map<String, String[]> parameterMap = target.getParameterMap(); assertEquals(2, parameterMap.size()); assertEquals("v1", parameterMap.get("k1")[0]); assertEquals("v2", parameterMap.get("k2")[0]); verify(request); } }<|fim▁end|>
* or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the
<|file_name|>connector.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 SAP SE Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package driver import ( "context" "crypto/tls" "crypto/x509" "database/sql/driver" "fmt" "io/ioutil" "net/url" "strconv" "sync" ) /* A Connector represents a hdb driver in a fixed configuration. A Connector can be passed to sql.OpenDB (starting from go 1.10) allowing users to bypass a string based data source name. */ type Connector struct { mu sync.RWMutex host, username, password string locale string bufferSize, fetchSize, timeout int tlsConfig *tls.Config } func newConnector() *Connector { return &Connector{ fetchSize: DefaultFetchSize, timeout: DefaultTimeout, } } // NewBasicAuthConnector creates a connector for basic authentication. func NewBasicAuthConnector(host, username, password string) *Connector { c := newConnector() c.host = host c.username = username c.password = password return c } // NewDSNConnector creates a connector from a data source name. func NewDSNConnector(dsn string) (*Connector, error) { c := newConnector() url, err := url.Parse(dsn) if err != nil { return nil, err } c.host = url.Host if url.User != nil { c.username = url.User.Username() c.password, _ = url.User.Password() } var certPool *x509.CertPool for k, v := range url.Query() { switch k { default: return nil, fmt.Errorf("URL parameter %s is not supported", k) case DSNFetchSize: if len(v) == 0 { continue } fetchSize, err := strconv.Atoi(v[0]) if err != nil { return nil, fmt.Errorf("failed to parse fetchSize: %s", v[0]) } if fetchSize < minFetchSize { c.fetchSize = minFetchSize } else { c.fetchSize = fetchSize } case DSNTimeout: if len(v) == 0 { continue } timeout, err := strconv.Atoi(v[0]) if err != nil { return nil, fmt.Errorf("failed to parse timeout: %s", v[0]) } if timeout < minTimeout { c.timeout = minTimeout } else { c.timeout = timeout } case DSNLocale: if len(v) == 0 { continue }<|fim▁hole|> continue } if c.tlsConfig == nil { c.tlsConfig = &tls.Config{} } c.tlsConfig.ServerName = v[0] case DSNTLSInsecureSkipVerify: if len(v) == 0 { continue } var err error b := true if v[0] != "" { b, err = strconv.ParseBool(v[0]) if err != nil { return nil, fmt.Errorf("failed to parse InsecureSkipVerify (bool): %s", v[0]) } } if c.tlsConfig == nil { c.tlsConfig = &tls.Config{} } c.tlsConfig.InsecureSkipVerify = b case DSNTLSRootCAFile: for _, fn := range v { rootPEM, err := ioutil.ReadFile(fn) if err != nil { return nil, err } if certPool == nil { certPool = x509.NewCertPool() } if ok := certPool.AppendCertsFromPEM(rootPEM); !ok { return nil, fmt.Errorf("failed to parse root certificate - filename: %s", fn) } } if certPool != nil { if c.tlsConfig == nil { c.tlsConfig = &tls.Config{} } c.tlsConfig.RootCAs = certPool } } } return c, nil } // Host returns the host of the connector. func (c *Connector) Host() string { return c.host } // Username returns the username of the connector. func (c *Connector) Username() string { return c.username } // Password returns the password of the connector. func (c *Connector) Password() string { return c.password } // Locale returns the locale of the connector. func (c *Connector) Locale() string { c.mu.RLock() defer c.mu.RUnlock() return c.locale } /* SetLocale sets the locale of the connector. For more information please see DSNLocale. */ func (c *Connector) SetLocale(locale string) { c.mu.Lock() c.locale = locale c.mu.Unlock() } // FetchSize returns the fetchSize of the connector. func (c *Connector) FetchSize() int { c.mu.RLock() defer c.mu.RUnlock() return c.fetchSize } /* SetFetchSize sets the fetchSize of the connector. For more information please see DSNFetchSize. */ func (c *Connector) SetFetchSize(fetchSize int) error { c.mu.Lock() defer c.mu.Unlock() if fetchSize < minFetchSize { fetchSize = minFetchSize } c.fetchSize = fetchSize return nil } // Timeout returns the timeout of the connector. func (c *Connector) Timeout() int { c.mu.RLock() defer c.mu.RUnlock() return c.timeout } /* SetTimeout sets the timeout of the connector. For more information please see DSNTimeout. */ func (c *Connector) SetTimeout(timeout int) error { c.mu.Lock() defer c.mu.Unlock() if timeout < minTimeout { timeout = minTimeout } c.timeout = timeout return nil } // TLSConfig returns the TLS configuration of the connector. func (c *Connector) TLSConfig() *tls.Config { c.mu.RLock() defer c.mu.RUnlock() return c.tlsConfig } // SetTLSConfig sets the TLS configuration of the connector. func (c *Connector) SetTLSConfig(tlsConfig *tls.Config) error { c.mu.Lock() defer c.mu.Unlock() c.tlsConfig = tlsConfig return nil } // BasicAuthDSN return the connector DSN for basic authentication. func (c *Connector) BasicAuthDSN() string { values := url.Values{} if c.locale != "" { values.Set(DSNLocale, c.locale) } if c.fetchSize != 0 { values.Set(DSNFetchSize, fmt.Sprintf("%d", c.fetchSize)) } if c.timeout != 0 { values.Set(DSNTimeout, fmt.Sprintf("%d", c.timeout)) } return (&url.URL{ Scheme: DriverName, User: url.UserPassword(c.username, c.password), Host: c.host, RawQuery: values.Encode(), }).String() } // Connect implements the database/sql/driver/Connector interface. func (c *Connector) Connect(ctx context.Context) (driver.Conn, error) { return newConn(ctx, c) } // Driver implements the database/sql/driver/Connector interface. func (c *Connector) Driver() driver.Driver { return drv }<|fim▁end|>
c.locale = v[0] case DSNTLSServerName: if len(v) == 0 {
<|file_name|>spaceicon.py<|end_file_name|><|fim▁begin|>############################################################################### #cyn.in is an open source Collaborative Knowledge Management Appliance that #enables teams to seamlessly work together on files, documents and content in #a secure central environment. # #cyn.in v2 an open source appliance is distributed under the GPL v3 license #along with commercial support options. # #cyn.in is a Cynapse Invention. # #Copyright (C) 2008 Cynapse India Pvt. Ltd. # #This program is free software: you can redistribute it and/or modify it under #the terms of the GNU General Public License as published by the Free Software #Foundation, either version 3 of the License, or any later version and observe #the Additional Terms applicable to this program and must display appropriate #legal notices. In accordance with Section 7(b) of the GNU General Public #License version 3, these Appropriate Legal Notices must retain the display of #the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have #received a copy of the detailed Additional Terms License with this program. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General #Public License for more details. # #You should have received a copy of the GNU General Public License along with #this program. If not, see <http://www.gnu.org/licenses/>. # #You can contact Cynapse at [email protected] with any problems with cyn.in. #For any queries regarding the licensing, please send your mails to # [email protected] # #You can also contact Cynapse at: #802, Building No. 1, #Dheeraj Sagar, Malad(W) #Mumbai-400064, India ############################################################################### from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from plone.app.layout.viewlets.common import ViewletBase from zope.component import getMultiAdapter<|fim▁hole|>class SpaceIconViewlet(ViewletBase): render = ViewPageTemplateFile('space_icon.pt') def update(self): portal_state = getMultiAdapter((self.context, self.request), name=u'plone_portal_state') cportal_url = portal_state.portal_url() current_object = self.context.aq_inner self.has_space_icon = False self.space_icon = "" self.space_url = "" parentslist = current_object.aq_chain new_object = None found = 0 try: for type in parentslist: if type.portal_type == 'Space' and type.meta_type == 'Space': new_object = type found = 1 if found == 1: break except AttributeError: a = self.space_icon if new_object <> None: #implement code here for binding space icon if new_object.space_icon <> "": self.space_icon = cportal_url + "/" + new_object.space_icon else: self.space_icon = default_space_icon self.space_url = new_object.absolute_url() self.has_space_icon = True else: self.site_icon = portal_state.portal_url() + "/logo.jpg" self.site_url = portal_state.portal_url() self.render = ViewPageTemplateFile('site_logo.pt')<|fim▁end|>
<|file_name|>active.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, Ben Morgan. All rights reserved. // Use of this source code is governed by an MIT license // that can be found in the LICENSE file. package lackey import ( "os" "os/exec" "strconv" "strings" "github.com/cassava/lackey/audio/mp3" "github.com/goulash/audio" "github.com/goulash/color" "github.com/goulash/osutil" ) type ExecError struct { Err error Output string } func (err *ExecError) Error() string { return err.Err.Error() } type Encoder interface { Ext() string CanCopy(src, dst Audio) bool Encode(src, dst string, md Audio) error } type Runner struct { Color *color.Colorizer Encoder Encoder ForceTranscode bool CopyExtensions []string DryRun bool Verbose bool Strip bool SrcPrefix string DstPrefix string } func (o *Runner) WhichExt(src Audio) string { // At the moment, we always have the same kind of output name := src.FileInfo().Name() for _, ext := range o.CopyExtensions { if strings.HasSuffix(name, ext) { return ext } } return o.Encoder.Ext() } // canEncode returns true if this runner can encode the codec. func (o *Runner) canEncode(c audio.Codec) bool { return c == audio.FLAC || c == audio.MP3 || c == audio.M4A || c == audio.OGG } func (o *Runner) transcodeOrCopy(src, dst Audio) AudioOperation { if o.Encoder.CanCopy(src, dst) { return CopyAudio } return TranscodeAudio } func (o *Runner) Which(src, dst Audio) AudioOperation { // Files we should copy directly, we do so here name := src.FileInfo().Name() for _, ext := range o.CopyExtensions { if strings.HasSuffix(name, ext) { if dst.IsExists() { sfi, dfi := src.FileInfo(), dst.FileInfo() if dfi.Size() == 0 { return CopyAudio } if sfi.ModTime().After(dfi.ModTime()) { return CopyAudio } return SkipAudio } return CopyAudio } } if !o.canEncode(src.Encoding()) { return IgnoreAudio } if o.ForceTranscode { return TranscodeAudio } if !dst.IsExists() { return o.transcodeOrCopy(src, dst) } sfi, dfi := src.FileInfo(), dst.FileInfo() if dfi.Size() == 0 { return o.transcodeOrCopy(src, dst) } if sfi.ModTime().After(dfi.ModTime()) { return UpdateAudio } return SkipAudio } func (o *Runner) Ok(dst string) error { if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) }<|fim▁hole|> return nil } func (o *Runner) Ignore(dst string) error { if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@{.y}ignoring:@|@. %s\n", dst) return nil } func (o *Runner) Error(err error) error { o.Color.Fprintf(os.Stderr, "@rerror:@| %s\n", err) if e, ok := err.(*ExecError); ok { o.Color.Fprintf(os.Stderr, "@routput:@|\n%s\n", e.Output) } return err } func (o *Runner) Warn(err error) error { o.Color.Fprintf(os.Stderr, "@rwarning:@| %s\n", err) if e, ok := err.(*ExecError); ok { o.Color.Fprintf(os.Stderr, "@routput:@|\n%s\n", e.Output) } return nil } func (o *Runner) RemoveDir(dst string) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@grm -r:@| %s\n", dst) if o.DryRun { return nil } return os.RemoveAll(path) } func (o *Runner) CreateDir(dst string) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@gmkdir:@| %s\n", dst) if o.DryRun { return nil } return os.MkdirAll(path, 0777) } func (o *Runner) RemoveFile(dst string) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@grm:@| %s\n", dst) if o.DryRun { return nil } return os.Remove(path) } func (o *Runner) CopyFile(src, dst string) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@gcp:@| %s\n", dst) if o.DryRun { return nil } return osutil.CopyFile(src, path) } func (o *Runner) Transcode(src string, dst string, md Audio) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@gencode:@| %s\n", dst) if o.DryRun { return nil } if ex, _ := osutil.FileExists(path); ex { err := os.Remove(path) if err != nil { return err } } return o.Encoder.Encode(src, path, md) } func (o *Runner) Update(src string, dst string, md Audio) error { path := dst if o.Strip { dst = strings.TrimPrefix(dst, o.DstPrefix) } o.Color.Printf("@gupdate:@| %s\n", dst) if o.DryRun { return nil } err := os.Remove(path) if err != nil { return err } o.Color.Printf(" -> ") return o.Transcode(src, path, md) } type MP3Encoder struct { TargetQuality int BitrateThreshold int } func (e *MP3Encoder) Ext() string { return ".mp3" } func (e *MP3Encoder) CanCopy(src, dst Audio) bool { if src.Encoding() != audio.MP3 { return false } sm := src.Metadata() if sm.EncodingBitrate() > e.BitrateThreshold { return false } return true } func (e *MP3Encoder) Encode(src, dst string, md Audio) error { q := strconv.FormatInt(int64(e.TargetQuality), 10) var bs []byte var err error if md.Encoding() == audio.MP3 { // We are much more reliable using lame directly than over ffmpeg when downsampling // MP3 files directly. bs, err = exec.Command("lame", "--mp3input", "-h", "-V"+q, src, dst).CombinedOutput() } else if md.Encoding() == audio.FLAC { // Because ffmpeg is having some bugs, we avoid using it when possible. enc := mp3.NewEncoder() enc.Quality = e.TargetQuality dec := exec.Command("flac", "-c", "-d", src) bs, err = enc.EncodeFromStdin(dec, dst, md.Metadata()) } else { bs, err = exec.Command("ffmpeg", "-i", src, "-vn", "-qscale:a", q, dst).CombinedOutput() } if err != nil { return &ExecError{ Err: err, Output: string(bs), } } return nil } type OPUSEncoder struct { Extension string TargetBitrate string } func (e *OPUSEncoder) Ext() string { if e.Extension == "" { return ".opus" } return e.Extension } func (e *OPUSEncoder) CanCopy(src, dst Audio) bool { return false } func (e *OPUSEncoder) Encode(src, dst string, md Audio) error { bs, err := exec.Command("ffmpeg", "-i", src, "-vn", "-acodec", "libopus", "-vbr", "on", "-compression_level", "10", "-b:a", e.TargetBitrate, dst).CombinedOutput() if err != nil { return &ExecError{ Err: err, Output: string(bs), } } return nil }<|fim▁end|>
if o.Verbose { o.Color.Printf("@{g.}ok:@|@. %s\n", dst) }
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>import os import signal import subprocess class Camera(object): # In order to run mjpg-streamer through Python, make sure # mjpg-streamer-experimental is installed so the .so objects # and mjpg-streamer are all on defualt PATH so we don't have # to specify path (was getting a lot of errors resulting from # files not being able to be found. Resolution must also be # specified in "integerxinteger" and not by name. # default layout for camera def __init__(self, resolution='1280x720', framerate=30, device='/dev/video0', port=8080, brightness=16, contrast=32): self.process = None self.resolution = resolution self.framerate = framerate self.device = device self.port = port self.brightness = brightness self.contrast = contrast self.input = 'input_uvc.so -d {device}'.format( device=self.device, ) self.output = 'output_http.so -p {port} {web}'.format( port=self.port, web='-w /usr/local/www' ) self.status = "killed" # framerate shouldn't be changed: keep at 30, allows for a good image while # reserving valuable processing power for other devices. Device is formatted as a # string: /dev/videoNUM where NUM is the number for the order in which camera is # plugged in, starting at 0. Port is the web port where you want to output image # to: change as needed # open video feed for an instance of Camera def start(self): self.process = subprocess.Popen(['mjpg_streamer', '-i', self.input, '-o', self.output]) if self.is_alive(): self.status = 'active'<|fim▁hole|> # using this method def kill(self): if self.is_alive(): self.process.kill() self.status = 'killed' def suspend(self): os.kill(self.process.pid, signal.SIGSTOP) self.status = 'suspended' def unsuspend(self): if self.status == 'suspended': os.kill(self.process.pid, signal.SIGCONT) self.status = 'active' def is_alive(self): if self.process is None: return False return (self.process.poll() is None) def get_status(self): if not self.is_alive(): self.status = 'killed' return self.status def set_status(self, status): if status == 'active': if self.status == 'suspended': self.unsuspend() elif self.status == 'killed': self.start() elif status == 'suspended': if self.status == 'active': self.suspend() elif status == 'killed': self.kill()<|fim▁end|>
# closes video feed for an instance of Camera: each instance of Camera must be killed
<|file_name|>model.py<|end_file_name|><|fim▁begin|>import tensorflow as tf from tensorflow.python.ops import rnn_cell from tensorflow.python.ops import seq2seq import numpy as np class Model(): def __init__(self, args, infer=False): self.args = args if infer: args.batch_size = 1 args.seq_length = 1 if args.model == 'rnn': cell_fn = rnn_cell.BasicRNNCell elif args.model == 'gru': cell_fn = rnn_cell.GRUCell elif args.model == 'lstm': cell_fn = rnn_cell.BasicLSTMCell else: raise Exception("model type not supported: {}".format(args.model)) cell = cell_fn(args.rnn_size) self.cell = cell = rnn_cell.MultiRNNCell([cell] * args.num_layers) self.input_data = tf.placeholder(tf.float32, [args.batch_size, args.seq_length], name="input") self.targets = tf.placeholder(tf.int32, [args.batch_size, args.seq_length], name="targets") self.initial_state = cell.zero_state(args.batch_size, tf.float32) inputs_data = tf.split(1, args.seq_length, self.input_data) args.vocab_size = 1 with tf.variable_scope('rnnlm'): softmax_w = tf.get_variable("softmax_w", [args.rnn_size, args.vocab_size])<|fim▁hole|> # inputs = tf.split(1, args.seq_length, tf.nn.embedding_lookup(embedding, self.input_data)) #inputs = tf.split(1, args.seq_length, self.input_data) # inputs = [tf.squeeze(input_, [1]) for input_ in inputs] #def loop(prev, _): # prev = tf.matmul(prev, softmax_w) + softmax_b # prev_symbol = tf.stop_gradient(tf.argmax(prev, 1)) # return tf.nn.embedding_lookup(embedding, prev_symbol) #outputs, last_state = seq2seq.rnn_decoder(inputs, self.initial_state, cell, loop_function=loop if infer else None, scope='rnnlm') outputs, last_state = seq2seq.rnn_decoder(inputs_data, self.initial_state, cell) output = tf.reshape(tf.concat(1, outputs), [-1, args.rnn_size]) self.logits = tf.matmul(output, softmax_w) + softmax_b self.probs = tf.nn.softmax(self.logits) #loss = seq2seq.sequence_loss_by_example([self.logits], # [tf.reshape(self.targets, [-1])], # [tf.ones([args.batch_size * args.seq_length])], # args.vocab_size) self.reg_cost = tf.reduce_sum(1e-1 * (tf.nn.l2_loss(softmax_w))) target = tf.cast(self.targets, tf.float32) self.target_vector = tf.reshape(target, [-1]) loss = tf.pow(self.logits / self.target_vector, 2) self.cost = tf.reduce_sum(loss) / args.batch_size / args.seq_length + self.reg_cost self.final_state = last_state self.lr = tf.Variable(0.0, trainable=False) tvars = tf.trainable_variables() grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars), args.grad_clip) optimizer = tf.train.AdamOptimizer(self.lr) self.train_op = optimizer.apply_gradients(zip(grads, tvars)) def sample(self, sess, chars, vocab, num=200, prime='The ', sampling_type=1): state = self.cell.zero_state(1, tf.float32).eval() for char in prime[:-1]: x = np.zeros((1, 1)) x[0, 0] = vocab[char] feed = {self.input_data: x, self.initial_state:state} [state] = sess.run([self.final_state], feed) def weighted_pick(weights): t = np.cumsum(weights) s = np.sum(weights) return(int(np.searchsorted(t, np.random.rand(1)*s))) ret = prime char = prime[-1] for n in range(num): x = np.zeros((1, 1)) x[0, 0] = vocab[char] feed = {self.input_data: x, self.initial_state:state} [probs, state] = sess.run([self.probs, self.final_state], feed) p = probs[0] if sampling_type == 0: sample = np.argmax(p) elif sampling_type == 2: if char == ' ': sample = weighted_pick(p) else: sample = np.argmax(p) else: # sampling_type == 1 default: sample = weighted_pick(p) pred = chars[sample] ret += pred char = pred return ret<|fim▁end|>
softmax_b = tf.get_variable("softmax_b", [args.vocab_size]) # with tf.device("/cpu:0"): # embedding = tf.get_variable("embedding", [args.vocab_size, args.rnn_size])
<|file_name|>NametagGroup.py<|end_file_name|><|fim▁begin|>from panda3d.core import * from panda3d.direct import * from NametagConstants import * from Nametag3d import * from Nametag2d import * class NametagGroup: CCNormal = CCNormal CCNoChat = CCNoChat CCNonPlayer = CCNonPlayer CCSuit = CCSuit CCToonBuilding = CCToonBuilding CCSuitBuilding = CCSuitBuilding CCHouseBuilding = CCHouseBuilding CCSpeedChat = CCSpeedChat CCFreeChat = CCFreeChat CHAT_TIMEOUT_MAX = 12.0 CHAT_TIMEOUT_MIN = 4.0 CHAT_TIMEOUT_PROP = 0.5 def __init__(self): self.nametag2d = Nametag2d() self.nametag3d = Nametag3d() self.icon = PandaNode('icon') self.chatTimeoutTask = None self.font = None self.speechFont = None self.name = '' self.displayName = '' self.wordWrap = None self.qtColor = VBase4(1,1,1,1) self.colorCode = CCNormal self.avatar = None self.active = True self.chatPages = [] self.chatPage = 0 self.chatFlags = 0 self.objectCode = None self.manager = None self.nametags = [] self.addNametag(self.nametag2d) self.addNametag(self.nametag3d) self.visible3d = True # Is a 3D nametag visible, or do we need a 2D popup? self.tickTask = taskMgr.add(self.__tickTask, self.getUniqueId(), sort=45) self.stompTask = None self.stompText = None self.stompFlags = 0 def destroy(self): taskMgr.remove(self.tickTask) if self.manager is not None: self.unmanage(self.manager) for nametag in list(self.nametags): self.removeNametag(nametag) if self.stompTask: self.stompTask.remove() def getNametag2d(self): return self.nametag2d def getNametag3d(self): return self.nametag3d def getNameIcon(self): return self.icon def getNumChatPages(self): if not self.chatFlags & (CFSpeech|CFThought): return 0 return len(self.chatPages)<|fim▁hole|> def getChatStomp(self): return bool(self.stompTask) def getChat(self): if self.chatPage >= len(self.chatPages): return '' else: return self.chatPages[self.chatPage] def getStompText(self): return self.stompText def getStompDelay(self): return 0.2 def getUniqueId(self): return 'Nametag-%d' % id(self) def hasButton(self): return bool(self.getButtons()) def getButtons(self): if self.getNumChatPages() < 2: # Either only one page or no pages displayed. This means no button, # unless the game code specifically requests one. if self.chatFlags & CFQuitButton: return NametagGlobals.quitButtons elif self.chatFlags & CFPageButton: return NametagGlobals.pageButtons else: return None elif self.chatPage == self.getNumChatPages()-1: # Last page of a multiple-page chat. This calls for a quit button, # unless the game says otherwise. if not self.chatFlags & CFNoQuitButton: return NametagGlobals.quitButtons else: return None else: # Non-last page of a multiple-page chat. This calls for a page # button, but only if the game requests it: if self.chatFlags & CFPageButton: return NametagGlobals.pageButtons else: return None def setActive(self, active): self.active = active def isActive(self): return self.active def setAvatar(self, avatar): self.avatar = avatar def setFont(self, font): self.font = font self.updateTags() def setSpeechFont(self, font): self.speechFont = font self.updateTags() def setWordwrap(self, wrap): self.wordWrap = wrap self.updateTags() def setColorCode(self, cc): self.colorCode = cc self.updateTags() def setName(self, name): self.name = name self.updateTags() def setDisplayName(self, name): self.displayName = name self.updateTags() def setQtColor(self, color): self.qtColor = color self.updateTags() def setChat(self, chatString, chatFlags): if not self.chatFlags&CFSpeech: # We aren't already displaying some chat. Therefore, we don't have # to stomp. self._setChat(chatString, chatFlags) else: # Stomp! self.clearChat() self.stompText = chatString self.stompFlags = chatFlags self.stompTask = taskMgr.doMethodLater(self.getStompDelay(), self.__updateStomp, 'ChatStomp-' + self.getUniqueId()) def _setChat(self, chatString, chatFlags): if chatString: self.chatPages = chatString.split('\x07') self.chatFlags = chatFlags else: self.chatPages = [] self.chatFlags = 0 self.setPageNumber(0) # Calls updateTags() for us. self._stopChatTimeout() if chatFlags&CFTimeout: self._startChatTimeout() def __updateStomp(self, task): self._setChat(self.stompText, self.stompFlags) self.stompTask = None def setContents(self, contents): # This function is a little unique, it's meant to override contents on # EXISTING nametags only: for tag in self.nametags: tag.setContents(contents) def setObjectCode(self, objectCode): self.objectCode = objectCode def getObjectCode(self): return self.objectCode def _startChatTimeout(self): length = len(self.getChat()) timeout = min(max(length*self.CHAT_TIMEOUT_PROP, self.CHAT_TIMEOUT_MIN), self.CHAT_TIMEOUT_MAX) self.chatTimeoutTask = taskMgr.doMethodLater(timeout, self.__doChatTimeout, 'ChatTimeout-' + self.getUniqueId()) def __doChatTimeout(self, task): self._setChat('', 0) return task.done def _stopChatTimeout(self): if self.chatTimeoutTask: taskMgr.remove(self.chatTimeoutTask) def clearShadow(self): pass def clearChat(self): self._setChat('', 0) if self.stompTask: self.stompTask.remove() def updateNametag(self, tag): tag.font = self.font tag.speechFont = self.speechFont tag.name = self.name tag.wordWrap = self.wordWrap or DEFAULT_WORDWRAPS[self.colorCode] tag.displayName = self.displayName or self.name tag.qtColor = self.qtColor tag.colorCode = self.colorCode tag.chatString = self.getChat() tag.buttons = self.getButtons() tag.chatFlags = self.chatFlags tag.avatar = self.avatar tag.icon = self.icon tag.update() def __testVisible3D(self): # We must determine if a 3D nametag is visible or not, since this # affects the visibility state of 2D nametags. # Next, we iterate over all of our nametags until we find a visible # one: for nametag in self.nametags: if not isinstance(nametag, Nametag3d): continue # It's not in the 3D system, disqualified. if nametag.isOnScreen(): return True # If we got here, none of the tags were a match... return False def __tickTask(self, task): for nametag in self.nametags: nametag.tick() if (NametagGlobals.masterNametagsActive and self.active) or self.hasButton(): nametag.setClickRegionEvent(self.getUniqueId()) else: nametag.setClickRegionEvent(None) if NametagGlobals.onscreenChatForced and self.chatFlags & CFSpeech: # Because we're *forcing* chat onscreen, we skip the visible3d test # and go ahead and display it anyway. visible3d = False elif not NametagGlobals.masterArrowsOn and not self.chatFlags: # We're forcing margins offscreen; therefore, we should pretend # that the 3D nametag is always visible. visible3d = True else: visible3d = self.__testVisible3D() if visible3d ^ self.visible3d: self.visible3d = visible3d for nametag in self.nametags: if isinstance(nametag, MarginPopup): nametag.setVisible(not visible3d) return task.cont def updateTags(self): for nametag in self.nametags: self.updateNametag(nametag) def addNametag(self, nametag): self.nametags.append(nametag) self.updateNametag(nametag) if self.manager is not None and isinstance(nametag, MarginPopup): nametag.manage(manager) def removeNametag(self, nametag): self.nametags.remove(nametag) if self.manager is not None and isinstance(nametag, MarginPopup): nametag.unmanage(manager) nametag.destroy() def manage(self, manager): self.manager = manager for tag in self.nametags: if isinstance(tag, MarginPopup): tag.manage(manager) def unmanage(self, manager): self.manager = None for tag in self.nametags: if isinstance(tag, MarginPopup): tag.unmanage(manager) tag.destroy()<|fim▁end|>
def setPageNumber(self, page): self.chatPage = page self.updateTags()
<|file_name|>leafletScriptStrings.py<|end_file_name|><|fim▁begin|>from utils import scaleToZoom def jsonScript(layer): json = """ <script src="data/json_{layer}.js\"></script>""".format(layer=layer) return json def scaleDependentLayerScript(layer, layerName): min = layer.minimumScale() max = layer.maximumScale() scaleDependentLayer = """ if (map.getZoom() <= {min} && map.getZoom() >= {max}) {{ feature_group.addLayer(json_{layerName}JSON); console.log("show"); //restackLayers(); }} else if (map.getZoom() > {min} || map.getZoom() < {max}) {{ feature_group.removeLayer(json_{layerName}JSON); console.log("hide"); //restackLayers(); }}""".format(min=scaleToZoom(min), max=scaleToZoom(max), layerName=layerName) return scaleDependentLayer def scaleDependentScript(layers): scaleDependent = """ map.on("zoomend", function(e) {""" scaleDependent += layers scaleDependent += """ });""" scaleDependent += layers return scaleDependent def openScript(): openScript = """ <script>""" return openScript def crsScript(crsAuthId, crsProj4): crs = """ var crs = new L.Proj.CRS('{crsAuthId}', '{crsProj4}', {{ resolutions: [2800, 1400, 700, 350, 175, 84, 42, 21, 11.2, 5.6, 2.8, 1.4, 0.7, 0.35, 0.14, 0.07], }});""".format(crsAuthId=crsAuthId, crsProj4=crsProj4) return crs def mapScript(extent, matchCRS, crsAuthId, measure, maxZoom, minZoom, bounds): map = """ var map = L.map('map', {""" if extent == "Canvas extent" and matchCRS and crsAuthId != 'EPSG:4326': map += """ crs: crs, continuousWorld: false, worldCopyJump: false, """ if measure: map += """ measureControl:true,""" map += """ zoomControl:true, maxZoom:""" + unicode(maxZoom) + """, minZoom:""" + unicode(minZoom) + """ })""" if extent == "Canvas extent": map += """.fitBounds(""" + bounds + """);""" map += """ var hash = new L.Hash(map); var additional_attrib = '<a href="https://github.com/tomchadwin/qgis2web" target ="_blank">qgis2web</a>';""" return map def featureGroupsScript(): featureGroups = """ var feature_group = new L.featureGroup([]); var raster_group = new L.LayerGroup([]);""" return featureGroups def basemapsScript(basemap, attribution): basemaps = """ var basemap = L.tileLayer('{basemap}', {{ attribution: additional_attrib + ' {attribution}' }}); basemap.addTo(map);""".format(basemap=basemap, attribution=attribution) return basemaps def layerOrderScript(): layerOrder = """ var layerOrder=new Array(); function restackLayers() { for (index = 0; index < layerOrder.length; index++) { feature_group.removeLayer(layerOrder[index]); feature_group.addLayer(layerOrder[index]); } } layerControl = L.control.layers({},{},{collapsed:false});""" return layerOrder def popFuncsScript(table): popFuncs = """ var popupContent = {table}; layer.bindPopup(popupContent);""".format(table=table) return popFuncs def popupScript(safeLayerName, popFuncs): popup = """ function pop_{safeLayerName}(feature, layer) {{{popFuncs} }}""".format(safeLayerName=safeLayerName, popFuncs=popFuncs) return popup def pointToLayerScript(radius, borderWidth, borderStyle, colorName, borderColor, borderOpacity, opacity, labeltext): pointToLayer = """ pointToLayer: function (feature, latlng) {{ return L.circleMarker(latlng, {{ radius: {radius}, fillColor: '{colorName}', color: '{borderColor}', weight: {borderWidth}, opacity: {borderOpacity}, dashArray: '{dashArray}', fillOpacity: {opacity} }}){labeltext}""".format(radius=radius, colorName=colorName, borderColor=borderColor, borderWidth=borderWidth * 4, borderOpacity=borderOpacity if borderStyle != 0 else 0, dashArray=getLineStyle(borderStyle, borderWidth), opacity=opacity, labeltext=labeltext) return pointToLayer def pointStyleScript(pointToLayer, popFuncs): pointStyle = """{pointToLayer} }}, onEachFeature: function (feature, layer) {{{popFuncs} }}""".format(pointToLayer=pointToLayer, popFuncs=popFuncs) return pointStyle def wfsScript(scriptTag): wfs = """ <script src='{scriptTag}'></script>""".format(scriptTag=scriptTag) return wfs def jsonPointScript(safeLayerName, pointToLayer, usedFields): if usedFields != 0: jsonPoint = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ onEachFeature: pop_{safeLayerName}, {pointToLayer} }} }}); layerOrder[layerOrder.length] = json_{safeLayerName}JSON;""".format(safeLayerName=safeLayerName, pointToLayer=pointToLayer) else: jsonPoint = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ {pointToLayer} }} }}); layerOrder[layerOrder.length] = json_{safeLayerName}JSON;""".format(safeLayerName=safeLayerName, pointToLayer=pointToLayer) return jsonPoint def clusterScript(safeLayerName): cluster = """ var cluster_group{safeLayerName}JSON = new L.MarkerClusterGroup({{showCoverageOnHover: false}}); cluster_group{safeLayerName}JSON.addLayer(json_{safeLayerName}JSON);""".format(safeLayerName=safeLayerName) return cluster def categorizedPointStylesScript(symbol, opacity, borderOpacity): styleValues = """ radius: '{radius}', fillColor: '{fillColor}', color: '{color}', weight: {borderWidth}, opacity: {borderOpacity}, dashArray: '{dashArray}', fillOpacity: '{opacity}', }}; break;""".format(radius=symbol.size() * 2, fillColor=symbol.color().name(), color=symbol.symbolLayer(0).borderColor().name(), borderWidth=symbol.symbolLayer(0).outlineWidth() * 4, borderOpacity=borderOpacity if symbol.symbolLayer(0).outlineStyle() != 0 else 0, dashArray=getLineStyle(symbol.symbolLayer(0).outlineStyle(), symbol.symbolLayer(0).outlineWidth()), opacity=opacity) return styleValues def simpleLineStyleScript(radius, colorName, penStyle, opacity): lineStyle = """ return {{ weight: {radius}, color: '{colorName}', dashArray: '{penStyle}', opacity: {opacity} }};""".format(radius=radius * 4, colorName=colorName, penStyle=penStyle, opacity=opacity) return lineStyle def singlePolyStyleScript(radius, colorName, borderOpacity, fillColor, penStyle, opacity): polyStyle = """ return {{ weight: {radius}, color: '{colorName}', fillColor: '{fillColor}', dashArray: '{penStyle}', opacity: {borderOpacity}, fillOpacity: {opacity} }};""".format(radius=radius, colorName=colorName, fillColor=fillColor, penStyle=penStyle, borderOpacity=borderOpacity, opacity=opacity) return polyStyle def nonPointStylePopupsScript(lineStyle, popFuncs): nonPointStylePopups = """ style: function (feature) {{{lineStyle} }}, onEachFeature: function (feature, layer) {{{popFuncs} }}""".format(lineStyle=lineStyle, popFuncs=popFuncs) return nonPointStylePopups def nonPointStyleFunctionScript(safeLayerName, lineStyle): nonPointStyleFunction = """ function doStyle{safeLayerName}(feature) {{{lineStyle} }}""".format(safeLayerName=safeLayerName, lineStyle=lineStyle) return nonPointStyleFunction def categoryScript(layerName, valueAttr): category = """ function doStyle{layerName}(feature) {{ switch (feature.properties.{valueAttr}) {{""".format(layerName=layerName, valueAttr=valueAttr) return category def defaultCategoryScript(): defaultCategory = """ default: return {""" return defaultCategory def eachCategoryScript(catValue): if isinstance(catValue, basestring): valQuote = "'" else: valQuote = "" eachCategory = """ case """ + valQuote + unicode(catValue) + valQuote + """: return {""" return eachCategory def endCategoryScript(): endCategory = """ } }""" return endCategory def categorizedPointWFSscript(layerName, labeltext, popFuncs): categorizedPointWFS = """ pointToLayer: function (feature, latlng) {{ return L.circleMarker(latlng, doStyle{layerName}(feature)){labeltext} }}, onEachFeature: function (feature, layer) {{{popFuncs} }}""".format(layerName=layerName, labeltext=labeltext, popFuncs=popFuncs) return categorizedPointWFS def categorizedPointJSONscript(safeLayerName, labeltext, usedFields): if usedFields != 0: categorizedPointJSON = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ onEachFeature: pop_{safeLayerName}, pointToLayer: function (feature, latlng) {{ return L.circleMarker(latlng, doStyle{safeLayerName}(feature)){labeltext} }} }}); layerOrder[layerOrder.length] = json_{safeLayerName}JSON;""".format(safeLayerName=safeLayerName, labeltext=labeltext) else: categorizedPointJSON = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ pointToLayer: function (feature, latlng) {{ return L.circleMarker(latlng, doStyle{safeLayerName}(feature)){labeltext} }} }}); layerOrder[layerOrder.length] = json_{safeLayerName}JSON;""".format(safeLayerName=safeLayerName, labeltext=labeltext) return categorizedPointJSON def categorizedLineStylesScript(symbol, opacity): categorizedLineStyles = """ color: '{color}', weight: '{weight}', dashArray: '{dashArray}', opacity: '{opacity}', }}; break;""".format(color=symbol.color().name(), weight=symbol.width() * 4, dashArray=getLineStyle(symbol.symbolLayer(0).penStyle(), symbol.width()), opacity=opacity) return categorizedLineStyles def categorizedNonPointStyleFunctionScript(layerName, popFuncs): categorizedNonPointStyleFunction = """ style: doStyle{layerName}, onEachFeature: function (feature, layer) {{{popFuncs} }}""".format(layerName=layerName, popFuncs=popFuncs) return categorizedNonPointStyleFunction def categorizedPolygonStylesScript(symbol, opacity, borderOpacity): categorizedPolygonStyles = """ weight: '{weight}', fillColor: '{fillColor}', color: '{color}', dashArray: '{dashArray}', opacity: '{borderOpacity}', fillOpacity: '{opacity}', }}; break;""".format(weight=symbol.symbolLayer(0).borderWidth() * 4, fillColor=symbol.color().name() if symbol.symbolLayer(0).brushStyle() != 0 else "none", color=symbol.symbolLayer(0).borderColor().name() if symbol.symbolLayer(0).borderStyle() != 0 else "none", dashArray=getLineStyle(symbol.symbolLayer(0).borderStyle(), symbol.symbolLayer(0).borderWidth()), borderOpacity=borderOpacity, opacity=opacity) return categorizedPolygonStyles def graduatedStyleScript(layerName): graduatedStyle = """ function doStyle{layerName}(feature) {{""".format(layerName=layerName) return graduatedStyle def rangeStartScript(valueAttr, r): rangeStart = """ if (feature.properties.{valueAttr} >= {lowerValue} && feature.properties.{valueAttr} <= {upperValue}) {{""".format(valueAttr=valueAttr, lowerValue=r.lowerValue(), upperValue=r.upperValue()) return rangeStart def graduatedPointStylesScript(valueAttr, r, symbol, opacity, borderOpacity): graduatedPointStyles = rangeStartScript(valueAttr, r) graduatedPointStyles += """ return {{ radius: '{radius}', fillColor: '{fillColor}', color: '{color}', weight: {lineWeight}, fillOpacity: '{opacity}', opacity: '{borderOpacity}', dashArray: '{dashArray}' }} }}""".format(radius=symbol.size() * 2, fillColor=symbol.color().name(), color=symbol.symbolLayer(0).borderColor().name(), lineWeight=symbol.symbolLayer(0).outlineWidth() * 4, opacity=opacity, borderOpacity=borderOpacity, dashArray=getLineStyle(symbol.symbolLayer(0).outlineStyle(), symbol.symbolLayer(0).outlineWidth())) return graduatedPointStyles def graduatedLineStylesScript(valueAttr, r, categoryStr, symbol, opacity): graduatedLineStyles = rangeStartScript(valueAttr, r) graduatedLineStyles += """ return {{ color: '{color}', weight: '{weight}', dashArray: '{dashArray}', opacity: '{opacity}', }} }}""".format(color=symbol.symbolLayer(0).color().name(), weight=symbol.width() * 4, dashArray=getLineStyle(symbol.symbolLayer(0).penStyle(), symbol.width()), opacity=opacity) return graduatedLineStyles def graduatedPolygonStylesScript(valueAttr, r, symbol, opacity, borderOpacity): graduatedPolygonStyles = rangeStartScript(valueAttr, r) graduatedPolygonStyles += """ return {{ color: '{color}', weight: '{weight}', dashArray: '{dashArray}', fillColor: '{fillColor}', opacity: '{borderOpacity}', fillOpacity: '{opacity}', }} }}""".format(color=symbol.symbolLayer(0).borderColor().name(), weight=symbol.symbolLayer(0).borderWidth() * 4 if symbol.symbolLayer(0).borderStyle() != 0 else "0", dashArray=getLineStyle(symbol.symbolLayer(0).borderStyle(), symbol.symbolLayer(0).borderWidth() if symbol.symbolLayer(0).borderStyle() != 0 else "0"), fillColor=symbol.color().name() if symbol.symbolLayer(0).brushStyle() != 0 else "none", borderOpacity=borderOpacity, opacity=opacity) return graduatedPolygonStyles def endGraduatedStyleScript(): endGraduatedStyle = """ }""" return endGraduatedStyle <|fim▁hole|>def customMarkerScript(safeLayerName, labeltext, usedFields): if usedFields != 0: customMarker = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ onEachFeature: pop_{safeLayerName}, pointToLayer: function (feature, latlng) {{ return L.marker(latlng, {{ icon: L.icon({{ iconUrl: feature.properties.icon_exp, iconSize: [24, 24], // size of the icon change this to scale your icon (first coordinate is x, second y from the upper left corner of the icon) iconAnchor: [12, 12], // point of the icon which will correspond to marker's location (first coordinate is x, second y from the upper left corner of the icon) popupAnchor: [0, -14] // point from which the popup should open relative to the iconAnchor (first coordinate is x, second y from the upper left corner of the icon) }}) }}){labeltext} }}}} );""".format(safeLayerName=safeLayerName, labeltext=labeltext) else: customMarker = """ var json_{safeLayerName}JSON = new L.geoJson(json_{safeLayerName}, {{ pointToLayer: function (feature, latlng) {{ return L.marker(latlng, {{ icon: L.icon({{ iconUrl: feature.properties.icon_exp, iconSize: [24, 24], // size of the icon change this to scale your icon (first coordinate is x, second y from the upper left corner of the icon) iconAnchor: [12, 12], // point of the icon which will correspond to marker's location (first coordinate is x, second y from the upper left corner of the icon) popupAnchor: [0, -14] // point from which the popup should open relative to the iconAnchor (first coordinate is x, second y from the upper left corner of the icon) }}) }}){labeltext} }}}} );""".format(safeLayerName=safeLayerName, labeltext=labeltext) return customMarker def wmsScript(safeLayerName, wms_url, wms_layer, wms_format): wms = """ var overlay_{safeLayerName} = L.tileLayer.wms('{wms_url}', {{ layers: '{wms_layer}', format: '{wms_format}', transparent: true, continuousWorld : true, }});""".format(safeLayerName=safeLayerName, wms_url=wms_url, wms_layer=wms_layer, wms_format=wms_format) return wms def rasterScript(safeLayerName, out_raster_name, bounds): raster = """ var img_{safeLayerName} = '{out_raster_name}'; var img_bounds_{safeLayerName} = {bounds}; var overlay_{safeLayerName} = new L.imageOverlay(img_{safeLayerName}, img_bounds_{safeLayerName});""".format(safeLayerName=safeLayerName, out_raster_name=out_raster_name, bounds=bounds) return raster def titleSubScript(webmap_head, webmap_subhead): titleSub = """ var title = new L.Control(); title.onAdd = function (map) { this._div = L.DomUtil.create('div', 'info'); // create a div with a class "info" this.update(); return this._div; }; title.update = function () { this._div.innerHTML = '<h2>""" + webmap_head.encode('utf-8') + """</h2>""" + webmap_subhead.encode('utf-8') + """' }; title.addTo(map);""" return titleSub def addressSearchScript(): addressSearch = """ var osmGeocoder = new L.Control.OSMGeocoder({ collapsed: false, position: 'topright', text: 'Search', }); osmGeocoder.addTo(map);""" return addressSearch def locateScript(): locate = """ map.locate({setView: true, maxZoom: 16}); function onLocationFound(e) { var radius = e.accuracy / 2; L.marker(e.latlng).addTo(map) .bindPopup("You are within " + radius + " meters from this point").openPopup(); L.circle(e.latlng, radius).addTo(map); } map.on('locationfound', onLocationFound); """ return locate def endHTMLscript(wfsLayers): endHTML = """ </script>{wfsLayers} </body> </html>""".format(wfsLayers=wfsLayers) return endHTML def getLineStyle(penType, lineWidth): dash = lineWidth * 10 dot = lineWidth * 1 gap = lineWidth * 5 if penType > 1: if penType == 2: penStyle = [dash, gap] if penType == 3: penStyle = [dot, gap] if penType == 4: penStyle = [dash, gap, dot, gap] if penType == 5: penStyle = [dash, gap, dot, gap, dot, gap] penStyle = ','.join(map(str, penStyle)) else: penStyle = "" return penStyle<|fim▁end|>
<|file_name|>kvazaarfilter.cpp<|end_file_name|><|fim▁begin|>#include "kvazaarfilter.h" #include "statisticsinterface.h" #include "common.h" #include "settingskeys.h" #include "logger.h" #include <kvazaar.h> #include <QtDebug> #include <QTime> #include <QSize> enum RETURN_STATUS {C_SUCCESS = 0, C_FAILURE = -1}; KvazaarFilter::KvazaarFilter(QString id, StatisticsInterface *stats, std::shared_ptr<HWResourceManager> hwResources): Filter(id, "Kvazaar", stats, hwResources, DT_YUV420VIDEO, DT_HEVCVIDEO), api_(nullptr), config_(nullptr), enc_(nullptr), pts_(0), input_pic_(nullptr), framerate_num_(30), framerate_denom_(1), encodingFrames_() { maxBufferSize_ = 3; } void KvazaarFilter::updateSettings() { Logger::getLogger()->printNormal(this, "Updating kvazaar settings"); stop(); while(isRunning()) { sleep(1); } close(); encodingFrames_.clear(); if(init()) { Logger::getLogger()->printNormal(this, "Resolution change successful"); } else { Logger::getLogger()->printNormal(this, "Failed to change resolution"); } start(); Filter::updateSettings(); } bool KvazaarFilter::init() { Logger::getLogger()->printNormal(this, "Iniating Kvazaar"); // input picture should not exist at this point if(!input_pic_ && !api_) { api_ = kvz_api_get(8); if(!api_) { Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Failed to retrieve Kvazaar API."); return false; } config_ = api_->config_alloc(); enc_ = nullptr; if(!config_) { Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Failed to allocate Kvazaar config."); return false; } QSettings settings(settingsFile, settingsFileFormat); api_->config_init(config_); api_->config_parse(config_, "preset", settings.value(SettingsKey::videoPreset).toString().toUtf8()); // input #ifdef __linux__ if (settingEnabled(SettingsKey::screenShareStatus)) { config_->width = settings.value(SettingsKey::videoResultionWidth).toInt(); config_->height = settings.value(SettingsKey::videoResultionHeight).toInt(); framerate_num_ = settings.value(SettingsKey::videoFramerate).toFloat(); config_->framerate_num = framerate_num_; } else { // On Linux the Camerafilter seems to have a Qt bug that causes not being able to set resolution config_->width = 640; config_->height = 480; config_->framerate_num = 30; } #else config_->width = settings.value(SettingsKey::videoResultionWidth).toInt(); config_->height = settings.value(SettingsKey::videoResultionHeight).toInt(); convertFramerate(settings.value(SettingsKey::videoFramerate).toReal()); config_->framerate_num = framerate_num_; #endif config_->framerate_denom = framerate_denom_; // parallelization if (settings.value(SettingsKey::videoKvzThreads) == "auto") { config_->threads = QThread::idealThreadCount(); } else if (settings.value(SettingsKey::videoKvzThreads) == "Main") { config_->threads = 0; } else { config_->threads = settings.value(SettingsKey::videoKvzThreads).toInt(); } config_->owf = settings.value(SettingsKey::videoOWF).toInt(); config_->wpp = settings.value(SettingsKey::videoWPP).toInt(); bool tiles = false; if (tiles) { std::string dimensions = settings.value(SettingsKey::videoTileDimensions).toString().toStdString(); api_->config_parse(config_, "tiles", dimensions.c_str()); } // this does not work with uvgRTP at the moment. Avoid using slices. if(settings.value(SettingsKey::videoSlices).toInt() == 1) { if(config_->wpp) { config_->slices = KVZ_SLICES_WPP; } else if (tiles) { config_->slices = KVZ_SLICES_TILES; } } // Structure config_->qp = settings.value(SettingsKey::videoQP).toInt(); config_->intra_period = settings.value(SettingsKey::videoIntra).toInt(); config_->vps_period = settings.value(SettingsKey::videoVPS).toInt(); config_->target_bitrate = settings.value(SettingsKey::videoBitrate).toInt(); if (config_->target_bitrate != 0) { QString rcAlgo = settings.value(SettingsKey::videoRCAlgorithm).toString(); if (rcAlgo == "lambda") { config_->rc_algorithm = KVZ_LAMBDA; } else if (rcAlgo == "oba") { config_->rc_algorithm = KVZ_OBA; config_->clip_neighbour = settings.value(SettingsKey::videoOBAClipNeighbours).toInt(); } else { Logger::getLogger()->printWarning(this, "Some carbage in rc algorithm setting"); config_->rc_algorithm = KVZ_NO_RC; } } else { config_->rc_algorithm = KVZ_NO_RC; } config_->gop_lowdelay = 1; if (settings.value(SettingsKey::videoScalingList).toInt() == 0) { config_->scaling_list = KVZ_SCALING_LIST_OFF; } else { config_->scaling_list = KVZ_SCALING_LIST_DEFAULT; } config_->lossless = settings.value(SettingsKey::videoLossless).toInt(); QString constraint = settings.value(SettingsKey::videoMVConstraint).toString(); if (constraint == "frame") { config_->mv_constraint = KVZ_MV_CONSTRAIN_FRAME; } else if (constraint == "tile") { config_->mv_constraint = KVZ_MV_CONSTRAIN_TILE; } else if (constraint == "frametile") { config_->mv_constraint = KVZ_MV_CONSTRAIN_FRAME_AND_TILE; } else if (constraint == "frametilemargin") { config_->mv_constraint = KVZ_MV_CONSTRAIN_FRAME_AND_TILE_MARGIN; } else { config_->mv_constraint = KVZ_MV_CONSTRAIN_NONE; } config_->set_qp_in_cu = settings.value(SettingsKey::videoQPInCU).toInt(); config_->vaq = settings.value(SettingsKey::videoVAQ).toInt(); // compression-tab customParameters(settings); config_->hash = KVZ_HASH_NONE; enc_ = api_->encoder_open(config_); if(!enc_) { Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Failed to open Kvazaar encoder."); return false; } input_pic_ = api_->picture_alloc(config_->width, config_->height); if(!input_pic_) { Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Could not allocate input picture."); return false; } Logger::getLogger()->printNormal(this, "Kvazaar iniation succeeded"); } return true; } void KvazaarFilter::close() { if(api_) { api_->encoder_close(enc_); api_->config_destroy(config_); enc_ = nullptr; config_ = nullptr; api_->picture_free(input_pic_); input_pic_ = nullptr; api_ = nullptr; } pts_ = 0; Logger::getLogger()->printNormal(this, "Closed Kvazaar"); } void KvazaarFilter::process() { Q_ASSERT(enc_); Q_ASSERT(config_); <|fim▁hole|> while(input) { if(!input_pic_) { Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Input picture was not allocated correctly."); break; } feedInput(std::move(input)); input = getInput(); } } void KvazaarFilter::customParameters(QSettings& settings) { int size = settings.beginReadArray(SettingsKey::videoCustomParameters); Logger::getLogger()->printNormal(this, "Getting custom Kvazaar parameters", "Amount", QString::number(size)); for(int i = 0; i < size; ++i) { settings.setArrayIndex(i); QString name = settings.value("Name").toString(); QString value = settings.value("Value").toString(); if (api_->config_parse(config_, name.toStdString().c_str(), value.toStdString().c_str()) != 1) { Logger::getLogger()->printWarning(this, "Invalid custom parameter for kvazaar", "Amount", QString::number(size)); } } settings.endArray(); } void KvazaarFilter::feedInput(std::unique_ptr<Data> input) { kvz_picture *recon_pic = nullptr; kvz_frame_info frame_info; kvz_data_chunk *data_out = nullptr; uint32_t len_out = 0; if (config_->width != input->vInfo->width || config_->height != input->vInfo->height || (double)(config_->framerate_num/config_->framerate_denom) != input->vInfo->framerate) { // This should not happen. Logger::getLogger()->printDebug(DEBUG_PROGRAM_ERROR, this, "Input resolution or framerate differs from settings", {"Settings", "Input"}, {QString::number(config_->width) + "x" + QString::number(config_->height) + "p" + QString::number(config_->framerate_num), QString::number(input->vInfo->width) + "x" + QString::number(input->vInfo->height) + "p" + QString::number(input->vInfo->framerate)}); return; } // copy input to kvazaar picture memcpy(input_pic_->y, input->data.get(), input->vInfo->width*input->vInfo->height); memcpy(input_pic_->u, &(input->data.get()[input->vInfo->width*input->vInfo->height]), input->vInfo->width*input->vInfo->height/4); memcpy(input_pic_->v, &(input->data.get()[input->vInfo->width*input->vInfo->height + input->vInfo->width*input->vInfo->height/4]), input->vInfo->width*input->vInfo->height/4); input_pic_->pts = pts_; ++pts_; encodingFrames_.push_front(std::move(input)); api_->encoder_encode(enc_, input_pic_, &data_out, &len_out, &recon_pic, nullptr, &frame_info ); while(data_out != nullptr) { parseEncodedFrame(data_out, len_out, recon_pic); // see if there is more output ready api_->encoder_encode(enc_, nullptr, &data_out, &len_out, &recon_pic, nullptr, &frame_info ); } } void KvazaarFilter::parseEncodedFrame(kvz_data_chunk *data_out, uint32_t len_out, kvz_picture *recon_pic) { std::unique_ptr<Data> encodedFrame = std::move(encodingFrames_.back()); encodingFrames_.pop_back(); std::unique_ptr<uchar[]> hevc_frame(new uchar[len_out]); uint8_t* writer = hevc_frame.get(); uint32_t dataWritten = 0; for (kvz_data_chunk *chunk = data_out; chunk != nullptr; chunk = chunk->next) { if(chunk->len > 3 && chunk->data[0] == 0 && chunk->data[1] == 0 && ( chunk->data[2] == 1 || (chunk->data[2] == 0 && chunk->data[3] == 1 )) && dataWritten != 0 && config_->slices != KVZ_SLICES_NONE) { // send previous packet if this is not the first std::unique_ptr<Data> slice(shallowDataCopy(encodedFrame.get())); sendEncodedFrame(std::move(slice), std::move(hevc_frame), dataWritten); hevc_frame = std::unique_ptr<uchar[]>(new uchar[len_out - dataWritten]); writer = hevc_frame.get(); dataWritten = 0; } memcpy(writer, chunk->data, chunk->len); writer += chunk->len; dataWritten += chunk->len; } api_->chunk_free(data_out); api_->picture_free(recon_pic); uint32_t delay = QDateTime::currentMSecsSinceEpoch() - encodedFrame->presentationTime; getStats()->sendDelay("video", delay); getStats()->addEncodedPacket("video", len_out); // send last packet reusing input structure sendEncodedFrame(std::move(encodedFrame), std::move(hevc_frame), dataWritten); } void KvazaarFilter::sendEncodedFrame(std::unique_ptr<Data> input, std::unique_ptr<uchar[]> hevc_frame, uint32_t dataWritten) { input->type = DT_HEVCVIDEO; input->data_size = dataWritten; input->data = std::move(hevc_frame); sendOutput(std::move(input)); } void KvazaarFilter::convertFramerate(double framerate) { uint32_t wholeNumber = (uint32_t)framerate; double remainder = framerate - wholeNumber; if (remainder > 0.0) { uint32_t multiplier = 1.0 /remainder; framerate_num_ = framerate*multiplier; framerate_denom_ = multiplier; } else { framerate_num_ = wholeNumber; framerate_denom_ = 1; } Logger::getLogger()->printNormal(this, "Got framerate num and denum", "Framerate", {QString::number(framerate_num_) + "/" + QString::number(framerate_denom_) }); }<|fim▁end|>
std::unique_ptr<Data> input = getInput();
<|file_name|>timesheet.js<|end_file_name|><|fim▁begin|>function changeNameLength(name, limit) { return (name.length > limit) ? name.substring(0, limit - 3) + "..." : name; } function getTimesheet(data) { data = $.parseJSON(data); $(".alert").addClass("display-hide"); $(".schedule-info").removeClass("display-hide"); $("#turn").hide(); $(".table-container").hide(); if (data.valid == null) { $(".schedule-info").addClass("display-hide"); } else if (!data.valid) { if (data.error == "curricularMatrix") { $(".alert").removeClass("display-hide"); } } else { $(".table-container").show(); $("#turn").show(); generateTimesheet(data.schedules); } }<|fim▁hole|>$(document).on("click", ".btn-generate-timesheet", function () { var classroom = $("select.classroom-id").val(); $.ajax({ 'url': generateTimesheetURL, 'type': 'POST', 'data': { 'classroom': classroom, }, }).success(function (result) { getTimesheet(result); }); }); function generateTimesheet(data) { var i = 0; var turn = 0; $(".table-timesheet tr td").children().remove(); $.each(data, function (weekDay, schedules) { $.each(schedules, function (schedule, info) { if (i == 0) { if (info.turn == 0) turn = "Manhã"; if (info.turn == 1) turn = "Tarde"; if (info.turn == 2) turn = "Noite"; i++; } var discipline = changeNameLength(info.disciplineName, 20); var instructor = changeNameLength(info.instructorInfo.name, 30); var icons = ""; if (info.instructorInfo.unavailable) icons += "<i title='Horário indisponível para o instrutor.' class='unavailability-icon fa fa-times-circle darkred'></i>"; if (info.instructorInfo.countConflicts > 1) icons += "<i title='Instrutor possui " + info.instructorInfo.countConflicts + " conflitos neste horário.' class='fa fa-exclamation-triangle conflict-icon darkgoldenrod'></i>"; $(".table-timesheet tr#h" + schedule + " td[week_day=" + weekDay + "]").html( "<div schedule='"+info.id+"' class='schedule-block'>"+ "<p class='discipline-name' discipline_id='" + info.disciplineId + "' title='" + info.disciplineName + "'>" + discipline + "</p>" + "<p class='instructor-name' instructor_id='"+ info.instructorInfo.id +"' title='" + info.instructorInfo.name + "'>" + instructor + "<i class='fa fa-pencil edit-instructor'></i></p>" + icons+ "</div>"); }); }); $("#turn").text(turn); } $(document).on("click", ".schedule-selected .instructor-name",function(){ var instructorId = $(this).attr("instructor_id"); var disciplineId = $(this).parent().find(".discipline-name").attr("discipline_id"); var scheduleId = $(this).parent().attr("schedule"); $.ajax({ 'url': getInstructorsUrl, 'type': 'POST', 'data': { 'discipline':disciplineId }, }).success(function (result) { $("#change-instructor-schedule").val(scheduleId); $("#change-instructor-id").html(result); $("#change-instructor-id").val(instructorId).select2(); $("#change-instructor-modal").modal(); }); }); $(document).on("click", "#change-instructor-button", function(){ $.ajax({ 'url': changeInstructorUrl, 'type': 'POST', 'data': { 'schedule':$("#change-instructor-schedule").val(), 'instructor':$("#change-instructor-id").val() }, }).success(function (result) { getTimesheet(result); $("#change-instructor-modal").modal('hide'); }); }); $(document).on("click", ".table-timesheet td",function(){ if($(this).hasClass("schedule-selected")){ $(this).removeClass("schedule-selected"); }else{ //Já selecionou alguem if($(".table-timesheet").find(".schedule-selected").length > 0){ var firstSelected = $(".table-timesheet").find(".schedule-selected"); var secondSelected = $(this); var firstSchedule = null; var secondSchedule = null; if(firstSelected.find(".schedule-block").length > 0){ firstSchedule = {"id":firstSelected.find(".schedule-block").attr("schedule")}; }else{ firstSchedule = {"id":null, "week_day": firstSelected.attr("week_day"), "schedule":firstSelected.parent().attr("id").replace("h","")}; } if(secondSelected.find(".schedule-block").length > 0){ secondSchedule = {"id":secondSelected.find(".schedule-block").attr("schedule")}; }else{ secondSchedule = {"id":null, "week_day": secondSelected.attr("week_day"), "schedule":secondSelected.parent().attr("id").replace("h","")}; } changeSchedule(firstSchedule, secondSchedule); }else { //Primeira seleção $(this).addClass("schedule-selected"); } } }); function changeSchedule(firstSchedule, secondSchedule){ $.ajax({ 'url': changeSchedulesURL, 'type': 'POST', 'data': { 'firstSchedule': firstSchedule, 'secondSchedule': secondSchedule, }, }).success(function (result) { getTimesheet(result); $('.schedule-selected').removeClass("schedule-selected"); }); }<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)] #![plugin(clippy)] extern crate calamine; extern crate hyper; use std::fs; use std::fs::File; use std::io::{Read, Write}; use std::path::Path; use std::str; use hyper::client::Client; mod excel; fn main() { fs::create_dir_all("data").unwrap(); let csv = Path::new("data/foo.csv"); let url = "https://www.spdrs.com/site-content/xls/SPY_All_Holdings.\ xls?fund=SPY&docname=All+Holdings&onyx_code1=1286&onyx_code2=1700"; let worksheet = "SPY_All_Holdings"; let xls = Path::new("data/foo.xls"); download(url, xls); excel::excel_to_csv(csv, xls, worksheet);<|fim▁hole|> let mut response = Client::new().get(url).send().unwrap(); let mut buf = Vec::new(); response.read_to_end(&mut buf).unwrap(); let mut file = File::create(&xls).unwrap(); file.write_all(&buf).unwrap(); }<|fim▁end|>
} fn download(url: &str, xls: &Path) {
<|file_name|>java.awt.image.RGBImageFilter.d.ts<|end_file_name|><|fim▁begin|>declare namespace java { namespace awt {<|fim▁hole|> abstract class RGBImageFilter extends java.awt.image.ImageFilter { protected origmodel: java.awt.image.ColorModel protected newmodel: java.awt.image.ColorModel protected canFilterIndexColorModel: boolean public constructor() public setColorModel(arg0: java.awt.image.ColorModel): void public substituteColorModel(arg0: java.awt.image.ColorModel, arg1: java.awt.image.ColorModel): void public filterIndexColorModel(arg0: java.awt.image.IndexColorModel): java.awt.image.IndexColorModel public filterRGBPixels(arg0: number | java.lang.Integer, arg1: number | java.lang.Integer, arg2: number | java.lang.Integer, arg3: number | java.lang.Integer, arg4: number[] | java.lang.Integer[], arg5: number | java.lang.Integer, arg6: number | java.lang.Integer): void public setPixels(arg0: number | java.lang.Integer, arg1: number | java.lang.Integer, arg2: number | java.lang.Integer, arg3: number | java.lang.Integer, arg4: java.awt.image.ColorModel, arg5: number[] | java.lang.Byte[], arg6: number | java.lang.Integer, arg7: number | java.lang.Integer): void public setPixels(arg0: number | java.lang.Integer, arg1: number | java.lang.Integer, arg2: number | java.lang.Integer, arg3: number | java.lang.Integer, arg4: java.awt.image.ColorModel, arg5: number[] | java.lang.Integer[], arg6: number | java.lang.Integer, arg7: number | java.lang.Integer): void public abstract filterRGB(arg0: number | java.lang.Integer, arg1: number | java.lang.Integer, arg2: number | java.lang.Integer): number } } } }<|fim▁end|>
namespace image {
<|file_name|>espacios.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.template import Library from ..models import Espacio register = Library()<|fim▁hole|>@register.inclusion_tag('espacios/_otros_espacios.html', takes_context=True) def otros_espacios(context): qs = Espacio.objects.all() if 'espacio' in context: obj = context['espacio'] if obj: qs = qs.exclude(pk=obj.pk) return {'otros_espacios': qs}<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! This module contains all of the built-in mutators. <|fim▁hole|><|fim▁end|>
pub mod vym;
<|file_name|>assignment2.py<|end_file_name|><|fim▁begin|>''' author Lama Hamadeh ''' import pandas as pd import matplotlib.pyplot as plt import matplotlib import assignment2_helper as helper # Look pretty... matplotlib.style.use('ggplot') # Do * NOT * alter this line, until instructed! scaleFeatures = True #Features scaling (if it's false no scaling appears and that affects the 2D plot and the variance values) # TODO: Load up the dataset and remove any and all # Rows that have a nan. You should be a pro at this # by now ;-) # # .. your code here .. df=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module4/Datasets/kidney_disease.csv',index_col = 0) df = df.reset_index(drop=True) #remove the index column df=df.dropna(axis=0) #remove any and all Rows that have a nan #print(df) <|fim▁hole|># Create some color coded labels; the actual label feature # will be removed prior to executing PCA, since it's unsupervised. # You're only labeling by color so you can see the effects of PCA labels = ['red' if i=='ckd' else 'green' for i in df.classification] # TODO: Use an indexer to select only the following columns: # ['bgr','wc','rc'] # # .. your code here .. df=df[['bgr', 'rc','wc']] #select only the following columns: bgr, rc, and wc # TODO: Print out and check your dataframe's dtypes. You'll probably # want to call 'exit()' after you print it out so you can stop the # program's execution. # # You can either take a look at the dataset webpage in the attribute info # section: https://archive.ics.uci.edu/ml/datasets/Chronic_Kidney_Disease # or you can actually peek through the dataframe by printing a few rows. # What kind of data type should these three columns be? If Pandas didn't # properly detect and convert them to that data type for you, then use # an appropriate command to coerce these features into the right type. # # .. your code here .. print(df.dtypes) # df.rc = pd.to_numeric(df.rc, errors='coerce') # df.wc = pd.to_numeric(df.wc, errors='coerce') # # TODO: PCA Operates based on variance. The variable with the greatest # variance will dominate. Go ahead and peek into your data using a # command that will check the variance of every feature in your dataset. # Print out the results. Also print out the results of running .describe # on your dataset. # # Hint: If you don't see all three variables: 'bgr','wc' and 'rc', then # you probably didn't complete the previous step properly. # # .. your code here .. print(df.var()) # print(df.describe()) # # TODO: This method assumes your dataframe is called df. If it isn't, # make the appropriate changes. Don't alter the code in scaleFeatures() # just yet though! # # .. your code adjustment here .. if scaleFeatures: df = helper.scaleFeatures(df) # TODO: Run PCA on your dataset and reduce it to 2 components # Ensure your PCA instance is saved in a variable called 'pca', # and that the results of your transformation are saved in 'T'. # # .. your code here .. from sklearn import decomposition pca = decomposition.PCA(n_components=2) pca.fit(df) decomposition.PCA(copy=True, n_components=2, whiten=False) T= pca.transform(df) # Plot the transformed data as a scatter plot. Recall that transforming # the data will result in a NumPy NDArray. You can either use MatPlotLib # to graph it directly, or you can convert it to DataFrame and have pandas # do it for you. # # Since we've already demonstrated how to plot directly with MatPlotLib in # Module4/assignment1.py, this time we'll convert to a Pandas Dataframe. # # Since we transformed via PCA, we no longer have column names. We know we # are in P.C. space, so we'll just define the coordinates accordingly: ax = helper.drawVectors(T, pca.components_, df.columns.values, plt, scaleFeatures) T = pd.DataFrame(T) T.columns = ['component1', 'component2'] T.plot.scatter(x='component1', y='component2', marker='o', c=labels, alpha=0.75, ax=ax) plt.show()<|fim▁end|>
<|file_name|>test_scrooge_gen.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from textwrap import dedent from mock import MagicMock from pants.backend.codegen.targets.java_thrift_library import JavaThriftLibrary from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.targets.scala_library import ScalaLibrary from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.build_graph.address import Address from pants.build_graph.build_file_aliases import BuildFileAliases from pants.goal.context import Context from pants.util.dirutil import safe_rmtree from pants_test.tasks.task_test_base import TaskTestBase from twitter.common.collections import OrderedSet from pants.contrib.scrooge.tasks.scrooge_gen import ScroogeGen # TODO (tdesai) Issue-240: Use JvmToolTaskTestBase for ScroogeGenTest class ScroogeGenTest(TaskTestBase): @classmethod def task_type(cls): return ScroogeGen @property def alias_groups(self): return BuildFileAliases(targets={'java_thrift_library': JavaThriftLibrary}) def setUp(self): super(ScroogeGenTest, self).setUp() self.task_outdir = os.path.join(self.build_root, 'scrooge', 'gen-java') def tearDown(self): super(ScroogeGenTest, self).tearDown() safe_rmtree(self.task_outdir) def test_validate_compiler_configs(self): # Set synthetic defaults for the global scope. self.set_options_for_scope('thrift-defaults', compiler='unchecked', language='uniform', rpc_style='async')<|fim▁hole|> self.add_to_build_file('test_validate', dedent(''' java_thrift_library(name='one', sources=[], dependencies=[], ) ''')) self.add_to_build_file('test_validate', dedent(''' java_thrift_library(name='two', sources=[], dependencies=[':one'], ) ''')) self.add_to_build_file('test_validate', dedent(''' java_thrift_library(name='three', sources=[], dependencies=[':one'], rpc_style='finagle', ) ''')) target = self.target('test_validate:one') context = self.context(target_roots=[target]) task = self.create_task(context) task._validate_compiler_configs([self.target('test_validate:one')]) task._validate_compiler_configs([self.target('test_validate:two')]) with self.assertRaises(TaskError): task._validate_compiler_configs([self.target('test_validate:three')]) def test_scala(self): build_string = ''' java_thrift_library(name='a', sources=['a.thrift'], dependencies=[], compiler='scrooge', language='scala', rpc_style='finagle' ) ''' sources = [os.path.join(self.task_outdir, 'org/pantsbuild/example/Example.scala')] self._test_help(build_string, ScalaLibrary, sources) def test_android(self): build_string = ''' java_thrift_library(name='a', sources=['a.thrift'], dependencies=[], compiler='scrooge', language='android', rpc_style='finagle' ) ''' sources = [os.path.join(self.task_outdir, 'org/pantsbuild/android_example/Example.java')] self._test_help(build_string, JavaLibrary, sources) def _test_help(self, build_string, library_type, sources): contents = dedent('''#@namespace android org.pantsbuild.android_example namespace java org.pantsbuild.example struct Example { 1: optional i64 number } ''') self.create_file(relpath='test_smoke/a.thrift', contents=contents) self.add_to_build_file('test_smoke', dedent(build_string)) target = self.target('test_smoke:a') context = self.context(target_roots=[target]) task = self.create_task(context) task._declares_service = lambda source: False task._outdir = MagicMock() task._outdir.return_value = self.task_outdir task.gen = MagicMock() task.gen.return_value = {'test_smoke/a.thrift': sources} saved_add_new_target = Context.add_new_target try: mock = MagicMock() Context.add_new_target = mock task.execute() self.assertEquals(1, mock.call_count) _, call_kwargs = mock.call_args self.assertEquals(call_kwargs['target_type'], library_type) self.assertEquals(call_kwargs['dependencies'], OrderedSet()) self.assertEquals(call_kwargs['provides'], None) self.assertEquals(call_kwargs['sources'], []) self.assertEquals(call_kwargs['derived_from'], target) finally: Context.add_new_target = saved_add_new_target<|fim▁end|>
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> build::link("mdmregistration", true) }<|fim▁end|>
// Copyright © 2015, Peter Atashian // Licensed under the MIT License <LICENSE.md> extern crate build; fn main() {
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3 import vk import sys import json # get access token #app_id = 4360605 #url = "http://api.vkontakte.ru/oauth/authorize?client_id=" + str(app_id) + "&scope=4&redirect_uri=http://api.vk.com/blank.html&display=page&response_type=token" #webbrowser.open_new_tab(url) #exit() word = sys.argv[1] txt_file = open(word + '.txt', "w") html_file = open(word + '.html', "w") vkapi = vk.API(access_token='copy_token_here') result = vkapi.groups.search(q = word, offset = 0, count = 100) <|fim▁hole|>#print(result['count']) #exit() json_tree = result['items'] for item in json_tree: link = 'http://vk.com/club' + str(item['id']) name = item['name'] tag_link = '<a href="' + link + '">' + link + '</a>' + '\t' + name + '<br>' txt_file.write(link + '\n') html_file.write(tag_link + '\n') txt_file.close() html_file.close()<|fim▁end|>
<|file_name|>walrus_concurrency.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import time from concurrent.futures import ThreadPoolExecutor from eucaops import Eucaops from eucaops import S3ops from eutester.eutestcase import EutesterTestCase class WalrusConcurrent(EutesterTestCase): def __init__(self): self.setuptestcase() self.setup_parser() self.parser.add_argument("-n", "--number", type=int, default=100) self.parser.add_argument("-c", "--concurrent", type=int, default=10) self.parser.add_argument("-s", "--size", type=int, default=1024) self.get_args() # Setup basic eutester object if self.args.region: self.tester = S3ops( credpath=self.args.credpath, region=self.args.region) else: self.tester = Eucaops( credpath=self.args.credpath, config_file=self.args.config,password=self.args.password) self.start = time.time() self.bucket_name = "concurrency-" + str(int(self.start)) self.tester.create_bucket(self.bucket_name) def clean_method(self): self.tester.clear_bucket(self.bucket_name) def Concurrent(self): key_payload = self.tester.id_generator(self.args.size) thread_count = self.args.number thread_pool = [] with ThreadPoolExecutor(max_workers=thread_count) as executor: for i in xrange(thread_count): thread_pool.append(executor.submit(self.tester.upload_object, bucket_name=self.bucket_name, key_name="test" + str(i), contents=key_payload)) end = time.time() total = end - self.start self.tester.debug("\nExecution time: {0}\n# of Objects: {1}\nObject Size: {2}B\nConcurrency Level of {3}".format( total, self.args.number, self.args.size, self.args.concurrent)) with ThreadPoolExecutor(max_workers=thread_count) as executor: for object in thread_pool: thread_pool.append(executor.submit(self.tester.delete_object, object)) if __name__ == "__main__":<|fim▁hole|> testcase = WalrusConcurrent() ### Use the list of tests passed from config/command line to determine what subset of tests to run ### or use a predefined list list = testcase.args.tests or ["Concurrent"] ### Convert test suite methods to EutesterUnitTest objects unit_list = [ ] for test in list: unit_list.append( testcase.create_testunit_by_name(test) ) ### Run the EutesterUnitTest objects result = testcase.run_test_case_list(unit_list) exit(result)<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from .forms import SetupForm from django.contrib.auth.decorators import login_required<|fim▁hole|> @login_required def home(request): # Redirect to the default view, which happens to be a non-framework view return redirect('/en-us/app/twitter2/twitter_general') @render_to('twitter2:setup.html') @login_required def setup(request): result = create_setup_view_context( request, SetupForm, reverse('twitter2:home')) # HACK: Workaround DVPL-4647 (Splunk 6.1 and below): # Refresh current app's state so that non-framework views # observe when the app becomes configured. service = request.service app_name = service.namespace['app'] service.apps[app_name].post('_reload') return result<|fim▁end|>
from django.core.urlresolvers import reverse from django.shortcuts import redirect from splunkdj.decorators.render import render_to from splunkdj.setup import create_setup_view_context
<|file_name|>test_serializers.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals from collections import namedtuple from django.core.exceptions import ValidationError, ObjectDoesNotExist from django.db.models.fields import FieldDoesNotExist from django.test.client import RequestFactory from drf_toolbox.compat import django_pgfields_installed, models from drf_toolbox.serializers import (fields, BaseModelSerializer, ModelSerializer, RelatedField) from drf_toolbox.serializers.fields import api from drf_toolbox import viewsets from rest_framework import serializers from rest_framework.relations import HyperlinkedIdentityField from tests import models as test_models, serializers as test_serializers from tests.compat import mock import unittest import six import uuid NO_DJANGOPG = 'django-pgfields is not installed.' class SerializerSuite(unittest.TestCase): """Suite of test cases around custom serializers, ensuring that they provide expected output. """ def test_api_endpoints_field_autocreated(self): """Establish that the `api_endpoints` key is auto-created on a serializer that doesn't explicitly define the field. """ # Create a bogus viewset class, so the serializer can be # given context that is aware of it. class ViewSet(viewsets.ModelViewSet): model = test_models.NormalModel serializer_class = test_serializers.NormalSerializer # Create the serializer s = test_serializers.NormalSerializer() s.context = { 'request': RequestFactory().get('/foo/bar/'), 'view': ViewSet(), } # Ensure that the expected api.APIEndpointsField is present. df = s.get_default_fields() self.assertIn('api_endpoints', df) self.assertIsInstance(df['api_endpoints'], api.APIEndpointsField) def test_api_endpoints_field_default_serializer(self): """Establish that the the `api_endpoints` key is created for a default serializer. """ # Create a bogus viewset class, so the serializer can be # given context that is aware of it. class ViewSet(viewsets.ModelViewSet): model = test_models.NormalModel # Create the serializer. s = ViewSet().get_serializer_class()() s.context = { 'request': RequestFactory().get('/foo/bar/'), 'view': ViewSet(), } # Ensure that the expected api.APIEndpointField is present. df = s.get_default_fields() self.assertIn('api_endpoints', df) self.assertIsInstance(df['api_endpoints'], api.APIEndpointsField) def test_api_endpoint_field_default_serializer(self): """Establish that the the `api_endpoint` key is created in a case where we cannot match to the viewset, and we're still using a specific serializer. """ # Create a bogus viewset class, so the serializer can be # given context that is aware of it. class Viewset(viewsets.ModelViewSet): model = test_models.NormalModel # Create the serializer. s = test_serializers.NormalSerializer() s.context = { 'request': RequestFactory().get('/foo/bar/'), 'view': Viewset(), } # Ensure that the expected api.APIEndpointField is present. df = s.get_default_fields() self.assertIn('api_endpoint', df) self.assertIsInstance(df['api_endpoint'], api.APIEndpointField) def test_api_endpoint_key_existing(self): """Test that if a set of fields is provided with an `api_endpoints` field, that we don't barrel over it. """ # Ensure I get what I expect from `get_default_fields`. s = test_serializers.ExplicitAPIEndpointsSerializer() fields = s.get_default_fields() self.assertEqual(len(fields), 3) self.assertIsInstance(fields['api_endpoints'], serializers.IntegerField) def test_api_endpoints_autocovert_plural_to_singular(self): """Establish that explicitly specifying `api_endpoint` or `api_endpoints` will graciously switch between them when necessary. """ # Create a serializer to use for this test. class Serializer(test_serializers.NormalSerializer): class Meta: model = test_serializers.NormalSerializer.Meta.model fields = ('id', 'api_endpoints') # Establish that a serializer instance with no context will # have an api_endpoint field. s = Serializer() self.assertIn('api_endpoint', s.opts.fields) self.assertNotIn('api_endpoints', s.opts.fields) def test_api_endpoints_autocovert_singular_to_plural(self): """Establish that explicitly specifying `api_endpoint` or `api_endpoints` will graciously switch between them when necessary. """ # Create a serializer to use for this test. class Serializer(test_serializers.NormalSerializer): class Meta: model = test_serializers.NormalSerializer.Meta.model fields = ('id', 'api_endpoint') # Establish that a serializer instance with no context will # have an api_endpoint field. with mock.patch.object(ModelSerializer, '_viewset_uses_me') as vum: vum.return_value = True s = Serializer(context={'view': object(),}) self.assertIn('api_endpoints', s.opts.fields) self.assertNotIn('api_endpoint', s.opts.fields) def test_direct_relationship(self): """Test that a direct relationship retrieval works as expected. """ # Get the related field from a direct relationship. s = test_serializers.ChildSerializer() rel_field = s.get_related_field( model_field=test_models.ChildModel._meta.\ get_field_by_name('normal')[0], related_model=test_models.NormalModel, to_many=False, ) self.assertIsInstance(rel_field, RelatedField) # Verify the label. self.assertEqual( rel_field.label_from_instance(test_models.NormalModel()), 'NormalModel object', ) # Verify the value. self.assertFalse(rel_field.prepare_value(test_models.NormalModel())) def test_direct_relationship_with_explicit_fields(self): """Test that a direct relationship retreival works as expected, and that our explicit field list chains down to the related field. """ # Create our serializer. s = test_serializers.ChildSerializerII() rel_field = s.get_related_field( model_field=test_models.ChildModel._meta.\ get_field_by_name('normal')[0], related_model=test_models.NormalModel, to_many=False, ) self.assertIsInstance(rel_field, RelatedField) rel_field.context = {'request': RequestFactory().get('/foo/bar/')} # Get the serializer class. s = rel_field._get_serializer(test_models.NormalModel(bacon=42)) self.assertEqual([i for i in s.get_fields().keys()], ['id', 'bacon']) def test_reverse_relationship(self): """Test that a reverse relationship retrieval works as expected. """ # Instantiate my normal serializer and run a reverse # relationship against the fake child model. s = test_serializers.NormalSerializer() rel_field = s.get_related_field(None, test_models.ChildModel, False) self.assertIsInstance(rel_field, RelatedField) def test_related_field_with_no_pk(self): """Test that a related field receiving a model object with no primary key returns None. """ rel_field = RelatedField(()) answer = rel_field.to_native(test_models.ChildModel()) self.assertEqual(answer, None) def test_related_field_with_pk(self): """Test that a related field receiving a model object with a primary key returns None. """ # Create a fake request. factory = RequestFactory() request = factory.get('/foo/') # Get the appropriate related field. fake_pk = uuid.uuid4() nm = test_models.NormalModel(id=42) cm = test_models.ChildModel(normal=nm) cs = test_serializers.ChildSerializer(context={'request': request}) rel_field = cs.get_related_field( model_field=test_models.ChildModel._meta.\ get_field_by_name('normal')[0], related_model=test_models.NormalModel, to_many=False, ) rel_field.context = { 'request': request } # Get the final answer. answer = rel_field.to_native(nm) self.assertEqual({ 'api_endpoint': 'http://testserver/normal/%d/' % nm.id, 'id': 42, 'bacon': None, 'bar': None, 'baz': None, 'foo': None, }, answer) def test_reverse_related_field_serializer(self): """Establish that a related field can be specified on a serializer without incident. """ # Create a bogus request object. factory = RequestFactory() request = factory.get('/foo/') # Create a serializer that would otherwise show itself # at a related level. rs = test_serializers.ReverseSerializer() # Create an instance. nm = test_models.NormalModel(bar=1, baz=2, bacon=3) rm = test_models.RelatedModel(id=42, baz=1, normal=nm) # Get the fields from the serializer and determine that we get # what we expect. fields_dict = rs.get_default_fields() self.assertEqual( [i for i in fields_dict.keys()], [ 'id', 'api_endpoint', 'bacon', 'bar', 'baz', 'foo', 'related_model', ], ) # Pull out the related field. rel_field = fields_dict['related_model'] rel_field.context = {'request': request} # Convert our related field to native, and establish that it does not # have a normal model. native = rel_field.to_native(rm) self.assertEqual({'id': 42, 'baz': 1}, native) def test_create_rel_serializer_class(self): """Establish that the `RelatedField._create_serializer_class` method works as expected. """ RelatedModel = test_models.RelatedModel # Create a bogus request object. factory = RequestFactory() request = factory.get('/foo/') # Create a serializer that would otherwise show itself # at a related level. rs = test_serializers.ReverseSerializer() # Create an instance. nm = test_models.NormalModel(bar=1, baz=2, bacon=3) rm = RelatedModel(id=42, baz=1, normal=nm) # Get the fields from the serializer and determine that we get # what we expect. fields_dict = rs.fields self.assertEqual( set([i for i in fields_dict.keys()]), {'bacon', 'bar', 'baz', 'related_model'}, ) # Pull out the related field. rel_field = fields_dict['related_model'] rel_field.context = {'request': request} # Establish that there is no serializer class on the related # field yet. self.assertFalse(hasattr(rel_field, '_serializer_class')) # Create a serializer class. ret_val = rel_field._create_serializer_class(RelatedModel) self.assertTrue(ret_val) self.assertTrue(hasattr(rel_field, '_serializer_class')) sc = rel_field._serializer_class # Establish that a followup call is a no-op. ret_val = rel_field._create_serializer_class(RelatedModel) self.assertFalse(ret_val) self.assertIs(rel_field._serializer_class, sc) def test_created_field(self): """Establish that explicitly asking for a `created` field does cause it to be included. """ fc = test_serializers.CreatedSerializer() self.assertIn('created', fc.get_default_fields()) def test_initial_data(self): """Establish that initial data is carried over to the `save_object` serializer method. """ NormalModel = test_models.NormalModel # Create our child serializer. nm = NormalModel(id=42) ns = test_serializers.ChildSerializer(initial={ 'normal': nm.id, }) # Establish that if we call `save_object` on a child that does not # yet have a normal, that the latter's presence in `initial` causes # it to be set on our object. cm = test_models.ChildModel() with self.assertRaises(ObjectDoesNotExist): cm.normal with mock.patch.object(BaseModelSerializer, 'save_object') as save: with mock.patch.object(NormalModel.objects, 'get') as get: get.return_value = nm # Actually perform the `save_object` call being tested. ns.save_object(cm) # Assert that the superclass `save_object` was called as # expected. save.assert_called_once_with(cm) # Assert that the `get` method was called as expected. get.assert_called_once_with(pk=42) self.assertEqual(cm.normal, nm) class RelatedFieldTests(unittest.TestCase): def setUp(self): # Save my fake models to my test class. NormalModel = test_models.NormalModel self.nm = test_models.NormalModel self.cm = test_models.ChildModel # Set up related fields and things. self.rel_field = RelatedField(()) self.rel_field.context = {} if hasattr(test_models.NormalModel.objects, 'get_queryset'): self.rel_field.queryset = NormalModel.objects.get_queryset() else: self.rel_field.queryset = NormalModel.objects.get_query_set() def test_related_field_from_id_dict(self): """Test that a related field's `from_native` method, when sent a dictionary with an `id` key, returns that ID. """ # Test the case where we get a valid value back.<|fim▁hole|> qs.assert_called_with(id=42) self.assertEqual(answer, qs.return_value) def test_related_field_from_with_no_unique(self): """Test that a related field's `from_native` method, when no unique values are sent, raises ValidationError. """ # Test the case where we get a valid value back. with self.assertRaises(ValidationError): answer = self.rel_field.from_native({'foo': 3 }) def test_related_field_from_pk_noexist(self): """Test that a related field's `from_native` method processes a plain ID correctly, and processes DoesNotExist correctly. """ # Test processing when DoesNotExist is raised. with mock.patch.object(self.rel_field.queryset, 'get') as m: m.side_effect = test_models.NormalModel.DoesNotExist with self.assertRaises(ValidationError): answer = self.rel_field.from_native(42) def test_related_field_from_pk_valueerror(self): """Test that a related field's `from_native` method processes a plain ID correctly, and processes ValueError correctly. """ # Test processing when DoesNotExist is raised. with mock.patch.object(self.rel_field.queryset, 'get') as m: m.side_effect = ValueError with self.assertRaises(ValidationError): answer = self.rel_field.from_native(42) def test_related_field_from_unique_key(self): """Establish that we can retrieve a relation by a unique key within that model. """ with mock.patch.object(self.rel_field.queryset, 'get') as m: answer = self.rel_field.from_native({'bacon': 42}) m.assert_called_once_with(bacon=42) def test_related_field_from_composite_unique_keys(self): """Establish that we can retrieve a relation by a composite-unique set of keys within that model. """ with mock.patch.object(self.rel_field.queryset, 'get') as m: answer = self.rel_field.from_native({'bar': 1, 'baz': 2}) m.assert_called_once_with(bar=1, baz=2) def test_related_field_from_no_unique_keys(self): """Establish that if we attempt a lookup with no unique keys, that the system doesn't even try and raises an error. """ with self.assertRaises(ValidationError): answer = self.rel_field.from_native({'foo': []}) def test_related_field_from_bogus_field(self): """Establish that if I attempt to retrieve a related instance based on a field that does not exist on the related model, that ValidationError is raised. """ with self.assertRaises(ValidationError): answer = self.rel_field.from_native({'bogus': None}) def test_related_field_ignores_api_endpoint(self): """Establish that a `from_native` call will ignore serializer fields that do not correspond to model fields, such as `api_endpoint`. """ with mock.patch.object(self.rel_field.queryset, 'get') as get: answer = self.rel_field.from_native({'api_endpoint': 1, 'baz': 2}) get.assert_called_once_with(baz=2) def test_related_field_multiple_objects(self): """Establish that if I send criteria that don't narrow down to a single model instance, that ValidationError is raised. """ with mock.patch.object(self.rel_field.queryset, 'get') as m: m.side_effect = test_models.NormalModel.MultipleObjectsReturned with self.assertRaises(ValidationError): answer = self.rel_field.from_native({'bar': 3}) @unittest.skipUnless(django_pgfields_installed, NO_DJANGOPG) class PostgresFieldTests(unittest.TestCase): """Test suite to establish that the custom serializer fields that correlate to django_pg model fields work in the way we expect. """ def test_uuid_field_no_auto_add(self): """Test that a UUID field without `auto_add` returns the correct serializer field. """ # Instantiate my fake model serializer and establish that # we get back a UUIDField that is not read-only. s = test_serializers.PGFieldsSerializer() fields_dict = s.get_default_fields() self.assertIsInstance(fields_dict['uuid'], fields.UUIDField) self.assertEqual(fields_dict['uuid'].required, True) self.assertEqual(fields_dict['uuid'].read_only, False) def test_composite_field_without_drf_method(self): """Establish that we get a plain CompositeField if the model field does not instruct us otherwise. """ s = test_serializers.PGFieldsSerializer() fields_dict = s.get_default_fields() self.assertEqual(fields_dict['coords'].__class__, fields.CompositeField) def test_json_field_from_native(self): """Determine that a JSON serializer sends the value through on the `from_native` method. """ jf = fields.JSONField() answer = jf.from_native([1, 3, 5]) self.assertEqual(answer, [1, 3, 5]) def test_json_field_to_native(self): """Determine that a JSON serializer sends the value through on the `to_native` method. """ jf = fields.JSONField() answer = jf.to_native([1, 3, 5]) self.assertEqual(answer, [1, 3, 5]) def test_uuid_field_from_native(self): """Determine that the UUID serializer converts the value back to a Python UUID object. """ uf = fields.UUIDField() answer = uf.from_native('01234567-0123-0123-0123-0123456789ab') self.assertIsInstance(answer, uuid.UUID) self.assertEqual( answer, uuid.UUID('01234567-0123-0123-0123-0123456789ab'), ) def test_uuid_field_to_native(self): """Determine that the UUID serializer converts the value to a string representation of the uuid. """ uf = fields.UUIDField() answer = uf.to_native( uuid.UUID('01234567-0123-0123-0123-0123456789ab'), ) self.assertIsInstance(answer, six.text_type) self.assertEqual(answer, '01234567-0123-0123-0123-0123456789ab') def test_array_field_from_native(self): """Establish that the Array serializer converts the value back into a Python list as expected. """ af = fields.ArrayField(of=serializers.IntegerField()) answer = af.from_native([1, 1, '2', 3, '5', 8]) self.assertIsInstance(answer, list) self.assertEqual(answer, [1, 1, 2, 3, 5, 8]) def test_array_field_to_native(self): """Establish that the Array serializer converts the value to a Python list as expected. """ af = fields.ArrayField(of=serializers.IntegerField()) answer = af.to_native([1, 1, 2, 3, 5, 8]) self.assertIsInstance(answer, list) self.assertEqual(answer, [1, 1, 2, 3, 5, 8]) def test_composite_field_from_native(self): """Establish that the composite serializer converts the value back into the appropriate Python instance type. """ # Create an instance class and composite field. Point = namedtuple('Point', ['x', 'y']) cf = fields.CompositeField( fields={ 'x': serializers.IntegerField(), 'y': serializers.IntegerField(), }, instance_class=Point, ) # Test the conversion from a native dictionary. answer = cf.from_native({ 'x': 3, 'y': 1 }) self.assertIsInstance(answer, Point) self.assertEqual(answer.x, 3) self.assertEqual(answer.y, 1) def test_composite_field_to_native(self): """Establish that the composite serializer converts the value back into the appropriate Python instance type. """ # Create an instance class and composite field. Point = namedtuple('Point', ['x', 'y']) cf = fields.CompositeField( fields={ 'x': serializers.IntegerField(), 'y': serializers.IntegerField(), }, instance_class=Point, ) # Test the conversion from a native dictionary. answer = cf.to_native(Point(x=3, y=1)) self.assertIsInstance(answer, dict) self.assertEqual(answer, { 'x': 3, 'y': 1 })<|fim▁end|>
with mock.patch.object(self.rel_field.queryset, 'get') as qs: qs.return_value = test_models.NormalModel(id=42) answer = self.rel_field.from_native({'id': 42 })
<|file_name|>data_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2022 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Data processing utility functions.""" from typing import Tuple, List, Sequence import attr import six from etcmodel.models import tokenization _WHITESPACE_DELIMITER = u" \t\r\n\u202f" # \u202f corresponds to "" @attr.s(auto_attribs=True) class TokenizedText: """Tokenized text with indices mappings.""" # The original text. text: str = "" # The Wordpiece tokenized text. tokens: List[str] = attr.Factory(list) # The Wordpiece token ids. token_ids: List[int] = attr.Factory(list) # The whitespace tokenized text. unigrams: List[str] = attr.Factory(list) # The indices mapping from chars to unigrams. The char at index `i` belongs to # the unigram at index `chars_to_unigrams[i]`. A whitespace belongs to the # previous unigram. Only used with WordPiece tokenizer. chars_to_unigrams: List[int] = attr.Factory(list) # The indices mapping from unigrams to tokens. The unigram at index `i` starts # at the Wordpiece token at index `unigrams_to_tokens[i]`. Only used with # WordPiece tokenizer. unigrams_to_tokens: List[int] = attr.Factory(list) # The indices mapping from tokens to unigrams. The token at index `i` belongs # to the unigram at index `tokens_to_unigrams[i]`. Only used with WordPiece # tokenizer. tokens_to_unigrams: List[int] = attr.Factory(list) # The indices mapping from chars to tokens. The char at index `i` belongs to<|fim▁hole|> # Only used with SentencePiece tokenizer. chars_to_tokens: List[int] = attr.Factory(list) def whitespace_split_with_indices( text: str) -> Tuple[List[str], List[int], List[int]]: """Whitespace splits a text into unigrams and returns indices mapping.""" if not isinstance(text, str): raise ValueError("The input text is not of unicode format.") unigrams = [] unigram_to_char_map = [] char_to_unigram_map = [] prev_is_separator = True for i, c in enumerate(text): if c in _WHITESPACE_DELIMITER: prev_is_separator = True else: if prev_is_separator: unigrams.append(c) unigram_to_char_map.append(i) else: unigrams[-1] += c prev_is_separator = False char_to_unigram_map.append(len(unigrams) - 1) return unigrams, unigram_to_char_map, char_to_unigram_map def wordpiece_tokenize_with_indices( doc_unigrams: Sequence[str], tokenizer: tokenization.FullTokenizer ) -> Tuple[List[str], List[int], List[int]]: """Wordpiece tokenizes unigrams to tokens and returns indices mapping.""" token_to_unigram_map = [] unigram_to_token_map = [] doc_tokens = [] for (i, token) in enumerate(doc_unigrams): unigram_to_token_map.append(len(doc_tokens)) sub_tokens = tokenizer.tokenize(token) token_to_unigram_map.extend([i] * len(sub_tokens)) doc_tokens.extend(sub_tokens) return doc_tokens, unigram_to_token_map, token_to_unigram_map def get_wordpiece_tokenized_text( text: str, tokenizer: tokenization.FullTokenizer) -> TokenizedText: """Gets WordPiece TokenizedText for a text with indices mapping.""" unigrams, _, chars_to_unigrams = whitespace_split_with_indices(text) tokens, unigrams_to_tokens, tokens_to_unigrams = ( wordpiece_tokenize_with_indices(unigrams, tokenizer)) token_ids = tokenizer.convert_tokens_to_ids(tokens) tokenized_text = TokenizedText() tokenized_text.text = text tokenized_text.tokens = tokens tokenized_text.token_ids = token_ids tokenized_text.unigrams = unigrams tokenized_text.chars_to_unigrams = chars_to_unigrams tokenized_text.unigrams_to_tokens = unigrams_to_tokens tokenized_text.tokens_to_unigrams = tokens_to_unigrams return tokenized_text def sentencepiece_detokenize(tokens: Sequence[str]) -> str: """Recovers SenencePiece token to original text, with whitespace removal.""" spiece_token = tokenization.SPIECE_UNDERLINE.decode("utf-8") tokens = list(tokens) if tokens and tokens[0].startswith(spiece_token): tokens[0] = tokens[0][1:] return "".join(tokens).replace(spiece_token, " ") def get_sentencepiece_tokenized_text( text: str, tokenizer: tokenization.FullTokenizer) -> TokenizedText: """Gets SentencePiece TokenizedText for a text with indices mapping.""" tokens = [six.ensure_text(tk, "utf-8") for tk in tokenizer.tokenize(text)] token_ids = tokenizer.convert_tokens_to_ids(tokens) chars_to_tokens = [] for i, token in enumerate(tokens): num_chars = len(token) if i == 0: num_chars -= 1 chars_to_tokens.extend([i] * num_chars) token_ids = tokenizer.convert_tokens_to_ids(tokens) tokenized_text = TokenizedText() tokenized_text.text = sentencepiece_detokenize(tokens) tokenized_text.tokens = tokens tokenized_text.token_ids = token_ids tokenized_text.chars_to_tokens = chars_to_tokens return tokenized_text def find_char_spans(text: str, substring: str) -> List[Tuple[int, int]]: """Finds all substring occurrence char level spans (inclusive).""" if not substring: return [] char_spans = [] char_begin = text.find(substring) while char_begin != -1: char_end = char_begin + len(substring) - 1 char_spans.append((char_begin, char_end)) char_begin = text.find(substring, char_end + 1) return char_spans def _improve_answer_span( doc_tokens: Sequence[str], unimproved_span: Tuple[int, int], orig_answer_text: str, tokenizer: tokenization.FullTokenizer, ): """Returns answer token spans that better match the annotated answer. This function is branched from the original BERT `run_squad.py` code Usually question answer span annotations are character based. We first project them to whitespace-tokenized words (unigrams). But then after WordPiece tokenization, we can often find a "better match". For example: Question: What year was John Smith born? Context: The leader was John Smith (1895-1943). Answer: 1895 The original whitespace-tokenized answer will be "(1895-1943).". However after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match the exact answer, 1895. The purpose of this function is to find such "better match". However, this is not always possible. Consider the following: Question: What country is the top exporter of electornics? Context: The Japanese electronics industry is the lagest in the world. Answer: Japan In this case, the annotator chose "Japan" as a character sub-span of the word "Japanese". Since our WordPiece tokenizer does not split "Japanese", we just use "Japanese" as the annotation. This is expected to be fairly rare. Args: doc_tokens: Sequence of Text, the wordpiece tokenized tokens of the doc. unimproved_span: Tuple of two ints, the unimproved answer token span. In the first example, it is the token span for "(" and ")". orig_answer_text: Text, the original answer text. In the first example, it is "1895". tokenizer: FullTokenizer, wordpiece tokenizer to tokenize the original answer text. Returns: Tuple of two ints, the improved answer token span. In the first example, it corresponds to the answer token span for "1895". """ tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) for new_begin in range(unimproved_span[0], unimproved_span[1] + 1): for new_end in range(unimproved_span[1], new_begin - 1, -1): text_span = " ".join(doc_tokens[new_begin:(new_end + 1)]) if text_span == tok_answer_text: return new_begin, new_end return unimproved_span def _convert_answer_spans(answer_unigram_spans: Sequence[Tuple[int, int]], unigram_to_token_map: Sequence[int], num_tokens: int) -> List[Tuple[int, int]]: """Converts answer unigram spans to token spans.""" answer_token_spans = [] for unigram_begin, unigram_end in answer_unigram_spans: token_begin = unigram_to_token_map[unigram_begin] if unigram_end + 1 < len(unigram_to_token_map): token_end = unigram_to_token_map[unigram_end + 1] - 1 else: token_end = num_tokens - 1 answer_token_spans.append((token_begin, token_end)) return answer_token_spans def find_answer_spans_wordpiece( tokenized_context: TokenizedText, answer: str, tokenizer: tokenization.FullTokenizer) -> List[Tuple[int, int]]: """Finds all answer occurrence WordPiece token spans (inclusive). Args: tokenized_context: WordPiece tokenized context with indices mapping. answer: Answer string. tokenizer: A WordPiece tokenizer. Returns: A list of (begin, end) WordPiece token level indices (inclusive) of all the answer occurrences in the context. If the answer is empty or there is no answer occurrence in the context, return empty list. """ # The answer occurrence always corresponds to char level occurrence. # This is to avoid the following case, # context: "..Italian composer who wrote 39 operas.." # answer: "opera" # Since both "operas" and "opera" are in the vocab, simply searching token # level spans will miss such kind of occurrence. token_spans = [] for char_begin, char_end in find_char_spans(tokenized_context.text, answer): unigram_span = (tokenized_context.chars_to_unigrams[char_begin], tokenized_context.chars_to_unigrams[char_end]) unimproved_token_span = _convert_answer_spans( [unigram_span], tokenized_context.unigrams_to_tokens, len(tokenized_context.tokens))[0] token_spans.append( _improve_answer_span(tokenized_context.tokens, unimproved_token_span, answer, tokenizer)) return token_spans def find_answer_spans_sentencepiece(tokenized_context: TokenizedText, answer: str) -> List[Tuple[int, int]]: """Finds all answer occurrence SentencePiece token spans (inclusive). Args: tokenized_context: SentencePiece tokenized context with indices mapping. answer: Answer string. Returns: A list of (begin, end) WordPiece token level indices (inclusive) of all the answer occurrences in the context. If the answer is empty or there is no answer occurrence in the context, return empty list. """ # The answer occurrence always corresponds to char level occurrence. # This is to avoid the following case, # context: "..Italian composer who wrote 39 operas.." # answer: "opera" # Since both "operas" and "opera" are in the vocab, simply searching token # level spans will miss such kind of occurrence. token_spans = [] for char_begin, char_end in find_char_spans(tokenized_context.text, answer): token_spans.append((tokenized_context.chars_to_tokens[char_begin], tokenized_context.chars_to_tokens[char_end])) return token_spans def wordpiece_tokens_to_normalized_text(wordpiece_tokens: Sequence[str]) -> str: """Concatenates wordpiece tokens to a normalized text and cleans up. The wordpiece tokens are results from BERT tokenization. They may contain symbols of '##' or ' ##' and some extra whitespaces. The function first concatenate the tokens and then removes those extrac symbols and whitespaces. Args: wordpiece_tokens: A sequence of wordpiece tokens from BERT tokenization. Returns: The text by concatenating the wordpiece tokens and cleaning up. """ text = " ".join(wordpiece_tokens) # De-tokenize WordPieces that have been split off. text = text.replace(" ##", "") text = text.replace("##", "") # Clean whitespace text = text.strip() text = " ".join(text.split()) return text<|fim▁end|>
# the token at index `char_to_token_index[i]`. A whitespace belongs to the # later token. Note that the `text` stored in this class is obtained from # first SentencePiece tokenize the input text, then detokenize the tokens.
<|file_name|>cauchy.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 # pylint: disable=wildcard-import """Cauchy distribution""" __all__ = ['Cauchy'] from numbers import Number from numpy import nan, pi from .constraint import Real from .distribution import Distribution from .utils import sample_n_shape_converter from .... import np class Cauchy(Distribution): r"""Create a relaxed Cauchy distribution object. Parameters ---------- loc : Tensor or scalar, default 0 mode or median of the distribution scale : Tensor or scalar, default 1 half width at half maximum """ # pylint: disable=abstract-method has_grad = True support = Real() arg_constraints = {'loc': Real(), 'scale': Real()} def __init__(self, loc=0.0, scale=1.0, validate_args=None): self.loc = loc self.scale = scale super(Cauchy, self).__init__( event_dim=0, validate_args=validate_args) @property def mean(self): return nan @property def variance(self): return nan def sample(self, size=None): # TODO: Implement sampling op in the backend.<|fim▁hole|> if (isinstance(self.loc, Number), isinstance(self.scale, Number)) == (True, True): u = np.random.uniform(size=size) else: u = np.random.uniform(np.zeros_like( # pylint: disable=too-many-function-args self.loc + self.scale), size=size) return self.icdf(u) def sample_n(self, size=None): return self.sample(sample_n_shape_converter(size)) def log_prob(self, value): if self._validate_args: self._validate_samples(value) return (-np.log(pi) - np.log(self.scale) - np.log(1 + ((value - self.loc) / self.scale) ** 2)) def cdf(self, value): if self._validate_args: self._validate_samples(value) return np.arctan((value - self.loc) / self.scale) / pi + 0.5 def icdf(self, value): return np.tan(pi * (value - 0.5)) * self.scale + self.loc def entropy(self): return np.log(4 * pi) + np.log(self.scale)<|fim▁end|>
# `np.zeros_like` does not support scalar at this moment.
<|file_name|>bitcoin_ar.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ar" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Vector</source> <translation>عن البلاك كوين</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Vector&lt;/b&gt; version</source> <translation>جزء البلاك كوين</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The Vector developers</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or &lt;a href=&quot;http://www.opensource.org/licenses/mit-license.php&quot;&gt;http://www.opensource.org/licenses/mit-license.php&lt;/a&gt;. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (&lt;a href=&quot;https://www.openssl.org/&quot;&gt;https://www.openssl.org/&lt;/a&gt;) and cryptographic software written by Eric Young (&lt;a href=&quot;mailto:[email protected]&quot;&gt;[email protected]&lt;/a&gt;) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>كتاب العنوان</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>أنقر على الماوس مرتين لتعديل العنوان</translation> </message> <message> <location line="+24"/> <source>Create a new address</source> <translation>انشأ عنوان جديد</translation> </message> <message> <location line="+10"/> <source>Copy the currently selected address to the system clipboard</source> <translation>قم بنسخ العنوان المختار لحافظة النظام</translation> </message> <message> <location line="-7"/> <source>&amp;New Address</source> <translation>&amp;عنوان جديد</translation> </message> <message> <location line="-43"/> <source>These are your Vector addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>هذه هي عناوين البلاك كوين لاستقبال الدفعات. يمكن أن تعطي عنوان مختلف لكل مرسل من اجل أن تتابع من يرسل لك.</translation> </message> <message> <location line="+53"/> <source>&amp;Copy Address</source> <translation>انسخ العنوان</translation> </message> <message> <location line="+7"/> <source>Show &amp;QR Code</source> <translation>اظهار &amp;رمز الاستجابة السريعة</translation> </message> <message> <location line="+7"/> <source>Sign a message to prove you own a Vector address</source> <translation>التوقيع علي رسالة لاثبات بانك تملك عنوان البلاك كوين</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>وقع &amp;الرسالة</translation> </message> <message> <location line="+17"/> <source>Delete the currently selected address from the list</source> <translation>خذف العنوان الحالي التي تم اختياره من القائمة</translation> </message> <message> <location line="-10"/> <source>Verify a message to ensure it was signed with a specified Vector address</source> <translation>تحقق من الرسالة لتثبت بانه تم توقيعه بعنوان بلاك كوين محدد</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;تحقق الرسالة</translation> </message> <message> <location line="+10"/> <source>&amp;Delete</source> <translation>&amp;أمسح</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+66"/> <source>Copy &amp;Label</source> <translation>نسخ &amp;التسمية</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>تعديل</translation> </message> <message> <location line="+248"/> <source>Export Address Book Data</source> <translation>تصدير بيانات كتاب العناوين</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>خطا في التصدير</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>لا يمكن الكتابة الي الملف %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+145"/> <source>Label</source> <translation>وصف</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>عنوان</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(لا وصف)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>حوار كلمة المرور</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>ادخل كلمة المرور</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>عبارة مرور جديدة</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>ادخل الجملة السرية مرة أخرى</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>For staking only</source> <translation type="unfinished"/> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+38"/> <source>Encrypt wallet</source> <translation>تشفير المحفظة</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>هذه العملية تحتاج عبارة المرور محفظتك لفتحها</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>إفتح المحفظة</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>هذه العملية تحتاج عبارة المرور محفظتك فك تشفيرها</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>فك تشفير المحفظة</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>تغيير عبارة المرور</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source><|fim▁hole|> </message> <message> <location line="+45"/> <source>Confirm wallet encryption</source> <translation>تأكيد التشفير المحفظة</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>تخذير : اذا تم تشفير المحفظة وضيعت كلمة المرور, لن تستطيع الحصول علي البلاك كوين</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>هل انت متأكد من رغبتك في تشفير المحفظة؟</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>محفظة مشفرة</translation> </message> <message> <location line="-140"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Vector will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>بلاك كوين</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>فشل تشفير المحفظة</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>شل تشفير المحفظة بسبب خطأ داخلي. لم يتم تشفير محفظتك.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>عبارتي المرور ليستا متطابقتان </translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>فشل فتح المحفظة</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>عبارة المرور التي تم إدخالها لفك شفرة المحفظة غير صحيحة. </translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>فشل فك التشفير المحفظة</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>كلمة مرور المحفظة تم تغييره بشكل ناجح</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+297"/> <source>Sign &amp;message...</source> <translation>التوقيع و الرسائل</translation> </message> <message> <location line="-64"/> <source>Show general overview of wallet</source> <translation>إظهار نظرة عامة على المحفظة</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>المعاملات</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>تصفح التاريخ المعاملات</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>&amp;كتاب العنوان</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>تعديل قائمة العنوان المحفوظة</translation> </message> <message> <location line="-18"/> <source>Show the list of addresses for receiving payments</source> <translation>اظهار قائمة العناوين التي تستقبل التعاملات</translation> </message> <message> <location line="+34"/> <source>E&amp;xit</source> <translation>خروج</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>الخروج من التطبيق</translation> </message> <message> <location line="+4"/> <source>Show information about Vector</source> <translation>اظهار المعلومات عن البلاك كوين</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>عن</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>اظهر المعلومات</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>خيارات ...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>تشفير المحفظة</translation> </message> <message> <location line="+2"/> <source>&amp;Backup Wallet...</source> <translation>حفظ ودعم المحفظة</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>تغيير كلمة المرور</translation> </message> <message> <location line="+9"/> <source>&amp;Export...</source> <translation>&amp;تصدير...</translation> </message> <message> <location line="-55"/> <source>Send coins to a Vector address</source> <translation>ارسال البلاك كوين الي عنوان اخر</translation> </message> <message> <location line="+39"/> <source>Modify configuration options for Vector</source> <translation>تعديل خيارات التكوين للبلاك كوين</translation> </message> <message> <location line="+17"/> <source>Export the data in the current tab to a file</source> <translation>ارسال البيانات الحالية الي ملف</translation> </message> <message> <location line="-13"/> <source>Encrypt or decrypt wallet</source> <translation>تشفير او فك التشفير للمحفظة</translation> </message> <message> <location line="+2"/> <source>Backup wallet to another location</source> <translation>احفظ نسخة احتياطية للمحفظة في مكان آخر</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>تغيير عبارة المرور المستخدمة لتشفير المحفظة</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>تأكيد الرسالة</translation> </message> <message> <location line="-214"/> <location line="+555"/> <source>Vector</source> <translation>البلاك كوين</translation> </message> <message> <location line="-555"/> <source>Wallet</source> <translation>محفظة</translation> </message> <message> <location line="+193"/> <source>&amp;About Vector</source> <translation>عن البلاك كوين</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>اظهار/ اخفاء</translation> </message> <message> <location line="+8"/> <source>Unlock wallet</source> <translation>فتح المحفظة</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation>قفل المحفظة</translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation>قفل المحفظة</translation> </message> <message> <location line="+32"/> <source>&amp;File</source> <translation>ملف</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>الاعدادات</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>مساعدة</translation> </message> <message> <location line="+17"/> <source>Tabs toolbar</source> <translation>شريط أدوات علامات التبويب</translation> </message> <message> <location line="+46"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+58"/> <source>Vector client</source> <translation>برنامج البلاك كوين</translation> </message> <message numerus="yes"> <location line="+70"/> <source>%n active connection(s) to Vector network</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+488"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation type="unfinished"/> </message> <message> <location line="-812"/> <source>&amp;Dashboard</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>&amp;Unlock Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+277"/> <source>Up to date</source> <translation>محين</translation> </message> <message> <location line="+43"/> <source>Catching up...</source> <translation>اللحاق بالركب ...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>تأكيد رسوم المعاملة</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>المعاملات المرسلة</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>المعاملات واردة</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid Vector address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Wallet is &lt;b&gt;not encrypted&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>المحفظة مشفرة و مفتوحة حاليا</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>المحفظة مشفرة و مقفلة حاليا</translation> </message> <message> <location line="+24"/> <source>Backup Wallet</source> <translation>النسخ الاحتياطي للمحفظة</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>بيانات المحفظة (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>فشل الدعم</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>خطا في محاولة حفظ بيانات الحفظة في مكان جديد</translation> </message> <message numerus="yes"> <location line="+91"/> <source>%n second(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+433"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="-456"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+27"/> <location line="+433"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="-429"/> <location line="+6"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+0"/> <source>%1 and %2</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+0"/> <source>%n year(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+69"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="+324"/> <source>Not staking</source> <translation type="unfinished"/> </message> <message> <location filename="../bitcoin.cpp" line="+104"/> <source>A fatal error occurred. Vector can no longer continue safely and will quit.</source> <translation>خطا فادح! بلاك كوين لا يمكن أن يستمر جاري الاغلاق</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+110"/> <source>Network Alert</source> <translation>تحذير الشبكة</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>سيطرة الكوين</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>الكمية:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>المبلغ:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>اهمية:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>رسوم:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location filename="../coincontroldialog.cpp" line="+537"/> <source>no</source> <translation>لا</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>بعد الرسوم:</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>تغيير:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>List mode</source> <translation type="unfinished"/> </message> <message> <location line="+45"/> <source>Amount</source> <translation>المبلغ</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Address</source> <translation>عنوان</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>التاريخ</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>تأكيد</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation type="unfinished"/> </message> <message> <location filename="../coincontroldialog.cpp" line="-500"/> <source>Copy address</source> <translation> انسخ عنوان</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation> انسخ التسمية</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>نسخ الكمية</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <location line="+317"/> <source>highest</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>high</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>medium-high</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>low-medium</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>low</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>lowest</source> <translation type="unfinished"/> </message> <message> <location line="+140"/> <source>DUST</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>yes</source> <translation>نعم</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <location line="+66"/> <source>(no label)</source> <translation>(لا وصف)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>(change)</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>عدل العنوان</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>العنوان</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>عنوان تلقي جديد</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>عنوان إرسال جديد</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>تعديل عنوان التلقي </translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>تعديل عنوان الارسال</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>هدا العنوان &quot;%1&quot; موجود مسبقا في دفتر العناوين</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Vector address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation> يمكن فتح المحفظة.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>فشل توليد مفتاح جديد.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+426"/> <location line="+12"/> <source>Vector-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>خيارات ...</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>الرئيسي</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>ادفع &amp;رسوم المعاملة</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Reserve</source> <translation>حجز</translation> </message> <message> <location line="+31"/> <source>Automatically start Vector after logging in to the system.</source> <translation>بد البلاك كوين تلقائي عند الدخول الي الجهاز</translation> </message> <message> <location line="+3"/> <source>&amp;Start Vector on system login</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;الشبكة</translation> </message> <message> <location line="+6"/> <source>Automatically open the Vector client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Connect to the Vector network through a SOCKS5 proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS5 proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+90"/> <source>&amp;Window</source> <translation>نافذه</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Vector.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Whether to show coin control features or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Whether to select the coin outputs randomly or with minimal coin age.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Minimize weight consumption (experimental)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use black visual theme (requires restart)</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>تم</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>الغاء</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+47"/> <source>default</source> <translation>الافتراضي</translation> </message> <message> <location line="+148"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Vector.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>عنوان الوكيل توفيره غير صالح.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>نمودج</translation> </message> <message> <location line="+46"/> <location line="+247"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Vector network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-173"/> <source>Stake:</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Unconfirmed:</source> <translation type="unfinished"/> </message> <message> <location line="-113"/> <source>Wallet</source> <translation>محفظة</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation type="unfinished"/> </message> <message> <location line="+80"/> <source>Immature:</source> <translation>غير ناضجة</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Total:</source> <translation>الكامل:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>اخر المعملات </translation> </message> <message> <location line="-118"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location line="-32"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>خارج المزامنه</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start vector: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Label:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Message:</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>اسم العميل</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <source>N/A</source> <translation>غير معروف</translation> </message> <message> <location line="-194"/> <source>Client version</source> <translation>نسخه العميل</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>المعلومات</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation>الشبكه</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>عدد الاتصالات</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location line="+197"/> <source>&amp;Network Traffic</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Clear</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Totals</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>In:</source> <translation type="unfinished"/> </message> <message> <location line="+80"/> <source>Out:</source> <translation type="unfinished"/> </message> <message> <location line="-383"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>الفتح</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the Vector-Qt help message to get a list with possible Vector command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-237"/> <source>Build date</source> <translation>وقت البناء</translation> </message> <message> <location line="-104"/> <source>Vector - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Vector Core</source> <translation type="unfinished"/> </message> <message> <location line="+256"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the Vector debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="+325"/> <source>Welcome to the Vector RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> <message> <location line="+127"/> <source>%1 B</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 KB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 MB</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 GB</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>%1 m</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>%1 h</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 h %2 m</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>إرسال Coins</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation type="unfinished"/> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <location line="+51"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Priority:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>no</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Change</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>custom change address</source> <translation type="unfinished"/> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>إرسال إلى عدة مستلمين في وقت واحد</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>مسح الكل</translation> </message> <message> <location line="+24"/> <source>Balance:</source> <translation>الرصيد:</translation> </message> <message> <location line="+47"/> <source>Confirm the send action</source> <translation>تأكيد الإرسال</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-174"/> <source>Enter a Vector address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>نسخ الكمية</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <location line="+87"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>تأكيد الإرسال Coins</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source> and </source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>المبلغ المدفوع يجب ان يكون اكبر من 0</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+247"/> <source>WARNING: Invalid Vector address</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(لا وصف)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>ادفع الى </translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>إدخال تسمية لهذا العنوان لإضافته إلى دفتر العناوين الخاص بك</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>اختيار عنوان من كتاب العناوين</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>انسخ العنوان من لوحة المفاتيح</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>خذف هذا المستخدم</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Vector address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>ادخال عنوان البلاك كوين (مثلا B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>التواقيع - التوقيع /تأكيد الرسالة</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;وقع الرسالة</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>اختيار عنوان من كتاب العناوين</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>انسخ العنوان من لوحة المفاتيح</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Vector address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>مسح الكل</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Vector address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Vector address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter Vector signature</source> <translation type="unfinished"/> </message> <message> <location line="+85"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>العنوان المدخل غير صالح</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>الرجاء التأكد من العنوان والمحاولة مرة اخرى</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>العنوان المدخل لا يشير الى مفتاح</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>المفتاح الخاص للعنوان المدخل غير موجود.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>فشل توقيع الرسالة.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>الرسالة موقعة.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>فشلت عملية التأكد من الرسالة.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>تم تأكيد الرسالة.</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <location filename="../trafficgraphwidget.cpp" line="+75"/> <source>KB/s</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+25"/> <source>Open until %1</source> <translation>مفتوح حتى 1٪</translation> </message> <message> <location line="+6"/> <source>conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>1% غير متواجد</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>غير مؤكدة/1%</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>تأكيد %1</translation> </message> <message> <location line="+17"/> <source>Status</source> <translation>الحالة.</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>التاريخ</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>المصدر</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>تم اصداره.</translation> </message> <message> <location line="+5"/> <location line="+13"/> <source>From</source> <translation>من</translation> </message> <message> <location line="+1"/> <location line="+19"/> <location line="+58"/> <source>To</source> <translation>الى</translation> </message> <message> <location line="-74"/> <location line="+2"/> <source>own address</source> <translation>عنوانه</translation> </message> <message> <location line="-2"/> <source>label</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>غير مقبولة</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>دين</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>رسوم التحويل</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation>تعليق</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>رقم المعاملة</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>معاملة</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Amount</source> <translation>المبلغ</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>صحيح</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>خاطئ</translation> </message> <message> <location line="-202"/> <source>, has not been successfully broadcast yet</source> <translation>لم يتم حتى الآن البث بنجاح</translation> </message> <message numerus="yes"> <location line="-36"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+67"/> <source>unknown</source> <translation>غير معروف</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>تفاصيل المعاملة</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>يبين هذا الجزء وصفا مفصلا لهده المعاملة</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+231"/> <source>Date</source> <translation>التاريخ</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>النوع</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>عنوان</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>المبلغ</translation> </message> <message> <location line="+52"/> <source>Open until %1</source> <translation>مفتوح حتى 1٪</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>تأكيد الإرسال Coins</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation>غير متصل</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>لم يتم تلقى هذه الكتلة (Block) من قبل أي العقد الأخرى وربما لن تكون مقبولة!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>ولدت ولكن لم تقبل</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>استقبل مع</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>استقبل من</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>أرسل إلى</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>دفع لنفسك</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Mined</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>غير متوفر</translation> </message> <message> <location line="+194"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>حالة المعاملة. تحوم حول هذا الحقل لعرض عدد التأكيدات.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>التاريخ والوقت الذي تم فيه تلقي المعاملة.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>نوع المعاملات</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>عنوان وجهة المعاملة</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>المبلغ الذي أزيل أو أضيف الى الرصيد</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+54"/> <location line="+17"/> <source>All</source> <translation>الكل</translation> </message> <message> <location line="-16"/> <source>Today</source> <translation>اليوم</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>هدا الاسبوع</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>هدا الشهر</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>الشهر الماضي</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>هدا العام</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>نطاق</translation> </message> <message> <location line="+12"/> <source>Received with</source> <translation>استقبل مع</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>أرسل إلى</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>إليك</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Mined</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>اخرى</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>ادخل عنوان أووصف للبحث</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>الكمية الادني</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation> انسخ عنوان</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation> انسخ التسمية</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>نسخ الكمية</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>نسخ رقم المعاملة</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>عدل الوصف</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>اظهار تفاصيل المعاملة</translation> </message> <message> <location line="+138"/> <source>Export Transaction Data</source> <translation>تصدير بيانات المعاملة</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>تأكيد</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>التاريخ</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>النوع</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>وصف</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>عنوان</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>المبلغ</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>العنوان</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>خطا في التصدير</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>لا يمكن الكتابة الي الملف %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>نطاق:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>الى</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+212"/> <source>Sending...</source> <translation>ارسال....</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+171"/> <source>Vector version</source> <translation>جزع البلاك كوين</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>المستخدم</translation> </message> <message> <location line="+1"/> <source>Send command to -server or vectord</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>List commands</source> <translation>اعرض الأوامر</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>مساعدة في كتابة الاوامر</translation> </message> <message> <location line="-145"/> <source>Options:</source> <translation>خيارات: </translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: vector.conf)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Specify pid file (default: vectord.pid)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>حدد موقع مجلد المعلومات او data directory</translation> </message> <message> <location line="-25"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=vectorrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Vector Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>ضع حجم كاش قاعدة البيانات بالميجابايت (الافتراضي: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Listen for connections on &lt;port&gt; (default: 1715 or testnet: 11715)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>حدد عنوانك العام هنا</translation> </message> <message> <location line="+4"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Always query for peer addresses via DNS lookup (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation type="unfinished"/> </message> <message> <location line="-35"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+62"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 1716 or testnet: 11716)</source> <translation type="unfinished"/> </message> <message> <location line="-16"/> <source>Accept command line and JSON-RPC commands</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Run in the background as a daemon and accept commands</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>استخدم التحقق من الشبكه</translation> </message> <message> <location line="-23"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>قبول الاتصالات من خارج</translation> </message> <message> <location line="-28"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+93"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="-103"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Vector will not work properly.</source> <translation>تحذير : تأكد من ساعة وتاريخ الكمبيوتر! اذا ساعة غير صحيحة بلاك كوين لن يعمل بشكل صحيح</translation> </message> <message> <location line="+130"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>تحذير : خطا في قراءة wallet.dat! كل المفاتيح تم قرائتة بشكل صحيح لكن بيانات الصفقة او إدخالات كتاب العنوان غير صحيحة او غير موجودة</translation> </message> <message> <location line="-16"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>تخذير :wallet.dat غير صالح تم حفظ البيانات. المحفظة الاصلية تم حفظه ك wallet.{timestamp}.bak %s في ; اذا حسابك او صفقاتك غير صحيح يجب عليك استعادة النسخ الاحتياطي</translation> </message> <message> <location line="-34"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>محاولة استرجاع المفاتيح الخاصة من wallet.dat الغير صالح</translation> </message> <message> <location line="+5"/> <source>Block creation options:</source> <translation>خيارات صناعة الكتل</translation> </message> <message> <location line="-67"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+101"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>فشل في الاستماع على أي منفذ. استخدام الاستماع = 0 إذا كنت تريد هذا.</translation> </message> <message> <location line="-2"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation type="unfinished"/> </message> <message> <location line="-89"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+30"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="-38"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="-34"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="-41"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+45"/> <source>Username for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="+54"/> <source>Verifying database integrity...</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>تحذير هذا الجزء قديم, التجديد مطلوب</translation> </message> <message> <location line="-52"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat غير صالح لا يمكن الاسترجاع</translation> </message> <message> <location line="-59"/> <source>Password for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-47"/> <source>Connect through SOCKS5 proxy</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>عند</translation> </message> <message> <location line="+6"/> <source>Output debugging information (default: 0, supplying &lt;category&gt; is optional)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&lt;category&gt; can be:</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Wait for RPC server to start</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>ارقاء المحفظة الي اخر نسخة</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>اعادة بحث سلسلة الكتلة لايجاد معالمات المحفظة</translation> </message> <message> <location line="+3"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>كمية تأكيد الكتل (0-6 التلقائي 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external VEC000?.dat file</source> <translation>نقل كتل من ملف VEC000.dat خارجي</translation> </message> <message> <location line="+1"/> <source>Keep at most &lt;n&gt; MiB of unconnectable blocks in memory (default: %u)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Unsupported argument -socks found. Setting SOCKS version isn&apos;t possible anymore, only SOCKS5 proxies are supported.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Initialization sanity check failed. Vector is shutting down.</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="-168"/> <source>This help message</source> <translation>رسالة المساعدة هذه</translation> </message> <message> <location line="+104"/> <source>Wallet %s resides outside data directory %s.</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location line="-129"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location line="+125"/> <source>Loading addresses...</source> <translation>تحميل العنوان</translation> </message> <message> <location line="-10"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>خطأ عند تنزيل wallet.dat: المحفظة تالفة</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of Vector</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart Vector to complete</source> <translation>المحفظة يجب أن يعاد كتابته : أعد البلاك كوين لتكتمل</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>خطأ عند تنزيل wallet.dat</translation> </message> <message> <location line="-15"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-22"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+58"/> <source>Sending...</source> <translation>ارسال....</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>مبلغ خاطئ</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>حسابك لا يكفي</translation> </message> <message> <location line="-40"/> <source>Loading block index...</source> <translation type="unfinished"/> </message> <message> <location line="-109"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation type="unfinished"/> </message> <message> <location line="+124"/> <source>Unable to bind to %s on this computer. Vector is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="-101"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+33"/> <source>Minimize weight consumption (experimental) (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>How many blocks to check at startup (default: 500, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Vector is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Loading wallet...</source> <translation>تحميل المحفظه</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation> لا يمكن خفض المحفظة</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation> لا يمكن كتابة العنوان الافتراضي</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>إعادة مسح</translation> </message> <message> <location line="+2"/> <source>Done loading</source> <translation>انتهاء التحميل</translation> </message> <message> <location line="-159"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="+186"/> <source>Error</source> <translation>خطأ</translation> </message> <message> <location line="-18"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS><|fim▁end|>
<translation>أدخل عبارة المرور القديمة والجديدة إلى المحفظة.</translation>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from pyramid.httpexceptions import ( HTTPException, HTTPFound, HTTPNotFound, HTTPBadRequest, HTTPConflict, ) from pyramid.security import Authenticated from pyramid.view import view_config from perpetualfailure.db import session from perpetualfailure.knowledgebase.models import ( KB_Article, KB_ArticleRevision, ) import copy import logging log = logging.getLogger(__name__) def traverse(path_, page=None, parents=None): path = path_ # We're rolling out blank, let's start from the KB root (index) if not page: path = copy.copy(path_) node = path.pop(0) page = session.query(KB_Article).filter(KB_Article.parent == None, KB_Article.name == node).first() if not parents: parents = [] # Remove empty elements from the path # Lets us do stuff like /kb////channels//// == /kb/channels while (path and not path[0]): path.pop(0) # The path list is empty; we've reache the article we wanted (bottom level) if not path: return (page, parents) # Search for the current path node in the names of this page's children node = path.pop(0) results = [article for article in page.children if article.name == node] if not results: # No results found return (None, parents) # Node found; update page variable and check for more. parents.append(page) page = results[0] return traverse(path, page, parents) @view_config(<|fim▁hole|>) def viewArticle(request): path = request.matchdict['path'] # Check whether we're trying to load the index or not if not path or path == "/": path = [""] else: path = path.split("/") # The index should always be at the first index in the path path[0] = "index" # Find the article by traversing the article tree down to the article we # want. asdfasdfsa fasdfadsf asdfasdf asdfasfdasd fas sadfasf ghei hei hei (article, parents) = traverse(path) if not article: # Much cri :@( return HTTPNotFound() # RIP revision_count = session.execute("select count(id) from knowledgebase_article_revision where article_id = %i;" % article.id).fetchall()[0][0] # Feed the allmighty Mako return {"article": article, "parents": parents, "revisions": revision_count} @view_config( route_name='knowledgebase.article.create', renderer='knowledgebase/article/edit.mako', # TODO: Add a factory and use the "create" permission. permission=Authenticated, ) def createArticle(request): article = KB_Article() # Construct a list from the path given in the route URL path = request.matchdict['path'].split("/") path = [node for node in path if node] path.insert(0, "index") if len(path) > 1: parent = traverse(path[:-1])[0] if not parent: return HTTPNotFound() if traverse(path)[0]: return HTTPConflict() # Validate data and if appropriate update and redirect. r = articleUpdate(request, article, path) if isinstance(r, HTTPException): return r return {"article": article} @view_config( route_name='knowledgebase.article.edit', renderer='knowledgebase/article/edit.mako', # TODO: Add a factory and use the "edit" permission. permission=Authenticated, ) def editArticle(request): # Construct a list from the path given in the route URL path = request.matchdict['path'].split("/") path = [node for node in path if node] path.insert(0, "index") article = traverse(path)[0] if not article: return HTTPNotFound() # Validate data and if appropriate update and redirect. r = articleUpdate(request, article, path) if isinstance(r, HTTPException): return r return {"article": article} def articleUpdate(request, article, path, is_new=False): if not request.method == "POST": return None for key in ['title', 'content']: if key not in request.POST: return HTTPBadRequest() article.title = request.POST['title'] article.name = path[-1] article.content = request.POST['content'] # Update the parent of this object if len(path) > 1: article.parent = traverse(path[:-1])[0] elif article.parent: # This is a root article but it's got a parent, remove the parent # from this article object. article.parent = None curr_rev = KB_ArticleRevision(article) prev_rev = article.revision if prev_rev: prev_rev.children.append(curr_rev) session.add(prev_rev) session.add(curr_rev) article.revision = curr_rev session.add(article) return HTTPFound(location=request.route_path('knowledgebase.article.view', path=request.matchdict['path'])) @view_config( route_name='knowledgebase.revision.compare', renderer='knowledgebase/revision/compare.mako', ) def compareRevisions(request): base = getRevisionFromMatchdict(request, "base") head = getRevisionFromMatchdict(request, "head") baseText = "" if base: baseText = base.content.split("\n") headText = "" if head: headText = head.content.split("\n") baseFile = "article/%s/revision/%s" % (base.article.id, base.id) headFile = "article/%s/revision/%s" % (head.article.id, head.id) diff = "\n".join(list(difflib.unified_diff(baseText, headText, baseFile, headFile))) return {"raw_diff": diff, "base": base, "head": head, "baseFile": baseFile, "headFile": headFile} def getRevisionFromMatchdict(request, key): id = request.matchdict[key] revision = session.query(KB_ArticleRevision) \ .filter(KB_ArticleRevision.id == id).first() return revision<|fim▁end|>
route_name='knowledgebase.article.view', renderer='knowledgebase/article/view.mako',
<|file_name|>mwcc.py<|end_file_name|><|fim▁begin|>"""SCons.Tool.mwcc Tool-specific initialization for the Metrowerks CodeWarrior compiler. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/mwcc.py 3897 2009/01/13 06:45:54 scons" import os import os.path import string import SCons.Util def set_vars(env): """Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars MWCW_VERSIONS is set to a list of objects representing installed versions MWCW_VERSION is set to the version object that will be used for building. MWCW_VERSION can be set to a string during Environment construction to influence which version is chosen, otherwise the latest one from MWCW_VERSIONS is used. Returns true if at least one version is found, false otherwise """ desired = env.get('MWCW_VERSION', '') # return right away if the variables are already set if isinstance(desired, MWVersion): return 1 elif desired is None: return 0 versions = find_versions() version = None if desired: for v in versions: if str(v) == desired: version = v elif versions: version = versions[-1] env['MWCW_VERSIONS'] = versions env['MWCW_VERSION'] = version if version is None: return 0 env.PrependENVPath('PATH', version.clpath) env.PrependENVPath('PATH', version.dllpath) ENV = env['ENV'] ENV['CWFolder'] = version.path ENV['LM_LICENSE_FILE'] = version.license plus = lambda x: '+%s' % x ENV['MWCIncludes'] = string.join(map(plus, version.includes), os.pathsep) ENV['MWLibraries'] = string.join(map(plus, version.libs), os.pathsep) return 1 def find_versions(): """Return a list of MWVersion objects representing installed versions""" versions = [] ### This function finds CodeWarrior by reading from the registry on ### Windows. Some other method needs to be implemented for other ### platforms, maybe something that calls env.WhereIs('mwcc') if SCons.Util.can_read_reg:<|fim▁hole|> i = 0 while 1: name = product + '\\' + SCons.Util.RegEnumKey(product_key, i) name_key = SCons.Util.RegOpenKeyEx(HLM, name) try: version = SCons.Util.RegQueryValueEx(name_key, 'VERSION') path = SCons.Util.RegQueryValueEx(name_key, 'PATH') mwv = MWVersion(version[0], path[0], 'Win32-X86') versions.append(mwv) except SCons.Util.RegError: pass i = i + 1 except SCons.Util.RegError: pass return versions class MWVersion: def __init__(self, version, path, platform): self.version = version self.path = path self.platform = platform self.clpath = os.path.join(path, 'Other Metrowerks Tools', 'Command Line Tools') self.dllpath = os.path.join(path, 'Bin') # The Metrowerks tools don't store any configuration data so they # are totally dumb when it comes to locating standard headers, # libraries, and other files, expecting all the information # to be handed to them in environment variables. The members set # below control what information scons injects into the environment ### The paths below give a normal build environment in CodeWarrior for ### Windows, other versions of CodeWarrior might need different paths. msl = os.path.join(path, 'MSL') support = os.path.join(path, '%s Support' % platform) self.license = os.path.join(path, 'license.dat') self.includes = [msl, support] self.libs = [msl, support] def __str__(self): return self.version CSuffixes = ['.c', '.C'] CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++'] def generate(env): """Add Builders and construction variables for the mwcc to an Environment.""" import SCons.Defaults import SCons.Tool set_vars(env) static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in CSuffixes: static_obj.add_action(suffix, SCons.Defaults.CAction) shared_obj.add_action(suffix, SCons.Defaults.ShCAction) for suffix in CXXSuffixes: static_obj.add_action(suffix, SCons.Defaults.CXXAction) shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction) env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES' env['CC'] = 'mwcc' env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS' env['CXX'] = 'mwcc' env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS' env['SHCC'] = '$CC' env['SHCCFLAGS'] = '$CCFLAGS' env['SHCFLAGS'] = '$CFLAGS' env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS' env['SHCXX'] = '$CXX' env['SHCXXFLAGS'] = '$CXXFLAGS' env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS' env['CFILESUFFIX'] = '.c' env['CXXFILESUFFIX'] = '.cpp' env['CPPDEFPREFIX'] = '-D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '-I' env['INCSUFFIX'] = '' #env['PCH'] = ? #env['PCHSTOP'] = ? def exists(env): return set_vars(env)<|fim▁end|>
try: HLM = SCons.Util.HKEY_LOCAL_MACHINE product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions' product_key = SCons.Util.RegOpenKeyEx(HLM, product)
<|file_name|>world.ts<|end_file_name|><|fim▁begin|>///<reference path='refs.ts'/> module TDev { export module World { export function log(s: string) { Util.log("World: " + s); } // Filled in from [editor/default.ts]; expects [s] to be a // *touchdevelop* script text, not an external editor. export var getScriptMeta : (s:string) => any; export var sanitizeScriptTextForCloud : (s:string) => string; export var waitForUpdate = (id:string) => false; // for now disable merge on sync in the lite cloud export var disableMerge = true; // this is so that the Editor can react to changes made by sync // state is: // "uploaded" - when a new version was sent to the cloud, and we got a new snapshotId // "published" - after a script is published // "downloaded" - after a merge, or just plain download export var newHeaderCallbackAsync = (h:Cloud.Header, state:string) => Promise.as(); // this is called before we attempt a merge; the editor should save the state if the guid matches and display // a progress overlay until newHeaderCallbackAsync({ guid: guid }, "downloaded") is called export var incomingHeaderAsync = (guid:string) => Promise.as(); var currentUserInfo:any = null; var currentUserPromise = new PromiseInv(); var localStorage = window.localStorage; var getIndexTablePromise = () => Storage.getTableAsync("Index"); var getScriptsTablePromise = () => Storage.getTableAsync("Scripts"); var getTracesTablePromise = () => Storage.getTableAsync("Traces"); interface SyncData { indexTable: Storage.Table; scriptsTable: Storage.Table; installedHeaders: Cloud.InstalledHeaders; recentUses: any; downloaded: any[]; removed: any[]; uptodates: any[]; uploaded: any[]; keys: string[]; scriptVersionsInCloudItems: any; items: any; progress: any; } export interface ScriptStub { // Either "touchdevelop", or another one (external). This is unlike // [Cloud.Header] where [editor] is either undefined, or a string // (meaning external editor). editorName: string; // When the editor is "touchdevelop", this is the same value that // can be obtained by running [getScriptMeta] on [scriptText]. scriptName: string; // When the editor is "touchdevelop", initially contains a template, then // gets mutated by [newScriptAsync] with extra meta information before // being saved to storage. When the editor is external, remains blank. scriptText: string; } function getHeader(body: Cloud.Body) : Cloud.Header { var x = JSON.parse(JSON.stringify(body)); delete x.script; return x; } function removeInstalledAsync(indexTable: Storage.Table, scriptsTable: Storage.Table, guid: string) : Promise { var headerItem = {} headerItem[guid] = undefined; var bodyItem = {} bodyItem[guid + "-script"] = undefined; bodyItem[guid + "-scriptState"] = undefined; bodyItem[guid + "-scriptVersionInCloud"] = undefined; return Promise.join([indexTable.setItemsAsync(headerItem), scriptsTable.setItemsAsync(bodyItem)]); } function setInstalledAsync( indexTable: Storage.Table, scriptsTable: Storage.Table, header: Cloud.Header, script: string, editorState: string, scriptState: string, cloudScriptVersion: string ) : Promise { var headerItem = {} // In the case of a regular script, we can recover the metadata from // the script body. In the case of an external editor, we demand // that the caller properly set the metadata. if (script && !header.editor && (!header.meta || header.meta.comment === undefined)) header.meta = getScriptMeta(script); if (header.editor && (!header.meta || !header.meta.name)) { Util.log("ERROR pre-condition not met for [setInstalledAsync]; bailing"); debugger; return Promise.as(); } headerItem[header.guid] = JSON.stringify(header); var bodyItem = {} // protz: I believe we can get rid of this assert now that we have // external scripts that may start out null...? // Util.assert(script !== "") if (script != null) bodyItem[header.guid + "-script"] = typeof script === "string" ? script : undefined; if (editorState != null) bodyItem[header.guid + "-editorState"] = typeof editorState === "string" ? editorState : undefined; if (scriptState !== null) bodyItem[header.guid + "-scriptState"] = typeof scriptState === "string" ? scriptState : undefined; if (cloudScriptVersion != null) bodyItem[header.guid + "-scriptVersionInCloud"] = typeof cloudScriptVersion === "string" ? cloudScriptVersion : undefined; log(header.guid + "/" + header.scriptId + ": " + header.name + " save with base " + header.scriptVersion.baseSnapshot); return Promise.join([indexTable.setItemsAsync(headerItem), scriptsTable.setItemsAsync(bodyItem)]); } function setCloudScriptVersionAsync(scriptsTable: Storage.Table, guid: string, cloudScriptVersion: string) : Promise { var bodyItem = {} bodyItem[guid + "-scriptVersionInCloud"] = typeof cloudScriptVersion === "string" ? cloudScriptVersion : undefined; return scriptsTable.setItemsAsync(bodyItem); } export function mergeJSON(base:any, local:any, server:any) { Object.keys(server).forEach(k => { if (server[k] && typeof server[k] === "object" && local[k] && typeof local[k] == "object") local[k] = mergeJSON(base[k] || {}, local[k], server[k]) else if (!local.hasOwnProperty(k) || base[k] === local[k]) local[k] = server[k] }) return local } function mergeEditorStates(base:string, local:string, server:string) { return JSON.stringify(mergeJSON(JSON.parse(base || "{}"), JSON.parse(local || "{}"), JSON.parse(server || "{}"))) } export var mergeScripts = (base:string, local:string, server:string) => local; export function getScriptBlobAsync(snapshotId:string) { return Util.httpGetJsonAsync(Cloud.config.workspaceUrl + snapshotId) } export interface ScriptBlob { script: string; editorState: string; extra?: any; } // [header] is coming from the cloud; we need to update our local // storage to merge data from the cloud function downloadInstalledAsync(indexTable: Storage.Table, scriptsTable: Storage.Table, header: Cloud.Header) : Promise { log(header.guid + "/" + header.scriptId + ": " + header.name + " is newer"); if (Cloud.lite) { var theirs:ScriptBlob; var baseVer:ScriptBlob; var currVer:ScriptBlob; var hd:Cloud.Header; // local header var skipMsg = false return getScriptBlobAsync(header.scriptVersion.baseSnapshot) .then(v => theirs = v) .then(() => incomingHeaderAsync(header.guid)) .then(() => indexTable.getValueAsync(header.guid)) .then(str => hd = str ? JSON.parse(str) : null) .then(() => { // [hd] is the local header; if the [instanceId] is "cloud", then no local modifications were performed, i.e. the // local header is *exactly* baseSnapshot var touch = () => { header.scriptVersion.instanceId = Cloud.getWorldId() header.scriptVersion.time = getCurrentTime(); header.scriptVersion.version++; header.status = "unpublished"; } if (hd && hd.scriptVersion.instanceId != "cloud" && hd.scriptVersion.baseSnapshot && hd.scriptVersion.baseSnapshot != header.scriptVersion.baseSnapshot) { if (disableMerge) { touch() skipMsg = true // setInstalledAsync() will not update to null return <ScriptBlob>{ script: null, editorState: null } } // We need to merge, because there's been a fork. The base header is [hd.scriptVersion.baseSnapshot], "theirs" is // [header], and "mine" is [hd]. log(header.guid + "/" + header.scriptId + ": " + header.name + " merging based on " + hd.scriptVersion.baseSnapshot); return getScriptBlobAsync(hd.scriptVersion.baseSnapshot) .then(r => { baseVer = r }) // Note: the [guid] is the same for both [header] and [hd]. The line below is getting the local script. .then(() => incomingHeaderAsync(header.guid)) .then(() => scriptsTable.getItemsAsync([header.guid + "-script", header.guid + "-editorState"])) .then(r => { currVer = { script: r[header.guid + "-script"], editorState: r[header.guid + "-editorState"] } }) .then(() => { if (header.editor) { var ret:ScriptBlob = { script: currVer.script, editorState: currVer.editorState, // This must be exactly an <External.PendingMerge> extra: { theirs: { scriptText: theirs.script, editorState: theirs.editorState, baseSnapshot: header.scriptVersion.baseSnapshot, metadata: header.meta, }, base: { scriptText: baseVer.script, editorState: baseVer.editorState, baseSnapshot: hd.scriptVersion.baseSnapshot, metadata: hd.meta, }, } }; // Don't update the header: merely record the fact that we've seen a new version go by from the cloud, // and record in the extra field the contents of that version (so that we don't have to hit the cloud // again to get it later on). var newVersion = header.scriptVersion.baseSnapshot; header = hd; // FIXME properly pass a value instead of updating in-place, so that we don't have to bind ret before header.pendingMerge = newVersion; return ret; } else { // Our new header is the one that we took in from the cloud, except that some modifications were // performed. Hence, we modify the [instanceId] so that it no longer says "cloud". Since the // [baseSnapshot] is still the one from the cloud header, this means that we've been creating a new // version *on top of* the cloud header. This new version has not been synced to the cloud, and // therefore does not have a [baseSnapshot] yet. touch() return <ScriptBlob>{ script: mergeScripts(baseVer.script, currVer.script, theirs.script), editorState: mergeEditorStates(baseVer.editorState, currVer.editorState, theirs.editorState), } } }) } else { return theirs } }) .then(resp => setInstalledAsync(indexTable, scriptsTable, header, resp.script, resp.editorState, null, JSON.stringify(resp.extra || {}))) .then(() => skipMsg ? Promise.as() : newHeaderCallbackAsync(header, "downloaded")) .then(() => header.scriptVersion.instanceId == "cloud" ? Promise.as() : uploadInstalledAsync(indexTable, scriptsTable, header)) } return Cloud.getUserInstalledBodyAsync(header.guid).then(function (installedBodies: Cloud.InstalledBodies) { var body = <Cloud.Body>undefined; installedBodies.bodies.forEach(function (b) { if (b.guid == header.guid) body = b; }); if (body) { var cloudScriptVersion = JSON.stringify(header.scriptVersion); if (body.status == "published") return ScriptCache.getScriptAsync(body.scriptId) .then((script) => script == null // transient download error? ? Promise.as() // ignore : script == "" // published script deleted in cloud? (rare, but possible) ? setInstalledAsync(indexTable, scriptsTable, uninstall(getHeader(body)), undefined, undefined, null, null) : setInstalledAsync(indexTable, scriptsTable, getHeader(body), script, body.editorState, null, cloudScriptVersion)); else if (body.script == "") // unpublished script deleted in cloud? (not sure how possible, but observed in practice) return setInstalledAsync(indexTable, scriptsTable, uninstall(getHeader(body)), undefined, undefined, null, null); else return setInstalledAsync(indexTable, scriptsTable, getHeader(body), body.script, body.editorState, null, cloudScriptVersion); } else return removeInstalledAsync(indexTable, scriptsTable, header.guid); }); } function publishInstalledAsync(indexTable: Storage.Table, scriptsTable: Storage.Table, header: Cloud.Header) : Promise { log(header.guid + "/" + header.scriptId + ": " + header.name + " is to be published"); return indexTable.getValueAsync(header.guid) .then(resp => { header = JSON.parse(resp) }) .then(() => Cloud.postUserInstalledPublishAsync(header.guid, header.publishAsHidden, JSON.stringify(header.scriptVersion), header.meta)) .then(function (installedBodies: Cloud.InstalledBodies) { var body = <Cloud.Body>undefined; installedBodies.bodies.forEach(function (b) { if (b.guid == header.guid) body = b; }); if (!body) return undefined; var cloudScriptVersion = JSON.stringify(header.scriptVersion); // do not delete state on publication; make sure we don't override body with "" var hd = getHeader(body) return setInstalledAsync(indexTable, scriptsTable, hd, body.script || null, body.editorState || null, null, cloudScriptVersion) .then(() => newHeaderCallbackAsync(hd, "published")) .then(() => []) // non-null result }) .then((r) => r, (e) => { if (e.status == 400) { ModalDialog.info("cannot publish", "Your script '" + header.name + "' cannot be published. Error message: " + (e.errorMessage || "not available")) getInstalledHeaderAsync(header.guid).then((header:Cloud.Header) => { if (header.status == "tobepublished") { header.status = "unpublished"; return setInstalledAsync(indexTable, scriptsTable, header, null, null, null, null) } else return Promise.as(); }).done(); } throw e; }); } function uploadInstalledAsync(indexTable: Storage.Table, scriptsTable: Storage.Table, header: Cloud.Header): Promise { // of PostUserInstalledResponse // A conservative estimate of the version we are saving. We compare all three fields at // the same time. (It may be the case that in-between the various asynchronous steps // below, a newer version gets written and it's innocuous, but we err on the safe side.) var conservativeVersion = JSON.stringify(header.scriptVersion); log(header.guid + "/" + header.scriptId + ": " + header.name + " is dirty, attempting to save version " + conservativeVersion); if (header.pendingMerge) { log(header.guid + "/" + header.scriptId + ": " + header.name + " is pending merge resolution, skipping"); return Promise.as(); } return Promise.join({ script: scriptsTable.getValueAsync(header.guid + "-script"), editorState: scriptsTable.getValueAsync(header.guid + "-editorState") }).then(function (data) { var body = <Cloud.Body>JSON.parse(JSON.stringify(header)); if (!Cloud.lite && body.status == "published") body.script = ""; else if ((Cloud.lite && body.status == "published") || body.status == "unpublished" || body.status == "tobepublished") { body.script = sanitizeScriptTextForCloud(data.script); if (body.status == "tobepublished") body.status = "unpublished"; } else body.script = undefined; body.editorState = data.editorState; if (Cloud.lite && disableMerge) { body.scriptVersion.baseSnapshot = "*" } return Cloud.postUserInstalledAsync(<Cloud.InstalledBodies>{ bodies: [body] }) .then(resp => { if (Cloud.lite && !resp.numErrors) { var header = resp.headers[0] if (!header.editor && body.script) header.meta = getScriptMeta(body.script) // [setInstalledAsync] is not interrupted until it performs the // actual call to [setItemsAsync], so that's the right time to check // whether the version has changed in the meanwhile. This check // assumes that all clients of the [World] module are well-behaved // and always call [updateInstalledAsync], which takes care of // bumping the version number in a monotonic fashion. return getInstalledHeaderAsync(header.guid).then((h: Cloud.Header) => { var currentVersion = JSON.stringify(h.scriptVersion); // This should be equal or greater than currentVersion. Anything // else means I've missed something! log("actually saving? version is now "+currentVersion); if (currentVersion != conservativeVersion) { // Someone wrote a new version in local storage; so all we // remember is that this local version now needs to be saved on // top of the newer version that's in the cloud. Client code // must retry to save. h.scriptVersion.baseSnapshot = resp.headers[0].scriptVersion.baseSnapshot; resp.retry = true; return setInstalledAsync(indexTable, scriptsTable, h, null, null, null, null) .then(() => resp) } else { // That header in the cloud is fine, that's our new header. return setInstalledAsync(indexTable, scriptsTable, header, null, null, null, null) .then(() => resp) } }) .then(resp => newHeaderCallbackAsync(resp.headers[0], "uploaded").then(() => resp)); } return Promise.as(resp) }) }); } function recentUsesInstalledAsync(headers: Cloud.Header[]): Promise { // of PostUserInstalledResponse return Cloud.postUserInstalledAsync(<Cloud.InstalledBodies>{ recentUses: headers.map(h => <Cloud.RecentUse>{ guid: h.guid, recentUse: h.recentUse }) }); } var syncVersion = undefined; var continuouslySyncVersion = undefined; var syncCount = 0; export function cancelSync() { syncVersion = undefined; } export function syncIsActive() { return !!syncVersion; } export function cancelContinuouslySync() { if (continuouslySyncVersion) { continuouslySyncVersion = undefined; cancelSync(); } } var updateCache:any = null var addUpdates:any = {} export var onNewNotificationChanged: (newNotifications: number) => void = undefined; export var _askEmail: boolean = false; export var _askToEnableEmailNewsletter: boolean = false; export var _askToEnableEmailNotifications: boolean = false; export var _askToEnableNotifications: boolean = false; export var _profileIndex: number = 0; export var _profileCount: number = 0; export function continuouslySyncAsync(m: boolean, onSyncedAsync: () => Promise = undefined) { if (continuouslySyncVersion) return Promise.as(); // continuouslySync still going on return internalContinuouslySyncAsync(m, onSyncedAsync, continuouslySyncVersion = new Object()); } function internalContinuouslySyncAsync(m: boolean, onSyncedAsync: () => Promise , myContinuouslySyncVersion: any) { var v = lastV; if (myContinuouslySyncVersion == continuouslySyncVersion && v) return syncAsync(false, v, m).then(() => { var p = Promise.as(); if (myContinuouslySyncVersion == continuouslySyncVersion) { if (onSyncedAsync && v != lastV) p = p.then(() => onSyncedAsync()); p = p.then(() => Promise.delay(2000, () => internalContinuouslySyncAsync(m, onSyncedAsync, myContinuouslySyncVersion))); } return p; }); else return Promise.as(); } export function getCurrentUserInfoAsync() { if (currentUserInfo) return Promise.as(currentUserInfo) else return currentUserPromise; } var lastV; export function syncAsync(uploadRecentUses: boolean = true, v: number = undefined, m: boolean = false, onNotLoggedIn: () => void = undefined, onBadTime: (number) => void = undefined, onAskBeta: () => void = undefined, onAskSomething: (AskSomething) => void = undefined, onNoOtherAsk: () => void = undefined): Promise // of string --- undefined: success; some text: we hit an error (no internet, not yet logged in, too much posted...); you can try again later { var totalCounter = TDev.RT.Perf.start("syncasync", true); var last = TDev.RT.Perf.start("startsync"); function time(name: string) { if (!v) TDev.RT.Perf.stop(last); last = TDev.RT.Perf.start(name); } if (syncCount > 0 && window.applicationCache.status == window.applicationCache.IDLE) { try { window.applicationCache.update(); } catch (e) { } } time("appcache"); syncCount++; if (Cloud.isOffline()) { var message = "cannot sync - you appear to be offline"; HTML.showProgressNotification(v ? undefined : message); return Promise.as(message); } time("isonline"); var mySyncVersion = new Object(); syncVersion = mySyncVersion; var canceled = Promise.wrapError("canceled"); log("starting sync"); localStorage.removeItem("editorScriptToSaveDirty"); var pendingDownloads = 0; var pendingUploads = 0; var pendingPublishs = 0; var progress = function (deltaDownloads: number, deltaUploads: number, deltaPublishs: number) { if (syncVersion != mySyncVersion) return; pendingDownloads += deltaDownloads; pendingUploads += deltaUploads; pendingPublishs += deltaPublishs; var a = [] if (pendingDownloads) a.push(pendingDownloads + " down"); if (pendingUploads) a.push(pendingUploads + " up"); if (pendingPublishs) a.push(pendingPublishs + " publish"); var s = "syncing..."; if (a.length > 0) { s += " (" + a.join(", ") + ")"; HTML.showProgressNotification(s, false); } }; progress(0, 0, 0); var tobepublished = [] var newerHeaders = [] var deletedHeaders = [] var uptodates = [] var dirtyHeaders = []; var askBeta = false; var askSomething = null; return (v ? Cloud.getUserInstalledLongAsync(v, m) : Cloud.getUserInstalledAsync()).then(installedHeaders => { if (syncVersion != mySyncVersion) return canceled; return Promise.join({ installedHeaders: installedHeaders, indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise() }); }, e => { if (e.status == 204 || // NoContent false) // This triggers randomly on flaky connections and such // We're very rarely really not logged in nowadays // v && Cloud.isOnline() && /localhost/.test(document.URL) && e.status == 0 // because of CORS on localhost when not logged in yet { mySyncVersion = new Object(); return canceled; } throw e; }).then(function (data: SyncData) { time("opendb+user/installed"); if (syncVersion != mySyncVersion) return canceled; var user = data.installedHeaders.user; if (user) { currentUserInfo = user; if (currentUserPromise) {<|fim▁hole|> currentUserPromise.success(user); currentUserPromise = null; } } var min = data.installedHeaders.minimum; lastV = data.installedHeaders.v; if (min && Cloud.currentReleaseId && min < Cloud.currentReleaseId) { if (waitForUpdate(min)) { syncVersion = new Object() return; } } if (onNewNotificationChanged) onNewNotificationChanged(data.installedHeaders.newNotifications); if (Runtime.offerNotifications()) { var notifications = data.installedHeaders.notifications; if (Runtime.refreshNotifications) Runtime.refreshNotifications(notifications); _askToEnableNotifications = !notifications; } _askEmail = !data.installedHeaders.email; _askToEnableEmailNewsletter = !data.installedHeaders.emailNewsletter; _askToEnableEmailNotifications = !data.installedHeaders.emailNotifications; _profileIndex = data.installedHeaders.profileIndex || 0; _profileCount = data.installedHeaders.profileCount || 0; if (data.installedHeaders.blobcontainer) Cloud.config.workspaceUrl = data.installedHeaders.blobcontainer Random.addCloudEntropy(data.installedHeaders.random) if (!Cloud.lite && data.installedHeaders.time) { var now = new Date().getTime(); var seconds = (now - data.installedHeaders.time * 1000) / 1000; if (Math.abs(seconds) > 120) { HTML.showProgressNotification(v ? undefined : "syncing canceled."); syncVersion = undefined; if (onBadTime) onBadTime(seconds); return; } } if (data.installedHeaders.askBeta && World.switchToChannel) askBeta = true; askSomething = data.installedHeaders.askSomething; updateCache = {} data.installedHeaders.headers.forEach((hd) => { if (hd.updateId && hd.scriptId != hd.updateId && hd.updateTime > hd.scriptTime) updateCache[hd.scriptId] = hd.updateId; }); localStorage["updateCacheForInstalled"] = JSON.stringify(updateCache); data.keys = <any>data.indexTable.getKeysAsync(); return Promise.join(data); }).then(function (data: SyncData) { time("readdb1"); if (syncVersion != mySyncVersion) return canceled; data.items = data.indexTable.getItemsAsync(data.keys) if (!Cloud.lite) data.scriptVersionsInCloudItems = data.scriptsTable.getItemsAsync(data.keys.map((guid) => guid + "-scriptVersionInCloud")); return Promise.join(data); }).then(function (data/*: SyncData*/) { time("readdb2"); if (syncVersion != mySyncVersion) return canceled; var recentUses = [] var newerOrDeletedGuids: any = {}; (<SyncData>data).installedHeaders.headers.forEach(function (header) { var existingItem = data.items[header.guid]; var isNewer = true; if (existingItem) { var existingHeader = <Cloud.Header>JSON.parse(existingItem); if (Cloud.lite) { isNewer = header.scriptVersion.baseSnapshot != existingHeader.scriptVersion.baseSnapshot && header.scriptVersion.baseSnapshot != existingHeader.pendingMerge; if (existingHeader.status == "deleted") isNewer = false if (!isNewer && existingHeader.scriptVersion.instanceId == "cloud" && header.status == "published" && existingHeader.status == "unpublished") isNewer = true; } else isNewer = Cloud.isVersionNewer(header.scriptVersion, existingHeader.scriptVersion); if (header.recentUse < existingHeader.recentUse) recentUses.push(existingHeader); } if (isNewer) { newerOrDeletedGuids[header.guid] = true; if (header.status === "deleted") { if (existingItem) deletedHeaders.push(header); } else { newerHeaders.push(header); } } else { var cloudScriptVersion = JSON.stringify(header.scriptVersion); var uptodate = Promise.as(); if (!Cloud.lite && data.scriptVersionsInCloudItems[header.guid + "-scriptVersionInCloud"] !== cloudScriptVersion) uptodate = uptodate.then(() => setCloudScriptVersionAsync(data.scriptsTable, header.guid, cloudScriptVersion)); if (header.recentUse > existingHeader.recentUse) uptodate = uptodate.then(() => recentUseAsync(data.indexTable, header.guid, header.recentUse)); uptodates.push(uptodate) } }); (<SyncData>data).keys.forEach(function (key) { var header = <Cloud.Header>JSON.parse(data.items[key]); if (newerOrDeletedGuids[header.guid]) return; if (header.status === "tobepublished") tobepublished.push(header); var isDirty = true; if (Cloud.lite) { isDirty = header.scriptVersion.instanceId != "cloud" } else { var s = data.scriptVersionsInCloudItems[key + "-scriptVersionInCloud"]; if (s) { var cloudScriptVersion = <Cloud.Version>JSON.parse(s); if (!Cloud.isVersionNewer(header.scriptVersion, cloudScriptVersion)) isDirty = false; } } if (isDirty) dirtyHeaders.push(header); }); log(recentUses.length + " items with newer recentUses"); log(newerHeaders.length + " newer items to download, " + deletedHeaders.length + " deleted items"); log(dirtyHeaders.length + " items to upload"); progress(newerHeaders.length, dirtyHeaders.length, tobepublished.length); if (recentUses.length > 0 && uploadRecentUses) data.recentUses = recentUsesInstalledAsync(recentUses); return Promise.join(data); }).then(function (data/*: SyncData*/) { // It's unclear how [data] is used from then on, because it is // just discarded two steps below. Perhaps we assign the // properties to prevent some promises from being // garbage-collected until we move on to the next step? time("diff"); data.downloaded = Promise.thenEach(newerHeaders, (h: Cloud.Header) => { if (syncVersion != mySyncVersion) return canceled; return downloadInstalledAsync(data.indexTable, data.scriptsTable, h).then(() => { progress(-1, 0, 0) }); }); data.removed = Promise.thenEach(deletedHeaders, (h: Cloud.Header) => { if (syncVersion != mySyncVersion) return canceled; return removeInstalledAsync(data.indexTable, data.scriptsTable, h.guid); }); data.uploaded = Promise.thenEach(dirtyHeaders, (h: Cloud.Header) => { if (syncVersion != mySyncVersion) return canceled; return uploadInstalledAsync(data.indexTable, data.scriptsTable, h).then(() => { progress(0, -1, 0) }); }); data.uptodates = Promise.join(uptodates); return Promise.join(data); }).then(function (data/*: SyncData*/) { time("download+upload"); if (syncVersion != mySyncVersion) return canceled; data.tobepublished = Promise.thenEach(tobepublished, (header) => publishInstalledAsync(data.indexTable, data.scriptsTable, header).then(result => { progress(0, 0, -1); if (!result) ModalDialog.info("publishing failed", "There was a versioning mismatch between your local state and the cloud. Please check the content of the script you want to publish and then try again."); })); data.progress = Cloud.postPendingProgressAsync(); return Promise.join(data); }).then(() => { time("publish"); if (!v) TDev.RT.Perf.stop(totalCounter); if (syncVersion != mySyncVersion) return; syncVersion = undefined; HTML.showProgressNotification(v ? undefined : "syncing done", true, 0, 1000); if (askBeta && onAskBeta && !/localhost/.test(window.location.href)) onAskBeta(); else if (askSomething && onAskSomething) onAskSomething(askSomething); else if (onNoOtherAsk) onNoOtherAsk(); return undefined; }, function (e) { if (syncVersion != mySyncVersion) return; syncVersion = undefined; var status = e.status var errorMessage = e.errorMessage if (!status) Object.keys(e).forEach(k => { var f = e[k]; if (!f) return; if (f.status) status = f.status; if (f.errorMessage) errorMessage = f.errorMessage; if (typeof f == "object") Object.keys(f).forEach(l => { var g = f[l]; if (!g) return; if (g.status) status = g.status; if (g.errorMessage) errorMessage = g.errorMessage; }); }); var info = ""; if (status || errorMessage) info = " (code " + status + (errorMessage ? (": " + errorMessage) : "") + ")"; Util.log('nosync: ' + info); if (Util.navigatingAway) { HTML.showProgressNotification(undefined); return undefined; } else if (status == 400) { var message = lf("Cloud precondition violated") + info; HTML.showProgressNotification(message) return message; } else if (status == 503) { var message = lf("Did you post a lot recently? You must wait for one hour before you can post more.") + info; HTML.showProgressNotification(message); return message; } else if (status == 403) //(Cloud.isOnline() && /localhost/.test(document.URL)) // because of CORS on localhost when not logged in yet { var message = status == 403 ? Cloud.hasAccessToken() ? onNotLoggedIn ? lf("cannot sync - your access token has expired and will renew automatically") + info : lf("cannot sync - your access token has expired") + info : lf("cannot sync - you are not signed in") + info : lf("cannot sync") + info; HTML.showProgressNotification(message) if (status == 403) Cloud.setAccessToken(undefined); if (onNotLoggedIn) onNotLoggedIn(); return message; } else if (!Cloud.isTouchDevelopOnline()) { var message = lf("cannot sync - you are in offline mode"); HTML.showProgressNotification(v ? undefined : message); return message; } else { var message = lf("cannot sync - are you offline?") + info; HTML.showProgressNotification(v ? undefined : message); return message; } // TDev.World.log("ERROR" + (!!e ? JSON.stringify(e) : "undefined")); }); } export function saveAsync(guid: string, onNotLoggedIn: () => void = undefined, onBadTime: (number) => void = undefined): Promise // of PostUserInstalledResponse { if (!Cloud.getUserId() || Cloud.isOffline()) { Util.log('save skipped: not auth or offline'); return Promise.as(); } var mySyncVersion = new Object(); syncVersion = mySyncVersion; var canceled = Promise.wrapError("canceled"); log("starting save"); return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), header: getInstalledHeaderAsync(guid) }).then(function (data) { if (syncVersion != mySyncVersion) return; return uploadInstalledAsync(data.indexTable, data.scriptsTable, data.header); }).then(function (result) { if (syncVersion != mySyncVersion) return; syncVersion = undefined; HTML.showSaveNotification(result.numErrors ? lf("problems saving!") : lf("saved")); return result; }, function (e) { if (syncVersion != mySyncVersion) return; syncVersion = undefined; var status = e.status var errorMessage = e.errorMessage var info = ""; if (status || errorMessage) info = " (code " + status + (errorMessage ? (": " + errorMessage) : "") + ")"; if (status == 400) throw new Error("Cloud precondition violated" + info); else if (status == 403 || (Cloud.isOnline() && /localhost/.test(document.URL))) // because of CORS on localhost when not logged in yet { HTML.showSaveNotification("could not save - you are not signed in (" + status + ")", 500); if (status == 403) Cloud.setAccessToken(undefined); if (onNotLoggedIn) onNotLoggedIn(); } else HTML.showSaveNotification("cannot back up to cloud - you appear to be offline"); }); } function uninstall(header: Cloud.Header) { header.scriptVersion = <Cloud.Version>{ instanceId: Cloud.getWorldId(), version: 2147483647, time: 253402300799, baseSnapshot: header.scriptVersion.baseSnapshot }; header.status = "deleted"; return header; } export function uninstallAsync(guid: string) : Promise // of void { if (!Util.check(!!guid)) return Promise.as(undefined); log("starting uninstall of " + guid); return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), }).then(function (data/*: SyncData*/) { data.items = data.indexTable.getItemsAsync([guid]); return Promise.join(data); }).then(function (data/*: SyncData*/) { var h = data.items[guid]; if (!h) return undefined; // already uninstalled? var header = uninstall(<Cloud.Header>JSON.parse(h)); return setInstalledAsync(data.indexTable, data.scriptsTable, header, undefined, undefined, null, null); }); } export function publishAsync(guid: string, hidden:boolean) : Promise // of void { if (!Util.check(!!guid)) return Promise.as(undefined); log("starting publishing of " + guid); return Promise.join({ indexTable: getIndexTablePromise() }).then(function (data/*: SyncData*/) { data.items = data.indexTable.getItemsAsync([guid]); return Promise.join(data); }).then(function (data/*: SyncData*/) { var h = data.items[guid]; if (!h) return undefined; // uninstalled? var header = <Cloud.Header>JSON.parse(h); if (header.status !== "unpublished") return undefined; header.status = "tobepublished"; header.publishAsHidden = hidden; var headerItem = {}; headerItem[header.guid] = JSON.stringify(header); return data.indexTable.setItemsAsync(headerItem); }); } export function recentUseAsync(indexTable: any, guid: string, recentUse: number) : Promise // of void { if (!Util.check(!!guid)) return Promise.as(undefined); return Promise.join({ items: indexTable.getItemsAsync([guid]) }).then(function (data/*: SyncData*/) { var h = data.items[guid]; if (!h) return undefined; // uninstalled? var header = <Cloud.Header>JSON.parse(h); if (header.recentUse < recentUse) header.recentUse = recentUse; var headerItem = {}; headerItem[header.guid] = JSON.stringify(header); return indexTable.setItemsAsync(headerItem); }); } export function getCurrentTime() { return Math.floor(new Date().getTime()/1000); } function installAsync(status: string, scriptId: string, userId: string, stub: ScriptStub) : Promise // of Cloud.Header { var meta; if (stub.editorName == "touchdevelop") { meta = getScriptMeta(stub.scriptText); // This is mandatory: since [setInstalledAsync] uses the // [scriptText] to save data, there's no way we can switch to a // different name at this stage. Util.assert(meta.name.trim() == stub.scriptName.trim()); // For compatibility with old cloud entries, we now switch to // the semantics "falsy [editor] field for [Cloud.Header] means // TouchDevelop editor". stub.editorName = ""; } else { meta = { localGuid: Util.guidGen(), name: stub.scriptName, }; } var h = <Cloud.Header>(<any>{ status: status, scriptId: scriptId, userId: userId, meta: meta, name: meta.name, scriptVersion: <Cloud.Version>{instanceId: Cloud.getWorldId(), version: 0, time: getCurrentTime(), baseSnapshot: "" }, guid: meta.localGuid, editor: stub.editorName, }); Util.assert(!!h.guid); return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), }).then(function (data/*: SyncData*/) { return setInstalledAsync(data.indexTable, data.scriptsTable, h, stub.scriptText, null, null, null).then(() => h); }); } export function installPublishedAsync(scriptId: string, userId: string) : Promise // of Cloud.Header { if (!Util.check(!!scriptId)) return Promise.as(undefined); return getInstalledAsync().then(function (items) { var guids = Object.keys(items); var matchingGuids = guids.filter(function (guid) { var item = items[guid]; return item.status == "published" && item.scriptId == scriptId; }); if (matchingGuids.length > 0) return items[matchingGuids[0]]; return Promise.join({ text: ScriptCache.getScriptAsync(scriptId), json: (<any>Browser).TheApiCacheMgr.getAsync(scriptId, true), }).then(data => { var text: string = data.text; var json = data.json; if (!text) { HTML.showErrorNotification("cannot get script /" + scriptId); return new PromiseInv(); // stall } else { return installAsync("published", scriptId, userId, { scriptText: text, // This is a script stub that uses the different // convention editorName: json.editor || "touchdevelop", scriptName: json.name, }); } }); }); } export function installUnpublishedAsync(baseScriptId: string, baseUserId: string, stub: ScriptStub) : Promise // of Cloud.Header { return installAsync("unpublished", baseScriptId, baseUserId, stub); } export function getInstalledAsync() : Promise // yields object whose keys are guids, and the values are Headers { return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), }).then(function (data/*: SyncData*/) { data.keys = data.indexTable.getKeysAsync(); return Promise.join(data); }).then(function (data: SyncData) { return data.indexTable.getItemsAsync(data.keys).then((items) => Promise.thenEach(items, (v) => JSON.parse(v))); }); } export function getInstalledHeaderAsync(guid: string) : Promise // of Cloud.Header { if (!Util.check(!!guid)) return Promise.as(undefined); return getIndexTablePromise().then((indexTable) => indexTable.getValueAsync(guid)).then((s) => s ? JSON.parse(s) : undefined); } export function getInstalledScriptAsync(guid: string) : Promise // of string (script text) { if (!Util.check(!!guid)) return Promise.as(undefined); return getScriptsTablePromise().then((scriptsTable) => scriptsTable.getValueAsync(guid + "-script")); } export function getInstalledEditorStateAsync(guid: string) : Promise // of string (script text) { if (!Util.check(!!guid)) return Promise.as(undefined); return getScriptsTablePromise().then((scriptsTable) => scriptsTable.getValueAsync(guid + "-editorState")); } export function getInstalledScriptVersionInCloud(guid: string) : Promise // of string { if (!Util.check(!!guid)) return Promise.as(undefined); return getScriptsTablePromise().then((scriptsTable) => scriptsTable.getValueAsync(guid + "-scriptVersionInCloud")); } export function getAnyScriptAsync(guid: string) : Promise // of string (script text) { if (/-/.test(guid)) return getInstalledScriptAsync(guid); else return ScriptCache.getScriptAsync(guid); } export function getInstalledScriptsAsync(guids: string[]) : Promise // of guid => string (script text) { return getScriptsTablePromise().then((scriptsTable) => scriptsTable.getItemsAsync(guids.map((g) => g + "-script")).then((map) => { var r = {} guids.forEach((g) => { r[g] = map[g + "-script"] }) return r; }) ); } export function getScriptRestoreAsync(guid:string) { var d = null return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), }).then(function (data/*: SyncData*/) { d = data return Promise.join([ data.indexTable.getValueAsync(guid), data.scriptsTable.getItemsAsync([ guid + "-scriptVersionInCloud", guid + "-editorState", guid + "-script" ])]) }).then(resp => { var hd = {} hd[guid] = resp[0] var entries = resp[1] return () => Promise.join([d.indexTable.setItemsAsync(hd), d.scriptsTable.setItemsAsync(entries)]) }) } export function setInstalledScriptAsync( header: Cloud.Header, script: string, editorState: string, scriptState: string = null, scriptVersionInCloud = null ) : Promise // of void { if (!Util.check(!!header)) return Promise.as(undefined); log("setting " + header.guid); return Promise.join({ indexTable: getIndexTablePromise(), scriptsTable: getScriptsTablePromise(), }).then(function (data/*: SyncData*/) { return setInstalledAsync(data.indexTable, data.scriptsTable, header, script, editorState, scriptState, scriptVersionInCloud); }); } /* export function triggerCrash(id:string) { getScriptsTablePromise().then(tbl => { var s = {} s[id + "-script"] = "" return tbl.setItemsAsync(s); }).done(); } */ function initUpdateCache() { if (!updateCache) { var s = localStorage["updateCacheForInstalled"] updateCache = s ? JSON.parse(s) : {} } } export function rememberUpdate(id:string, update:string) { addUpdates[id] = update; } export function updateFor(h:Cloud.Header) { initUpdateCache(); if (h && h.status == "published") return updateCache[h.scriptId] || addUpdates[h.scriptId] || null; return null; } export function updateAsync(guid:string) { var id = ""; return getInstalledHeaderAsync(guid).then((h:Cloud.Header) => { id = updateFor(h); if (!id) return Promise.as(); return ScriptCache.getScriptAsync(id); }).then((text) => { if (!text) return Promise.as(); return getInstalledHeaderAsync(guid).then((h:Cloud.Header) => { if (h.status != "published") return Promise.as(); h.status = "published"; h.scriptId = id; h.meta = null // recompute return updateInstalledScriptAsync(h, text, null, true) }) }) } export function updateInstalledScriptAsync(hd:Cloud.Header, script:string, state:string, background = false, scriptVersionInCloud = "") { if (!background) { hd.status = "unpublished"; hd.recentUse = getCurrentTime(); } hd.scriptVersion.instanceId = Cloud.getWorldId() hd.scriptVersion.time = getCurrentTime(); hd.scriptVersion.version++; if (!hd.editor) hd.meta = null // recompute return World.setInstalledScriptAsync(hd, script, state, "", scriptVersionInCloud) } export var switchToChannel = (ch:string) => { if (ch == "beta") Util.navigateInWindow(Cloud.getServiceUrl() + "/app/beta?nocache=" + Util.guidGen()); else Util.navigateInWindow(Cloud.getServiceUrl() + "/app/"); }; } }<|fim▁end|>
<|file_name|>loop.js<|end_file_name|><|fim▁begin|>// Benchpress: A collection of micro-benchmarks. var allResults = [ ]; // ----------------------------------------------------------------------------- // F r a m e w o r k // ----------------------------------------------------------------------------- function Benchmark(string, run) { this.string = string; this.run = run; } // Run each benchmark for two seconds and count number of iterations. function time(benchmark) { var elapsed = 0; var start = new Date(); for (var n = 0; elapsed < 2000; n++) { benchmark.run(); elapsed = new Date() - start; } var usec = (elapsed * 1000) / n; allResults.push(usec); print('Time (' + benchmark.string + '): ' + Math.floor(usec) + ' us.'); } function error(string) { print(string); } // ----------------------------------------------------------------------------- // L o o p // ----------------------------------------------------------------------------- function loop() { var sum = 0; for (var i = 0; i < 200; i++) { for (var j = 0; j < 100; j++) { sum++; } } if (sum != 20000) error("Wrong result: " + sum + " should be: 20000"); } var Loop = new Benchmark("Loop", loop); // ----------------------------------------------------------------------------- // M a i n // -----------------------------------------------------------------------------<|fim▁hole|> var logMean = 0; for (var i = 0; i < allResults.length; i++) logMean += Math.log(allResults[i]); logMean /= allResults.length; print("Geometric mean: " + Math.round(Math.pow(Math.E, logMean)) + " us.");<|fim▁end|>
time(Loop);
<|file_name|>config.js<|end_file_name|><|fim▁begin|>System.config({ baseURL: ".", defaultJSExtensions: true, transpiler: "babel", babelOptions: { "optional": [ "runtime", "optimisation.modules.system" ] }, paths: { "github:*": "jspm_packages/github/*", "npm:*": "jspm_packages/npm/*" }, bundles: { "build.js": [ "lib/main.js", "lib/filelayer/ajaxUtil.js", "lib/filelayer/corslite.js", "github:shramov/[email protected]", "npm:[email protected]", "npm:[email protected]", "lib/filelayer/leaflet.filelayer.js", "github:mholt/[email protected]", "npm:[email protected]/css/font-awesome.min.css!github:systemjs/[email protected]", "github:shramov/[email protected]/control/Distance", "npm:[email protected]/dist/leaflet-src", "npm:[email protected]/togeojson", "github:mholt/[email protected]/papaparse", "github:jspm/[email protected]", "github:jspm/[email protected]/index", "npm:[email protected]", "npm:[email protected]/browser" ] }, map: { "adm-zip": "npm:[email protected]", "babel": "npm:[email protected]", "babel-runtime": "npm:[email protected]", "clean-css": "npm:[email protected]", "core-js": "npm:[email protected]", "css": "github:systemjs/[email protected]", "font-awesome": "npm:[email protected]", "gildas-lormeau/zip.js": "github:gildas-lormeau/zip.js@master", "leaflet": "npm:[email protected]", "mapbox/togeojson": "github:mapbox/[email protected]", "mholt/PapaParse": "github:mholt/[email protected]", "shramov/leaflet-plugins": "github:shramov/[email protected]", "togeojson": "npm:[email protected]", "github:jspm/[email protected]": { "assert": "npm:[email protected]" }, "github:jspm/[email protected]": { "buffer": "npm:[email protected]" }, "github:jspm/[email protected]": { "constants-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "crypto-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "events": "npm:[email protected]" }, "github:jspm/[email protected]": { "Base64": "npm:[email protected]", "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]", "url": "github:jspm/[email protected]", "util": "github:jspm/[email protected]" }, "github:jspm/[email protected]": { "https-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "os-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "path-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "process": "npm:[email protected]" }, "github:jspm/[email protected]": { "stream-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "string_decoder": "npm:[email protected]" }, "github:jspm/[email protected]": { "url": "npm:[email protected]" }, "github:jspm/[email protected]": { "util": "npm:[email protected]" }, "github:jspm/[email protected]": { "vm-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "browserify-zlib": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "zlib": "github:jspm/[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]", "module": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "minimalistic-assert": "npm:[email protected]", "vm": "github:jspm/[email protected]" }, "npm:[email protected]": { "util": "npm:[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "buffer-xor": "npm:[email protected]", "cipher-base": "npm:[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "evp_bytestokey": "npm:[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "browserify-aes": "npm:[email protected]", "browserify-des": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "evp_bytestokey": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "cipher-base": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "des.js": "npm:[email protected]", "inherits": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "constants": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "browserify-rsa": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "create-hmac": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "elliptic": "npm:[email protected]", "inherits": "npm:[email protected]", "parse-asn1": "npm:[email protected]", "stream": "github:jspm/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "buffer": "github:jspm/[email protected]", "pako": "npm:[email protected]", "process": "github:jspm/[email protected]", "readable-stream": "npm:[email protected]", "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "base64-js": "npm:[email protected]", "child_process": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "ieee754": "npm:[email protected]", "isarray": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]", "string_decoder": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "commander": "npm:[email protected]", "fs": "github:jspm/[email protected]", "http": "github:jspm/[email protected]", "https": "github:jspm/[email protected]", "os": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "source-map": "npm:[email protected]", "url": "github:jspm/[email protected]", "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "child_process": "github:jspm/[email protected]", "events": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "graceful-readlink": "npm:[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "readable-stream": "npm:[email protected]", "typedarray": "npm:[email protected]" }, "npm:[email protected]": { "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "elliptic": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "cipher-base": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "ripemd160": "npm:[email protected]", "sha.js": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]" }, "npm:[email protected]": { "browserify-cipher": "npm:[email protected]", "browserify-sign": "npm:[email protected]", "create-ecdh": "npm:[email protected]", "create-hash": "npm:[email protected]", "create-hmac": "npm:[email protected]",<|fim▁hole|> "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "minimalistic-assert": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "miller-rabin": "npm:[email protected]", "randombytes": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "brorand": "npm:[email protected]", "hash.js": "npm:[email protected]", "inherits": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]" }, "npm:[email protected]": { "css": "github:systemjs/[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]" }, "npm:[email protected]": { "inherits": "npm:[email protected]" }, "npm:[email protected]": { "http": "github:jspm/[email protected]" }, "npm:[email protected]": { "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "util": "github:jspm/[email protected]", "zlib": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "brorand": "npm:[email protected]" }, "npm:[email protected]": { "os": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "asn1.js": "npm:[email protected]", "browserify-aes": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "evp_bytestokey": "npm:[email protected]", "pbkdf2": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "child_process": "github:jspm/[email protected]", "create-hmac": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "browserify-rsa": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "parse-asn1": "npm:[email protected]", "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "core-util-is": "npm:[email protected]", "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "isarray": "npm:[email protected]", "process": "github:jspm/[email protected]", "stream-browserify": "npm:[email protected]", "string_decoder": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "core-util-is": "npm:[email protected]", "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "isarray": "npm:[email protected]", "process": "github:jspm/[email protected]", "process-nextick-args": "npm:[email protected]", "string_decoder": "npm:[email protected]", "util-deprecate": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "amdefine": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "readable-stream": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]" }, "npm:[email protected]": { "concat-stream": "npm:[email protected]", "minimist": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "punycode": "npm:[email protected]", "querystring": "npm:[email protected]", "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "inherits": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "indexof": "npm:[email protected]" } } });<|fim▁end|>
"diffie-hellman": "npm:[email protected]", "inherits": "npm:[email protected]", "pbkdf2": "npm:[email protected]", "public-encrypt": "npm:[email protected]",
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate serde_json; #[macro_use] extern crate serde_derive; #[macro_use] extern crate exonum; extern crate router; extern crate bodyparser; extern crate iron; use std::path::Path; use std::fs::File; use std::io::prelude::*; use std::collections::HashMap; use exonum::blockchain::{self, Blockchain, Service, GenesisConfig, ConsensusConfig, ValidatorKeys, Transaction, ApiContext}; use exonum::node::{Node, NodeConfig, NodeApiConfig, TransactionSend, ApiSender, NodeChannel}; use exonum::messages::{RawTransaction, FromRaw, Message}; use exonum::storage::{Fork, MapIndex, LevelDB, LevelDBOptions}; use exonum::crypto::{PublicKey, SecretKey, Hash, HexValue}; use exonum::encoding::{self, Field}; use exonum::api::{Api, ApiError}; use iron::prelude::*; use iron::Handler; use router::Router; // // // // // // // // // // CONSTANTS // // // // // // // // // // const STORAGE_PATH: &'static str = "~/.db"; // Define service ID for the service trait. const SERVICE_ID: u16 = 1; // Define constants for transaction types within the service. const TX_CREATE_WALLET_ID: u16 = 1; const TX_TRANSFER_ID: u16 = 2; // Define initial balance of a newly created wallet. const INIT_BALANCE: u64 = 100; // // // // // // // // // // PERSISTENT DATA // // // // // // // // // // // Declare the data to be stored in the blockchain. In the present case, // declare a type for storing information about the wallet and its balance. /// Declare a [serializable][1] /// [1]: https://github.com/exonum/exonum-doc/blob/master/src/architecture/serialization.md /// struct and determine bounds of its fields with `encoding_struct!` macro. encoding_struct! { struct Wallet { const SIZE = 48; field pub_key: &PublicKey [00 => 32] field name: &str [32 => 40] field balance: u64 [40 => 48] } } /// Add methods to the `Wallet` type for changing balance. impl Wallet { pub fn increase(self, amount: u64) -> Self { let balance = self.balance() + amount; Self::new(self.pub_key(), self.name(), balance) } pub fn decrease(self, amount: u64) -> Self { let balance = self.balance() - amount; Self::new(self.pub_key(), self.name(), balance) } } // // // // // // // // // // DATA LAYOUT // // // // // // // // // // /// Create schema of the key-value storage implemented by `MemoryDB`. In the /// present case a `Fork` of the database is used. pub struct CurrencySchema<'a> { view: &'a mut Fork, } /// Declare layout of the data. Use an instance of [`MapIndex`][2] /// [2]: https://github.com/exonum/exonum-doc/blob/master/src/architecture/storage.md#mapindex /// to keep wallets in storage. Index values are serialized `Wallet` structs. /// /// Isolate the wallets map into a separate entity by adding a unique prefix, /// i.e. the first argument to the `MapIndex::new` call. impl<'a> CurrencySchema<'a> { pub fn wallets(&mut self) -> MapIndex<&mut Fork, PublicKey, Wallet> { let prefix = blockchain::gen_prefix(SERVICE_ID, 0, &()); MapIndex::new(prefix, self.view) } /// Get a separate wallet from the storage. pub fn wallet(&mut self, pub_key: &PublicKey) -> Option<Wallet> { self.wallets().get(pub_key) } } // // // // // // // // // // TRANSACTIONS // // // // // // // // // // /// Create a new wallet. message! { struct TxCreateWallet { const TYPE = SERVICE_ID; const ID = TX_CREATE_WALLET_ID; const SIZE = 40; field pub_key: &PublicKey [00 => 32] field name: &str [32 => 40] } } /// Transfer coins between the wallets. message! { struct TxTransfer { const TYPE = SERVICE_ID; const ID = TX_TRANSFER_ID; const SIZE = 80; field from: &PublicKey [00 => 32] field to: &PublicKey [32 => 64] field amount: u64 [64 => 72] field seed: u64 [72 => 80] } } // // // // // // // // // // CONTRACTS // // // // // // // // // // /// Execute a transaction. impl Transaction for TxCreateWallet { /// Verify integrity of the transaction by checking the transaction /// signature. fn verify(&self) -> bool { self.verify_signature(self.pub_key()) } /// Apply logic to the storage when executing the transaction. fn execute(&self, view: &mut Fork) { let mut schema = CurrencySchema { view }; if schema.wallet(self.pub_key()).is_none() { let wallet = Wallet::new(self.pub_key(), self.name(), INIT_BALANCE); println!("Create the wallet: {:?}", wallet); schema.wallets().put(self.pub_key(), wallet) } } } impl Transaction for TxTransfer { /// Check if the sender is not the receiver. Check correctness of the /// sender's signature. fn verify(&self) -> bool { (*self.from() != *self.to()) && self.verify_signature(self.from()) } /// Retrieve two wallets to apply the transfer. Check the sender's /// balance and apply changes to the balances of the wallets. fn execute(&self, view: &mut Fork) { let mut schema = CurrencySchema { view }; let sender = schema.wallet(self.from()); let receiver = schema.wallet(self.to()); if let (Some(sender), Some(receiver)) = (sender, receiver) { let amount = self.amount(); if sender.balance() >= amount { let sender = sender.decrease(amount); let receiver = receiver.increase(amount); println!("Transfer between wallets: {:?} => {:?}", sender, receiver); let mut wallets = schema.wallets(); wallets.put(self.from(), sender); wallets.put(self.to(), receiver); } } } } // // // // // // // // // // REST API // // // // // // // // // // /// Implement the node API. #[derive(Clone)] struct CryptocurrencyApi { channel: ApiSender<NodeChannel>, blockchain: Blockchain, } /// Shortcut to get data on wallets. impl CryptocurrencyApi { fn get_wallet(&self, pub_key: &PublicKey) -> Option<Wallet> { let mut view = self.blockchain.fork(); let mut schema = CurrencySchema { view: &mut view }; schema.wallet(pub_key) } fn get_wallets(&self) -> Option<Vec<Wallet>> { let mut view = self.blockchain.fork(); let mut schema = CurrencySchema { view: &mut view }; let idx = schema.wallets(); let wallets: Vec<Wallet> = idx.values().collect(); if wallets.is_empty() { None } else { Some(wallets) } } } /// Add an enum which joins transactions of both types to simplify request /// processing. #[serde(untagged)] #[derive(Clone, Serialize, Deserialize)] enum TransactionRequest { CreateWallet(TxCreateWallet), Transfer(TxTransfer), } /// Implement a trait for the enum for deserialized `TransactionRequest`s /// to fit into the node channel. impl Into<Box<Transaction>> for TransactionRequest { fn into(self) -> Box<Transaction> { match self { TransactionRequest::CreateWallet(trans) => Box::new(trans), TransactionRequest::Transfer(trans) => Box::new(trans), } } } /// The structure returned by the REST API. #[derive(Serialize, Deserialize)] struct TransactionResponse { tx_hash: Hash, } /// Implement the `Api` trait. /// `Api` facilitates conversion between transactions/read requests and REST /// endpoints; for example, it parses `POSTed` JSON into the binary transaction /// representation used in Exonum internally. impl Api for CryptocurrencyApi { fn wire(&self, router: &mut Router) { let self_ = self.clone(); let transaction = move |req: &mut Request| -> IronResult<Response> { match req.get::<bodyparser::Struct<TransactionRequest>>() { Ok(Some(transaction)) => { let transaction: Box<Transaction> = transaction.into(); let tx_hash = transaction.hash(); self_.channel.send(transaction).map_err(ApiError::Events)?; let json = TransactionResponse { tx_hash }; self_.ok_response(&serde_json::to_value(&json).unwrap()) } Ok(None) => Err(ApiError::IncorrectRequest("Empty request body".into()))?, Err(e) => Err(ApiError::IncorrectRequest(Box::new(e)))?, } }; // Gets status of all wallets. let self_ = self.clone(); let wallets_info = move |_: &mut Request| -> IronResult<Response> { if let Some(wallets) = self_.get_wallets() { self_.ok_response(&serde_json::to_value(wallets).unwrap()) } else { self_.not_found_response( &serde_json::to_value("Wallets database is empty") .unwrap(), ) } }; // Gets status of the wallet corresponding to the public key. let self_ = self.clone(); let wallet_info = move |req: &mut Request| -> IronResult<Response> { let path = req.url.path(); let wallet_key = path.last().unwrap(); let public_key = PublicKey::from_hex(wallet_key).map_err(ApiError::FromHex)?; if let Some(wallet) = self_.get_wallet(&public_key) { self_.ok_response(&serde_json::to_value(wallet).unwrap()) } else { self_.not_found_response(&serde_json::to_value("Wallet not found").unwrap()) } }; // Bind the transaction handler to a specific route. router.post("/v1/wallets/transaction", transaction, "transaction"); router.get("/v1/wallets", wallets_info, "wallets_info"); router.get("/v1/wallet/:pub_key", wallet_info, "wallet_info"); } } // // // // // // // // // // SERVICE DECLARATION // // // // // // // // // // /// Define the service. struct CurrencyService; /// Implement a `Service` trait for the service. impl Service for CurrencyService { fn service_name(&self) -> &'static str { "cryptocurrency" } fn service_id(&self) -> u16 { SERVICE_ID } /// Implement a method to deserialize transactions coming to the node. fn tx_from_raw(&self, raw: RawTransaction) -> Result<Box<Transaction>, encoding::Error> { let trans: Box<Transaction> = match raw.message_type() { TX_TRANSFER_ID => Box::new(TxTransfer::from_raw(raw)?), TX_CREATE_WALLET_ID => Box::new(TxCreateWallet::from_raw(raw)?), _ => { return Err(encoding::Error::IncorrectMessageType { message_type: raw.message_type(), }); } }; Ok(trans) } /// Create a REST `Handler` to process web requests to the node. fn public_api_handler(&self, ctx: &ApiContext) -> Option<Box<Handler>> { let mut router = Router::new(); let api = CryptocurrencyApi { channel: ctx.node_channel().clone(), blockchain: ctx.blockchain().clone(), }; api.wire(&mut router); Some(Box::new(router)) } } // // // // // // // // // // ENTRY POINT // // // // // // // // // // fn main() { exonum::helpers::init_logger().unwrap(); println!("Creating in-memory database..."); let database_options = LevelDBOptions { create_if_missing: true, error_if_exists: false, ..Default::default() }; let db = LevelDB::open(format!("{}/db", STORAGE_PATH), database_options).unwrap(); //let db = MemoryDB::new(); let services: Vec<Box<Service>> = vec![Box::new(CurrencyService)]; let blockchain = Blockchain::new(Box::new(db), services); //--------------------------------------- fn get_file_contents(path: &Path) -> String { let mut f = File::open(path).unwrap(); let mut c = String::new(); f.read_to_string(&mut c).unwrap(); c } let node_keys_buf = get_file_contents(Path::new("keys.keys")); let node_keys_strs: Vec<&str> = node_keys_buf.lines().collect(); let consensus_public_key = PublicKey::from_hex(&node_keys_strs[0]).unwrap(); let consensus_secret_key = SecretKey::from_hex(&node_keys_strs[1]).unwrap(); let service_public_key = PublicKey::from_hex(&node_keys_strs[2]).unwrap(); let service_secret_key = SecretKey::from_hex(&node_keys_strs[3]).unwrap(); let validator_keys_buf = get_file_contents(Path::new("validator_pub_keys.keys")); let validator_keys_strs: Vec<&str> = validator_keys_buf.lines().collect(); let mut i = 0; let mut validator_keys: Vec<ValidatorKeys> = Vec::new(); while i <= 0 { //////////// let cons = PublicKey::from_hex(&validator_keys_strs[i]).unwrap(); let serv = PublicKey::from_hex(&validator_keys_strs[i + 1]).unwrap(); validator_keys.push(ValidatorKeys { consensus_key: cons, service_key: serv, }); i += 2; } // let key: PublicKey = PublicKey::from_hex("16ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72").unwrap(); //let consensus_public_key = PublicKey::from_hex("16ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72").unwrap(); //let consensus_secret_key = SecretKey::from_hex("2a751e6595af66f7644bd33cf7710b6226cf8d0de4b3d18bc8fc2d80f19325a716ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72").unwrap(); //let service_public_key = PublicKey::from_hex("523ead8ea8457de570e165a512dd5d1b6688cb5757c3d744e03d1173f3e3e237").unwrap(); //let service_secret_key = SecretKey::from_hex("18544ebbf3ceeeebca847fe6b4e6ce88f83fc92b6b0e24d5466f3cd08aea37bb523ead8ea8457de570e165a512dd5d1b6688cb5757c3d744e03d1173f3e3e237").unwrap(); //let (consensus_public_key, consensus_secret_key) = (PublicKey::from_hex("16ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72").unwrap(), SecretKey::from_hex("2a751e6595af66f7644bd33cf7710b6226cf8d0de4b3d18bc8fc2d80f19325a716ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72")); //let (service_public_key, service_secret_key) = PublicKey::from_hex("16ef83ca4b231404daec6d07b24beb84d89c25944285d2e32a2dcf8f0f3eda72").unwrap(); //println!("{}",key); // let validator_keys = ValidatorKeys { // consensus_key: consensus_public_key, // service_key: service_public_key, // }; let consensus_config = ConsensusConfig { txs_block_limit: 1, ..Default::default() }; let genesis = GenesisConfig::new_with_consensus(consensus_config, validator_keys.into_iter()); let api_address = "0.0.0.0:8000".parse().unwrap(); let api_cfg = NodeApiConfig { public_api_address: Some(api_address), enable_blockchain_explorer: true, state_update_timeout: 1_000, ..Default::default() }; let peer_address = "0.0.0.0:8001".parse().unwrap();<|fim▁hole|> peers: vec![ //"185.35.221.11:8000".parse().unwrap(), "185.35.221.11:8001".parse().unwrap(), "54.252.175.39:8001".parse().unwrap() ], service_public_key, service_secret_key, consensus_public_key, consensus_secret_key, genesis, external_address: None, network: Default::default(), whitelist: Default::default(), api: api_cfg, mempool: Default::default(), services_configs: Default::default(), }; println!("Starting a single node..."); let mut node = Node::new(blockchain, node_cfg); println!("Blockchain is ready for transactions!"); node.run().unwrap(); }<|fim▁end|>
let node_cfg = NodeConfig { listen_address: peer_address,
<|file_name|>mixins.js<|end_file_name|><|fim▁begin|>/** * @file A set of global functions available to all components. * @author Rowina Sanela */ (bbn => { "use strict"; if ( !bbn.vue ){ throw new Error("Impossible to find the library bbn-vue") } Vue.mixin({ computed: { /** * Return the object of the currentPopup. * @computed currentPopup * @return {Object} */ currentPopup(){ if ( !this._currentPopup ){ let e = bbn.vue._retrievePopup(this); if ( e ){ this._currentPopup = e; } else{ let vm = this; while (vm = vm.$parent) { if ( vm._currentPopup ){ this._currentPopup = vm._currentPopup; break; } else if ( vm ){ e = bbn.vue._retrievePopup(vm); if ( e ){ this._currentPopup = e; break; } } if (vm === this.$root) { break; } } } } if ( this._currentPopup ){ return this._currentPopup; } return null; } }, methods: { /** * Return the function bbn._ for the strings' translation. * @method _ * @return {Function} */ _: bbn._, /** * Returns the given ref (will return $refs[name] or $refs[name][0])<|fim▁hole|> */ getRef(name){ return bbn.vue.getRef(this, name); }, /** * Checks if the component corresponds to the selector * @method is * @fires bbn.vue.is * @param {String} selector * @return {Function} */ is(selector){ return bbn.vue.is(this, selector); }, /** * Returns the closest component matching the given selector * @method closest * @param {String} selector * @param {Boolean} checkEle * @return {Function} */ closest(selector, checkEle){ return bbn.vue.closest(this, selector, checkEle); }, /** * Returns an array of parent components until $root * @method ancestors * @param {String} selector * @param {Boolean} checkEle * @return {Function} */ ancestors(selector, checkEle){ return bbn.vue.ancestors(this, selector, checkEle); }, /** * Fires the function bbn.vue.getChildByKey. * @method getChildByKey * @param {String} key * @param {String} selector * @todo Remove for Vue3 * @return {Function} */ getChildByKey(key, selector){ return bbn.vue.getChildByKey(this, key, selector); }, /** * Fires the function bbn.vue.findByKey. * @method findByKey * @param {String} key * @param {String} selector * @param {Array} ar * @todo Remove for Vue3 * @return {Function} */ findByKey(key, selector, ar){ return bbn.vue.findByKey(this, key, selector, ar); }, /** * Fires the function bbn.vue.findAllByKey. * @method findAllByKey * @param {String} key * @param {String} selector * @todo Remove for Vue3 * @return {Function} */ findAllByKey(key, selector){ return bbn.vue.findAllByKey(this, key, selector); }, /** * Fires the function bbn.vue.find. * @method find * @param {String} selector * @param {Number} index * @todo Remove for Vue3 * @return {Function} */ find(selector, index){ return bbn.vue.find(this, selector, index); }, /** * Fires the function bbn.vue.findAll. * @method findAll * @param {String} selector * @param {Boolean} only_children * @todo Remove for Vue3 * @return {Function} */ findAll(selector, only_children){ return bbn.vue.findAll(this, selector, only_children); }, /** * Extends an object with Vue.$set * @method extend * @param {Boolean} selector * @param {Object} source The object to be extended * @param {Object} obj1 * @return {Object} */ extend(deep, src, obj1){ let args = [this]; for ( let i = 0; i < arguments.length; i++ ){ args.push(arguments[i]); } return bbn.vue.extend(...args); }, /** * Fires the function bbn.vue.getComponents. * @method getComponents * @param {Array} ar * @param {Boolean} only_children * @todo Remove for Vue3 * @return {Function} */ getComponents(ar, only_children){ return bbn.vue.getComponents(this, ar, only_children); }, /** * Opens the closest object popup. * @method getPopup * @return {Object} */ getPopup(){ let popup = bbn.vue.getPopup(this); if (arguments.length && popup) { let cfg = arguments[0]; let args = []; if (bbn.fn.isObject(cfg)) { cfg.opener = this; } args.push(cfg); for (let i = 1; i < arguments.length; i++) { args.push(arguments[i]); } return popup.open.apply(popup, args); } return popup; }, /** * Opens a confirmation from the closest popup * @method confirm */ confirm(){ let popup = this.getPopup(); if (arguments.length && popup) { let cfg = arguments[0]; let args = []; if (bbn.fn.isObject(cfg)) { cfg.opener = this; } args.push(cfg); for (let i = 1; i < arguments.length; i++) { args.push(arguments[i]); } if (!bbn.fn.isObject(cfg)) { args.push(this); } return popup.confirm.apply(popup, args) } }, /** * Opens an alert from the closest popup * @method alert */ alert(){ let popup = this.getPopup(); if (arguments.length && popup) { let cfg = arguments[0]; let args = []; if (bbn.fn.isObject(cfg)) { cfg.opener = this; } args.push(cfg); for (let i = 1; i < arguments.length; i++) { args.push(arguments[i]); } if (!bbn.fn.isObject(cfg)) { args.push(this); } return popup.alert.apply(popup, args) } }, /** * Executes bbn.fn.post * @method post * @see {@link https://bbn.io/bbn-js/doc/ajax/post|bbn.fn.post} documentation * @todo Stupid idea, it should be removed. * @return {Promise} */ post(){ return bbn.vue.post(this, arguments); }, /** * Executes bbn.fn.postOut * @method postOut * @see {@link https://bbn.io/bbn-js/doc/ajax/postOut|bbn.fn.postOut} documentation * @todo Stupid idea, it should be removed. * @return {void} */ postOut(){ return bbn.vue.postOut(this, ...arguments); }, /** * @method getComponentName * @todo Returns a component name based on the name of the given component and a path. * @memberof bbn.vue * @param {Vue} vm The component from which the name is created. * @param {String} path The relative path to the component from the given component. */ getComponentName(){ return bbn.vue.getComponentName(this, ...arguments); }, } }); })(window.bbn);<|fim▁end|>
* @method getRef * @param {String} name * @fires bbn.vue.getRef * @return {Function}
<|file_name|>web.py<|end_file_name|><|fim▁begin|>import busbus from busbus.entity import BaseEntityJSONEncoder from busbus.provider import ProviderBase from busbus.queryable import Queryable import cherrypy import collections import itertools import types def json_handler(*args, **kwargs): value = cherrypy.serving.request._json_inner_handler(*args, **kwargs) return BaseEntityJSONEncoder().encode(value).encode('utf-8') cherrypy.config['tools.json_out.handler'] = json_handler EXPAND_TYPES = { 'providers': ProviderBase, 'agencies': busbus.Agency, 'stops': busbus.Stop, 'routes': busbus.Route, 'arrivals': busbus.Arrival, } def unexpand_init(result, to_expand): return ({attr: unexpand(value, to_expand) for attr, value in dict(obj).items()} for obj in result) def unexpand(obj, to_expand): for name, cls in EXPAND_TYPES.items(): if isinstance(obj, cls): if name not in to_expand: return {'id': obj.id} else: return {attr: unexpand(value, to_expand) for attr, value in dict(obj).items()} if isinstance(obj, dict): return {attr: unexpand(value, to_expand) for attr, value in obj.items()} if isinstance(obj, (list, tuple, collections.Iterator)): return (unexpand(value, to_expand) for value in obj) return obj class APIError(Exception): def __init__(self, msg, error_code=500): self.msg = msg self.error_code = error_code class EndpointNotFoundError(APIError): def __init__(self, entity, action=None): super(EndpointNotFoundError, self).__init__( 'Endpoint /{0} not found'.format( entity + '/' + action if action else entity), 404) class Engine(busbus.Engine): def __init__(self, *args, **kwargs): # perhaps fix this to use a decorator somehow? self._entity_actions = { ('stops', 'find'): (self.stops_find, 'stops'), ('routes', 'directions'): (self.routes_directions, 'directions'), } super(Engine, self).__init__(*args, **kwargs) @cherrypy.popargs('entity', 'action') @cherrypy.expose @cherrypy.tools.json_out() def default(self, entity=None, action=None, **kwargs): if entity is None: return self.help() response = { 'request': { 'status': 'ok', 'entity': entity, 'params': kwargs, } } try: to_expand = (kwargs.pop('_expand').split(',') if '_expand' in kwargs else []) if to_expand: response['request']['expand'] = to_expand limit = kwargs.pop('_limit', None) if limit: try: limit = int(limit) if limit <= 0: raise ValueError() except ValueError: raise APIError('_limit must be a positive integer', 422) response['request']['limit'] = limit if 'realtime' in kwargs: if kwargs['realtime'] in ('y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE', 'on', 'On', 'ON'): kwargs['realtime'] = True elif kwargs['realtime'] in ('n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE', 'off', 'Off', 'OFF'): kwargs['realtime'] = False else: raise APIError('realtime is not a boolean', 422) if action: response['request']['action'] = action if (entity, action) in self._entity_actions: func, entity = self._entity_actions[(entity, action)] result = func(**kwargs) else: raise EndpointNotFoundError(entity, action) else: if 'provider.id' in kwargs: provider_id = kwargs.pop('provider.id') if provider_id in self._providers: provider = self._providers[provider_id] entity_func = getattr(provider, entity, None) else: entity_func = Queryable(()) else: entity_func = getattr(self, entity, None) if entity_func is not None: result = entity_func.where(**kwargs) else: raise EndpointNotFoundError(entity) if limit: result = itertools.islice(result, limit) response[entity] = unexpand_init(result, to_expand) except APIError as exc: response['request']['status'] = 'error' response['error'] = exc.msg cherrypy.response.status = exc.error_code return response<|fim▁hole|> def help(self): return { 'request': { 'status': 'help', }, '_entities': EXPAND_TYPES.keys(), '_actions': self._entity_actions.keys(), } def stops_find(self, **kwargs): expected = ('latitude', 'longitude', 'distance') if all(x in kwargs for x in expected): for x in expected: kwargs[x] = float(kwargs[x]) latlon = (kwargs['latitude'], kwargs['longitude']) return super(Engine, self).stops.where( lambda s: s.distance_to(latlon) <= kwargs['distance']) else: raise APIError('missing attributes: ' + ','.join( x for x in expected if x not in kwargs), 422) def routes_directions(self, **kwargs): expected = ('route.id', 'provider.id') missing = [x for x in expected if x not in kwargs] if missing: raise APIError('missing attributes: ' + ','.join(missing), 422) provider = self._providers[kwargs['provider.id']] route = provider.get(busbus.Route, kwargs['route.id']) return route.directions<|fim▁end|>
<|file_name|>fakes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2015 Spanish National Research Council # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import re import uuid import webob.dec import webob.exc from ooi import utils import ooi.wsgi application_url = "https://foo.example.org:8774/ooiv1" tenants = { "foo": {"id": uuid.uuid4().hex, "name": "foo"}, "bar": {"id": uuid.uuid4().hex, "name": "bar"}, "baz": {"id": uuid.uuid4().hex, "name": "baz"}, } flavors = { 1: { "id": 1, "name": "foo", "vcpus": 2, "ram": 256, "disk": 10, }, 2: { "id": 2, "name": "bar", "vcpus": 4, "ram": 2014, "disk": 20, } } images = { "foo": { "id": "foo",<|fim▁hole|> "id": "bar", "name": "bar", } } volumes = { tenants["foo"]["id"]: [ { "id": uuid.uuid4().hex, "displayName": "foo", "size": 2, "status": "available", "attachments": [], }, { "id": uuid.uuid4().hex, "displayName": "bar", "size": 3, "status": "available", "attachments": [], }, { "id": uuid.uuid4().hex, "displayName": "baz", "size": 5, "status": "available", "attachments": [], }, ], tenants["bar"]["id"]: [], tenants["baz"]["id"]: [ { "id": uuid.uuid4().hex, "displayName": "volume", "size": 5, "status": "in-use", }, ], } pools = { tenants["foo"]["id"]: [ { "id": "foo", "name": "foo", }, { "id": "bar", "name": "bar", } ], tenants["bar"]["id"]: [], tenants["baz"]["id"]: [ { "id": "public", "name": "public", }, ], } linked_vm_id = uuid.uuid4().hex allocated_ip = "192.168.253.23" floating_ips = { tenants["foo"]["id"]: [], tenants["bar"]["id"]: [], tenants["baz"]["id"]: [ { "fixed_ip": "10.0.0.2", "id": uuid.uuid4().hex, "instance_id": linked_vm_id, "ip": "192.168.253.1", "pool": pools[tenants["baz"]["id"]][0]["name"], }, { "fixed_ip": None, "id": uuid.uuid4().hex, "instance_id": None, "ip": "192.168.253.2", "pool": pools[tenants["baz"]["id"]][0]["name"], }, ], } servers = { tenants["foo"]["id"]: [ { "id": uuid.uuid4().hex, "name": "foo", "flavor": {"id": flavors[1]["id"]}, "image": {"id": images["foo"]["id"]}, "status": "ACTIVE", }, { "id": uuid.uuid4().hex, "name": "bar", "flavor": {"id": flavors[2]["id"]}, "image": {"id": images["bar"]["id"]}, "status": "SHUTOFF", }, { "id": uuid.uuid4().hex, "name": "baz", "flavor": {"id": flavors[1]["id"]}, "image": {"id": images["bar"]["id"]}, "status": "ERROR", }, ], tenants["bar"]["id"]: [], tenants["baz"]["id"]: [ { "id": linked_vm_id, "name": "withvolume", "flavor": {"id": flavors[1]["id"]}, "image": {"id": images["bar"]["id"]}, "status": "ACTIVE", "os-extended-volumes:volumes_attached": [ {"id": volumes[tenants["baz"]["id"]][0]["id"]} ], "addresses": { "private": [ {"addr": floating_ips[tenants["baz"]["id"]][0]["fixed_ip"], "OS-EXT-IPS:type": "fixed", "OS-EXT-IPS-MAC:mac_addr": "1234"}, {"addr": floating_ips[tenants["baz"]["id"]][0]["ip"], "OS-EXT-IPS:type": "floating", "OS-EXT-IPS-MAC:mac_addr": "1234"}, ] } } ], } # avoid circular definition of attachments volumes[tenants["baz"]["id"]][0]["attachments"] = [{ # how consistent can OpenStack be! # depending on using /servers/os-volume_attachments # or /os-volumes it will return different field names "server_id": servers[tenants["baz"]["id"]][0]["id"], "serverId": servers[tenants["baz"]["id"]][0]["id"], "attachment_id": uuid.uuid4().hex, "volumeId": volumes[tenants["baz"]["id"]][0]["id"], "volume_id": volumes[tenants["baz"]["id"]][0]["id"], "device": "/dev/vdb", "id": volumes[tenants["baz"]["id"]][0]["id"], }] def fake_query_results(): cats = [] # OCCI Core cats.append( 'link; ' 'scheme="http://schemas.ogf.org/occi/core#"; ' 'class="kind"; title="link"') cats.append( 'resource; ' 'scheme="http://schemas.ogf.org/occi/core#"; ' 'class="kind"; title="resource"; ' 'rel="http://schemas.ogf.org/occi/core#entity"') cats.append( 'entity; ' 'scheme="http://schemas.ogf.org/occi/core#"; ' 'class="kind"; title="entity"') # OCCI Infrastructure Compute cats.append( 'compute; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="kind"; title="compute resource"; ' 'rel="http://schemas.ogf.org/occi/core#resource"') cats.append( 'start; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; ' 'class="action"; title="start compute instance"') cats.append( 'stop; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; ' 'class="action"; title="stop compute instance"') cats.append( 'restart; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; ' 'class="action"; title="restart compute instance"') cats.append( 'suspend; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; ' 'class="action"; title="suspend compute instance"') # OCCI Templates cats.append( 'os_tpl; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="mixin"; title="OCCI OS Template"') cats.append( 'resource_tpl; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="mixin"; title="OCCI Resource Template"') # OpenStack Images cats.append( 'bar; ' 'scheme="http://schemas.openstack.org/template/os#"; ' 'class="mixin"; title="bar"; ' 'rel="http://schemas.ogf.org/occi/infrastructure#os_tpl"') cats.append( 'foo; ' 'scheme="http://schemas.openstack.org/template/os#"; ' 'class="mixin"; title="foo"; ' 'rel="http://schemas.ogf.org/occi/infrastructure#os_tpl"') # OpenStack Flavors cats.append( '1; ' 'scheme="http://schemas.openstack.org/template/resource#"; ' 'class="mixin"; title="Flavor: foo"; ' 'rel="http://schemas.ogf.org/occi/infrastructure#resource_tpl"') cats.append( '2; ' 'scheme="http://schemas.openstack.org/template/resource#"; ' 'class="mixin"; title="Flavor: bar"; ' 'rel="http://schemas.ogf.org/occi/infrastructure#resource_tpl"') # OCCI Infrastructure Network cats.append( 'network; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="kind"; title="network resource"; ' 'rel="http://schemas.ogf.org/occi/core#resource"') cats.append( 'ipnetwork; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/network#"; ' 'class="mixin"; title="IP Networking Mixin"') cats.append( 'up; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/network/action#"; ' 'class="action"; title="up network instance"') cats.append( 'down; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/network/action#"; ' 'class="action"; title="down network instance"') cats.append( 'networkinterface; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="kind"; title="network link resource"; ' 'rel="http://schemas.ogf.org/occi/core#link"') cats.append( 'ipnetworkinterface; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/' 'networkinterface#"; ' 'class="mixin"; title="IP Network interface Mixin"') # OCCI Infrastructure Storage cats.append( 'storage; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="kind"; title="storage resource"; ' 'rel="http://schemas.ogf.org/occi/core#resource"') cats.append( 'storagelink; ' 'scheme="http://schemas.ogf.org/occi/infrastructure#"; ' 'class="kind"; title="storage link resource"; ' 'rel="http://schemas.ogf.org/occi/core#link"') cats.append( 'offline; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/storage/action#"; ' 'class="action"; title="offline storage instance"') cats.append( 'online; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/storage/action#"; ' 'class="action"; title="online storage instance"') cats.append( 'backup; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/storage/action#"; ' 'class="action"; title="backup storage instance"') cats.append( 'resize; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/storage/action#"; ' 'class="action"; title="resize storage instance"') cats.append( 'snapshot; ' 'scheme="http://schemas.ogf.org/occi/infrastructure/storage/action#"; ' 'class="action"; title="snapshot storage instance"') # OpenStack contextualization cats.append( 'user_data; ' 'scheme="http://schemas.openstack.org/compute/instance#"; ' 'class="mixin"; title="Contextualization extension - user_data"') cats.append( 'public_key; ' 'scheme="http://schemas.openstack.org/instance/credentials#"; ' 'class="mixin"; title="Contextualization extension - public_key"') result = [] for c in cats: result.append(("Category", c)) return result class FakeOpenStackFault(ooi.wsgi.Fault): _fault_names = { 400: "badRequest", 401: "unauthorized", 403: "forbidden", 404: "itemNotFound", 405: "badMethod", 406: "notAceptable", 409: "conflictingRequest", 413: "overLimit", 415: "badMediaType", 429: "overLimit", 501: "notImplemented", 503: "serviceUnavailable"} @webob.dec.wsgify() def __call__(self, req): code = self.wrapped_exc.status_int fault_name = self._fault_names.get(code) explanation = self.wrapped_exc.explanation fault_data = { fault_name: { 'code': code, 'message': explanation}} self.wrapped_exc.body = utils.utf8(json.dumps(fault_data)) self.wrapped_exc.content_type = "application/json" return self.wrapped_exc class FakeApp(object): """Poor man's fake application.""" def __init__(self): self.routes = {} for tenant in tenants.values(): path = "/%s" % tenant["id"] self._populate(path, "server", servers[tenant["id"]], actions=True) self._populate(path, "volume", volumes[tenant["id"]], "os-volumes") self._populate(path, "floating_ip_pool", pools[tenant["id"]], "os-floating-ip-pools") self._populate(path, "floating_ip", floating_ips[tenant["id"]], "os-floating-ips") # NOTE(aloga): dict_values un Py3 is not serializable in JSON self._populate(path, "image", list(images.values())) self._populate(path, "flavor", list(flavors.values())) self._populate_attached_volumes(path, servers[tenant["id"]], volumes[tenant["id"]]) def _populate(self, path_base, obj_name, obj_list, objs_path=None, actions=[]): objs_name = "%ss" % obj_name if objs_path: path = "%s/%s" % (path_base, objs_path) else: path = "%s/%s" % (path_base, objs_name) objs_details_path = "%s/detail" % path self.routes[path] = create_fake_json_resp({objs_name: obj_list}) self.routes[objs_details_path] = create_fake_json_resp( {objs_name: obj_list}) for o in obj_list: obj_path = "%s/%s" % (path, o["id"]) self.routes[obj_path] = create_fake_json_resp({obj_name: o}) if actions: action_path = "%s/action" % obj_path self.routes[action_path] = webob.Response(status=202) def _populate_attached_volumes(self, path, server_list, vol_list): for s in server_list: attachments = [] if "os-extended-volumes:volumes_attached" in s: for attach in s["os-extended-volumes:volumes_attached"]: for v in vol_list: if attach["id"] == v["id"]: attachments.append(v["attachments"][0]) path_base = "%s/servers/%s/os-volume_attachments" % (path, s["id"]) self.routes[path_base] = create_fake_json_resp( {"volumeAttachments": attachments} ) for attach in attachments: obj_path = "%s/%s" % (path_base, attach["id"]) self.routes[obj_path] = create_fake_json_resp( {"volumeAttachment": attach}) @webob.dec.wsgify() def __call__(self, req): if req.method == "GET": return self._do_get(req) elif req.method == "POST": return self._do_post(req) elif req.method == "DELETE": return self._do_delete(req) def _do_create_server(self, req): # TODO(enolfc): this should check the json is # semantically correct s = {"server": {"id": "foo", "name": "foo", "flavor": {"id": "1"}, "image": {"id": "2"}, "status": "ACTIVE"}} return create_fake_json_resp(s) def _do_create_volume(self, req): # TODO(enolfc): this should check the json is # semantically correct s = {"volume": {"id": "foo", "displayName": "foo", "size": 1, "status": "on-line"}} return create_fake_json_resp(s) def _do_create_attachment(self, req): v = {"volumeAttachment": {"serverId": "foo", "volumeId": "bar", "device": "/dev/vdb"}} return create_fake_json_resp(v, 202) def _do_allocate_ip(self, req): body = req.json_body.copy() pool = body.popitem() tenant = req.path_info.split('/')[1] for p in pools[tenant]: if p["name"] == pool[1]: break else: exc = webob.exc.HTTPNotFound() return FakeOpenStackFault(exc) ip = {"floating_ip": {"ip": allocated_ip, "id": 1}} return create_fake_json_resp(ip, 202) def _do_post(self, req): if req.path_info.endswith("servers"): return self._do_create_server(req) if req.path_info.endswith("os-volumes"): return self._do_create_volume(req) elif req.path_info.endswith("action"): body = req.json_body.copy() action = body.popitem() if action[0] in ["os-start", "os-stop", "reboot", "addFloatingIp", "removeFloatingIp"]: return self._get_from_routes(req) elif req.path_info.endswith("os-volume_attachments"): return self._do_create_attachment(req) elif req.path_info.endswith("os-floating-ips"): return self._do_allocate_ip(req) raise Exception def _do_delete(self, req): self._do_get(req) tested_paths = { r"/[^/]+/servers/[^/]+/os-volume_attachments/[^/]+$": 202, r"/[^/]+/os-floating-ips/[^/]+$": 202, r"/[^/]+/servers/[^/]+$": 204, r"/[^/]+/os-volumes/[^/]+$": 204, } for p, st in tested_paths.items(): if re.match(p, req.path_info): return create_fake_json_resp({}, st) raise Exception def _do_get(self, req): return self._get_from_routes(req) def _get_from_routes(self, req): try: ret = self.routes[req.path_info] except KeyError: exc = webob.exc.HTTPNotFound() ret = FakeOpenStackFault(exc) return ret def create_fake_json_resp(data, status=200): r = webob.Response() r.headers["Content-Type"] = "application/json" r.charset = "utf8" r.body = json.dumps(data).encode("utf8") r.status_code = status return r<|fim▁end|>
"name": "foo", }, "bar": {
<|file_name|>eternus_dx_fc.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 FUJITSU LIMITED # Copyright (c) 2012 EMC Corporation. # Copyright (c) 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """ FibreChannel Cinder Volume driver for Fujitsu ETERNUS DX S3 series. """ from oslo_log import log as logging import six from cinder import interface from cinder.volume import driver from cinder.volume.drivers.fujitsu import eternus_dx_common from cinder.zonemanager import utils as fczm_utils LOG = logging.getLogger(__name__) @interface.volumedriver class FJDXFCDriver(driver.FibreChannelDriver): """FC Cinder Volume Driver for Fujitsu ETERNUS DX S3 series.""" # ThirdPartySystems wiki page CI_WIKI_NAME = "Fujitsu_ETERNUS_CI" VERSION = eternus_dx_common.FJDXCommon.VERSION def __init__(self, *args, **kwargs): super(FJDXFCDriver, self).__init__(*args, **kwargs) self.common = eternus_dx_common.FJDXCommon( 'fc', configuration=self.configuration) self.VERSION = self.common.VERSION def check_for_setup_error(self): pass def create_volume(self, volume): """Create volume.""" LOG.debug('create_volume, ' 'volume id: %s, enter method.', volume['id']) location, metadata = self.common.create_volume(volume) v_metadata = self._get_metadata(volume) metadata.update(v_metadata) LOG.debug('create_volume, info: %s, exit method.', metadata) return {'provider_location': six.text_type(location), 'metadata': metadata} def create_volume_from_snapshot(self, volume, snapshot): """Creates a volume from a snapshot.""" LOG.debug('create_volume_from_snapshot, ' 'volume id: %(vid)s, snap id: %(sid)s, enter method.', {'vid': volume['id'], 'sid': snapshot['id']}) location, metadata = ( self.common.create_volume_from_snapshot(volume, snapshot)) v_metadata = self._get_metadata(volume) metadata.update(v_metadata) LOG.debug('create_volume_from_snapshot, ' 'info: %s, exit method.', metadata) return {'provider_location': six.text_type(location), 'metadata': metadata} def create_cloned_volume(self, volume, src_vref): """Create cloned volume.""" LOG.debug('create_cloned_volume, ' 'target volume id: %(tid)s, ' 'source volume id: %(sid)s, enter method.', {'tid': volume['id'], 'sid': src_vref['id']}) location, metadata = ( self.common.create_cloned_volume(volume, src_vref)) v_metadata = self._get_metadata(volume) metadata.update(v_metadata) LOG.debug('create_cloned_volume, ' 'info: %s, exit method.', metadata) return {'provider_location': six.text_type(location), 'metadata': metadata} def delete_volume(self, volume): """Delete volume on ETERNUS.""" LOG.debug('delete_volume, ' 'volume id: %s, enter method.', volume['id']) vol_exist = self.common.delete_volume(volume) LOG.debug('delete_volume, ' 'delete: %s, exit method.', vol_exist) <|fim▁hole|> 'snap id: %(sid)s, volume id: %(vid)s, enter method.', {'sid': snapshot['id'], 'vid': snapshot['volume_id']}) location, metadata = self.common.create_snapshot(snapshot) LOG.debug('create_snapshot, info: %s, exit method.', metadata) return {'provider_location': six.text_type(location)} def delete_snapshot(self, snapshot): """Deletes a snapshot.""" LOG.debug('delete_snapshot, ' 'snap id: %(sid)s, volume id: %(vid)s, enter method.', {'sid': snapshot['id'], 'vid': snapshot['volume_id']}) vol_exist = self.common.delete_snapshot(snapshot) LOG.debug('delete_snapshot, ' 'delete: %s, exit method.', vol_exist) def ensure_export(self, context, volume): """Driver entry point to get the export info for an existing volume.""" return def create_export(self, context, volume, connector): """Driver entry point to get the export info for a new volume.""" return def remove_export(self, context, volume): """Driver entry point to remove an export for a volume.""" return @fczm_utils.AddFCZone def initialize_connection(self, volume, connector): """Allow connection to connector and return connection info.""" LOG.debug('initialize_connection, volume id: %(vid)s, ' 'wwpns: %(wwpns)s, enter method.', {'vid': volume['id'], 'wwpns': connector['wwpns']}) info = self.common.initialize_connection(volume, connector) data = info['data'] init_tgt_map = ( self.common.build_fc_init_tgt_map(connector, data['target_wwn'])) data['initiator_target_map'] = init_tgt_map info['data'] = data LOG.debug('initialize_connection, ' 'info: %s, exit method.', info) return info @fczm_utils.RemoveFCZone def terminate_connection(self, volume, connector, **kwargs): """Disallow connection from connector.""" LOG.debug('terminate_connection, volume id: %(vid)s, ' 'wwpns: %(wwpns)s, enter method.', {'vid': volume['id'], 'wwpns': connector['wwpns']}) map_exist = self.common.terminate_connection(volume, connector) attached = self.common.check_attached_volume_in_zone(connector) info = {'driver_volume_type': 'fibre_channel', 'data': {}} if not attached: # No more volumes attached to the host init_tgt_map = self.common.build_fc_init_tgt_map(connector) info['data'] = {'initiator_target_map': init_tgt_map} LOG.debug('terminate_connection, unmap: %(unmap)s, ' 'connection info: %(info)s, exit method', {'unmap': map_exist, 'info': info}) return info def get_volume_stats(self, refresh=False): """Get volume stats.""" LOG.debug('get_volume_stats, refresh: %s, enter method.', refresh) pool_name = None if refresh is True: data, pool_name = self.common.update_volume_stats() backend_name = self.configuration.safe_get('volume_backend_name') data['volume_backend_name'] = backend_name or 'FJDXFCDriver' data['storage_protocol'] = 'FC' self._stats = data LOG.debug('get_volume_stats, ' 'pool name: %s, exit method.', pool_name) return self._stats def extend_volume(self, volume, new_size): """Extend volume.""" LOG.debug('extend_volume, ' 'volume id: %s, enter method.', volume['id']) used_pool_name = self.common.extend_volume(volume, new_size) LOG.debug('extend_volume, ' 'used pool name: %s, exit method.', used_pool_name) def _get_metadata(self, volume): v_metadata = volume.get('volume_metadata') if v_metadata: ret = {data['key']: data['value'] for data in v_metadata} else: ret = volume.get('metadata', {}) return ret<|fim▁end|>
def create_snapshot(self, snapshot): """Creates a snapshot.""" LOG.debug('create_snapshot, '
<|file_name|>usuarios.js<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ angular.module('MetronicApp').controller('UsuariosCtrl', function ($scope, GetSv, $rootScope, PostSv,toaster) { $scope.usuarios = []; $scope.add = false; $scope.edit = false; $scope.a_editar = {}; $scope.usuario = {}; $scope.getUsers = function () { GetSv.getData("usuarios").then(function (data) { if (data.Error) { $scope.error = true; } else { $scope.usuarios = data; $scope.error = false; } }, function (e) { $scope.error = true; }); }; $scope.getUsers(); $scope.closeEdit = function () { $scope.a_editar = {}; $scope.edit = false; }; $scope.openEdit = function (item) { $scope.a_editar = item; $scope.edit = true; }; $scope.closeAdd = function () { $scope.add = false; }; $scope.openAdd = function (item) { $scope.a_editar = {}; $scope.add = true; }; $scope.sendUser = function (servlet, user) { PostSv.postData(servlet, {usuario: JSON.stringify(user)}).then(function (data) { if (data.Error) { toaster.pop('error', "Error", data.Error); } else { toaster.pop('success', "Exito", data.Exito); $scope.a_editar = {}; $scope.usuario = {}; $scope.getUsers(); $scope.add = false; $scope.edit = false; } }, function (e) { toaster.pop('error', "Error", "Error fatal"); }<|fim▁hole|> }; $scope.roles = $rootScope.roles; });<|fim▁end|>
);
<|file_name|>GraphViewConfigurationUIFactory.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and<|fim▁hole|> */ /* package org.apache.taverna.workbench.views.graph.config; import javax.swing.JPanel; import org.apache.taverna.configuration.Configurable; import org.apache.taverna.configuration.ConfigurationUIFactory; /** * ConfigurationFactory for the GraphViewConfiguration. * * @author David Withers */ public class GraphViewConfigurationUIFactory implements ConfigurationUIFactory { private GraphViewConfiguration graphViewConfiguration; @Override public boolean canHandle(String uuid) { return uuid.equals(getConfigurable().getUUID()); } @Override public JPanel getConfigurationPanel() { return new GraphViewConfigurationPanel(graphViewConfiguration); } @Override public Configurable getConfigurable() { return graphViewConfiguration; } public void setGraphViewConfiguration( GraphViewConfiguration graphViewConfiguration) { this.graphViewConfiguration = graphViewConfiguration; } }<|fim▁end|>
* limitations under the License.
<|file_name|>anitoonstv.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import re from channels import renumbertools from channelselector import get_thumb from core import httptools from core import scrapertools from core import servertools from core import tmdb from core.item import Item from platformcode import config, logger from channels import autoplay IDIOMAS = {'latino': 'Latino'} list_language = IDIOMAS.values() list_servers = ['openload', 'okru', 'netutv', 'rapidvideo' ] list_quality = ['default'] host = "http://www.anitoonstv.com" def mainlist(item):<|fim▁hole|> logger.info() thumb_series = get_thumb("channels_tvshow.png") autoplay.init(item.channel, list_servers, list_quality) itemlist = list() itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host, thumbnail=thumb_series)) itemlist = renumbertools.show_option(item.channel, itemlist) autoplay.show_option(item.channel, itemlist) return itemlist def lista(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) if 'Novedades' in item.title: patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>' patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>' else: patron_cat = '<li><a href=.+?>' patron_cat += str(item.title) patron_cat += '<\/a><div>(.+?)<\/div><\/li>' patron = "<a href='(.+?)'>(.+?)<\/a>" data = scrapertools.find_single_match(data, patron_cat) matches = scrapertools.find_multiple_matches(data, patron) for link, name in matches: if "Novedades" in item.title: url = link title = name.capitalize() else: url = host + link title = name if ":" in title: cad = title.split(":") show = cad[0] else: if "(" in title: cad = title.split("(") if "Super" in title: show = cad[1] show = show.replace(")", "") else: show = cad[0] else: show = title if "&" in show: cad = title.split("xy") show = cad[0] context1=[renumbertools.context(item), autoplay.context] itemlist.append( item.clone(title=title, url=url, plot=show, action="episodios", show=show, context=context1)) tmdb.set_infoLabels(itemlist) return itemlist def episodios(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>' data = scrapertools.find_single_match(data, patron) patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>" matches = scrapertools.find_multiple_matches(data, patron_caps) show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>') scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>") scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>') i = 0 temp = 0 for link, cap, name in matches: if int(cap) == 1: temp = temp + 1 if int(cap) < 10: cap = "0" + cap season = temp episode = int(cap) season, episode = renumbertools.numbered_for_tratk( item.channel, item.show, season, episode) date = name title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date) # title = str(temp)+"x"+cap+" "+name url = host + "/" + link if "NO DISPONIBLE" not in name: itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail, plot=scrapedplot, url=url, show=show)) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url, action="add_serie_to_library", extra="episodios", show=show)) return itemlist def findvideos(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data1 = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>') # name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>') scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>') scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">') itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"') for server, quality, url in itemla: if "Calidad Alta" in quality: quality = quality.replace("Calidad Alta", "HQ") server = server.lower().strip() if "ok" == server: server = 'okru' if "netu" == server: continue itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality, thumbnail=scrapedthumbnail, plot=scrapedplot, title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality))) autoplay.start(itemlist, item) return itemlist def play(item): logger.info() itemlist = [] # Buscamos video por servidor ... devuelve = servertools.findvideosbyserver(item.url, item.server) if not devuelve: # ...sino lo encontramos buscamos en todos los servidores disponibles devuelve = servertools.findvideos(item.url, skip=True) if devuelve: # logger.debug(devuelve) itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2], url=devuelve[0][1], thumbnail=item.thumbnail)) return itemlist<|fim▁end|>
<|file_name|>releases.py<|end_file_name|><|fim▁begin|>''' MLAB calls MATLAB funcitons and looks like a normal python library. authors: Yauhen Yakimovich <[email protected]> <|fim▁hole|>from types import ModuleType from mlabwrap import (MlabWrap, choose_release, find_available_releases, MatlabReleaseNotFound) import traceback # TODO: work with ENV #os.getenv("MLABRAW_CMD_STR", "") def get_available_releases(): return dict(find_available_releases()) def get_latest_release(available_releases=None): if not available_releases: available_releases = dict(find_available_releases()) versions = available_releases.keys() latest_release_version = sorted(versions)[-1] return latest_release_version class MatlabVersions(dict): def __init__(self, globs): self.globs = globs self.__selected_instance = None self._available_releases = dict(find_available_releases()) def __setitem__(self, k, v): self.globs[k] = v def __getitem__(self, k): try: return self.globs[k] except KeyError: pass # the only way we'd get to here is if we've tried to # import * from a repl. so, raise an exception, since # that's really the only sensible thing to do if k == "__all__": raise ImportError('Cannot import * from mlab. Please import mlab ' 'or import versions individually.') if k.startswith("__") and k.endswith("__"): raise AttributeError # how about an return a function variable? # try: return os.environ[k] # except KeyError: pass # is it a "release version"? if k.startswith('R') and k in self._available_releases: self[k] = self.get_mlab_instance(k) return self[k] if k == 'latest_release': matlab_release = self.pick_latest_release() instance = self.get_mlab_instance(matlab_release) self[k] = instance self[matlab_release] = instance return instance if self.__selected_instance is not None: instance = self[self.__selected_instance] try: return getattr(instance, k) except AttributeError: traceback.print_exc(file=sys.stdout) else: raise ImportError('Import failed, no MATLAB instance selected. Try import mlab.latest_release') raise ImportError('Failed to import anything for: %s' % k) def get_mlab_instance(self, matlab_release): choose_release(matlab_release) instance = MlabWrap() # Make it a module sys.modules['mlab.releases.' + matlab_release] = instance sys.modules['matlab'] = instance return instance def pick_latest_release(self): return get_latest_release(self._available_releases) # this is a thin wrapper around THIS module (we patch sys.modules[__name__]). # this is in the case that the user does a "from sh import whatever" # in other words, they only want to import certain programs, not the whole # system PATH worth of commands. in this case, we just proxy the # import lookup to our MatlabVersions class class SelfWrapper(ModuleType): def __init__(self, self_module): # this is super ugly to have to copy attributes like this, # but it seems to be the only way to make reload() behave # nicely. if i make these attributes dynamic lookups in # __getattr__, reload sometimes chokes in weird ways... for attr in ["__builtins__", "__doc__", "__name__", "__package__"]: setattr(self, attr, getattr(self_module, attr, None)) # python 3.2 (2.7 and 3.3 work fine) breaks on osx (not ubuntu) # if we set this to None. and 3.3 needs a value for __path__ self.__path__ = [] self.module = self_module self.instances = MatlabVersions(globals()) def __setattr__(self, name, value): #if hasattr(self, "instances"): self.instances[name] = value ModuleType.__setattr__(self, name, value) def __getattr__(self, name): if name == "instances": raise AttributeError if name in dir(self.module): return getattr(self.module, name) return self.instances[name] # accept special keywords argument to define defaults for all operations # that will be processed with given by return SelfWrapper def __call__(self, **kwargs): return SelfWrapper(self.self_module, kwargs) # we're being imported from somewhere if __name__ != '__main__': self = sys.modules[__name__] sys.modules[__name__] = SelfWrapper(self)<|fim▁end|>
Module wrapping borrowed from `sh` project by Andrew Moffat. ''' import os import sys
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>""" This is our testing framework. Goals: * it should be compatible with py.test and operate very similarly (or identically) * doesn't require any external dependencies * preferably all the functionality should be in this file only * no magic, just import the test file and execute the test functions, that's it * portable """ import os import sys import inspect import traceback import pdb from glob import glob from timeit import default_timer as clock def isgeneratorfunction(object): """ Return true if the object is a user-defined generator function. Generator function objects provides same attributes as functions. See isfunction.__doc__ for attributes listing. Adapted from Python 2.6. """ CO_GENERATOR = 0x20 if (inspect.isfunction(object) or inspect.ismethod(object)) and \ object.func_code.co_flags & CO_GENERATOR: return True return False def test(*paths, **kwargs): """ Runs the tests specified by paths, or all tests if paths=[]. Note: paths are specified relative to the sympy root directory in a unix format (on all platforms including windows). Examples: Run all tests: >> import sympy >> sympy.test() Run one file: >> import sympy >> sympy.test("sympy/core/tests/test_basic.py") Run all tests in sympy/functions/ and some particular file: >> import sympy >> sympy.test("sympy/core/tests/test_basic.py", "sympy/functions") """ verbose = kwargs.get("verbose", False) tb = kwargs.get("tb", "short") kw = kwargs.get("kw", "") post_mortem = kwargs.get("pdb", False) colors = kwargs.get("colors", True) r = PyTestReporter(verbose, tb, colors) t = SymPyTests(r, kw, post_mortem) if len(paths) > 0: t.add_paths(paths) else: t.add_paths(["sympy"]) return t.test() def doctest(*paths, **kwargs): """ Runs the doctests specified by paths, or all tests if paths=[]. Note: paths are specified relative to the sympy root directory in a unix format (on all platforms including windows). Examples: Run all tests: >> import sympy >> sympy.doctest() Run one file: >> import sympy >> sympy.doctest("sympy/core/tests/test_basic.py") Run all tests in sympy/functions/ and some particular file: >> import sympy >> sympy.doctest("sympy/core/tests/test_basic.py", "sympy/functions") """ verbose = kwargs.get("verbose", False) blacklist = kwargs.get("blacklist", []) blacklist.extend([ "sympy/thirdparty/pyglet", # segfaults "sympy/mpmath", # needs to be fixed upstream "sympy/plotting", # generates live plots "sympy/utilities/compilef.py", # needs tcc "sympy/galgebra/GA.py", # needs numpy "sympy/galgebra/latex_ex.py", # needs numpy "sympy/conftest.py", # needs py.test "sympy/utilities/benchmarking.py", # needs py.test ]) r = PyTestReporter(verbose) t = SymPyDocTests(r, blacklist=blacklist) if len(paths) > 0: t.add_paths(paths) else: t.add_paths(["sympy"]) return t.test() class SymPyTests(object): def __init__(self, reporter, kw="", post_mortem=False): self._post_mortem = post_mortem self._kw = kw self._count = 0 self._root_dir = self.get_sympy_dir() self._reporter = reporter self._reporter.root_dir(self._root_dir) self._tests = [] def add_paths(self, paths): for path in paths: path2 = os.path.join(self._root_dir, *path.split("/")) if path2.endswith(".py"): self._tests.append(path2) else: self._tests.extend(self.get_tests(path2)) def test(self): """ Runs the tests. Returns True if all tests pass, otherwise False. """ self._reporter.start() for f in self._tests: try: self.test_file(f) except KeyboardInterrupt: print " interrupted by user" break return self._reporter.finish() def test_file(self, filename): name = "test%d" % self._count name = os.path.splitext(os.path.basename(filename))[0] self._count += 1 gl = {'__file__':filename} try: execfile(filename, gl) except (ImportError, SyntaxError): self._reporter.import_error(filename, sys.exc_info()) return pytestfile = "" if gl.has_key("XFAIL"): pytestfile = inspect.getsourcefile(gl["XFAIL"]) disabled = gl.get("disabled", False) if disabled:<|fim▁hole|> # is defined the XFAIL decorator funcs = [gl[f] for f in gl.keys() if f.startswith("test_") and (inspect.isfunction(gl[f]) or inspect.ismethod(gl[f])) and (inspect.getsourcefile(gl[f]) == filename or inspect.getsourcefile(gl[f]) == pytestfile)] # Sorting of XFAILed functions isn't fixed yet :-( funcs.sort(key=lambda x: inspect.getsourcelines(x)[1]) i = 0 while i is not len(funcs): if isgeneratorfunction(funcs[i]): # some tests can be generators, that return the actual # test functions. We unpack it below: f = funcs.pop(i) for fg in f(): func = fg[0] args = fg[1:] fgw = lambda: func(*args) funcs.insert(i, fgw) i += 1 else: i += 1 # drop functions that are not selected with the keyword expression: funcs = [x for x in funcs if self.matches(x)] self._reporter.entering_filename(filename, len(funcs)) for f in funcs: self._reporter.entering_test(f) try: f() except KeyboardInterrupt: raise except: t, v, tr = sys.exc_info() if t is AssertionError: self._reporter.test_fail((t, v, tr)) if self._post_mortem: pdb.post_mortem(tr) elif t.__name__ == "Skipped": self._reporter.test_skip() elif t.__name__ == "XFail": self._reporter.test_xfail() elif t.__name__ == "XPass": self._reporter.test_xpass(v) else: self._reporter.test_exception((t, v, tr)) if self._post_mortem: pdb.post_mortem(tr) else: self._reporter.test_pass() self._reporter.leaving_filename() def get_sympy_dir(self): """ Returns the root sympy directory. """ this_file = os.path.abspath(__file__) sympy_dir = os.path.join(os.path.dirname(this_file), "..", "..") sympy_dir = os.path.normpath(sympy_dir) return sympy_dir def matches(self, x): """ Does the keyword expression self._kw match "x"? Returns True/False. Always returns True if self._kw is "". """ if self._kw == "": return True return x.__name__.find(self._kw) != -1 def get_paths(self, dir="", level=15): """ Generates a set of paths for testfiles searching. Example: >> get_paths(2) ['sympy/test_*.py', 'sympy/*/test_*.py', 'sympy/*/*/test_*.py'] >> get_paths(6) ['sympy/test_*.py', 'sympy/*/test_*.py', 'sympy/*/*/test_*.py', 'sympy/*/*/*/test_*.py', 'sympy/*/*/*/*/test_*.py', 'sympy/*/*/*/*/*/test_*.py', 'sympy/*/*/*/*/*/*/test_*.py'] """ wildcards = [dir] for i in range(level): wildcards.append(os.path.join(wildcards[-1], "*")) p = [os.path.join(x, "test_*.py") for x in wildcards] return p def get_tests(self, dir): """ Returns the list of tests. """ g = [] for x in self.get_paths(dir): g.extend(glob(x)) g = list(set(g)) g.sort() return g class SymPyDocTests(object): def __init__(self, reporter, blacklist=[]): self._count = 0 self._root_dir = self.get_sympy_dir() self._reporter = reporter self._reporter.root_dir(self._root_dir) self._tests = [] self._blacklist = blacklist def add_paths(self, paths): for path in paths: path2 = os.path.join(self._root_dir, *path.split("/")) if path2.endswith(".py"): self._tests.append(path2) else: self._tests.extend(self.get_tests(path2)) def test(self): """ Runs the tests. Returns True if all tests pass, otherwise False. """ self._reporter.start() for f in self._tests: try: self.test_file(f) except KeyboardInterrupt: print " interrupted by user" break return self._reporter.finish() def test_file(self, filename): def setup_pprint(): from sympy import pprint_use_unicode # force pprint to be in ascii mode in doctests pprint_use_unicode(False) # hook our nice, hash-stable strprinter from sympy.interactive import init_printing from sympy.printing import sstrrepr init_printing(sstrrepr) import doctest import unittest from StringIO import StringIO rel_name = filename[len(self._root_dir)+1:] module = rel_name.replace('/', '.')[:-3] setup_pprint() try: module = doctest._normalize_module(module) tests = doctest.DocTestFinder().find(module) except: self._reporter.import_error(filename, sys.exc_info()) return tests.sort() tests = [test for test in tests if len(test.examples) > 0] self._reporter.entering_filename(filename, len(tests)) for test in tests: assert len(test.examples) != 0 runner = doctest.DocTestRunner() old = sys.stdout new = StringIO() sys.stdout = new try: f, t = runner.run(test, out=new.write, clear_globs=False) finally: sys.stdout = old if f > 0: self._reporter.doctest_fail(test.name, new.getvalue()) else: self._reporter.test_pass() self._reporter.leaving_filename() def get_sympy_dir(self): """ Returns the root sympy directory. """ this_file = os.path.abspath(__file__) sympy_dir = os.path.join(os.path.dirname(this_file), "..", "..") sympy_dir = os.path.normpath(sympy_dir) return sympy_dir def get_paths(self, dir="", level=15): """ Generates a set of paths for testfiles searching. Example: >> get_paths(2) ['sympy/test_*.py', 'sympy/*/test_*.py', 'sympy/*/*/test_*.py'] >> get_paths(6) ['sympy/test_*.py', 'sympy/*/test_*.py', 'sympy/*/*/test_*.py', 'sympy/*/*/*/test_*.py', 'sympy/*/*/*/*/test_*.py', 'sympy/*/*/*/*/*/test_*.py', 'sympy/*/*/*/*/*/*/test_*.py'] """ wildcards = [dir] for i in range(level): wildcards.append(os.path.join(wildcards[-1], "*")) p = [os.path.join(x, "*.py") for x in wildcards] return p def is_on_blacklist(self, x): """ Returns True if "x" is on the blacklist. Otherwise False. """ for p in self._blacklist: if x.find(p) != -1: return True return False def get_tests(self, dir): """ Returns the list of tests. """ def importable(x): """ Checks if given pathname x is an importable module by checking for __init__.py file. Returns True/False. Currently we only test if the __init__.py file exists in the directory with the file "x" (in theory we should also test all the parent dirs) and if "x" is not on self._blacklist. """ if self.is_on_blacklist(x): return False init_py = os.path.dirname(x) + os.path.sep + "__init__.py" return os.path.exists(init_py) g = [] for x in self.get_paths(dir): g.extend(glob(x)) g = list(set(g)) g.sort() # skip files that are not importable (i.e. missing __init__.py) g = [x for x in g if importable(x)] return g class Reporter(object): """ Parent class for all reporters. """ pass class PyTestReporter(Reporter): """ Py.test like reporter. Should produce output identical to py.test. """ def __init__(self, verbose=False, tb="short", colors=True): self._verbose = verbose self._tb_style = tb self._colors = colors self._xfailed = 0 self._xpassed = [] self._failed = [] self._failed_doctest = [] self._passed = 0 self._skipped = 0 self._exceptions = [] # this tracks the x-position of the cursor (useful for positioning # things on the screen), without the need for any readline library: self._write_pos = 0 self._line_wrap = False def root_dir(self, dir): self._root_dir = dir def write(self, text, color="", align="left", width=80): """ Prints a text on the screen. It uses sys.stdout.write(), so no readline library is necessary. color ... choose from the colors below, "" means default color align ... left/right, left is a normal print, right is aligned on the right hand side of the screen, filled with " " if necessary width ... the screen width """ color_templates = ( ("Black" , "0;30"), ("Red" , "0;31"), ("Green" , "0;32"), ("Brown" , "0;33"), ("Blue" , "0;34"), ("Purple" , "0;35"), ("Cyan" , "0;36"), ("LightGray" , "0;37"), ("DarkGray" , "1;30"), ("LightRed" , "1;31"), ("LightGreen" , "1;32"), ("Yellow" , "1;33"), ("LightBlue" , "1;34"), ("LightPurple" , "1;35"), ("LightCyan" , "1;36"), ("White" , "1;37"), ) colors = {} for name, value in color_templates: colors[name] = value c_normal = '\033[0m' c_color = '\033[%sm' if align == "right": if self._write_pos+len(text) > width: # we don't fit on the current line, create a new line self.write("\n") self.write(" "*(width-self._write_pos-len(text))) if not sys.stdout.isatty(): # the stdout is not a terminal, this for example happens if the # output is piped to less, e.g. "bin/test | less". In this case, # the terminal control sequences would be printed verbatim, so # don't use any colors. color = "" if self._line_wrap: if text[0] != "\n": sys.stdout.write("\n") if color == "": sys.stdout.write(text) else: sys.stdout.write("%s%s%s" % (c_color % colors[color], text, c_normal)) sys.stdout.flush() l = text.rfind("\n") if l == -1: self._write_pos += len(text) else: self._write_pos = len(text)-l-1 self._line_wrap = self._write_pos >= width self._write_pos %= width def write_center(self, text, delim="="): width = 80 if text != "": text = " %s " % text idx = (width-len(text)) // 2 t = delim*idx + text + delim*(width-idx-len(text)) self.write(t+"\n") def write_exception(self, e, val, tb): t = traceback.extract_tb(tb) # remove the first item, as that is always runtests.py t = t[1:] t = traceback.format_list(t) self.write("".join(t)) t = traceback.format_exception_only(e, val) self.write("".join(t)) def start(self): self.write_center("test process starts") executable = sys.executable v = sys.version_info python_version = "%s.%s.%s-%s-%s" % v self.write("executable: %s (%s)\n\n" % (executable, python_version)) self._t_start = clock() def finish(self): self._t_end = clock() self.write("\n") text = "tests finished: %d passed" % self._passed if len(self._failed) > 0: text += ", %d failed" % len(self._failed) if len(self._failed_doctest) > 0: text += ", %d failed" % len(self._failed_doctest) if self._skipped > 0: text += ", %d skipped" % self._skipped if self._xfailed > 0: text += ", %d xfailed" % self._xfailed if len(self._xpassed) > 0: text += ", %d xpassed" % len(self._xpassed) if len(self._exceptions) > 0: text += ", %d exceptions" % len(self._exceptions) text += " in %.2f seconds" % (self._t_end - self._t_start) if len(self._xpassed) > 0: self.write_center("xpassed tests", "_") for e in self._xpassed: self.write("%s:%s\n" % (e[0], e[1])) self.write("\n") if self._tb_style != "no" and len(self._exceptions) > 0: #self.write_center("These tests raised an exception", "_") for e in self._exceptions: filename, f, (t, val, tb) = e self.write_center("", "_") if f is None: s = "%s" % filename else: s = "%s:%s" % (filename, f.__name__) self.write_center(s, "_") self.write_exception(t, val, tb) self.write("\n") if self._tb_style != "no" and len(self._failed) > 0: #self.write_center("Failed", "_") for e in self._failed: filename, f, (t, val, tb) = e self.write_center("", "_") self.write_center("%s:%s" % (filename, f.__name__), "_") self.write_exception(t, val, tb) self.write("\n") if self._tb_style != "no" and len(self._failed_doctest) > 0: #self.write_center("Failed", "_") for e in self._failed_doctest: filename, msg = e self.write_center("", "_") self.write_center("%s" % filename, "_") self.write(msg) self.write("\n") self.write_center(text) ok = len(self._failed) == 0 and len(self._exceptions) == 0 and \ len(self._failed_doctest) == 0 if not ok: self.write("DO *NOT* COMMIT!\n") return ok def entering_filename(self, filename, n): rel_name = filename[len(self._root_dir)+1:] self._active_file = rel_name self._active_file_error = False self.write(rel_name) self.write("[%d] " % n) def leaving_filename(self): if self._colors: self.write(" ") if self._active_file_error: self.write("[FAIL]", "Red", align="right") else: self.write("[OK]", "Green", align="right") self.write("\n") if self._verbose: self.write("\n") def entering_test(self, f): self._active_f = f if self._verbose: self.write("\n"+f.__name__+" ") def test_xfail(self): self._xfailed += 1 self.write("f") def test_xpass(self, fname): self._xpassed.append((self._active_file, fname)) self.write("X") def test_fail(self, exc_info): self._failed.append((self._active_file, self._active_f, exc_info)) self.write("F") self._active_file_error = True def doctest_fail(self, name, error_msg): # the first line contains "******", remove it: error_msg = "\n".join(error_msg.split("\n")[1:]) self._failed_doctest.append((name, error_msg)) self.write("F") self._active_file_error = True def test_pass(self): self._passed += 1 if self._verbose: self.write("ok") else: self.write(".") def test_skip(self): self._skipped += 1 self.write("s") def test_exception(self, exc_info): self._exceptions.append((self._active_file, self._active_f, exc_info)) self.write("E") self._active_file_error = True def import_error(self, filename, exc_info): self._exceptions.append((filename, None, exc_info)) rel_name = filename[len(self._root_dir)+1:] self.write(rel_name) self.write("[?] Failed to import") if self._colors: self.write(" ") self.write("[FAIL]", "Red", align="right") self.write("\n")<|fim▁end|>
funcs = [] else: # we need to filter only those functions that begin with 'test_' # that are defined in the testing file or in the file where
<|file_name|>levelpoint.ts<|end_file_name|><|fim▁begin|>import {Graphics} from 'controller/tiling/graphics' import Point from 'controller/geom/point' export default class LevelPoint { tile :Graphics pos :Point seen :boolean persistent :boolean moving :boolean constructor(pos :Point, tile :Graphics, persistent :boolean, moving :boolean){ this.pos = pos this.tile = tile<|fim▁hole|> this.persistent = persistent this.moving = moving this.seen = false } public getRenderedTile() :Graphics { if (this.seen) { return this.tile } return Graphics.Void } public getTile() :Graphics { return this.tile } public setTile(tile :Graphics){ this.tile = tile } }<|fim▁end|>
<|file_name|>store_change_logger.py<|end_file_name|><|fim▁begin|>class StoreChangeLogger: def __init__(self, store_name, context) -> None: self.topic = f'{context.application_id}-{store_name}-changelog' self.context = context self.partition = context.task_id.partition self.record_collector = context.state_record_collector def log_change(self, key: bytes, value: bytes) -> None:<|fim▁hole|> self.record_collector.send(self.topic, key, value, self.context.timestamp, partition=self.partition)<|fim▁end|>
if self.record_collector:
<|file_name|>bosh.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import jsonschema import json import os import sys import os.path as op import tempfile import pytest from argparse import ArgumentParser, RawTextHelpFormatter from jsonschema import ValidationError from boutiques.validator import DescriptorValidationError from boutiques.publisher import ZenodoError from boutiques.invocationSchemaHandler import InvocationValidationError from boutiques.localExec import ExecutorOutput from boutiques.localExec import ExecutorError from boutiques.exporter import ExportError from boutiques.importer import ImportError from boutiques.localExec import loadJson, addDefaultValues from boutiques.logger import raise_error from tabulate import tabulate def prettyprint(*params): parser = ArgumentParser("Boutiques pretty-print for generating help text") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor.") results = parser.parse_args(params) from boutiques.prettyprint import PrettyPrinter desc = loadJson(results.descriptor) prettyclass = PrettyPrinter(desc) return prettyclass.docstring def create(*params): parser = ArgumentParser("Boutiques descriptor creator") parser.add_argument("descriptor", action="store", help="Output file to store descriptor in.") parser.add_argument("--docker-image", '-d', action="store", help="Name of Docker image on DockerHub.") parser.add_argument("--use-singularity", '-u', action="store_true", help="When --docker-image is used. Specify to " "use singularity to run it.") results = parser.parse_args(params) from boutiques.creator import CreateDescriptor new = CreateDescriptor(parser=None, docker_image=results.docker_image, use_singularity=results.use_singularity) new.save(results.descriptor) return None def validate(*params): parser = ArgumentParser("Boutiques descriptor validator") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, JSON " "string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("--bids", "-b", action="store_true", help="Flag indicating if descriptor is a BIDS app") parser.add_argument("--format", "-f", action="store_true", help="If descriptor is valid, rewrite it with sorted" " keys.") results = parser.parse_args(params) from boutiques.validator import validate_descriptor descriptor = validate_descriptor(results.descriptor, format_output=results.format) if results.bids: from boutiques.bids import validate_bids validate_bids(descriptor, valid=True) def execute(*params): parser = ArgumentParser("Boutiques local executor", add_help=False) parser.add_argument("mode", action="store", help="Mode of operation to use. Launch: takes a " "set of inputs compliant with invocation schema " "and launches the tool. Simulate: shows sample " "command-lines based on the provided descriptor" " based on provided or randomly generated inputs. " "Prepare: pulls the Docker or Singularity container " "image for a given descriptor. ", choices=["launch", "simulate", "prepare"]) parser.add_argument("--help", "-h", action="store_true", help="show this help message and exit") helps = any([True for ht in ["--help", "-h"] if ht in params]) if len(params) <= 1 and helps: parser.print_help() raise SystemExit args, params = parser.parse_known_args(params) mode = args.mode params += ["--help"] if args.help is True else [] if mode == "launch": parser = ArgumentParser("Launches an invocation.") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, " "JSON string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("invocation", action="store", help="Input JSON complying to invocation.") parser.add_argument("-v", "--volumes", action="append", type=str, help="Volumes to mount when launching the " "container. Format consistently the following:" " /a:/b will mount local directory /a to " "container directory /b.") parser.add_argument("-x", "--debug", action="store_true", help="Keeps temporary scripts used during " "execution, and prints additional debug " "messages.") parser.add_argument("-u", "--user", action="store_true", help="Runs the container as local user ({0})" " instead of root.".format(os.getenv("USER"))) parser.add_argument("-s", "--stream", action="store_true", help="Streams stdout and stderr in real time " "during execution.") parser.add_argument("--imagepath", action="store", help="Path to Singularity image. " "If not specified, will use current directory.") results = parser.parse_args(params) descriptor = results.descriptor inp = results.invocation # Validate invocation and descriptor valid = invocation(descriptor, '-i', inp) # Generate object that will perform the commands from boutiques.localExec import LocalExecutor executor = LocalExecutor(descriptor, inp, {"forcePathType": True, "debug": results.debug, "changeUser": results.user, "stream": results.stream, "imagePath": results.imagepath}) # Execute it return executor.execute(results.volumes) if mode == "simulate": parser = ArgumentParser("Simulates an invocation.") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, " "JSON string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("-i", "--input", action="store", help="Input JSON complying to invocation.") parser.add_argument("-j", "--json", action="store_true", help="Flag to generate invocation in JSON format.") results = parser.parse_args(params) descriptor = results.descriptor # Do some basic input scrubbing inp = results.input valid = invocation(descriptor, '-i', inp) if inp else\ invocation(descriptor) # Generate object that will perform the commands from boutiques.localExec import LocalExecutor executor = LocalExecutor(descriptor, inp, {"forcePathType": True, "destroyTempScripts": True, "changeUser": True}) if not inp: executor.generateRandomParams(1) if results.json: sout = [json.dumps(executor.in_dict, indent=4, sort_keys=True)] print(sout[0]) else: executor.printCmdLine() sout = executor.cmd_line # for consistency with execute # Adding hide to "container location" field since it's an invalid # value, can parse that to hide the summary print return ExecutorOutput(os.linesep.join(sout), "", 0, "", [], [], os.linesep.join(sout), "", "hide") if mode == "prepare": parser = ArgumentParser("Pulls the container image for a given " "descriptor") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, " "JSON string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("-x", "--debug", action="store_true", help="Keeps temporary scripts used during " "execution, and prints additional debug " "messages.") parser.add_argument("-s", "--stream", action="store_true", help="Streams stdout and stderr in real time " "during execution.") parser.add_argument("--imagepath", action="store", help="Path to Singularity image. " "If not specified, will use current directory.") results = parser.parse_args(params) descriptor = results.descriptor # Validate descriptor valid = invocation(descriptor) # Generate object that will perform the commands from boutiques.localExec import LocalExecutor executor = LocalExecutor(descriptor, None, {"forcePathType": True, "debug": results.debug, "stream": results.stream, "imagePath": results.imagepath}) container_location = executor.prepare()[1] print("Container location: " + container_location) # Adding hide to "container location" field since it's an invalid # value, and we can parse that to hide the summary print return ExecutorOutput(container_location, "", 0, "", [], [], "", "", "hide") def importer(*params): parser = ArgumentParser("Imports old descriptor or BIDS app or CWL " " descriptor to spec.") parser.add_argument("type", help="Type of import we are performing", choices=["bids", "0.4", "cwl"]) parser.add_argument("output_descriptor", help="Where the Boutiques" " descriptor will be written.")<|fim▁hole|> ", is JSON descriptor," " for 'bids' is base directory of BIDS app, " "for 'cwl' is YAML descriptor.") parser.add_argument("-o", "--output-invocation", help="Where to write " "the invocation if any.") parser.add_argument("-i", "--input-invocation", help="Input invocation " " for CWL if any.") results = parser.parse_args(params) from boutiques.importer import Importer importer = Importer(results.input_descriptor, results.output_descriptor, results.input_invocation, results.output_invocation) if results.type == "0.4": importer.upgrade_04() elif results.type == "bids": importer.import_bids() elif results.type == "cwl": importer.import_cwl() def exporter(*params): parser = ArgumentParser("Export Boutiques descriptor to other formats.") parser.add_argument("type", help="Type of export we are performing.", choices=["carmin"]) parser.add_argument("descriptor", help="Boutiques descriptor to export.") parser.add_argument("--identifier", help="Identifier to use in" "CARMIN export.") parser.add_argument("output", help="Output file where to write the" " converted descriptor.") results = parser.parse_args(params) descriptor = results.descriptor output = results.output bosh(["validate", results.descriptor]) from boutiques.exporter import Exporter exporter = Exporter(descriptor, results.identifier) if results.type == "carmin": exporter.carmin(output) def publish(*params): parser = ArgumentParser("Boutiques publisher", description="A publisher of Boutiques tools" " in Zenodo (http://zenodo.org). Requires " "a Zenodo access token, see " "http://developers.zenodo.org/#authentication.") parser.add_argument("boutiques_descriptor", action="store", help="local path of the " " Boutiques descriptor to publish.") parser.add_argument("--sandbox", action="store_true", help="publish to Zenodo's sandbox instead of " "production server. Recommended for tests.") parser.add_argument("--zenodo-token", action="store", help="Zenodo API token to use for authentication. " "If not used, token will be read from configuration " "file or requested interactively.") parser.add_argument("--no-int", '-y', action="store_true", help="disable interactive input.") parser.add_argument("-v", "--verbose", action="store_true", help="print information messages.") group = parser.add_mutually_exclusive_group() group.add_argument("-r", "--replace", action="store_true", help="Publish an updated version of an existing " "record. The descriptor must contain a DOI, which " "will be replaced with a new one.") group.add_argument("--id", action="store", help="Zenodo ID of an existing record you wish to " "update with a new version, prefixed by " "'zenodo.' (e.g. zenodo.123456).") results = parser.parse_args(params) from boutiques.publisher import Publisher publisher = Publisher(results.boutiques_descriptor, results.zenodo_token, results.verbose, results.sandbox, results.no_int, results.replace, results.id) publisher.publish() if hasattr(publisher, 'doi'): return publisher.doi def invocation(*params): parser = ArgumentParser("Creates invocation schema and validates" " invocations. Uses descriptor's invocation" " schema if it exists, otherwise creates one.") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, JSON " "string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("-i", "--invocation", action="store", help="Input values in a JSON file or as a JSON " "object to be validated against " "the invocation schema.") parser.add_argument("-w", "--write-schema", action="store_true", help="If descriptor doesn't have an invocation " "schema, creates one and writes it to the descriptor" " file ") result = parser.parse_args(params) validate(result.descriptor) descriptor = loadJson(result.descriptor) if descriptor.get("invocation-schema"): invSchema = descriptor.get("invocation-schema") else: from boutiques.invocationSchemaHandler import generateInvocationSchema invSchema = generateInvocationSchema(descriptor) if result.write_schema: descriptor["invocation-schema"] = invSchema with open(result.descriptor, "w") as f: f.write(json.dumps(descriptor, indent=4, sort_keys=True)) if result.invocation: from boutiques.invocationSchemaHandler import validateSchema data = addDefaultValues(descriptor, loadJson(result.invocation)) validateSchema(invSchema, data) def evaluate(*params): parser = ArgumentParser("Evaluates parameter values for a descriptor" " and invocation") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, JSON " "string or Zenodo ID (prefixed by 'zenodo.').") parser.add_argument("invocation", action="store", help="Input JSON complying to invocation.") parser.add_argument("query", action="store", nargs="*", help="The query to be performed. Simply request keys " "from the descriptor (i.e. output-files), and chain " "together queries (i.e. id=myfile or optional=false) " "slashes between them and commas connecting them. " "(i.e. output-files/optional=false,id=myfile). " "Perform multiple queries by separating them with a " "space.") result = parser.parse_args(params) # Generate object that will parse the invocation and descriptor from boutiques.localExec import LocalExecutor executor = LocalExecutor(result.descriptor, result.invocation, {"forcePathType": True, "destroyTempScripts": True, "changeUser": True}) from boutiques.evaluate import evaluateEngine query_results = [] for query in result.query: query_results += [evaluateEngine(executor, query)] return query_results[0] if len(query_results) == 1 else query_results def test(*params): parser = ArgumentParser("Perform all the tests defined within the" " given descriptor") parser.add_argument("descriptor", action="store", help="The Boutiques descriptor as a JSON file, JSON " "string or Zenodo ID (prefixed by 'zenodo.').") result = parser.parse_args(params) # Generation of the invocation schema (and descriptor validation). invocation(result.descriptor) # Extraction of all the invocations defined for the test-cases. descriptor = loadJson(result.descriptor) if (not descriptor.get("tests")): # If no tests have been specified, we consider testing successful. return 0 for test in descriptor["tests"]: invocation_JSON = test["invocation"] # Check if the invocation is valid. invocation(result.descriptor, "--invocation", json.dumps(invocation_JSON)) # Invocations have been properly validated. We can launch the actual tests. test_path = op.join(op.dirname(op.realpath(__file__)), "test.py") return pytest.main([test_path, "--descriptor", result.descriptor]) def search(*params): parser = ArgumentParser("Search Zenodo for Boutiques descriptors. " "When no term is supplied, will search for " "all descriptors.") parser.add_argument("query", nargs="?", default="boutiques", action="store", help="Search query") parser.add_argument("-v", "--verbose", action="store_true", help="Print information messages") parser.add_argument("--sandbox", action="store_true", help="search Zenodo's sandbox instead of " "production server. Recommended for tests.") parser.add_argument("-m", "--max", action="store", help="Specify the maximum number of results " "to be returned. Default is 10.") parser.add_argument("-nt", "--no-trunc", action="store_true", help="Do not truncate long tool descriptions.") parser.add_argument("-e", "--exact", action="store_true", help="Only return results containing the exact query.") result = parser.parse_args(params) from boutiques.searcher import Searcher searcher = Searcher(result.query, result.verbose, result.sandbox, result.max, result.no_trunc, result.exact) return searcher.search() def pull(*params): parser = ArgumentParser("Download a descriptor from Zenodo.") parser.add_argument("zid", action="store", help="Zenodo ID " "of the descriptor to pull, prefixed by " "'zenodo.', e.g. zenodo.123456") parser.add_argument("-v", "--verbose", action="store_true", help="Print information messages") parser.add_argument("--sandbox", action="store_true", help="pull from Zenodo's sandbox instead of " "production server. Recommended for tests.") result = parser.parse_args(params) from boutiques.puller import Puller puller = Puller(result.zid, result.verbose, result.sandbox) return puller.pull() def bosh(args=None): parser = ArgumentParser(description="Driver for Bosh functions", add_help=False) parser.add_argument("function", action="store", nargs="?", help="The tool within boutiques/bosh you wish to run. " "Create: creates an Boutiques descriptor from scratch. " "Validate: validates an existing boutiques descriptor. " "Exec: launches or simulates an execution given a " "descriptor and a set of inputs. Import: creates a " "descriptor for a BIDS app or updates a descriptor " "from an older version of the schema. Export: exports a" "descriptor to other formats. Publish: creates" "an entry in Zenodo for the descriptor and " "adds the DOI created by Zenodo to the descriptor. " "Invocation: generates the invocation schema for a " "given descriptor. Evaluate: given an invocation and a " "descriptor, queries execution properties. " "Test: run pytest on a descriptor detailing tests. " "Example: Generates example command-line for descriptor" ". Search: search Zenodo for descriptors. " "Pull: download a descriptor from Zenodo. " "Pprint: generate pretty help text from a descriptor." "Version: prints the version of this tool.", choices=["create", "validate", "exec", "import", "export", "publish", "invocation", "evaluate", "test", "example", "search", "pull", "pprint", "version"]) parser.add_argument("--help", "-h", action="store_true", help="show this help message and exit") args, params = parser.parse_known_args(args) func = args.function params += ["--help"] if args.help is True else [] # Returns True if bosh was called from the CLI def runs_as_cli(): return os.path.basename(sys.argv[0]) == "bosh" def bosh_return(val, code=0, hide=False, formatted=None): if runs_as_cli(): if hide: return code if val is not None: if formatted is not None: print(formatted) else: print(val) else: if code == 0: print("OK") else: print("Failed") return code # everything went well return val # calling function wants this value try: if func == "create": out = create(*params) return bosh_return(out, hide=True) elif func == "validate": out = validate(*params) return bosh_return(out) elif func == "exec": out = execute(*params) # If executed through CLI, print 'out' and return exit_code # Otherwise, return out return bosh_return(out, out.exit_code, hide=bool(out.container_location == 'hide')) elif func == "example": out = execute('simulate', '-j', *params) return bosh_return(out, out.exit_code, hide=bool(out.container_location == 'hide')) elif func == "import": out = importer(*params) return bosh_return(out) elif func == "export": out = exporter(*params) return bosh_return(out) elif func == "publish": out = publish(*params) return bosh_return(out) elif func == "invocation": out = invocation(*params) return bosh_return(out) elif func == "evaluate": out = evaluate(*params) return bosh_return(out) elif func == "test": out = test(*params) return bosh_return(out) elif func == "pprint": out = prettyprint(*params) return bosh_return(out) elif func == "search": out = search(*params) return bosh_return(out, formatted=tabulate(out, headers='keys', tablefmt='plain')) elif func == "pull": out = pull(*params) return bosh_return(out, hide=True) elif func == "version": from boutiques.__version__ import VERSION return bosh_return(VERSION) else: parser.print_help() raise SystemExit except (ZenodoError, DescriptorValidationError, InvocationValidationError, ValidationError, ExportError, ImportError, ExecutorError) as e: # We don't want to raise an exception when function is called # from CLI.' if runs_as_cli(): try: print(e.message) # Python 2 only except Exception as ex: print(e) return 99 # Note: this conflicts with tool error codes. raise e<|fim▁end|>
parser.add_argument("input_descriptor", help="Input descriptor to be " "converted. For '0.4'"
<|file_name|>timeout_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 """ Created on 17 Sep 2019 @author: Bruno Beloff ([email protected]) """ import time from scs_core.sys.timeout import Timeout # -------------------------------------------------------------------------------------------------------------------- # run... timeout = Timeout(5) print(timeout) print("-") try: with timeout: time.sleep(10) print("slept")<|fim▁hole|> finally: print("done")<|fim▁end|>
except TimeoutError: print("TimeoutError")
<|file_name|>iterableFilterInterface.ts<|end_file_name|><|fim▁begin|>namespace jsfx { export interface IterableFilterInterface extends jsfx.FilterInterface {<|fim▁hole|> } }<|fim▁end|>
iterateCanvas(helper : jsfx.util.ImageDataHelper) : void;
<|file_name|>create.py<|end_file_name|><|fim▁begin|>"""Creates a user """ # :license: MIT, see LICENSE for more details. import json import string import sys import click import SoftLayer from SoftLayer.CLI import environment from SoftLayer.CLI import exceptions from SoftLayer.CLI import formatting from SoftLayer.CLI import helpers @click.command() @click.argument('username') @click.option('--email', '-e', required=True, help="Email address for this user. Required for creation.") @click.option('--password', '-p', default=None, show_default=True, help="Password to set for this user. If no password is provided, user will be sent an email " "to generate one, which expires in 24 hours. '-p generate' will create a password for you " "(Requires Python 3.6+). Passwords require 8+ characters, upper and lowercase, a number " "and a symbol.") @click.option('--from-user', '-u', default=None, help="Base user to use as a template for creating this user. " "Will default to the user running this command. Information provided in --template " "supersedes this template.") @click.option('--template', '-t', default=None, help="A json string describing https://softlayer.github.io/reference/datatypes/SoftLayer_User_Customer/") @environment.pass_env<|fim▁hole|> """Creates a user Users. Remember to set the permissions and access for this new user. Example:: slcli user create [email protected] -e [email protected] -p generate -a -t '{"firstName": "Test", "lastName": "Testerson"}' """ mgr = SoftLayer.UserManager(env.client) user_mask = ("mask[id, firstName, lastName, email, companyName, address1, city, country, postalCode, " "state, userStatusId, timezoneId]") from_user_id = None if from_user is None: user_template = mgr.get_current_user(objectmask=user_mask) from_user_id = user_template['id'] else: from_user_id = helpers.resolve_id(mgr.resolve_ids, from_user, 'username') user_template = mgr.get_user(from_user_id, objectmask=user_mask) # If we send the ID back to the API, an exception will be thrown del user_template['id'] if template is not None: try: template_object = json.loads(template) for key in template_object: user_template[key] = template_object[key] except ValueError as ex: raise exceptions.ArgumentError("Unable to parse --template. %s" % ex) user_template['username'] = username if password == 'generate': password = generate_password() user_template['email'] = email if not env.skip_confirmations: table = formatting.KeyValueTable(['name', 'value']) for key in user_template: table.add_row([key, user_template[key]]) table.add_row(['password', password]) click.secho("You are about to create the following user...", fg='green') env.fout(table) if not formatting.confirm("Do you wish to continue?"): raise exceptions.CLIAbort("Canceling creation!") result = mgr.create_user(user_template, password) table = formatting.Table(['Username', 'Email', 'Password']) table.add_row([result['username'], result['email'], password]) env.fout(table) def generate_password(): """Returns a 23 character random string, with 3 special characters at the end""" if sys.version_info > (3, 6): import secrets # pylint: disable=import-error,import-outside-toplevel alphabet = string.ascii_letters + string.digits password = ''.join(secrets.choice(alphabet) for i in range(20)) special = ''.join(secrets.choice(string.punctuation) for i in range(3)) return password + special else: raise ImportError("Generating passwords require python 3.6 or higher")<|fim▁end|>
def cli(env, username, email, password, from_user, template):
<|file_name|>p_haul_img.py<|end_file_name|><|fim▁begin|># # images driver for migration (without FS transfer) # import os import tempfile import rpyc import tarfile import time import shutil import time img_path = "/var/local/p.haul-fs/" img_tarfile = "images.tar" xfer_size = 64 * 1024 def copy_file(s, d): while True: chunk = s.read(xfer_size) if not chunk: break d.write(chunk) class phaul_images: def __init__(self): self.current_iter = 0 self.current_dir = None prefix = time.strftime("%y.%m.%d-%H.%M-", time.localtime()) self.wdir = tempfile.mkdtemp("", prefix, img_path) self.img_path = os.path.join(self.wdir, "img") os.mkdir(self.img_path) self.sync_time = 0.0 def close(self, keep_images): if not keep_images: print "Removing images" shutil.rmtree(self.wdir) else: print "Images are kept in %s" % self.wdir pass def img_sync_time(self): return self.sync_time def new_image_dir(self): self.current_iter += 1 img_dir = "%s/%d" % (self.img_path, self.current_iter) print "\tMaking directory %s" % img_dir self.current_dir = img_dir os.mkdir(img_dir) def image_dir_fd(self): return os.open(self.current_dir, os.O_DIRECTORY) def work_dir_fd(self): return os.open(self.wdir, os.O_DIRECTORY)<|fim▁hole|> def image_dir(self): return self.current_dir def work_dir(self): return self.wdir def prev_image_dir(self): if self.current_iter == 1: return None else: return "../%d" % (self.current_iter - 1) # Images transfer # Are there better ways for doing this? def sync_imgs_to_target(self, th, htype): # Pre-dump doesn't generate any images (yet?) # so copy only those from the top dir print "Sending images to target" start = time.time() print "\tPack" tf_name = os.path.join(self.current_dir, img_tarfile) tf = tarfile.open(tf_name, "w") for img in os.listdir(self.current_dir): if img.endswith(".img"): tf.add(os.path.join(self.current_dir, img), img) print "\tAdd htype images" for himg in htype.get_meta_images(self.current_dir): tf.add(himg[0], himg[1]) tf.close() print "\tCopy" lfh = open(tf_name, "rb") os.unlink(tf_name) rfh = th.open_image_tar() copy_file(lfh, rfh) print "\tUnpack" rfh.unpack_and_close() self.sync_time = time.time() - start # This one is created by target class exposed_images_tar(): def __init__(self, dir): self.dir = dir self.fname = os.path.join(dir, img_tarfile) self.fh = open(self.fname, "wb") def exposed_write(self, chunk): return self.fh.write(chunk) def exposed_unpack_and_close(self): self.fh.close() tf = tarfile.open(self.fname, "r") os.unlink(self.fname) tf.extractall(self.dir) tf.close()<|fim▁end|>
<|file_name|>GooglePlayMusicController.py<|end_file_name|><|fim▁begin|>from gmusicapi import Mobileclient import getpass class GpmSession(object): # Private Variables # Public Variables api = None logged_in = False songs = None playlists = None <|fim▁hole|> # Omit credentials if you want to handle login, include for prompts from this module def __init__(self, email=None, pw=None): self.api = Mobileclient() if not email and not pw: email = input("Please enter an email address tied to a GPM account: ") pw = getpass.getpass("Please enter the password associated with %s: " % email) self.logged_in = self.api.login(email, pw, Mobileclient.FROM_MAC_ADDRESS) # As per api protocol if self.logged_in: print("Google Play Music login successful") else: print("Google Play Music login failed") def init(self, songs = True, playlists = True): if songs: self.songs = self.api.get_all_songs() if playlists: self.playlists = self.api.get_all_playlists() def get_song_stream(self, title, artist=None): print(not self.songs) if not self.songs: self.init(True, False) song = next(iter((track for track in self.songs if self._filter_condition(track, title, artist)) or []), None) if song: return self.api.get_stream_url(song["id"]) else: return None def _filter_condition(self, song_obj, search_title, search_artist): result = True if search_title: result = result & (song_obj["title"].lower().strip() == search_title.lower().strip()) if search_artist: result = result & (song_obj["artist"].lower().strip() == search_artist.lower().strip()) return result def main(): session = GpmSession() while not session.logged_in: session = GpmSession() session.init() print(session.get_song_stream("Dirty Laundry", "Bitter Sweet")) print(session.get_song_stream("1940")) if __name__ == "__main__": main()<|fim▁end|>
# Constructor with optionally passed credentials
<|file_name|>barcode_issue_line.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012-2015 Netforce Co. Ltd. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is<|fim▁hole|># furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. from netforce.model import Model, fields, get_model class BarcodeIssueLine(Model): _name = "barcode.issue.line" _transient = True _fields = { "wizard_id": fields.Many2One("barcode.issue", "Wizard", required=True, on_delete="cascade"), "product_id": fields.Many2One("product", "Product", required=True), "qty": fields.Decimal("Qty", required=True), "uom_id": fields.Many2One("uom", "UoM", required=True), "qty2": fields.Decimal("Secondary Qty"), "lot_id": fields.Many2One("stock.lot", "Lot / Serial Number"), "container_from_id": fields.Many2One("stock.container", "From Container"), "container_to_id": fields.Many2One("stock.container", "To Container"), "location_from_id": fields.Many2One("stock.location", "From Location"), "location_to_id": fields.Many2One("stock.location", "To Location"), "related_id": fields.Reference([["sale.order", "Sales Order"], ["purchase.order", "Purchase Order"]], "Related To"), "qty2": fields.Decimal("Qty2"), "notes": fields.Text("Notes"), } BarcodeIssueLine.register()<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from rest_framework.routers import (Route, DynamicDetailRoute, SimpleRouter, DynamicListRoute) from app.api.account.views import AccountViewSet from app.api.podcast.views import PodcastViewSet, EpisodeViewSet class CustomRouter(SimpleRouter): """ A router for read-only APIs, which doesn't use trailing slashes. """ routes = [ Route( url=r'^{prefix}{trailing_slash}$', mapping={ 'get': 'list', 'post': 'create' }, name='{basename}-list', initkwargs={'suffix': 'List'} ), # Dynamically generated list routes. # Generated using @list_route decorator # on methods of the viewset. DynamicListRoute( url=r'^{prefix}/{methodnamehyphen}{trailing_slash}$', name='{basename}-{methodnamehyphen}', initkwargs={} ), # Detail route. Route( url=r'^{prefix}/{lookup}{trailing_slash}$', mapping={ 'get': 'retrieve', 'put': 'update', 'patch': 'partial_update', 'delete': 'destroy' }, name='{basename}-detail', initkwargs={'suffix': 'Instance'} ), # Dynamically generated detail routes. # Generated using @detail_route decorator on methods of the viewset. DynamicDetailRoute( url=r'^{prefix}/{lookup}/{methodnamehyphen}{trailing_slash}$', name='{basename}-{methodnamehyphen}', initkwargs={} ),<|fim▁hole|> ] router = CustomRouter() router.register(r'accounts', AccountViewSet) router.register(r'podcasts', PodcastViewSet) router.register(r'episodes', EpisodeViewSet) urlpatterns = router.urls<|fim▁end|>
<|file_name|>test.cpp<|end_file_name|><|fim▁begin|>#include "orca_gazebo/orca_gazebo_util.h" void testGaussianKernel() { // Anything outside of 4 stddev on either side is an outlier, and go in the first and last buckets constexpr int NUM_BUCKETS = 10; constexpr double MEAN = NUM_BUCKETS / 2; constexpr double STDDEV = 1; constexpr double BUCKET_WIDTH = STDDEV * 8 / (NUM_BUCKETS - 2); std::vector<int>h(NUM_BUCKETS); for (int s = 0; s < 10000; ++s) {<|fim▁hole|> h[i]++; } for (int i = 0; i < h.size(); ++i) { std::cout << "Bucket " << i << ", " << h[i] << std::endl; } } int main(int argc, char** argv) { testGaussianKernel(); return 0; }<|fim▁end|>
double m = orca_gazebo::gaussianKernel(MEAN, STDDEV); int i = static_cast<int>(m / BUCKET_WIDTH); if (i < 0) i = 0; if (i >= NUM_BUCKETS) i = NUM_BUCKETS - 1;
<|file_name|>ReactionsById.ts<|end_file_name|><|fim▁begin|>export interface ReactionsById_me_reactions_reactionKind { __typename: "ReactionKind"; /** * Id */ id: number; /** * Name of reaction example is like or dislike */ name: string; } export interface ReactionsById_me_reactions_study { __typename: "Study"; briefTitle: string; } export interface ReactionsById_me_reactions { __typename: "Reaction"; /** * id of reaction kind */ reactionKindId: number; /** * Type of reaction such as downvote */ reactionKind: ReactionsById_me_reactions_reactionKind; study: ReactionsById_me_reactions_study; nctId: string; } export interface ReactionsById_me { __typename: "User"; /** * Id */ id: number; /** * Email */ email: string; /** * First name */<|fim▁hole|> * Last name */ lastName: string | null; reactions: ReactionsById_me_reactions[] | null; } export interface ReactionsById { /** * Current logged in user */ me: ReactionsById_me | null; } export interface ReactionsByIdVariables { reactionKindId: string; }<|fim▁end|>
firstName: string | null; /**
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import re import datetime import time #niru's git commit while True: #open the file for reading file = open("test.txt") content = file.read() #Get timestamp ts = time.time() ist = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') #open file for read and close it neatly(wrap code in try/except) #with open('test.txt', 'r') as r: #content = r.read() #print content #Search the entire content for '@' and replace it with time stamp. new_content = re.sub(r'@.*', ist, content) print new_content #open file for write and close it neatly(wrap code in try/except) with open('test.txt', 'w') as f: f.write(new_content)<|fim▁hole|> print "torpid loop complete" time.sleep(5)<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var fastn = require('fastn')({<|fim▁hole|> templater: require('fastn/templaterComponent'), text: require('fastn/textComponent'), ratingControl: require('./ratingControlComponent') }); module.exports = function(settings){ return fastn('ratingControl', settings).attach().render(); };<|fim▁end|>
_generic: require('fastn/genericComponent'), list: require('fastn/listComponent'),
<|file_name|>descriptor_cpp2_test.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # # Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # https://developers.google.com/protocol-buffers/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for google.protobuf.pyext behavior.""" __author__ = '[email protected] (Anuraag Agrawal)' import os os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'cpp' os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION'] = '2' # We must set the implementation version above before the google3 imports. # pylint: disable=g-import-not-at-top from google.apputils import basetest from google.protobuf.internal import api_implementation # Run all tests from the original module by putting them in our namespace. # pylint: disable=wildcard-import from google.protobuf.internal.descriptor_test import * <|fim▁hole|>class ConfirmCppApi2Test(basetest.TestCase): def testImplementationSetting(self): self.assertEqual('cpp', api_implementation.Type()) self.assertEqual(2, api_implementation.Version()) if __name__ == '__main__': basetest.main()<|fim▁end|>