prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An actor-based remote devtools server implementation. Only tested with
//! nightly Firefox versions at time of writing. Largely based on
//! reverse-engineering of Firefox chrome devtool logs and reading of
//! [code](http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/).
#![crate_name = "devtools"]
#![crate_type = "rlib"]
#![feature(box_syntax, core, rustc_private)]
#![feature(collections)]
#![allow(non_snake_case)]
#[macro_use]
extern crate log;
extern crate collections;
extern crate core;
extern crate devtools_traits;
extern crate rustc_serialize;
extern crate msg;
extern crate time;
extern crate util;
extern crate hyper;
extern crate url;
use actor::{Actor, ActorRegistry};
use actors::console::ConsoleActor;
use actors::network_event::{NetworkEventActor, EventActor, ResponseStartMsg};
use actors::framerate::FramerateActor;
use actors::inspector::InspectorActor;
use actors::root::RootActor;
use actors::tab::TabActor;
use actors::timeline::TimelineActor;
use actors::worker::WorkerActor;
use protocol::JsonPacketStream;
use devtools_traits::{ConsoleMessage, DevtoolsControlMsg, NetworkEvent};
use devtools_traits::{DevtoolsPageInfo, DevtoolScriptControlMsg};
use msg::constellation_msg::{PipelineId, WorkerId};
use util::task::spawn_named;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::error::Error;
use std::sync::mpsc::{channel, Receiver, Sender, RecvError};
use std::net::{TcpListener, TcpStream, Shutdown};
use std::sync::{Arc, Mutex};
use time::precise_time_ns;
mod actor;
/// Corresponds to http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/
mod actors {
pub mod console;
pub mod framerate;
pub mod memory;
pub mod inspector;
pub mod root;
pub mod tab;
pub mod timeline;
pub mod worker;
pub mod network_event;
}
mod protocol;
#[derive(RustcEncodable)]
struct ConsoleAPICall {
from: String,
__type__: String,
message: ConsoleMsg,
}
#[derive(RustcEncodable)]
struct ConsoleMsg {
level: String,
timeStamp: u64,
arguments: Vec<String>,
filename: String,
lineNumber: u32,
columnNumber: u32,
}
#[derive(RustcEncodable)]
struct NetworkEventMsg {
from: String,
__type__: String,
eventActor: EventActor,
}
#[derive(RustcEncodable)]
struct NetworkEventUpdateMsg {
from: String,
__type__: String,
updateType: String,
response: ResponseStartMsg,
}
/// Spin up a devtools server that listens for connections on the specified port.
pub fn start_server(port: u16) -> Sender<DevtoolsControlMsg> {
let (sender, receiver) = channel();
{
let sender = sender.clone();
spawn_named("Devtools".to_owned(), move || {
run_server(sender, receiver, port)
});
}
sender
}
fn run_server(sender: Sender<DevtoolsControlMsg>,
receiver: Receiver<DevtoolsControlMsg>,
port: u16) {
let listener = TcpListener::bind(&("127.0.0.1", port)).unwrap();
let mut registry = ActorRegistry::new();
let root = box RootActor {
tabs: vec!(),
};
registry.register(root);
registry.find::<RootActor>("root");
let actors = registry.create_shareable();
let mut accepted_connections: Vec<TcpStream> = Vec::new();
let mut actor_pipelines: HashMap<PipelineId, String> = HashMap::new();
let mut actor_requests: HashMap<String, String> = HashMap::new();
let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new();
/// Process the input from a single devtools client until EOF.
fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) {
println!("connection established to {}", stream.peer_addr().unwrap());
{
let actors = actors.lock().unwrap();
let msg = actors.find::<RootActor>("root").encodable();
stream.write_json_packet(&msg);
}
'outer: loop {
match stream.read_json_packet() {
Ok(Some(json_packet)) => {
match actors.lock().unwrap().handle_message(json_packet.as_object().unwrap(),
&mut stream) {
Ok(()) => {},
Err(()) => {
println!("error: devtools actor stopped responding");
let _ = stream.shutdown(Shutdown::Both);
break 'outer
}
}
}
Ok(None) => {
println!("error: EOF");
break 'outer
}
Err(e) => {
println!("error: {}", e.description());
break 'outer
}
}
}
}
fn handle_framerate_tick(actors: Arc<Mutex<ActorRegistry>>, actor_name: String, tick: f64) {
let actors = actors.lock().unwrap();
let framerate_actor = actors.find::<FramerateActor>(&actor_name);
framerate_actor.add_tick(tick);
}
// We need separate actor representations for each script global that exists;
// clients can theoretically connect to multiple globals simultaneously.
// TODO: move this into the root or tab modules?
fn handle_new_global(actors: Arc<Mutex<ActorRegistry>>,
ids: (PipelineId, Option<WorkerId>),
script_sender: Sender<DevtoolScriptControlMsg>,
devtools_sender: Sender<DevtoolsControlMsg>,
actor_pipelines: &mut HashMap<PipelineId, String>,
actor_workers: &mut HashMap<(PipelineId, WorkerId), String>,
page_info: DevtoolsPageInfo) {
let mut actors = actors.lock().unwrap();
let (pipeline, worker_id) = ids;
//TODO: move all this actor creation into a constructor method on TabActor
let (tab, console, inspector, timeline) = {
let console = ConsoleActor {
name: actors.new_name("console"),
script_chan: script_sender.clone(),
pipeline: pipeline,
streams: RefCell::new(Vec::new()),
};
let inspector = InspectorActor {
name: actors.new_name("inspector"),
walker: RefCell::new(None),
pageStyle: RefCell::new(None),
highlighter: RefCell::new(None),
script_chan: script_sender.clone(),
pipeline: pipeline,
};
let timeline = TimelineActor::new(actors.new_name("timeline"),
pipeline,
script_sender,
devtools_sender);
let DevtoolsPageInfo { title, url } = page_info;
let tab = TabActor {
name: actors.new_name("tab"),
title: title,
url: url.serialize(),
console: console.name(),
inspector: inspector.name(),
timeline: timeline.name(),
};
let root = actors.find_mut::<RootActor>("root");
root.tabs.push(tab.name.clone());
(tab, console, inspector, timeline)
};
if let Some(id) = worker_id {
let worker = WorkerActor {
name: actors.new_name("worker"),
id: id,
};
actor_workers.insert((pipeline, id), worker.name.clone());
actors.register(box worker);
}
actor_pipelines.insert(pipeline, tab.name.clone());
actors.register(box tab);
actors.register(box console);
actors.register(box inspector);
actors.register(box timeline);
}
fn handle_console_message(actors: Arc<Mutex<ActorRegistry>>,
id: PipelineId,
console_message: ConsoleMessage,
actor_pipelines: &HashMap<PipelineId, String>) {
let console_actor_name = find_console_actor(actors.clone(), id, actor_pipelines);
let actors = actors.lock().unwrap();
let console_actor = actors.find::<ConsoleActor>(&console_actor_name);
match console_message {
ConsoleMessage::LogMessage {
message,
filename,
lineNumber,
columnNumber,
} => {
let msg = ConsoleAPICall {
from: console_actor.name.clone(),
__type__: "consoleAPICall".to_string(),
message: ConsoleMsg {
level: "log".to_string(),
timeStamp: precise_time_ns(),
arguments: vec!(message),
filename: filename,
lineNumber: lineNumber,
columnNumber: columnNumber,
},
};
for stream in console_actor.streams.borrow_mut().iter_mut() {
stream.write_json_packet(&msg);
}
}
}
}
fn find_console_actor(actors: Arc<Mutex<ActorRegistry>>,
id: PipelineId,
actor_pipelines: &HashMap<PipelineId, String>) -> String {
let actors = actors.lock().unwrap();
let ref tab_actor_name = (*actor_pipelines)[&id];
let tab_actor = actors.find::<TabActor>(tab_actor_name);
let console_actor_name = tab_actor.console.clone();
return console_actor_name;
}
fn handle_network_event(actors: Arc<Mutex<ActorRegistry>>,
mut connections: Vec<TcpStream>,
actor_pipelines: &HashMap<PipelineId, String>,
actor_requests: &mut HashMap<String, String>,
pipeline_id: PipelineId,
request_id: String,
network_event: NetworkEvent) {
let console_actor_name = find_console_actor(actors.clone(), pipeline_id, actor_pipelines);
let netevent_actor_name = find_network_event_actor(actors.clone(), actor_requests, request_id.clone());
let mut actors = actors.lock().unwrap();
let actor = actors.find_mut::<NetworkEventActor>(&netevent_actor_name);
match network_event {
NetworkEvent::HttpRequest(url, method, headers, body) => {
//Store the request information in the actor
actor.add_request(url, method, headers, body);
//Send a networkEvent message to the client
let msg = NetworkEventMsg {
from: console_actor_name,
__type__: "networkEvent".to_string(),
eventActor: actor.get_event_actor(),
};
for stream in connections.iter_mut() {
stream.write_json_packet(&msg);
}
}
NetworkEvent::HttpResponse(headers, status, body) => {
//Store the response information in the actor
actor.add_response(headers, status, body);
//Send a networkEventUpdate (responseStart) to the client
let msg = NetworkEventUpdateMsg {
from: netevent_actor_name,<|fim▁hole|> };
for stream in connections.iter_mut() {
stream.write_json_packet(&msg);
}
}
//TODO: Send the other types of update messages at appropriate times
// requestHeaders, requestCookies, responseHeaders, securityInfo, etc
}
}
// Find the name of NetworkEventActor corresponding to request_id
// Create a new one if it does not exist, add it to the actor_requests hashmap
fn find_network_event_actor(actors: Arc<Mutex<ActorRegistry>>,
actor_requests: &mut HashMap<String, String>,
request_id: String) -> String {
let mut actors = actors.lock().unwrap();
match (*actor_requests).entry(request_id) {
Occupied(name) => {
//TODO: Delete from map like Firefox does?
name.into_mut().clone()
}
Vacant(entry) => {
let actor_name = actors.new_name("netevent");
let actor = NetworkEventActor::new(actor_name.clone());
entry.insert(actor_name.clone());
actors.register(box actor);
actor_name
}
}
}
let sender_clone = sender.clone();
spawn_named("DevtoolsClientAcceptor".to_owned(), move || {
// accept connections and process them, spawning a new task for each one
for stream in listener.incoming() {
// connection succeeded
sender_clone.send(DevtoolsControlMsg::AddClient(stream.unwrap())).unwrap();
}
});
loop {
match receiver.recv() {
Ok(DevtoolsControlMsg::AddClient(stream)) => {
let actors = actors.clone();
accepted_connections.push(stream.try_clone().unwrap());
spawn_named("DevtoolsClientHandler".to_owned(), move || {
handle_client(actors, stream.try_clone().unwrap())
})
}
Ok(DevtoolsControlMsg::FramerateTick(actor_name, tick)) =>
handle_framerate_tick(actors.clone(), actor_name, tick),
Ok(DevtoolsControlMsg::NewGlobal(ids, script_sender, pageinfo)) =>
handle_new_global(actors.clone(), ids, script_sender, sender.clone(), &mut actor_pipelines,
&mut actor_workers, pageinfo),
Ok(DevtoolsControlMsg::SendConsoleMessage(id, console_message)) =>
handle_console_message(actors.clone(), id, console_message,
&actor_pipelines),
Ok(DevtoolsControlMsg::NetworkEventMessage(request_id, network_event)) => {
// copy the accepted_connections vector
let mut connections = Vec::<TcpStream>::new();
for stream in accepted_connections.iter() {
connections.push(stream.try_clone().unwrap());
}
//TODO: Get pipeline_id from NetworkEventMessage after fixing the send in http_loader
// For now, the id of the first pipeline is passed
handle_network_event(actors.clone(), connections, &actor_pipelines, &mut actor_requests,
PipelineId(0), request_id, network_event);
},
Ok(DevtoolsControlMsg::ServerExitMsg) | Err(RecvError) => break
}
}
for connection in accepted_connections.iter_mut() {
let _ = connection.shutdown(Shutdown::Both);
}
}<|fim▁end|> | __type__: "networkEventUpdate".to_string(),
updateType: "responseStart".to_string(),
response: actor.get_response_start() |
<|file_name|>buildhistory.py<|end_file_name|><|fim▁begin|>import os
import unittest<|fim▁hole|>import tempfile
from git import Repo
from oeqa.utils.commands import get_bb_var
from oe.buildhistory_analysis import blob_to_dict, compare_dict_blobs
class TestBlobParsing(unittest.TestCase):
def setUp(self):
import time
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
dir=get_bb_var('TOPDIR'))
self.repo = Repo.init(self.repo_path)
self.test_file = "test"
self.var_map = {}
def tearDown(self):
import shutil
shutil.rmtree(self.repo_path)
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
if len(to_add) == 0 and len(to_remove) == 0:
return
for k in to_remove:
self.var_map.pop(x,None)
for k in to_add:
self.var_map[k] = to_add[k]
with open(os.path.join(self.repo_path, self.test_file), 'w') as repo_file:
for k in self.var_map:
repo_file.write("%s = %s\n" % (k, self.var_map[k]))
self.repo.git.add("--all")
self.repo.git.commit(message=msg)
def test_blob_to_dict(self):
"""
Test convertion of git blobs to dictionary
"""
valuesmap = { "foo" : "1", "bar" : "2" }
self.commit_vars(to_add = valuesmap)
blob = self.repo.head.commit.tree.blobs[0]
self.assertEqual(valuesmap, blob_to_dict(blob),
"commit was not translated correctly to dictionary")
def test_compare_dict_blobs(self):
"""
Test comparisson of dictionaries extracted from git blobs
"""
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
blob1 = self.repo.heads.master.commit.tree.blobs[0]
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
blob2 = self.repo.heads.master.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records}
self.assertEqual(changesmap, var_changes, "Changes not reported correctly")
def test_compare_dict_blobs_default(self):
"""
Test default values for comparisson of git blob dictionaries
"""
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
self.commit_vars(to_add = { "foo" : "1" })
blob1 = self.repo.heads.master.commit.tree.blobs[0]
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
blob2 = self.repo.heads.master.commit.tree.blobs[0]
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
blob1, blob2, False, False)
var_changes = {}
for x in change_records:
oldvalue = "default" if ("default" in x.oldvalue) else x.oldvalue
var_changes[x.fieldname] = (oldvalue, x.newvalue)
self.assertEqual(defaultmap, var_changes, "Defaults not set properly")<|fim▁end|> | |
<|file_name|>server.py<|end_file_name|><|fim▁begin|>import datetime
from django.db import models
from django.conf import settings
#server variables that needed to be stored in db
class ServerVariable(models.Model):
name = models.CharField(max_length=64, blank=True, null=True)
value = models.TextField(blank=True, null=True)
class Meta:
app_label = 'website'
@staticmethod
def get(name):
try:
server_variable = ServerVariable.objects.get(name=name)
except:
return None
return server_variable.value
<|fim▁hole|> try:
server_variable = ServerVariable.objects.get(name=name)
except:
server_variable = ServerVariable(name=name)
server_variable.value = value
server_variable.save()
return server_variable
class MigrationHistory(models.Model):
jurisdiction_id = models.IntegerField(blank=True, null=True, db_index=True)
source_table = models.CharField(max_length=64, blank=True, null=True, db_index=True)
source_id = models.IntegerField(blank=True, null=True, db_index=True)
target_table = models.CharField(max_length=64, blank=True, null=True, db_index=True)
target_id = models.IntegerField(blank=True, null=True, db_index=True)
notes = models.TextField(blank=True, null=True)
notes2 = models.TextField(blank=True, null=True)
create_datetime = models.DateTimeField(auto_now_add=True)
modify_datetime = models.DateTimeField(auto_now=True)
class Meta:
app_label = 'website'
@staticmethod
def save_history(jurisdiction, source_table, source_id, target_table, target_id, notes='', notes2=''):
history, created = MigrationHistory.objects.get_or_create(source_table=source_table, source_id=source_id, target_table=target_table, target_id=target_id)
if jurisdiction != None:
history.jurisdiction_id = jurisdiction.id
history.notes = notes
history.notes2 = notes2
history.save()
return history
@staticmethod
def get_target_id(source_table, source_id, target_table):
try:
history = MigrationHistory.objects.get(source_table=source_table, source_id=source_id, target_table=target_table)
return history.target_id
except:
return None<|fim▁end|> | @staticmethod
def set(name, value): |
<|file_name|>ConfigFemtoAnalysis.C<|end_file_name|><|fim▁begin|>/*********************************************************************
* *
* Configfemtoanalysis.C - configuration macro for the femtoscopic *
* analysis, meant as a QA process for two-particle effects *
* *
* Author: Adam Kisiel ([email protected]) *
* *
*********************************************************************/<|fim▁hole|>#include "AliFemtoEventReaderESDChain.h"
#include "AliFemtoEventReaderESDChainKine.h"
#include "AliFemtoEventReaderAODChain.h"
#include "AliFemtoSimpleAnalysis.h"
#include "AliFemtoBasicEventCut.h"
#include "AliFemtoESDTrackCut.h"
#include "AliFemtoCorrFctn.h"
#include "AliFemtoCutMonitorParticleYPt.h"
#include "AliFemtoCutMonitorParticleVertPos.h"
#include "AliFemtoCutMonitorParticleMomRes.h"
#include "AliFemtoCutMonitorParticlePID.h"
#include "AliFemtoCutMonitorEventMult.h"
#include "AliFemtoCutMonitorEventVertex.h"
#include "AliFemtoShareQualityTPCEntranceSepPairCut.h"
#include "AliFemtoPairCutAntiGamma.h"
#include "AliFemtoPairCutRadialDistance.h"
#include "AliFemtoQinvCorrFctn.h"
#include "AliFemtoCorrFctnNonIdDR.h"
#include "AliFemtoShareQualityCorrFctn.h"
#include "AliFemtoTPCInnerCorrFctn.h"
#include "AliFemtoVertexMultAnalysis.h"
#include "AliFemtoCorrFctn3DSpherical.h"
#include "AliFemtoChi2CorrFctn.h"
#include "AliFemtoCorrFctnTPCNcls.h"
#include "AliFemtoBPLCMS3DCorrFctn.h"
#include "AliFemtoCorrFctn3DLCMSSym.h"
#include "AliFemtoModelBPLCMSCorrFctn.h"
#include "AliFemtoModelCorrFctn3DSpherical.h"
#include "AliFemtoModelGausLCMSFreezeOutGenerator.h"
#include "AliFemtoModelGausRinvFreezeOutGenerator.h"
#include "AliFemtoModelManager.h"
#include "AliFemtoModelWeightGeneratorBasic.h"
#include "AliFemtoModelWeightGeneratorLednicky.h"
#include "AliFemtoCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnSource.h"
#include "AliFemtoCutMonitorParticlePtPDG.h"
#include "AliFemtoKTPairCut.h"
#include "AliFemtoAvgSepCorrFctn.h"
#endif
//________________________________________________________________________
AliFemtoManager* ConfigFemtoAnalysis() {
double PionMass = 0.13956995;
double KaonMass = 0.493677;
double ProtonMass = 0.938272013;
// double psi = TMath::Pi()/2.;
// double psid = TMath::Pi()/6.;
// int runepvzero[7] = {1, 1, 1, 1, 1, 1, 1};
// double epvzerobins[7] = {-psi, -psi+psid, -psi+2*psid, -psi+3*psid, -psi+4*psid, -psi+5*psid, -psi+6*psid};
double psi = TMath::Pi()/2.;
double psid = TMath::Pi()/3.;
int runepvzero[4] = {0, 0, 0, 1};
double epvzerobins[4] = {-psi, -psi+psid, -psi+2*psid, -psi+3*psid};
int runmults[10] = {1, 1, 0, 0, 0, 0, 0, 0, 0, 0};
int multbins[11] = {0.001, 50, 100, 200, 300, 400, 500, 600, 700, 800, 900};
int runch[3] = {1, 1, 1};
const char *chrgs[3] = { "PP", "APAP", "PAP" };
int runktdep = 1;
double ktrng[3] = {0.01, 1.0, 5.0};
int numOfMultBins = 10;
int numOfChTypes = 3;
int numOfkTbins = 2;
int numOfEPvzero = 4;
int runqinv = 1;
int runshlcms = 0;// 0:PRF(PAP), 1:LCMS(PP,APAP)
int runtype = 2; // Types 0 - global, 1 - ITS only, 2 - TPC Inner
int isrealdata = 1;
// int gammacut = 1;
double shqmax = 1.0;
int nbinssh = 100;
AliFemtoEventReaderAODChain *Reader = new AliFemtoEventReaderAODChain();
Reader->SetFilterBit(7);
Reader->SetCentralityPreSelection(0.001, 310);
Reader->SetEPVZERO(kTRUE);
AliFemtoManager* Manager = new AliFemtoManager();
Manager->SetEventReader(Reader);
AliFemtoVertexMultAnalysis *anetaphitpc[10*3*2];
AliFemtoBasicEventCut *mecetaphitpc[10*3*2];
AliFemtoCutMonitorEventMult *cutPassEvMetaphitpc[50];
AliFemtoCutMonitorEventMult *cutFailEvMetaphitpc[50];
// AliFemtoCutMonitorEventVertex *cutPassEvVetaphitpc[50];
// AliFemtoCutMonitorEventVertex *cutFailEvVetaphitpc[50];
AliFemtoESDTrackCut *dtc1etaphitpc[50];
AliFemtoESDTrackCut *dtc2etaphitpc[50];
AliFemtoCutMonitorParticleYPt *cutPass1YPtetaphitpc[50];
AliFemtoCutMonitorParticleYPt *cutFail1YPtetaphitpc[50];
AliFemtoCutMonitorParticlePID *cutPass1PIDetaphitpc[50];
AliFemtoCutMonitorParticlePID *cutFail1PIDetaphitpc[50];
AliFemtoCutMonitorParticleYPt *cutPass2YPtetaphitpc[50];
AliFemtoCutMonitorParticleYPt *cutFail2YPtetaphitpc[50];
AliFemtoCutMonitorParticlePID *cutPass2PIDetaphitpc[50];
AliFemtoCutMonitorParticlePID *cutFail2PIDetaphitpc[50];
// AliFemtoPairCutAntiGamma *sqpcetaphitpcdiff[10*3];
// AliFemtoShareQualityTPCEntranceSepPairCut *sqpcetaphitpcsame[10*3];
//AliFemtoPairCutAntiGamma *sqpcetaphitpc[10*3];
AliFemtoPairCutRadialDistance *sqpcetaphitpc[50];
// AliFemtoChi2CorrFctn *cchiqinvetaphitpc[20*2];
AliFemtoKTPairCut *ktpcuts[50*2];
AliFemtoCorrFctnDirectYlm *cylmtpc[50];
AliFemtoCorrFctnDirectYlm *cylmkttpc[50*2];
AliFemtoCorrFctnDirectYlm *cylmetaphitpc[10*3];
AliFemtoQinvCorrFctn *cqinvkttpc[50*2];
AliFemtoQinvCorrFctn *cqinvtpc[50];
AliFemtoCorrFctnNonIdDR *ckstartpc[50];
AliFemtoCorrFctnNonIdDR *ckstarkttpc[50*2];
AliFemtoCorrFctnDEtaDPhi *cdedpetaphi[50*2];
AliFemtoAvgSepCorrFctn *cAvgSeptpc[50];
// AliFemtoCorrFctn3DLCMSSym *cq3dlcmskttpc[20*2];
// AliFemtoCorrFctnTPCNcls *cqinvnclstpc[20];
// AliFemtoShareQualityCorrFctn *cqinvsqtpc[20*10];
// AliFemtoChi2CorrFctn *cqinvchi2tpc[20];
AliFemtoTPCInnerCorrFctn *cqinvinnertpc[50];
// *** Third QA task - HBT analysis with all pair cuts off, TPC only ***
// *** Begin pion-pion (positive) analysis ***
int aniter = 0;
for (int imult = 0; imult < numOfMultBins; imult++) {
if (runmults[imult]) {
for (int ichg = 0; ichg < numOfChTypes; ichg++) {
if (runch[ichg]) {
for (int iepvzero = 0; iepvzero < numOfEPvzero; iepvzero++) {
if (runepvzero[iepvzero]) {
aniter = imult * numOfChTypes + ichg * numOfEPvzero + iepvzero;
// aniter = ichg * numOfMultBins + imult * numOfEPvzero + iepvzero;
// cout << "aniter = " << aniter << endl;
// aniter = ichg * numOfMultBins + imult;
// if (ichg == 2)
// runshlcms = 0;
// else
// runshlcms = 1;
//________________________
anetaphitpc[aniter] = new AliFemtoVertexMultAnalysis(8, -8.0, 8.0, 4, multbins[imult], multbins[imult+1]);
anetaphitpc[aniter]->SetNumEventsToMix(10);
anetaphitpc[aniter]->SetMinSizePartCollection(1);
anetaphitpc[aniter]->SetVerboseMode(kFALSE);
mecetaphitpc[aniter] = new AliFemtoBasicEventCut();
mecetaphitpc[aniter]->SetEventMult(0.001,100000);
mecetaphitpc[aniter]->SetVertZPos(-8,8);
if (iepvzero == 3)
mecetaphitpc[aniter]->SetEPVZERO(epvzerobins[0],epvzerobins[3]);
else
mecetaphitpc[aniter]->SetEPVZERO(epvzerobins[iepvzero],epvzerobins[iepvzero+1]);
// if (isrealdata)
// mecetaphitpc[aniter]->SetAcceptOnlyPhysics(kTRUE);
// cutPassEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutPass%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero));
// cutFailEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutFail%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero));
// mecetaphitpc[aniter]->AddCutMonitor(cutPassEvMetaphitpc[aniter], cutFailEvMetaphitpc[aniter]);
// cutPassEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutPass%stpcM%i", chrgs[ichg], imult));
// cutFailEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutFail%stpcM%i", chrgs[ichg], imult));
// mecetaphitpc[aniter]->AddCutMonitor(cutPassEvVetaphitpc[aniter], cutFailEvVetaphitpc[aniter]);
dtc1etaphitpc[aniter] = new AliFemtoESDTrackCut();
dtc2etaphitpc[aniter] = new AliFemtoESDTrackCut();
if (ichg == 0) {
dtc1etaphitpc[aniter]->SetCharge(1.0);
dtc1etaphitpc[aniter]->SetPt(0.7,4.0);
}
else if (ichg == 1) {
dtc1etaphitpc[aniter]->SetCharge(-1.0);
dtc1etaphitpc[aniter]->SetPt(0.7,4.0);
}
else if (ichg == 2) {
dtc1etaphitpc[aniter]->SetCharge(-1.0);
dtc2etaphitpc[aniter]->SetCharge(1.0);
dtc1etaphitpc[aniter]->SetPt(0.7,4.0);
dtc2etaphitpc[aniter]->SetPt(0.7,4.0);
}
dtc1etaphitpc[aniter]->SetEta(-0.8,0.8);
dtc1etaphitpc[aniter]->SetMass(ProtonMass);
dtc1etaphitpc[aniter]->SetMostProbableProton();
dtc1etaphitpc[aniter]->SetNsigma(3.0);
//dtc1etaphitpc[aniter]->SetNsigma(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPCTOF(kTRUE);
//dtc1etaphitpc[aniter]->SetNsigmaTPConly(kTRUE);
if (ichg == 2) {
dtc2etaphitpc[aniter]->SetEta(-0.8,0.8);
dtc2etaphitpc[aniter]->SetMass(ProtonMass);
dtc2etaphitpc[aniter]->SetMostProbableProton();
dtc2etaphitpc[aniter]->SetNsigma(3.0);
//dtc2etaphitpc[aniter]->SetNsigma(2.0);
dtc2etaphitpc[aniter]->SetNsigmaTPCTOF(kTRUE);
//dtc2etaphitpc[aniter]->SetNsigmaTPConly(kTRUE);
}
// Track quality cuts
if (runtype == 0) {
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetminTPCncls(80);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
// dtc1etaphitpc[aniter]->SetMaxITSChiNdof(6.0);
dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(0.2);
// dtc1etaphitpc[aniter]->SetMaxImpactXYPtDep(0.0182, 0.0350, -1.01);
dtc1etaphitpc[aniter]->SetMaxImpactZ(0.15);
// dtc1etaphitpc[aniter]->SetMaxSigmaToVertex(6.0);
}
else if (runtype == 1) {
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit|AliESDtrack::kITSpureSA);
// dtc1etaphitpc[aniter]->SetminTPCncls(70);
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
// dtc1etaphitpc[aniter]->SetMaxITSChiNdof(6.0);
// dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(6.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(0.2);
dtc1etaphitpc[aniter]->SetMaxImpactZ(0.25);
// dtc1etaphitpc[aniter]->SetMaxSigmaToVertex(6.0);
}
else if (runtype == 2) {
//dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCin);
dtc1etaphitpc[aniter]->SetminTPCncls(80);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(2.4); // 2.4 0.1
// dtc1etaphitpc[aniter]->SetMaxImpactXYPtDep(0.0205, 0.035, -1.1); // DCA xy
// dtc1etaphitpc[aniter]->SetMaxImpactXYPtDep(0.018, 0.035, -1.01); // DCA xy
dtc1etaphitpc[aniter]->SetMaxImpactZ(3.2); // 2.0 0.1
if (ichg == 2) {
//dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
dtc2etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCin);
dtc2etaphitpc[aniter]->SetminTPCncls(80);
dtc2etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc2etaphitpc[aniter]->SetLabel(kFALSE);
dtc2etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc2etaphitpc[aniter]->SetMaxImpactXY(2.4); // 2.4 0.1
// dtc2etaphitpc[aniter]->SetMaxImpactXYPtDep(0.0205, 0.035, -1.1); // DCA xy
//dtc2etaphitpc[aniter]->SetMaxImpactXYPtDep(0.018, 0.035, -1.01); // DCA xy
dtc2etaphitpc[aniter]->SetMaxImpactZ(3.2); // 2.0 0.1
}
}
cutPass1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutPass1%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),ProtonMass);
cutFail1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutFail1%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),ProtonMass);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1YPtetaphitpc[aniter], cutFail1YPtetaphitpc[aniter]);
cutPass1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutPass1%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),2);//0-pion,1-kaon,2-proton
cutFail1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutFail1%stpcM%iPsi%i", chrgs[ichg], imult , iepvzero),2);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1PIDetaphitpc[aniter], cutFail1PIDetaphitpc[aniter]);
// if (ichg == 2){
// cutPass2PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutPass2%stpcM%i", chrgs[ichg], imult),2);//0-pion,1-kaon,2-proton
// cutFail2PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutFail2%stpcM%i", chrgs[ichg], imult),2);
// dtc2etaphitpc[aniter]->AddCutMonitor(cutPass2PIDetaphitpc[aniter], cutFail2PIDetaphitpc[aniter]);
// }
// sqpcetaphitpc[aniter] = new AliFemtoPairCutAntiGamma();
sqpcetaphitpc[aniter] = new AliFemtoPairCutRadialDistance();
if (runtype == 0) {
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(0.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
// sqpcetaphitpc[aniter]->SetMaxEEMinv(0.0);
// sqpcetaphitpc[aniter]->SetMaxThetaDiff(0.0);
// sqpcetaphitpc[aniter]->SetTPCEntranceSepMinimum(1.5);
//sqpcetaphitpc[aniter]->SetRadialDistanceMinimum(0.12, 0.03);
// sqpcetaphitpc[aniter]->SetEtaDifferenceMinimum(0.02);
}
else if (runtype == 1) {
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(1.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
// sqpcetaphitpc[aniter]->SetMaxEEMinv(0.002);
// sqpcetaphitpc[aniter]->SetMaxThetaDiff(0.008);
// sqpcetaphitpc[aniter]->SetTPCEntranceSepMinimum(5.0);
//sqpcetaphitpc[aniter]->SetRadialDistanceMinimum(1.2, 0.03);
// sqpcetaphitpc[aniter]->SetEtaDifferenceMinimum(0.02);
}
else if (runtype == 2) {
//sqpcetaphitpc[aniter]->SetUseAOD(kTRUE);
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(0.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
// if (gammacut == 0) {
//sqpcetaphitpc[aniter]->SetMaxEEMinv(0.0);
//sqpcetaphitpc[aniter]->SetMaxThetaDiff(0.0);
//}
//else if (gammacut == 1) {
//sqpcetaphitpc[aniter]->SetMaxEEMinv(0.002);
//sqpcetaphitpc[aniter]->SetMaxThetaDiff(0.008);
//}
// sqpcetaphitpc[aniter]->SetMagneticFieldSign(-1); // field1 -1, field3 +1
// sqpcetaphitpc[aniter]->SetMinimumRadius(0.8); // biggest inefficiency for R=1.1 m (checked on small sample)
sqpcetaphitpc[aniter]->SetMinimumRadius(1.2); //0.8
sqpcetaphitpc[aniter]->SetPhiStarMin(kFALSE);
sqpcetaphitpc[aniter]->SetPhiStarDifferenceMinimum(0.017); // 0.012 - pions, 0.017 - kaons, 0.018
sqpcetaphitpc[aniter]->SetEtaDifferenceMinimum(0.012); // 0.017 - pions, 0.015 - kaons
}
anetaphitpc[aniter]->SetEventCut(mecetaphitpc[aniter]);
if (ichg == 2) {
anetaphitpc[aniter]->SetFirstParticleCut(dtc1etaphitpc[aniter]);
anetaphitpc[aniter]->SetSecondParticleCut(dtc2etaphitpc[aniter]);
}
else {
anetaphitpc[aniter]->SetFirstParticleCut(dtc1etaphitpc[aniter]);
anetaphitpc[aniter]->SetSecondParticleCut(dtc1etaphitpc[aniter]);
}
anetaphitpc[aniter]->SetPairCut(sqpcetaphitpc[aniter]);
if (ichg == 2) {
ckstartpc[aniter] = new AliFemtoCorrFctnNonIdDR(Form("ckstar%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),nbinssh,0.0,shqmax);
anetaphitpc[aniter]->AddCorrFctn(ckstartpc[aniter]);
}
else {
cqinvtpc[aniter] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),2*nbinssh,0.0,2*shqmax);
anetaphitpc[aniter]->AddCorrFctn(cqinvtpc[aniter]);
}
cylmtpc[aniter] = new AliFemtoCorrFctnDirectYlm(Form("cylm%stpcM%i", chrgs[ichg], imult),2,nbinssh, 0.0,shqmax,runshlcms);
anetaphitpc[aniter]->AddCorrFctn(cylmtpc[aniter]);
// cAvgSeptpc[aniter] = new AliFemtoAvgSepCorrFctn(Form("cAvgSep%stpcM%iPsi%i", chrgs[ichg], imult, iepvzero),4*nbinssh,0.0,200);
// anetaphitpc[aniter]->AddCorrFctn(cAvgSeptpc[aniter]);
cqinvinnertpc[aniter] = new AliFemtoTPCInnerCorrFctn(Form("cqinvinner%stpcM%d", chrgs[ichg], imult),nbinssh,0.0,shqmax);
cqinvinnertpc[aniter]->SetRadius(1.2);
anetaphitpc[aniter]->AddCorrFctn(cqinvinnertpc[aniter]);
if (runktdep) {
int ktm;
for (int ikt=0; ikt<numOfkTbins; ikt++) {
ktm = aniter * numOfkTbins + ikt;
ktpcuts[ktm] = new AliFemtoKTPairCut(ktrng[ikt], ktrng[ikt+1]);
cylmkttpc[ktm] = new AliFemtoCorrFctnDirectYlm(Form("cylm%stpcM%ikT%i", chrgs[ichg], imult, ikt),2,nbinssh,0.0,shqmax,runshlcms);
cylmkttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(cylmkttpc[ktm]);
if (ichg == 2) {
ckstarkttpc[ktm] = new AliFemtoCorrFctnNonIdDR(Form("ckstar%stpcM%iPsi%ikT%i", chrgs[ichg], imult, iepvzero, ikt),nbinssh,0.0,shqmax);
ckstarkttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(ckstarkttpc[ktm]);
}
else {
cqinvkttpc[ktm] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%iPsi%ikT%i", chrgs[ichg], imult, iepvzero, ikt),2*nbinssh,0.0,2*shqmax);
cqinvkttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(cqinvkttpc[ktm]);
}
// cqinvsqtpc[ktm] = new AliFemtoShareQualityCorrFctn(Form("cqinvsq%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,shqmax);
// cqinvsqtpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// anetaphitpc[aniter]->AddCorrFctn(cqinvsqtpc[ktm]);
// cqinvinnertpc[ktm] = new AliFemtoTPCInnerCorrFctn(Form("cqinvinner%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,shqmax);
// cqinvinnertpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// cqinvinnertpc[ktm]->SetRadius(1.2);
// anetaphitpc[aniter]->AddCorrFctn(cqinvinnertpc[ktm]);
// if (run3d) {
// cq3dlcmskttpc[ktm] = new AliFemtoCorrFctn3DLCMSSym(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),60,(imult>3)?((imult>6)?((imult>7)?0.6:0.4):0.25):0.15);
// cq3dlcmskttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// anetaphitpc[aniter]->AddCorrFctn(cq3dlcmskttpc[ktm]);
// }
}
}
// cdedpetaphi[aniter] = new AliFemtoCorrFctnDEtaDPhi(Form("cdedp%stpcM%i", chrgs[ichg], imult),240, 240);
// anetaphitpc[aniter]->AddCorrFctn(cdedpetaphi[aniter]);
Manager->AddAnalysis(anetaphitpc[aniter]);
}
}
}
}
}
}
// *** End pion-pion (positive) analysis
return Manager;
}<|fim▁end|> |
#if !defined(__CINT__) || defined(__MAKECINT_)
#include "AliFemtoManager.h" |
<|file_name|>nii2np.py<|end_file_name|><|fim▁begin|>import sys
from timer import Timer
import os
import pandas as pd
import nipy
import numpy as np
import re
import argparse
def get_images_list(path, regexp, number_images=None):
im_list=[]
dir_list=os.listdir(path)
if regexp=="NO":
im_list=dir_list
return dir_list
reg=re.compile(regexp)
im_list=[i for i in dir_list for m in [reg.search(i)] if m]
if isinstance(number_images, type(None) ):
if len(im_list)!=int(number_images):
raise Exception("set numbers of images have to be the same with numbers images in directory!")
return im_list
def delete_arrays(path_4d, region_code):
'''delete temporal arrays '''
p=1
while True:
if os.path.isfile( os.path.join(path_4d, str(region_code) +'_'+str(p) + ".npy" ) ):
os.remove(os.path.join(path_4d, str(region_code) +'_'+str(p) + ".npy" ))
p+=1
else:
break
def convert_array_for_regression(path_4d, region_code, split_size=1000):
''' merge region array to one and split it in (number images in study) x (voxels split_size) '''
regression_data=[]
p=1
while True:
try:
regression_data.append(np.load( os.path.join(path_4d, str(region_code) +'_'+str(p) + ".npy" ) ) )
print str(region_code) +'_' +str(p) + ".npy"
p+=1
except:
break
regression_data=np.concatenate(regression_data)
print "Region {}, regression data size {}, will be split by {} voxels chunks ".format(region_code,regression_data.shape, split_size)
sample_size, number_voxels=regression_data.shape
d=number_voxels/split_size
r=number_voxels-d*split_size
if d!=0:
l=[range(split_size*i,split_size*(i+1)) for i in range(0,d) ]
for i,j in enumerate(l): # TODO start from 0, maybe change to 1
save_np=regression_data[:,j]
np.save(os.path.join(path_4d, 'reg' + str(region_code) + "_" + str(i)) , save_np )
if r!=0:
save_np=regression_data[:,d*split_size:d*split_size+r]
np.save(os.path.join(path_4d, 'reg' + str(region_code) + "_" + str(i+1)) , save_np )
else:
np.save(os.path.join(path_4d, 'reg' + str(region_code) + "_" + str(0)) , regression_data )
def save_4d_data(Hammer_atlas, image_path, path_4d, image_names):
'''produce nparrays (voxels in region) x (image in study)
only if number of images less then 1000
'''
region_codes=np.unique(Hammer_atlas._data)
region_codes=region_codes[region_codes!=0]
region_coodinates={i:np.where(Hammer_atlas._data==i) for i in region_codes}
data_4d={i:[] for i in region_codes}
for im in image_names:
print im
try:
images_data=nipy.load_image(os.path.join(image_path, im ))._data
for k in data_4d:
data_4d[k].append(images_data[region_coodinates[k]])
except:
raise ValueError("Error during reading image {}".format(str(im)))
for c in region_codes:
c=int(c)
np_4d=np.array(data_4d[c])
print np_4d.shape
np.save(os.path.join(path_4d, str(c) +"_" + str(1)) , np_4d )
convert_array_for_regression(path_4d, c)
delete_arrays(path_4d, c)
def save_4d_data_region(logs_dir, atlas, image_path, path_4d, region_code, regexp='NO'):
image_names=get_images_list(image_path,regexp)
df=pd.DataFrame(image_names)
df.to_csv(os.path.join(logs_dir, str(region_code)+ '.csv'))
if len(image_names)<1000:
if int(region_code)!=0:
print 'FORCE MULTI JOBS SUBMISSION ( NOT EFFICIENT)'
elif int(region_code)==0:
save_4d_data(atlas, image_path, path_4d, image_names)
return 0
data_4d=[]
part=1
coordinate=np.where(atlas._data==int(region_code) )
if coordinate[0].shape[0]==0:
raise ValueError('Region code {} does not exist'.format(region_code))
count=0
for im in image_names:
# reading all images and dump nparrays by voxels in region by 1000 images
try:
images_data=nipy.load_image(os.path.join(image_path, im ))._data
count+=1
data=images_data[coordinate]
data_4d.append(data)
if count==1000:
np_4d=np.array(data_4d)
np.save(os.path.join(path_4d, str(region_code) + "_" + str(part)) , np_4d )<|fim▁hole|> data_4d=[]
np_4d=None
part+=1
count=0
except:
print ("Error during reading image {}".format(str(im)))
if count!=0:
np_4d=np.array(data_4d)
np.save(os.path.join(path_4d, str(region_code) +"_" + str(part)) , np_4d )
convert_array_for_regression(path_4d, region_code)
delete_arrays(path_4d, region_code)
def experiment_save_4d(logs_dir, atlas_path,image_path, path_4d, region_code , reg):
atlas=nipy.load_image(atlas_path)
save_4d_data_region(logs_dir, atlas, image_path, path_4d, region_code , regexp=reg)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Convert nifti images to nparray files')
parser.add_argument("-o",required=True, type=str, help="path to save result folder")
parser.add_argument("-i",required=True, type=str, help="path to nifti images")
parser.add_argument("-atlas",required=True, type=str, help="path to Atlas images to use to define voxel chunks")
parser.add_argument("-code",required=True,type=int, help="Atlas chunk code")
parser.add_argument("-regexp",type=str,default='NO', help="REGEXP to select images")
parser.add_argument("-logs",type=str,required=True, help="path to save logs")
args = parser.parse_args()
print args
with Timer() as t:
experiment_save_4d(args.logs, args.atlas, args.i, args.o, args.code, args.regexp)
print "save data for analysis %s s" %(t.secs)<|fim▁end|> | |
<|file_name|>template-build.js<|end_file_name|><|fim▁begin|>! function(factory) {
if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') {
var target = module['exports'] || exports;
factory(target);
} else if (typeof define === 'function' && define['amd']) {
//define(['exports'],function(exports){
// exports.abc = function(){}
//});
define(['exports'], factory);
} else {
factory(window['NC'] = {});
}
}(function(exports) {
function reMarker(templ, data, type) {
var _type = type || 'JavaScript';
if (arguments.length === 1 ||(!data && type)) {
var _templ = reMarker[_type].parse(templ);
return _templ;
/* return function(data) {
return reMarker[_type].proc(_templ, data);
}*/
}
data = data || {};
return reMarker[_type].proc(reMarker[_type].parse(templ), data);
}
/**
* 工具方法
* @type {Object}
*/
var _utils = {
trim: function(str) {
return str.replace(/(^\s*)|(\s*$)/g, "");
},
lTrim: function(str) {
return str.replace(/(^\s*)/g, "");
},
rTrim: function(str) {
return str.replace(/(\s*$)/g, "");
},
removeEmpty: function(arr) {
var splitStr = _separator(arr);
var REMOVE_REGEX = new RegExp(splitStr + splitStr);
var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr);
return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr);
},
filter: function(str) {
return str.replace('<', '<').replace('>', '>');
}
};
/**
* 设定分隔符
* @param {String} str 字符串源
* @return {String} 分隔符
*/
function _separator(str) {
var separator = '';
do {
separator = String.fromCharCode(Math.random(0, 1) * 100 + 255);
}
while (str.indexOf(separator) >= 0);
return separator;
};
/**
* 移除不安全代码
* @param html
* @returns {*|void}
*/
function removeUnsafe(html) {
var _templ = html.replace(/[\r|\n|\t]/ig, '').replace(/\s{2,}/ig, ' ').replace(/\'/ig, "\\\'");
return _templ;
}
/**
* 找出匹配的键值对
* @param {Array} value 数组
* @returns {Array}
*/
function findPairs(value) {
var cache = [];
if (Object.prototype.toString.call(value) === '[object Array]') {
var KEY_REGEX = /\b(\w+)\s*?=/g;
var commandStr = value.join(' ');
var _sp = _separator(commandStr);
commandStr = commandStr.replace(KEY_REGEX, _sp + "$1" + _sp);
value = _utils.removeEmpty(commandStr.split(_sp));
if (value.length % 2 == 0) {
for (var i = 0; i < value.length; i = i + 2) {
var _pair = [value[i], value[i + 1]];
cache = cache.concat(_pair);
}
}
}
return cache;
}
var VAR_REGEX=/^[a-zA-Z_][a-zA-Z0-9_]*$/im;
function _setVarToken(arr){
return arr.map(function(value){
if(VAR_REGEX.test(value)===true){
value='$'+value;
}
return value;
});
}
reMarker.PHP = (function() {
var Ruler = {
guid: 0
};
/**
* 匹配语法规则处理
* @type {{ruler: Function, rulerAssign: Function, rulerEndSwitch: Function, rulerCase: Function, rulerDefault: Function, rulerSwitch: Function, rulerElseIf: Function, rulerBreak: Function, rulerElse: Function, rulerEndIf: Function, rulerIf: Function, rulerEndList: Function, rulerList: Function}}
*/
Ruler.regRuler = {
ruler: function(str) {
var listArr = Ruler.util.removeEmpty(str.split(' '));
//import,include
var ruler = {
"list": this.rulerList,
"if": this.rulerIf,
"break": this.rulerBreak,
'/#list': this.rulerEndList,
'else': this.rulerElse,
"/#if": this.rulerEndIf,
'elseif': this.rulerElseIf,
'switch': this.rulerSwitch,
'case': this.rulerCase,
'default': this.rulerDefault,
'/#switch': this.rulerEndSwitch,
'assign': this.rulerAssign,
'return': this.rulerReturn
};
return (ruler[listArr[0]]).call(this, listArr);
},
rulerReturn: function() {
return 'return;';
},
/**
* 定义变量
* @param arr
* @returns {string}
*/
rulerAssign: function(arr) {
var result = [],
count;
var rt = findPairs(arr.slice(1));
count = rt.length;
for (j = 0; j < count; j += 2) {
var name = rt[j];
result.push('$' + name + '=' + rt[j + 1] + ';');
}
return result.join('');
},
rulerEndSwitch: function(arr) {
return '}';
},
rulerCase: function(arr) {
return ('case ' + arr[1] + ':');
},
rulerDefault: function() {
return 'default:';
},
rulerSwitch: function(arr) {
arr= _setVarToken(arr);
return 'switch(' + arr.join('').replace('switch', '') + '){';
},
rulerElseIf: function(arr) {
if (arr.length < 2) {
return false;
}
arr=_setVarToken(arr.slice(1));
return '}else if(' + Ruler.util.filter(arr.join('')) + '){';
},
rulerBreak: function() {
return 'break;';
},
rulerElse: function(arr) {
return '}else{';
},
rulerEndIf: function(arr) {
return '}';
},
rulerIf: function(arr) {
if (arr.length < 2) {
return false;
}
arr=_setVarToken(arr.slice(1));
return 'if(' + Ruler.util.filter(arr.join('')) + '){';
},
rulerEndList: function(arr) {
return '}';
},
/**
* 循环列表方法
* @param arr
* @returns {string}
*/
rulerList: function(arr) {
var listName, loopName, loopIndexName, loopHasNextName, result = [];
if (arr.length != 4) {
return;
}
var _guid = Ruler.guid++;
loopName = arr[3];
listName = arr[1];
loopIndexName = loopName + '_index';
loopHasNextName = loopName + '_has_next';
//如果变量名不是传统的字母或数字
if (!/^\w+$/.test(listName)) {
if (listName.indexOf('$') !== 0) {
listName = '$' + listName;
}
var _listName = '$_list' + _guid;
result.push(_listName + '=' + listName + ';');
listName = _listName;
} else {
listName = '$' + listName;
}
loopName = '$' + loopName;
loopIndexName = '$' + loopIndexName;
loopHasNextName = '$' + loopHasNextName;
result.push([
'$_i{guid}=0',
'$count{guid}=count(' + listName + ')',
loopName,
loopIndexName,
loopHasNextName + ';'
].join(';'));
result.push('for(;$_i{guid}<$count{guid};$_i{guid}++){');
result.push(loopName + '=' + listName + '[$_i{guid}];');
result.push(loopIndexName + '=$_i{guid};');
result.push(loopHasNextName + '=$_i{guid}!==$count{guid}-1;');
return result.join('').replace(/\{guid\}/ig, _guid);
}
};
/**
* 内嵌函数,待扩展
* @type {{trim: Function, lTrim: Function, rTrim: Function, removeEmpty: Function, filter: Function}}
*/
Ruler.util = {
trim: function(str) {
return str.replace(/(^\s*)|(\s*$)/g, "");
},
lTrim: function(str) {
return str.replace(/(^\s*)/g, "");
},
rTrim: function(str) {
return str.replace(/(\s*$)/g, "");
},
removeEmpty: function(arr) {
var splitStr = _separator(arr);
var REMOVE_REGEX = new RegExp(splitStr + splitStr);
var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr);
return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr);
},
filter: function(str) {
return str.replace('<', '<').replace('>', '>');
}
};
/**
* 将模板语法解释为JS语法
* @param _templ 模板字符串
* @returns {String} 语法解析后的
* @private
*/
function _parse(_templ) {
var chunks = [],
replaced = [],<|fim▁hole|> var lastIndex = 0;
var ss = /<#.+?>|\${.+?}|<\/#.+?>|<@.+?>/ig;
/**
* 将模块中的匹配替换为相应语言的语法
* @param {String} str 输入
* @param {Number} type 0普通字符 1变量 2表达式
* @return {Null}
*/
function _pushStr(str, type) {
if (str !== '') {
if (type == 2) {
replaced.push(str)
} else {
if (type == 1) {
replaced.push(printPrefix + str + ';')
} else {
str = str.replace(/"/ig, "\\\"");
replaced.push(printPrefix + '"' + str + '";')
}
}
}
}
//移除不安全代码
_templ = removeUnsafe(_templ);
_templ.replace(ss, function repalceHandler(match, index) {
if (lastIndex != index) {
var _temp_ = _templ.substring(lastIndex, index);
if (Ruler.util.trim(_temp_) != '')
_pushStr(_templ.substring(lastIndex, index));
chunks.push(_temp_);
}
if (match[0] == '$') {
_pushStr('$' + match.substring(2, match.length - 1), 1);
} else {
//是注释,暂时不处理
if (match[0] == '<' && match[1] == '#' && match[2] == '-') {
} else {
if (match[0] == '<' && match[1] == '#') {
_pushStr(Ruler.regRuler.ruler(match.substring(2, match.length - 1)), 2);
} else if (match[1] == '/' && match[2] == '#') {
_pushStr(Ruler.regRuler.ruler(match.substring(1, match.length - 1)), 2);
}
chunks.push(match);
}
}
//set the last match index as current match index plus matched value length
lastIndex = index + match.length;
});
//add the end string for replaced string
if (lastIndex < _templ.length) {
_pushStr(_templ.substring(lastIndex));
}
//if no matched replace
if (!replaced.length) {
_pushStr(_templ);
}
replaced = ["$__buf__='';", replaced.join(''), ";echo($__buf__);"].join('');
return replaced;
}
function _proc(html, data) {
return html;
}
return {
parse: _parse,
proc: _proc
}
})();
reMarker.JavaScript = (function() {
var Ruler = {};
/**
* 匹配语法规则处理
* @type {{ruler: Function, rulerAssign: Function, rulerEndSwitch: Function, rulerCase: Function, rulerDefault: Function, rulerSwitch: Function, rulerElseIf: Function, rulerBreak: Function, rulerElse: Function, rulerEndIf: Function, rulerIf: Function, rulerEndList: Function, rulerList: Function}}
*/
Ruler.regRuler = {
ruler: function(str) {
var listArr = Ruler.util.removeEmpty(str.split(' '));
//import,include
var ruler = {
"list": this.rulerList,
"if": this.rulerIf,
"break": this.rulerBreak,
'/#list': this.rulerEndList,
'else': this.rulerElse,
"/#if": this.rulerEndIf,
'elseif': this.rulerElseIf,
'switch': this.rulerSwitch,
'case': this.rulerCase,
'default': this.rulerDefault,
'/#switch': this.rulerEndSwitch,
'assign': this.rulerAssign,
'return': this.rulerReturn
};
return (ruler[listArr[0]]).call(this, listArr);
},
rulerReturn: function() {
return 'return;';
},
/**
* 定义变量
* @param arr
* @returns {string}
*/
rulerAssign: function(arr) {
var result = [],
count;
var rt = findPairs(arr.slice(1));
count = rt.length;
for (j = 0; j < count; j += 2) {
var name = rt[j];
result.push('var ');
result.push(name + '=' + rt[j + 1] + ';');
}
return result.join('');
},
rulerEndSwitch: function(arr) {
return '}';
},
rulerCase: function(arr) {
return ('case ' + arr[1] + ':');
},
rulerDefault: function() {
return 'default:';
},
rulerSwitch: function(arr) {
return 'switch(' + arr.join('').replace('switch', '') + '){';
},
rulerElseIf: function(arr) {
if (arr.length < 2) {
return false;
}
return '}else if(' + Ruler.util.filter(arr.slice(1).join('')) + '){';
},
rulerBreak: function() {
return 'break;';
},
rulerElse: function(arr) {
return '}else{';
},
rulerEndIf: function(arr) {
return '}';
},
rulerIf: function(arr) {
if (arr.length < 2) {
return false;
}
return 'if(' + Ruler.util.filter(arr.slice(1).join('')) + '){';
},
rulerEndList: function(arr) {
return '}})();';
},
/**
* 循环列表方法
* @param arr
* @returns {string}
*/
rulerList: function(arr) {
var listName, loopName, loopIndexName, loopHasNextName, result = [];
if (arr.length != 4) {
return;
}
loopName = arr[3];
listName = arr[1];
loopIndexName = loopName + '_index';
loopHasNextName = loopName + '_has_next';
result.push('(function(){');
if (!/^\w+$/.test(listName)) {
result.push('var _list=' + listName + ';');
listName = '_list';
}
result.push([
'var _i=0',
'_count=' + listName + '.length',
loopName,
loopIndexName,
loopHasNextName + ';'
].join(','));
result.push('for(;_i<_count;_i++){');
result.push(loopName + '=' + listName + '[_i];');
result.push(loopIndexName + '=_i;');
result.push(loopHasNextName + '=_i!==_count-1;');
return result.join('');
}
};
/**
* 内嵌函数,待扩展
* @type {{trim: Function, lTrim: Function, rTrim: Function, removeEmpty: Function, filter: Function}}
*/
Ruler.util = {
trim: function(str) {
return str.replace(/(^\s*)|(\s*$)/g, "");
},
lTrim: function(str) {
return str.replace(/(^\s*)/g, "");
},
rTrim: function(str) {
return str.replace(/(\s*$)/g, "");
},
removeEmpty: function(arr) {
var splitStr = _separator(arr);
var REMOVE_REGEX = new RegExp(splitStr + splitStr);
var REMOVE_HEAD_REGEX = new RegExp('^' + splitStr);
return arr.join(splitStr).replace(REMOVE_REGEX, splitStr).replace(REMOVE_HEAD_REGEX, '').split(splitStr);
},
filter: function(str) {
return str.replace('<', '<').replace('>', '>');
}
};
/**
* 将模板语法解释为JS语法
* @param _templ 模板字符串
* @returns {String} 语法解析后的
* @private
*/
function _parse(_templ) {
var chunks = [],
replaced = [],
compiled;
var printPrefix = "__buf__.push(";
var lastIndex = 0;
var ss = /<#.+?>|\${.+?}|<\/#.+?>|<@.+?>/ig;
/**
* 将模块中的匹配替换为相应语言的语法
* @param {String} str 输入
* @param {Number} type 0普通字符 1变量 2表达式
* @return {Null}
*/
function _pushStr(str, type) {
str = str.replace(/'/g, "\\'");
if (str !== '') {
if (type == 1) {
replaced.push(printPrefix + str + ');')
} else if (type == 2) {
replaced.push(str)
} else {
replaced.push(printPrefix + '\'' + str + '\');')
}
}
}
//移除不安全代码
_templ = removeUnsafe(_templ);
_templ.replace(ss, function(match, index) {
//the last match index of all template
//上次匹配结束位置与当前匹配的位置之间可能会有一些字符,也要加进来
if (lastIndex != index) {
var _temp_ = _templ.substring(lastIndex, index);
if (Ruler.util.trim(_temp_) != '')
_pushStr(_templ.substring(lastIndex, index));
chunks.push(_temp_);
}
if (match[0] == '$') {
_pushStr(match.substring(2, match.length - 1), 1);
//replaced.push(printPrefix + match.substring(2, match.length - 1) + ');');
} else {
//是注释,暂时不处理
if (match[0] == '<' && match[1] == '#' && match[2] == '-') {
} else {
if (match[0] == '<' && match[1] == '#') {
_pushStr(Ruler.regRuler.ruler(match.substring(2, match.length - 1)), 2);
} else if (match[1] == '/' && match[2] == '#') {
_pushStr(Ruler.regRuler.ruler(match.substring(1, match.length - 1)), 2);
} else {}
chunks.push(match);
}
}
//set the last match index as current match index plus matched value length
lastIndex = index + match.length;
});
//add the end string for replaced string
if (lastIndex < _templ.length) {
_pushStr(_templ.substring(lastIndex));
}
//if no matched replace
if (!replaced.length) {
_pushStr(_templ);
}
replaced = ["var __buf__=[],$index=null;with($data){", replaced.join(''), "} return __buf__.join('');"].join('');
return replaced;
}
function _proc(html, data) {
var util = {};
if (Ruler.util) {
var _util = Ruler.util;
for (var key in _util) {
util[key] = _util[key];
}
}
if (Object.prototype.toString.call(data) !== '[object Object]') {
data = {};
}
var replaced = html;
try {
compiled = new Function("$data", "$util", replaced);
} catch (e) {
throw "template code error";
}
return compiled.call(window, data, util)
}
return {
parse: _parse,
proc: _proc
}
})();
/*
模板引擎,使用freemark语法,目前已知最快的
作者:陈鑫
*/
var nc = typeof exports !== 'undefined' ? exports : {};
nc.reMarker = {
/**
* 柯里化模板语法,二次传入
* @param templ
* @returns {Function}
*/
proc: reMarker,
parse:reMarker
};
});
//如果内嵌入web页面,则自动将模板导出为JS变量
! function(factory) {
if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') {
var target = module['exports'] || exports;
factory(target);
} else if (typeof define === 'function' && define['amd']) {
define(['exports'], factory);
} else {
var scriptTags = document.getElementsByTagName('script'),
templates = [];
for (var i = 0; i < scriptTags.length; i++) {
if (scriptTags[i].getAttribute('type') == 'remark-template') {
templates.push(scriptTags[i]);
}
}
for (var t = 0; t < templates.length; t++) {
var _id = '__' + templates[t].id + '__';
window[_id] = window.NC.reMarker.proc(templates[t].innerHTML);
}
}
}(function(exports) {});<|fim▁end|> | compiled;
var printPrefix = "$__buf__.="; |
<|file_name|>tabbar.ts<|end_file_name|><|fim▁begin|>import {Component, Inject} from "@angular/core";
import {HighLevelComponent, GENERIC_INPUTS, GENERIC_BINDINGS} from "../common/component";
import {REACT_NATIVE_WRAPPER} from "./../../renderer/renderer";
import {ReactNativeWrapper} from "../../wrapper/wrapper";
/**
* A component for displaying a tab bar.
*
* ```
@Component({
selector: 'sample',
template: `
<TabBar tintColor="white" barTintColor="darkslateblue">
<TabBarItem systemIcon="history" [selected]="selectedTab == 'one'" (select)="selectedTab='one'"><Text>Tab one</Text></TabBarItem>
<TabBarItem systemIcon="favorites" [selected]="selectedTab == 'two'" (select)="selectedTab='two'"><Text>Tab two</Text></TabBarItem>
<TabBarItem systemIcon="featured" badge="8" [selected]="selectedTab == 'three'" (select)="selectedTab='three'"><Text>Tab three</Text></TabBarItem>
</TabBar>
`
})
export class Sample {
selectedTab: string = 'one';
}
* ```
* @style https://facebook.github.io/react-native/docs/view.html#style
* @platform ios
*/
@Component({
selector: 'TabBar',
inputs: [
'barTintColor', 'itemPositioning', 'tintColor', 'translucent'
].concat(GENERIC_INPUTS),
template: `<native-tabbar [barTintColor]="_barTintColor" [itemPositioning]="_itemPositioning" [tintColor]="_tintColor" [translucent]="_translucent"
${GENERIC_BINDINGS}><ng-content></ng-content></native-tabbar>`
})
export class TabBar extends HighLevelComponent {
constructor(@Inject(REACT_NATIVE_WRAPPER) wrapper: ReactNativeWrapper) {
super(wrapper);
this.setDefaultStyle({flex: 1});
}
//Properties
public _barTintColor: number;
public _itemPositioning: string;
public _tintColor: number;
public _translucent: boolean;
/**<|fim▁hole|> /**
* To be documented
*/
set itemPositioning(value: string) {this._itemPositioning = this.processEnum(value, ['auto', 'fill', 'center']);}
/**
* To be documented
*/
set tintColor(value: string) {this._tintColor = this.processColor(value);}
/**
* To be documented
*/
set translucent(value: string) {this._translucent = this.processBoolean(value);}
}<|fim▁end|> | * To be documented
*/
set barTintColor(value: string) {this._barTintColor = this.processColor(value);} |
<|file_name|>component.js<|end_file_name|><|fim▁begin|>import React from 'react'
import { Link } from 'react-router'
import videos from './videos.json'
const video = videos[Math.floor(Math.random() * videos.length)]
export default ({ userLoaded, toSteps, toInfo }) => (
<div className='ext-home-cover' style={{
backgroundImage: `url("${video.image}")`
}}>
{window.innerWidth >= 768 && (
<div className='banner'>
<div className='video'>
<video
playsInline
autoPlay
muted
loop
poster={video.image}
id='bgvid'>
<source src={video.video} type='video/mp4' />
</video>
</div>
</div>
)}
<div className='container'>
<div<|fim▁hole|> <h2>Queremos que seas parte de la celebración de la bandera y su creador. Sumate a decidir cómo ponemos linda nuestra ciudad.</h2>
</div>
</div>
</div>
)<|fim▁end|> | className='ext-site-cover-isologo'
style={{ backgroundImage: `url('/ext/lib/site/home-multiforum/consultas.svg')` }} />
<div>
<h1>Consultas</h1> |
<|file_name|>webglbuffer.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use crate::dom::bindings::codegen::Bindings::WebGLBufferBinding;
use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::DomRoot;
use crate::dom::webglobject::WebGLObject;
use crate::dom::webglrenderingcontext::WebGLRenderingContext;
use canvas_traits::webgl::webgl_channel;
use canvas_traits::webgl::{WebGLBufferId, WebGLCommand, WebGLError, WebGLResult};
use dom_struct::dom_struct;
use ipc_channel::ipc;
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: WebGLBufferId,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
capacity: Cell<usize>,
marked_for_deletion: Cell<bool>,
attached_counter: Cell<u32>,
/// https://www.khronos.org/registry/OpenGL-Refpages/es2.0/xhtml/glGetBufferParameteriv.xml
usage: Cell<u32>,
}
impl WebGLBuffer {
fn new_inherited(context: &WebGLRenderingContext, id: WebGLBufferId) -> Self {
Self {
webgl_object: WebGLObject::new_inherited(context),
id,
target: Default::default(),
capacity: Default::default(),
marked_for_deletion: Default::default(),
attached_counter: Default::default(),
usage: Cell::new(WebGLRenderingContextConstants::STATIC_DRAW),
}
}
pub fn maybe_new(context: &WebGLRenderingContext) -> Option<DomRoot<Self>> {
let (sender, receiver) = webgl_channel().unwrap();
context.send_command(WebGLCommand::CreateBuffer(sender));
receiver
.recv()
.unwrap()
.map(|id| WebGLBuffer::new(context, id))
}
pub fn new(context: &WebGLRenderingContext, id: WebGLBufferId) -> DomRoot<Self> {
reflect_dom_object(
Box::new(WebGLBuffer::new_inherited(context, id)),
&*context.global(),
WebGLBufferBinding::Wrap,
)
}
}
impl WebGLBuffer {
pub fn id(&self) -> WebGLBufferId {
self.id
}
pub fn buffer_data(&self, data: &[u8], usage: u32) -> WebGLResult<()> {
match usage {
WebGLRenderingContextConstants::STREAM_DRAW |
WebGLRenderingContextConstants::STATIC_DRAW |
WebGLRenderingContextConstants::DYNAMIC_DRAW => (),
_ => return Err(WebGLError::InvalidEnum),
}
self.capacity.set(data.len());
self.usage.set(usage);
let (sender, receiver) = ipc::bytes_channel().unwrap();
self.upcast::<WebGLObject>()
.context()
.send_command(WebGLCommand::BufferData(
self.target.get().unwrap(),
receiver,
usage,
));
sender.send(data).unwrap();
Ok(())
}
pub fn capacity(&self) -> usize {
self.capacity.get()
}
pub fn mark_for_deletion(&self, fallible: bool) {
if self.marked_for_deletion.get() {
return;
}
self.marked_for_deletion.set(true);
if self.is_deleted() {
self.delete(fallible);
}
}
fn delete(&self, fallible: bool) {
assert!(self.is_deleted());
let context = self.upcast::<WebGLObject>().context();
let cmd = WebGLCommand::DeleteBuffer(self.id);
if fallible {
context.send_command_ignored(cmd);
} else {
context.send_command(cmd);
}
}
pub fn is_marked_for_deletion(&self) -> bool {
self.marked_for_deletion.get()
}
<|fim▁hole|> self.marked_for_deletion.get() && !self.is_attached()
}
pub fn target(&self) -> Option<u32> {
self.target.get()
}
pub fn set_target(&self, target: u32) -> WebGLResult<()> {
if self.target.get().map_or(false, |t| t != target) {
return Err(WebGLError::InvalidOperation);
}
self.target.set(Some(target));
Ok(())
}
pub fn is_attached(&self) -> bool {
self.attached_counter.get() != 0
}
pub fn increment_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_add(1)
.expect("refcount overflowed"),
);
}
pub fn decrement_attached_counter(&self) {
self.attached_counter.set(
self.attached_counter
.get()
.checked_sub(1)
.expect("refcount underflowed"),
);
if self.is_deleted() {
self.delete(false);
}
}
pub fn usage(&self) -> u32 {
self.usage.get()
}
}
impl Drop for WebGLBuffer {
fn drop(&mut self) {
self.mark_for_deletion(true);
}
}<|fim▁end|> | pub fn is_deleted(&self) -> bool { |
<|file_name|>macro_import.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>use session::Session;
use metadata::creader::CrateReader;
use std::collections::{HashSet, HashMap};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::attr::AttrMetaMethods;
struct MacroLoader<'a> {
sess: &'a Session,
span_whitelist: HashSet<Span>,
reader: CrateReader<'a>,
macros: Vec<ast::MacroDef>,
}
impl<'a> MacroLoader<'a> {
fn new(sess: &'a Session) -> MacroLoader<'a> {
MacroLoader {
sess: sess,
span_whitelist: HashSet::new(),
reader: CrateReader::new(sess),
macros: vec![],
}
}
}
/// Read exported macros.
pub fn read_macro_defs(sess: &Session, krate: &ast::Crate) -> Vec<ast::MacroDef> {
let mut loader = MacroLoader::new(sess);
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
}
visit::walk_crate(&mut loader, krate);
loader.macros
}
pub type MacroSelection = HashMap<token::InternedString, Span>;
// note that macros aren't expanded yet, and therefore macros can't add macro imports.
impl<'a, 'v> Visitor<'v> for MacroLoader<'a> {
fn visit_item(&mut self, item: &ast::Item) {
// We're only interested in `extern crate`.
match item.node {
ast::ItemExternCrate(_) => {}
_ => {
visit::walk_item(self, item);
return;
}
}
// Parse the attributes relating to macros.
let mut import = Some(HashMap::new()); // None => load all
let mut reexport = HashMap::new();
for attr in &item.attrs {
let mut used = true;
match &attr.name()[..] {
"phase" => {
self.sess.span_err(attr.span, "#[phase] is deprecated");
}
"plugin" => {
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
"macro_use" => {
let names = attr.meta_item_list();
if names.is_none() {
// no names => load all
import = None;
}
if let (Some(sel), Some(names)) = (import.as_mut(), names) {
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
sel.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro import");
}
}
}
}
"macro_reexport" => {
let names = match attr.meta_item_list() {
Some(names) => names,
None => {
self.sess.span_err(attr.span, "bad macro reexport");
continue;
}
};
for attr in names {
if let ast::MetaWord(ref name) = attr.node {
reexport.insert(name.clone(), attr.span);
} else {
self.sess.span_err(attr.span, "bad macro reexport");
}
}
}
_ => used = false,
}
if used {
attr::mark_used(attr);
}
}
self.load_macros(item, import, reexport)
}
fn visit_mac(&mut self, _: &ast::Mac) {
// bummer... can't see macro imports inside macros.
// do nothing.
}
}
impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
import: Option<MacroSelection>,
reexport: MacroSelection) {
if let Some(sel) = import.as_ref() {
if sel.is_empty() && reexport.is_empty() {
return;
}
}
if !self.span_whitelist.contains(&vi.span) {
self.sess.span_err(vi.span, "an `extern crate` loading macros must be at \
the crate root");
return;
}
let macros = self.reader.read_exported_macros(vi);
let mut seen = HashSet::new();
for mut def in macros {
let name = token::get_ident(def.ident);
seen.insert(name.clone());
def.use_locally = match import.as_ref() {
None => true,
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
def.allow_internal_unstable = attr::contains_name(&def.attrs,
"allow_internal_unstable");
debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
if !seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
if !seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}
}
}
}<|fim▁end|> |
//! Used by `rustc` when loading a crate with exported macros.
|
<|file_name|>spiderForTECHORANGE.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu ([email protected])
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
from selenium import webdriver
import os
import time
import logging
import re
import random
from cameo.utility import Utility
from cameo.localdb import LocalDbForTECHORANGE<|fim▁hole|>"""
class SpiderForTECHORANGE:
#建構子
def __init__(self):
self.SOURCE_HTML_BASE_FOLDER_PATH = u"cameo_res\\source_html"
self.PARSED_RESULT_BASE_FOLDER_PATH = u"cameo_res\\parsed_result"
self.strWebsiteDomain = u"http://buzzorange.com/techorange"
self.dicSubCommandHandler = {
"index":self.downloadIndexPage,
"tag":self.downloadTagPag,
"news":self.downloadNewsPage
}
self.utility = Utility()
self.db = LocalDbForTECHORANGE()
self.driver = None
#取得 spider 使用資訊
def getUseageMessage(self):
return ("- TECHORANGE -\n"
"useage:\n"
"index - download entry page of TECHORANGE \n"
"tag - download not obtained tag page \n"
"news [tag] - download not obtained news [of given tag] \n")
#取得 selenium driver 物件
def getDriver(self):
chromeDriverExeFilePath = "cameo_res\\chromedriver.exe"
driver = webdriver.Chrome(chromeDriverExeFilePath)
return driver
#初始化 selenium driver 物件
def initDriver(self):
if self.driver is None:
self.driver = self.getDriver()
#終止 selenium driver 物件
def quitDriver(self):
self.driver.quit()
self.driver = None
#執行 spider
def runSpider(self, lstSubcommand=None):
strSubcommand = lstSubcommand[0]
strArg1 = None
if len(lstSubcommand) == 2:
strArg1 = lstSubcommand[1]
self.initDriver() #init selenium driver
self.dicSubCommandHandler[strSubcommand](strArg1)
self.quitDriver() #quit selenium driver
#下載 index 頁面
def downloadIndexPage(self, uselessArg1=None):
logging.info("download index page")
strIndexHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE"
if not os.path.exists(strIndexHtmlFolderPath):
os.mkdir(strIndexHtmlFolderPath) #mkdir source_html/TECHORANGE/
#科技報橘首頁
self.driver.get("https://buzzorange.com/techorange/")
#儲存 html
strIndexHtmlFilePath = strIndexHtmlFolderPath + u"\\index.html"
self.utility.overwriteSaveAs(strFilePath=strIndexHtmlFilePath, unicodeData=self.driver.page_source)
#下載 tag 頁面
def downloadTagPag(self, uselessArg1=None):
logging.info("download tag page")
strTagHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\tag"
if not os.path.exists(strTagHtmlFolderPath):
os.mkdir(strTagHtmlFolderPath) #mkdir source_html/TECHORANGE/tag/
strTagWebsiteDomain = self.strWebsiteDomain + u"/tag"
#取得 Db 中尚未下載的 Tag 名稱
lstStrNotObtainedTagName = self.db.fetchallNotObtainedTagName()
for strNotObtainedTagName in lstStrNotObtainedTagName:
#略過名稱太長的 tag
if len(strNotObtainedTagName) > 60:
continue
strTagUrl = strTagWebsiteDomain + u"/" + strNotObtainedTagName
#tag 第0頁
intPageNum = 0
time.sleep(random.randint(2,5)) #sleep random time
self.driver.get(strTagUrl)
#儲存 html
strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName)
self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source)
#tag 下一頁
elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers")
while len(elesNextPageA) != 0:
time.sleep(random.randint(2,5)) #sleep random time
intPageNum = intPageNum+1
strTagUrl = elesNextPageA[0].get_attribute("href")
self.driver.get(strTagUrl)
#儲存 html
strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName)
self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source)
#tag 再下一頁
elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers")
#更新tag DB 為已抓取 (isGot = 1)
self.db.updateTagStatusIsGot(strTagName=strNotObtainedTagName)
logging.info("got tag %s"%strNotObtainedTagName)
#限縮 字串長度低於 128 字元
def limitStrLessThen128Char(self, strStr=None):
if len(strStr) > 128:
logging.info("limit str less then 128 char")
return strStr[:127] + u"_"
else:
return strStr
#下載 news 頁面 (strTagName == None 會自動找尋已下載完成之 tag,但若未先執行 parser tag 即使 tag 已下載完成亦無法下載 news)
def downloadNewsPage(self, strTagName=None):
if strTagName is None:
#未指定 tag
lstStrObtainedTagName = self.db.fetchallCompletedObtainedTagName()
for strObtainedTagName in lstStrObtainedTagName:
self.downloadNewsPageWithGivenTagName(strTagName=strObtainedTagName)
else:
#有指定 tag 名稱
self.downloadNewsPageWithGivenTagName(strTagName=strTagName)
#下載 news 頁面 (指定 tag 名稱)
def downloadNewsPageWithGivenTagName(self, strTagName=None):
logging.info("download news page with tag %s"%strTagName)
strNewsHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\news"
if not os.path.exists(strNewsHtmlFolderPath):
os.mkdir(strNewsHtmlFolderPath) #mkdir source_html/TECHORANGE/news/
#取得 DB 紀錄中,指定 strTagName tag 的 news url
lstStrNewsUrl = self.db.fetchallNewsUrlByTagName(strTagName=strTagName)
intDownloadedNewsCount = 0#紀錄下載 news 頁面數量
timeStart = time.time() #計時開始時間點
timeEnd = None #計時結束時間點
for strNewsUrl in lstStrNewsUrl:
#檢查是否已下載
if not self.db.checkNewsIsGot(strNewsUrl=strNewsUrl):
if intDownloadedNewsCount%10 == 0: #計算下載10筆news所需時間
timeEnd = time.time()
timeCost = timeEnd - timeStart
logging.info("download 10 news cost %f sec"%timeCost)
timeStart = timeEnd
intDownloadedNewsCount = intDownloadedNewsCount+1
time.sleep(random.randint(2,5)) #sleep random time
self.driver.get(strNewsUrl)
#儲存 html
strNewsName = re.match("^https://buzzorange.com/techorange/[\d]{4}/[\d]{2}/[\d]{2}/(.*)/$", strNewsUrl).group(1)
strNewsName = self.limitStrLessThen128Char(strStr=strNewsName) #將名稱縮短小於128字完
strNewsHtmlFilePath = strNewsHtmlFolderPath + u"\\%s_news.html"%strNewsName
self.utility.overwriteSaveAs(strFilePath=strNewsHtmlFilePath, unicodeData=self.driver.page_source)
#更新news DB 為已抓取 (isGot = 1)
self.db.updateNewsStatusIsGot(strNewsUrl=strNewsUrl)<|fim▁end|> | """
抓取 科技報橘 html 存放到 source_html |
<|file_name|>completion.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This tool helps you to rebase package to the latest version
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hracek <[email protected]>
# Tomas Hozza <[email protected]>
import re
import sys
from rebasehelper.cli import CLI
from rebasehelper.archive import Archive
class Completion(object):
@staticmethod
def extensions():
archives = Archive.get_supported_archives()
return [a.lstrip('.') for a in archives]
@staticmethod
def options():
def get_delimiter(parser, action):
if action.nargs == 0:
return None
fmt = parser._get_formatter() # pylint: disable=protected-access
usage = fmt._format_actions_usage([action], []) # pylint: disable=protected-access
option_string = action.option_strings[0]
idx = usage.find(option_string)
if idx == -1:
return None
return usage[idx + len(option_string)]<|fim▁hole|> for action in actions:
if not action.option_strings:
continue
delimiter = get_delimiter(parser, action) or ''
result.append(dict(
options=[o + delimiter.strip() for o in action.option_strings],
choices=action.choices or []))
return result
@classmethod
def dump(cls):
options = cls.options()
return {
# pattern list of extensions
'RH_EXTENSIONS': '@({})'.format('|'.join(cls.extensions())),
# array of options
'RH_OPTIONS': '({})'.format(' '.join(['"{}"'.format(' '.join(o['options'])) for o in options])),
# array of choices of respective options
'RH_CHOICES': '({})'.format(' '.join(['"{}"'.format(' '.join(o['choices'])) for o in options])),
}
def replace_placeholders(s, **kwargs):
placeholder_re = re.compile(r'@(\w+)@')
matches = list(placeholder_re.finditer(s))
result = s
for match in reversed(matches):
replacement = kwargs.get(match.group(1), '')
result = result[:match.start(0)] + replacement + result[match.end(0):]
return result
def main():
if len(sys.argv) != 3:
return 1
with open(sys.argv[1]) as f:
s = f.read()
s = replace_placeholders(s, **Completion.dump())
with open(sys.argv[2], 'w') as f:
f.write(s)
return 0
if __name__ == '__main__':
main()<|fim▁end|> | parser = CLI.build_parser()
result = []
actions = parser._get_optional_actions() + parser._get_positional_actions() # pylint: disable=protected-access |
<|file_name|>test_requestobject.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import unittest
import securetrading
from securetrading.test import abstract_test_stobjects
import six
class Test_Request(abstract_test_stobjects.Abstract_Test_StObjects):
def setUp(self):
super(Test_Request, self).setUp()
self.class_ = securetrading.Request
def test___init__(self):
request = self.class_()
six.assertRegex(self, request["requestreference"], "A[a-z0-9]+")
self.assertEqual(securetrading.version_info, self.version_info)
def test__set_cachetoken(self):
exp1 = self.get_securetrading_request(
{"datacenterurl": "https://webservices.securetrading.net",
"datacenterpath": "/json/",
"cachetoken":
"17-ae7e511172a07c2fb45db4c73388087e4d850777386a5d72029aaf895\
87f3cf0"})
exp2 = self.get_securetrading_request(
{"datacenterurl": "https://webservices.securetrading.net",
"cachetoken": "17-6a0287dd04497ba8dab257acbd983741f55410b5c709463\
7d8c3f0fb57bd25ec"})
exp3 = self.get_securetrading_request(
{"cachetoken": "17-6a0287dd04497ba8dab257acbd983741f55410b5c709463\
7d8c3f0fb57bd25ec"})
# Test below treats invalid base64 string as cachetoken
exp4 = self.get_securetrading_request(
{"cachetoken": "eyJkYXRhY2VudGVydXJsIjogImh0dHBzOi8vd2Vic2VydmljZX\
Muc2VjdXJldHJhZGluZy5uZXQiLCAiY2FjaGV0b2tlbiI6ICIxNy1hZTdlNTExMTcy"})
tests = [('eyJkYXRhY2VudGVycGF0aCI6ICIvanNvbi8iLCAiZGF0YWNlbnRlcnVybCI\
6ICJodHRwczovL3dlYnNlcnZpY2VzLnNlY3VyZXRyYWRpbmcubmV0IiwgImNhY2hldG9rZW4iOiAiM\
TctYWU3ZTUxMTE3MmEwN2MyZmI0NWRiNGM3MzM4ODA4N2U0ZDg1MDc3NzM4NmE1ZDcyMDI5YWFmODk\
1ODdmM2NmMCJ9', exp1),
('"eyJkYXRhY2VudGVydXJsIjogImh0dHBzOi8vd2Vic2VydmljZXMuc2VjdX\
JldHJhZGluZy5uZXQiLCAiY2FjaGV0b2tlbiI6ICIxNy02YTAyODdkZDA0NDk3YmE4ZGFiMjU3YWNi\
ZDk4Mzc0MWY1NTQxMGI1YzcwOTQ2MzdkOGMzZjBmYjU3YmQyNWVjIn0=', exp2),
('17-6a0287dd04497ba8dab257acbd983741f55410b5c7094637d8c3f0fb\
57bd25ec', exp3),
('eyJkYXRhY2VudGVydXJsIjogImh0dHBzOi8vd2Vic2VydmljZXMuc2VjdXJ\
ldHJhZGluZy5uZXQiLCAiY2FjaGV0b2tlbiI6ICIxNy1hZTdlNTExMTcy', exp4),
]
for cachetoken, expected in tests:
request = self.class_()
request._set_cachetoken(cachetoken)
for obj in [expected, request]:
del obj["requestreference"] # Unique for every request object
self.assertEqual(request, expected)
class Test_Requests(Test_Request):
def setUp(self):
super(Test_Requests, self).setUp()
self.class_ = securetrading.Requests
def test_verify(self):
get_requests = self.get_securetrading_requests
get_request = self.get_securetrading_request
requests1 = get_requests([])
requests2 = get_requests(
[get_request({"a": "b"})])
<|fim▁hole|> get_request({"c": "d"})])
datacenter_url_dict = {"datacenterurl": "url"}
requests4 = get_requests(
[get_request({"a": "b"}),
get_request(datacenter_url_dict)])
datacenter_path_dict = {"datacenterpath": "path"}
requests5 = get_requests(
[get_request({"a": "b"}),
get_request(datacenter_path_dict)])
tests = [(requests1, None, None, None, None),
(requests2, None, None, None, None),
(requests3, None, None, None, None),
(requests4, securetrading.ApiError,
"10", "10 The key 'datacenterurl' must be specifed \
in the outer 'securetrading.Requests' object",
["The key 'datacenterurl' must be specifed in the \
outer 'securetrading.Requests' object"]),
(requests5, securetrading.ApiError,
"10", "10 The key 'datacenterpath' must be specifed \
in the outer 'securetrading.Requests' object",
["The key 'datacenterpath' must be specifed in the \
outer 'securetrading.Requests' object"]),
]
for requests, exp_exception, exp_code, exp_english, exp_data in tests:
if exp_exception is None:
requests.verify()
else:
self.check_st_exception(exp_exception, exp_data, exp_english,
exp_code, requests.verify)
def test__validate_requests(self):
get_requests = self.get_securetrading_requests
get_request = self.get_securetrading_request
tests = [([], None, None),
([get_request({"a": "b"})], None, None),
([{"a": "b"}], AssertionError, "Invalid requests specified")
]
for requests_list, exp_exception, exp_message in tests:
if exp_exception is None:
requests = get_requests(requests_list)
else:
six.assertRaisesRegex(self, exp_exception, exp_message,
get_requests,
requests_list)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | requests3 = get_requests(
[get_request({"a": "b"}), |
<|file_name|>test_plane_fftw_wisdom_import_export.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <fftw3.h>
#include <omp.h>
#include <cstdlib>
#include <sweet/SimulationVariables.hpp>
class TestFFTPlans
{
public:
bool importWisdom(int i_reuse_spectral_transformation_plans)
{
static const char *wisdom_file = "sweet_fftw";
std::cout << "fftw_import_wisdom_from_filename(" << wisdom_file << ")" << std::endl;
int wisdom_plan_loaded = fftw_import_wisdom_from_filename(wisdom_file);
if (wisdom_plan_loaded == 0)
{
std::cerr << "Failed to load FFTW wisdom from file '" << wisdom_file << "'" << std::endl;
if (i_reuse_spectral_transformation_plans == 2)
exit(1);
}
std::cout << "WISDOM: " << fftw_export_wisdom_to_string() << std::endl;
return true;
}
bool exportWisdom()
{
static const char *wisdom_file = "sweet_fftw";
std::cout << "fftw_export_wisdom_to_filename(" << wisdom_file << ")" << std::endl;
int wisdom_plan_stored = fftw_export_wisdom_to_filename(wisdom_file);
if (wisdom_plan_stored == 0)
{
std::cerr << "Failed to store FFTW wisdom to file " << wisdom_file << std::endl;
exit(1);
}
std::cout << "WISDOM: " << fftw_export_wisdom_to_string() << std::endl;
return true;
}
bool printWisdom()
{
std::cout << "WISDOM: " << fftw_export_wisdom_to_string() << std::endl;
return true;
}
int run(
int i_reuse_spectral_transformation_plans,
int i_res[2],
int i_nthreads
)
{
#if SWEET_THREADING
std::cout << "fftw_init_threads()" << std::endl;
int retval = fftw_init_threads();
if (retval == 0)
{
std::cerr << "ERROR: fftw_init_threads()" << std::endl;
exit(1);
}
std::cout << "fftw_plan_with_nthreads(" << i_nthreads << ")" << std::endl;
fftw_plan_with_nthreads(i_nthreads);
#endif
importWisdom(i_reuse_spectral_transformation_plans);
unsigned int num_cells = i_res[0]*i_res[1];
unsigned flags = 0;
if (i_reuse_spectral_transformation_plans == -1)
{
flags |= FFTW_ESTIMATE;
}
else
{
// estimation base don workload
if (num_cells < 32*32)
//flags |= FFTW_EXHAUSTIVE;
num_cells |= FFTW_MEASURE;
else if (num_cells < 128*128)
num_cells |= FFTW_MEASURE;
else
num_cells |= FFTW_PATIENT;
if (i_reuse_spectral_transformation_plans == 2)<|fim▁hole|> flags |= FFTW_WISDOM_ONLY;
}
}
// allocate more data than necessary for spectral space
fftw_complex *data_spectral = (fftw_complex*)fftw_malloc(sizeof(fftw_complex) * i_res[0]*i_res[1]);
// physical space data
double *data_physical = (double*)fftw_malloc(sizeof(double)*2 * i_res[0]*i_res[1]);
std::cout << "fftw_plan_dft_r2c_2d(...)" << std::endl;
fftw_plan fftw_plan_forward;
fftw_plan_forward =
fftw_plan_dft_r2c_2d(
i_res[1],
i_res[0],
data_physical,
(fftw_complex*)data_spectral,
flags
);
printWisdom();
if (fftw_plan_forward == nullptr)
{
std::cout << "Wisdom: " << fftw_export_wisdom_to_string() << std::endl;
std::cerr << "Failed to get forward plan dft_r2c fftw" << std::endl;
exit(-1);
}
exportWisdom();
#if SWEET_THREADING
fftw_cleanup_threads();
#endif
fftw_cleanup();
return 0;
}
};
int main(int i_argc, char **i_argv)
{
SimulationVariables simVars;
simVars.setupFromMainParameters(i_argc, i_argv, nullptr, true);
simVars.outputConfig();
#if SWEET_THREADING
int nthreads = omp_get_max_threads();
std::cout << " + nthreads: " << nthreads << std::endl;
#else
int nthreads = 0;
#endif
TestFFTPlans t;
t.run(simVars.misc.reuse_spectral_transformation_plans, simVars.disc.space_res_physical, nthreads);
std::cout << "FIN" << std::endl;
return 0;
}<|fim▁end|> | {
std::cout << "Enforcing to use Wisdom" << std::endl; |
<|file_name|>log.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
FORMAT = """%(asctime)s
:%(name)s:
%(levelname)s<|fim▁hole|>SELENIUM_REMOTE_CONNECTION = "selenium.webdriver.remote.remote_connection"<|fim▁end|> | :%(module)s
:%(funcName)
:%(lineno)s
:%(message)s""" |
<|file_name|>RemoveNthNodeFromEndOfList.java<|end_file_name|><|fim▁begin|>package org.luapp.practise.leetcode;
/**
* Created by lum on 2015/3/16.
*/
public class RemoveNthNodeFromEndOfList {
public static class ListNode {
int val;
ListNode next;
ListNode(int x) {
val = x;
next = null;
}
}
public static ListNode removeNthFromEnd(ListNode head, int n) {
if (head == null) {
return head;
}
// 移动指示针
ListNode nPointer = head;
int i = 0;
while (nPointer != null && i < n) {
nPointer = nPointer.next;
i++;
}<|fim▁hole|> if (i != n) {
System.out.println("error");
return null;
}
ListNode pre = head;
ListNode lPointer = head;
while (nPointer != null) {
nPointer = nPointer.next;
pre = lPointer;
lPointer = lPointer.next;
}
if(lPointer == head) {
head = head.next;
} else {
pre.next = lPointer.next;
}
return head;
}
public static void print(ListNode head) {
if (head == null) {
System.out.println("null");
} else {
System.out.print(head.val);
ListNode temp = head.next;
while (temp != null) {
System.out.print("->" + temp.val);
temp = temp.next;
}
System.out.println();
}
}
public static void main(String[] args) {
ListNode head = new ListNode(1);
ListNode temp = head;
for (int i = 2; i <= 5; i++) {
temp.next = new ListNode(i);
temp = temp.next;
}
print(head);
print(removeNthFromEnd(head, -1));
}
}<|fim▁end|> | // 如果指定n大于队列长度,报错 |
<|file_name|>logstash_formatter.go<|end_file_name|><|fim▁begin|>package logrustash
import (
"encoding/json"
"fmt"
"strings"
"time"
"github.com/sirupsen/logrus"
)
// Formatter generates json in logstash format.<|fim▁hole|>// Logstash site: http://logstash.net/
type LogstashFormatter struct {
Type string // if not empty use for logstash type field.
// TimestampFormat sets the format used for timestamps.
TimestampFormat string
}
func (f *LogstashFormatter) Format(entry *logrus.Entry) ([]byte, error) {
return f.FormatWithPrefix(entry, "")
}
func (f *LogstashFormatter) FormatWithPrefix(entry *logrus.Entry, prefix string) ([]byte, error) {
fields := make(logrus.Fields)
for k, v := range entry.Data {
//remvove the prefix when sending the fields to logstash
if prefix != "" && strings.HasPrefix(k, prefix) {
k = strings.TrimPrefix(k, prefix)
}
switch v := v.(type) {
case error:
// Otherwise errors are ignored by `encoding/json`
// https://github.com/Sirupsen/logrus/issues/377
fields[k] = v.Error()
default:
fields[k] = v
}
}
fields["@version"] = "1"
timeStampFormat := f.TimestampFormat
if timeStampFormat == "" {
timeStampFormat = time.RFC3339
}
fields["@timestamp"] = entry.Time.Format(timeStampFormat)
// set message field
v, ok := entry.Data["message"]
if ok {
fields["fields.message"] = v
}
fields["message"] = entry.Message
// set level field
v, ok = entry.Data["level"]
if ok {
fields["fields.level"] = v
}
fields["level"] = entry.Level.String()
// set type field
if f.Type != "" {
v, ok = entry.Data["type"]
if ok {
fields["fields.type"] = v
}
fields["type"] = f.Type
}
serialized, err := json.Marshal(fields)
if err != nil {
return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err)
}
return append(serialized, '\n'), nil
}<|fim▁end|> | |
<|file_name|>cardActionElement.js<|end_file_name|><|fim▁begin|>var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
define(["require", "exports", "aurelia-framework", "./../../config"], function (require, exports, aurelia_framework_1, config_1) {<|fim▁hole|> var CardActionElement = (function () {
function CardActionElement() {
}
CardActionElement.prototype.attached = function () {
this.element.classList.add("card-action");
};
CardActionElement.prototype.detached = function () {
this.element.classList.remove("card-action");
};
CardActionElement = __decorate([
aurelia_framework_1.customElement(config_1.config.cardAction),
aurelia_framework_1.containerless(),
aurelia_framework_1.inlineView("<template><div ref='element'><slot></slot></div></template>"),
__metadata('design:paramtypes', [])
], CardActionElement);
return CardActionElement;
}());
exports.CardActionElement = CardActionElement;
});
//# sourceMappingURL=cardActionElement.js.map<|fim▁end|> | "use strict"; |
<|file_name|>devicefarm.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "devicefarm")]
extern crate rusoto;
use rusoto::devicefarm::{DeviceFarmClient, ListDevicesRequest};
use rusoto::{DefaultCredentialsProvider, Region};
#[test]
pub fn should_list_devices() {
let credentials = DefaultCredentialsProvider::new().unwrap();
let client = DeviceFarmClient::new(credentials, Region::UsWest2);
let request = ListDevicesRequest::default();<|fim▁hole|><|fim▁end|> |
client.list_devices(&request).unwrap();
} |
<|file_name|>long.py<|end_file_name|><|fim▁begin|># http://rosalind.info/problems/long/
def superstring(arr, accumulator=''):
# We now have all strings
if len(arr) == 0:
return accumulator
# Initial call
elif len(accumulator) == 0:
accumulator = arr.pop(0)
return superstring(arr, accumulator)<|fim▁hole|> sample = arr[i]
l = len(sample)
for p in range(l / 2):
q = l - p
if accumulator.startswith(sample[p:]):
arr.pop(i)
return superstring(arr, sample[:p] + accumulator)
if accumulator.endswith(sample[:q]):
arr.pop(i)
return superstring(arr, accumulator + sample[q:])
f = open("rosalind_long.txt", "r")
dnas = {}
currentKey = ''
for content in f:
# Beginning of a new sample
if '>' in content:
key = content.rstrip().replace('>', '')
currentKey = key
dnas[currentKey] = ''
else:
dnas[currentKey] += content.rstrip()
print superstring(dnas.values())<|fim▁end|> |
# Recursive call
else:
for i in range(len(arr)): |
<|file_name|>test_calendar.py<|end_file_name|><|fim▁begin|>"""The tests for the google calendar component."""
# pylint: disable=protected-access
import logging
import unittest
from unittest.mock import patch, Mock
import pytest
import homeassistant.components.calendar as calendar_base
from homeassistant.components.google import calendar
import homeassistant.util.dt as dt_util
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers.template import DATE_STR_FORMAT
from tests.common import get_test_home_assistant, MockDependency
TEST_PLATFORM = {calendar_base.DOMAIN: {CONF_PLATFORM: 'test'}}
_LOGGER = logging.getLogger(__name__)
class TestComponentsGoogleCalendar(unittest.TestCase):
"""Test the Google calendar."""
hass = None # HomeAssistant
# pylint: disable=invalid-name
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.http = Mock()
# Set our timezone to CST/Regina so we can check calculations
# This keeps UTC-6 all year round
dt_util.set_default_time_zone(dt_util.get_time_zone('America/Regina'))
# pylint: disable=invalid-name
def tearDown(self):
"""Stop everything that was started."""
dt_util.set_default_time_zone(dt_util.get_time_zone('UTC'))
self.hass.stop()
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
week_from_today = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=7)
event = {
'summary': 'Test All Day Event',
'start': {
'date': week_from_today.isoformat()
},
'end': {
'date': (week_from_today + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': '[email protected]',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day'
cal = calendar.GoogleCalendarEventDevice(self.hass, None,
'', {'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': True,
'offset_reached': False,
'start_time': '{} 00:00:00'.format(event['start']['date']),
'end_time': '{} 00:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_future_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
one_hour_from_now = dt_util.now() \
+ dt_util.dt.timedelta(minutes=30)
event = {
'start': {
'dateTime': one_hour_from_now.isoformat()
},
'end': {
'dateTime': (one_hour_from_now
+ dt_util.dt.timedelta(minutes=60))
.isoformat()
},
'summary': 'Test Event in 30 minutes',
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': '[email protected]',
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Future Event'
device_id = 'test_future_event'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': False,
'offset_reached': False,
'start_time': one_hour_from_now.strftime(DATE_STR_FORMAT),
'end_time':
(one_hour_from_now + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() \
- dt_util.dt.timedelta(minutes=30)
event = {
'start': {
'dateTime': middle_of_event.isoformat()
},
'end': {
'dateTime': (middle_of_event + dt_util.dt
.timedelta(minutes=60))
.isoformat()
},
'summary': 'Test Event in Progress',
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': '[email protected]',
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Event in Progress'
device_id = 'test_event_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_ON
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': False,
'offset_reached': False,
'start_time': middle_of_event.strftime(DATE_STR_FORMAT),
'end_time':
(middle_of_event + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_offset_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() \
+ dt_util.dt.timedelta(minutes=14)
event_summary = 'Test Event in Progress'
event = {
'start': {
'dateTime': middle_of_event.isoformat()
},
'end': {
'dateTime': (middle_of_event + dt_util.dt
.timedelta(minutes=60))
.isoformat()
},
'summary': '{} !!-15'.format(event_summary),
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': '[email protected]',
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Event in Progress'
device_id = 'test_event_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': False,
'offset_reached': True,
'start_time': middle_of_event.strftime(DATE_STR_FORMAT),
'end_time':
(middle_of_event + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@pytest.mark.skip
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_offset_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=1)
event_summary = 'Test All Day Event Offset In Progress'
event = {
'summary': '{} !!-25:0'.format(event_summary),
'start': {
'date': tomorrow.isoformat()
},
'end': {
'date': (tomorrow + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': '[email protected]',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day Offset In Progress'
device_id = 'test_all_day_offset_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': True,
'offset_reached': True,
'start_time': '{} 06:00:00'.format(event['start']['date']),
'end_time': '{} 06:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_offset_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=2)
offset_hours = (1 + dt_util.now().hour)
event_summary = 'Test All Day Event Offset'
event = {
'summary': '{} !!-{}:0'.format(event_summary, offset_hours),
'start': {
'date': tomorrow.isoformat()
},
'end': {
'date': (tomorrow + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {<|fim▁hole|> 'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': '[email protected]',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': '[email protected]',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day Offset'
device_id = 'test_all_day_offset'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': True,
'offset_reached': False,
'start_time': '{} 00:00:00'.format(event['start']['date']),
'end_time': '{} 00:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@MockDependency("httplib2")
def test_update_false(self, mock_httplib2):
"""Test that the update returns False upon Error."""
mock_service = Mock()
mock_service.get = Mock(
side_effect=mock_httplib2.ServerNotFoundError("unit test"))
cal = calendar.GoogleCalendarEventDevice(self.hass, mock_service, None,
{'name': "test"})
result = cal.data.update()
assert not result<|fim▁end|> | 'email': '[email protected]', |
<|file_name|>obj_glyph.js<|end_file_name|><|fim▁begin|>// start of file
/**
Object > Glyph
A single collection of outlines that could
either represent a character, or be used as<|fim▁hole|> part of another character through components.
The following objects are stored as Glyph
Objects:
Glyphs (Characters)
Ligatures
Components
**/
//-------------------------------------------------------
// GLYPH OBJECT
//-------------------------------------------------------
function Glyph(oa){
// debug('\n Glyph - START');
oa = oa || {};
this.objtype = 'glyph';
this.hex = oa.glyphhex || false;
this.name = oa.name || getGlyphName(oa.glyphhex) || false;
this.glyphhtml = oa.glyphhtml || hexToHTML(oa.glyphhex) || false;
this.isautowide = isval(oa.isautowide)? oa.isautowide : true;
this.glyphwidth = isval(oa.glyphwidth)? oa.glyphwidth : 0;
this.leftsidebearing = isval(oa.leftsidebearing)? oa.leftsidebearing : false;
this.rightsidebearing = isval(oa.rightsidebearing)? oa.rightsidebearing : false;
this.ratiolock = isval(oa.ratiolock)? oa.ratiolock : false;
this.maxes = oa.maxes || makeUIMins();
this.shapes = oa.shapes || [];
this.usedin = oa.usedin || [];
this.contextglyphs = '';
this.rotationreferenceshapes = false;
// debug('\t name: ' + this.name);
var lc = 0;
var cs = 0;
if(oa.shapes && oa.shapes.length){
for(var i=0; i<oa.shapes.length; i++) {
if(oa.shapes[i].objtype === 'componentinstance'){
// debug('\t hydrating ci ' + oa.shapes[i].name);
this.shapes[i] = new ComponentInstance(clone(oa.shapes[i]));
lc++;
} else {
// debug('\t hydrating sh ' + oa.shapes[i].name);
this.shapes[i] = new Shape(clone(oa.shapes[i]));
cs++;
}
}
}
if(this.getMaxes) this.getMaxes();
// cache
oa.cache = oa.cache || {};
this.cache = {};
this.cache.svg = oa.cache.svg || false;
// debug(' Glyph - END\n');
}
//-------------------------------------------------------
// TRANSFORM & MOVE
//-------------------------------------------------------
Glyph.prototype.setGlyphPosition = function(nx, ny, force){
// debug('Glyph.setGlyphPosition - START');
// debug('\t nx/ny/force: ' + nx + ' ' + ny + ' ' + force);
var m = this.getMaxes();
if(nx !== false) nx = parseFloat(nx);
if(ny !== false) ny = parseFloat(ny);
var dx = (nx !== false)? (nx - m.xmin) : 0;
var dy = (ny !== false)? (ny - m.ymax) : 0;
this.updateGlyphPosition(dx, dy, force);
// debug(' Glyph.setGlyphPosition - END\n');
};
Glyph.prototype.updateGlyphPosition = function(dx, dy, force){
// debug('\n Glyph.updateGlyphPosition - START ' + this.name);
// debug('\t dx/dy/force: ' + dx + ' ' + dy + ' ' + force);
// debug('\t number of shapes: ' + this.shapes.length);
dx = parseFloat(dx) || 0;
dy = parseFloat(dy) || 0;
var cs = this.shapes;
for(var i=0; i<cs.length; i++){
cs[i].updateShapePosition(dx, dy, force);
}
this.changed();
// debug(' Glyph.updateGlyphPosition - END ' + this.name + '\n\n');
};
Glyph.prototype.setGlyphSize = function(nw, nh, ratiolock){
// debug('SET GLYPHSIZE ---- nw/nh/ra:\t' + nw + '\t ' + nh + '\t ' + ratiolock);
// debug('\t maxes: ' + json(this.maxes));
var m = this.getMaxes();
if(nw !== false) nw = parseFloat(nw);
if(nh !== false) nh = parseFloat(nh);
var ch = (m.ymax - m.ymin);
var cw = (m.xmax - m.xmin);
var dw = (nw !== false)? (nw - cw) : 0;
var dh = (nh !== false)? (nh - ch) : 0;
if(ratiolock){
if(Math.abs(nh) > Math.abs(nw)) dw = (cw*(nh/ch)) - cw;
else dh = (ch*(nw/cw)) - ch;
}
this.updateGlyphSize(dw, dh, false);
};
Glyph.prototype.updateGlyphSize = function(dw, dh, ratiolock, dontscalecomponentinstances){
// debug('\n Glyph.updateGlyphSize - START ' + this.name);
// debug('\t number of shapes: ' + this.shapes.length);
// debug('\t dw dh rl:\t' + dw + '/' + dh + '/' + ratiolock);
var m = this.getMaxes();
if(dw !== false) dw = parseFloat(dw) || 0;
if(dh !== false) dh = parseFloat(dh) || 0;
// debug('\t adjust dw/dh:\t' + dw + '/' + dh);
var oldw = m.xmax - m.xmin;
var oldh = m.ymax - m.ymin;
var neww = (oldw + dw);
var newh = (oldh + dh);
if(Math.abs(neww) < 1) neww = 1;
if(Math.abs(newh) < 1) newh = 1;
// debug('\t new w/h:\t' + neww + '/' + newh);
var ratiodh = (newh/oldh);
var ratiodw = (neww/oldw);
// debug('\t ratio dw/dh:\t' + ratiodw + '/' + ratiodh);
if(ratiolock){
// Assuming only one will be nonzero
// if(Math.abs(ratiodh) > Math.abs(ratiodw)) ratiodw = ratiodh;
// else ratiodh = ratiodw;
if(dw !== 0 && dh === 0) ratiodh = ratiodw;
else ratiodw = ratiodh;
}
// debug('\t ratio dw/dh:\t' + ratiodw + '/' + ratiodh);
var cs = this.shapes;
var s, smaxes,
oldsw, oldsh, oldsx, oldsy,
newsw, newsh, newsx, newsy,
sdw, sdh, sdx, sdy;
// debug('\t Before Maxes ' + json(m, true));
for(var i=0; i<cs.length; i++){
s = cs[i];
// debug('\t >>> Updating ' + s.objtype + ' ' + i + '/' + cs.length + ' : ' + s.name);
smaxes = s.getMaxes();
// scale
oldsw = smaxes.xmax - smaxes.xmin;
newsw = oldsw * ratiodw;
if(ratiodw === 0) sdw = false;
else sdw = newsw - oldsw;
oldsh = smaxes.ymax - smaxes.ymin;
newsh = oldsh * ratiodh;
if(ratiodh === 0) sdh = false;
else sdh = newsh - oldsh;
// debug('\t Shape ' + i + ' dw dh ' + sdw + ' ' + sdh);
if(s.objtype === 'componentinstance' && dontscalecomponentinstances) {
// Special case skipping scaling of CIs for Global Actions
// debug(`\t Skipped this shape because it's a component instance`);
} else {
// It's a regular shape, or we're scaling everything
s.updateShapeSize(sdw, sdh, false);
}
// move
oldsx = smaxes.xmin - m.xmin;
newsx = oldsx * ratiodw;
if(ratiodw === 0) sdx = false;
else sdx = newsx - oldsx;
oldsy = smaxes.ymin - m.ymin;
newsy = oldsy * ratiodh;
if(ratiodh === 0) sdy = false;
else sdy = newsy - oldsy;
// debug('\t Shape Pos ' + i + ' dx dy ' + sdx + ' ' + sdy);
s.updateShapePosition(sdx, sdy, true);
}
this.changed();
// debug('\t Afters Maxes ' + json(this.maxes, true));
// debug(' Glyph.updateGlyphSize - END ' + this.name + '\n');
};
Glyph.prototype.flipEW = function(mid){
// debug('\n Glyph.flipEW - START');
// debug('\t ' + this.name);
// debug('\t passed mid = ' + mid);
var m = this.getMaxes();
mid = isval(mid)? mid : ((m.xmax - m.xmin) / 2) + m.xmin;
// debug('\t mid = ' + mid);
// debug('\t maxes = ' + json(m, true));
for(var s=0; s < this.shapes.length; s++){
this.shapes[s].flipEW(mid);
}
this.changed();
// debug('\t maxes = ' + json(this.maxes, true));
};
Glyph.prototype.flipNS = function(mid){
var m = this.getMaxes();
mid = isval(mid)? mid : ((m.ymax - m.ymin) / 2) + m.ymin;
for(var s=0; s < this.shapes.length; s++){
this.shapes[s].flipNS(mid);
}
this.changed();
};
Glyph.prototype.startRotationPreview = function() {
// debug(`\n\n Glyph.startRotationPreview - START`);
// debug(`\t shapes ${this.shapes.length}`);
this.rotationreferenceshapes = [];
for(var i=0; i<this.shapes.length; i++) {
if(this.shapes[i].objtype === 'componentinstance'){
this.rotationreferenceshapes[i] = new ComponentInstance(this.shapes[i]);
} else {
this.rotationreferenceshapes[i] = new Shape(this.shapes[i]);
}
// debug(this.rotationreferenceshapes[i]);
}
// debug(` Glyph.startRotationPreview - END\n`);
};
Glyph.prototype.rotationPreview = function(deltaRad, about, snap) {
// debug(`\n\n Glyph.rotationPreview - START`);
var tempshape;
for(var i=0; i<this.shapes.length; i++) {
if(this.shapes[i].objtype === 'componentinstance'){
this.shapes[i].rotate(deltaRad - niceAngleToRadians(this.shapes[i].rotation), about, snap);
} else {
tempshape = new Shape(clone(this.rotationreferenceshapes[i]));
tempshape.rotate(deltaRad, about, snap);
this.shapes[i].path = tempshape.path;
}
this.shapes[i].changed();
}
// debug(` Glyph.rotationPreview - END\n`);
};
Glyph.prototype.endRotationPreview = function() {
this.rotationreferenceshapes = false;
};
Glyph.prototype.rotate = function(deltaRad, about, snap) {
about = about || this.getCenter();
for(var s=0; s < this.shapes.length; s++){
this.shapes[s].rotate(deltaRad, about, snap);
}
this.changed();
};
Glyph.prototype.reverseWinding = function() {
for(var s=0; s<this.shapes.length; s++){
this.shapes[s].reverseWinding();
}
this.changed();
};
Glyph.prototype.alignShapes = function(edge, target) {
// debug('\n Glyph.alignShapes - START');
// debug('\t edge: ' + edge);
var offset;
if(edge === 'top'){
target = -999999;
this.shapes.forEach(function(v) {
target = Math.max(target, v.getMaxes().ymax);
});
// debug('\t found TOP: ' + target);
this.shapes.forEach(function(v) {
v.setShapePosition(false, target);
});
} else if (edge === 'middle'){
target = this.getCenter().y;
// debug('\t found MIDDLE: ' + target);
this.shapes.forEach(function(v) {
offset = v.getCenter().y;
v.updateShapePosition(false, (target - offset));
});
} else if (edge === 'bottom'){
target = 999999;
this.shapes.forEach(function(v) {
target = Math.min(target, v.getMaxes().ymin);
});
// debug('\t found BOTTOM: ' + target);
this.shapes.forEach(function(v) {
offset = v.getMaxes().ymin;
v.updateShapePosition(false, (target - offset));
});
} else if (edge === 'left'){
target = 999999;
this.shapes.forEach(function(v) {
target = Math.min(target, v.getMaxes().xmin);
});
// debug('\t found LEFT: ' + target);
this.shapes.forEach(function(v) {
v.setShapePosition(target, false);
});
} else if (edge === 'center'){
target = this.getCenter().x;
// debug('\t found CENTER: ' + target);
this.shapes.forEach(function(v) {
offset = v.getCenter().x;
v.updateShapePosition((target - offset), false);
});
} else if (edge === 'right'){
target = -999999;
this.shapes.forEach(function(v) {
target = Math.max(target, v.getMaxes().xmax);
});
// debug('\t found RIGHT: ' + target);
this.shapes.forEach(function(v) {
offset = v.getMaxes().xmax;
v.updateShapePosition((target - offset), false);
});
}
this.changed();
// debug(' Glyph.alignShapes - END\n');
};
//-------------------------------------------------------
// GETTERS
//-------------------------------------------------------
Glyph.prototype.getName = function() { return this.name; };
Glyph.prototype.getChar = function() { return getGlyphName(this.hex); };
Glyph.prototype.getHTML = function() { return this.glyphhtml || ''; };
Glyph.prototype.getLSB = function() {
if(this.leftsidebearing === false) return _GP.projectsettings.defaultlsb;
else return this.leftsidebearing;
};
Glyph.prototype.getRSB = function() {
if(this.rightsidebearing === false) return _GP.projectsettings.defaultrsb;
else return this.rightsidebearing;
};
Glyph.prototype.getCenter = function() {
var m = this.getMaxes();
var re = {};
re.x = ((m.xmax - m.xmin) / 2) + m.xmin;
re.y = ((m.ymax - m.ymin) / 2) + m.ymin;
return re;
};
//-------------------------------------------------------
// CALCULATING SIZE
//-------------------------------------------------------
Glyph.prototype.calcGlyphMaxes = function(){
// debug('\n Glyph.calcGlyphMaxes - START ' + this.name);
this.maxes = makeUIMins();
var tm;
if(this.shapes.length > 0){
for(var jj=0; jj<this.shapes.length; jj++) {
// debug('\t ++++++ START shape ' + jj);
// debug(this.shapes[jj]);
if(this.shapes[jj].getMaxes){
tm = this.shapes[jj].getMaxes();
// debug('\t before ' + json(tm, true));
this.maxes = getOverallMaxes([tm, this.maxes]);
// debug('\t afters ' + json(tm, true));
// debug('\t ++++++ END shape ' + jj + ' - ' + this.shapes[jj].name);
}
}
} else {
this.maxes = { 'xmax': 0, 'xmin': 0, 'ymax': 0, 'ymin': 0 };
}
this.calcGlyphWidth();
// debug(' Glyph.calcGlyphMaxes - END ' + this.name + '\n');
return clone(this.maxes, 'Glyph.calcGlyphMaxes');
};
Glyph.prototype.calcGlyphWidth = function(){
if(!this.isautowide) return;
this.glyphwidth = Math.max(this.maxes.xmax, 0);
};
Glyph.prototype.getAdvanceWidth = function() {
this.calcGlyphWidth();
if(!this.isautowide) return this.glyphwidth;
else return this.glyphwidth + this.getLSB() + this.getRSB();
};
Glyph.prototype.getMaxes = function() {
// debug('\n Glyph.getMaxes - START ' + this.name);
if(hasNonValues(this.maxes)){
// debug('\t ^^^^^^ maxes found NaN, calculating...');
this.calcGlyphMaxes();
// debug('\t ^^^^^^ maxes found NaN, DONE calculating...');
}
if(this.shapes.length){
if( this.maxes.xmin === _UI.maxes.xmin ||
this.maxes.xmin === _UI.mins.xmin ||
this.maxes.xmax === _UI.maxes.xmax ||
this.maxes.xmax === _UI.mins.xmax ||
this.maxes.ymin === _UI.maxes.ymin ||
this.maxes.ymin === _UI.mins.ymin ||
this.maxes.ymax === _UI.maxes.ymax ||
this.maxes.ymax === _UI.mins.ymax
){
this.calcGlyphMaxes();
}
}
// debug('\t returning ' + json(this.maxes));
// debug(' Glyph.getMaxes - END ' + this.name + '\n');
return clone(this.maxes, 'Glyph.getMaxes');
};
function hasNonValues(obj) {
if(!obj) return true;
for(var v in obj){ if(obj.hasOwnProperty(v)){
if(!isval(obj[v])) return true;
}}
return false;
}
//-------------------------------------------------------
// COMPONENT STUFF
//-------------------------------------------------------
Glyph.prototype.containsComponents = function(){
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype === 'componentinstance'){
return true;
}
}
return false;
};
Glyph.prototype.canAddComponent = function(cid) {
// debug('\n Glyph.canAddComponent - START');
var myid = ''+getMyID(this);
// debug('\t adding ' + cid + ' to (me) ' + myid);
if(myid === cid) return false;
if(this.usedin.length === 0) return true;
var downlinks = this.collectAllDownstreamLinks([], true);
downlinks = downlinks.filter(function(elem, pos) { return downlinks.indexOf(elem) === pos;});
var uplinks = this.collectAllUpstreamLinks([]);
uplinks = uplinks.filter(function(elem, pos) { return uplinks.indexOf(elem) === pos;});
// debug('\t downlinks: ' + downlinks);
// debug('\t uplinks: ' + uplinks);
if(downlinks.indexOf(cid) > -1) return false;
if(uplinks.indexOf(cid) > -1) return false;
return true;
};
Glyph.prototype.collectAllDownstreamLinks = function(re, excludepeers) {
re = re || [];
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype === 'componentinstance'){
re = re.concat(getGlyph(this.shapes[s].link).collectAllDownstreamLinks(re));
if(!excludepeers) re.push(this.shapes[s].link);
}
}
return re;
};
Glyph.prototype.collectAllUpstreamLinks = function(re) {
re = re || [];
for(var g=0; g<this.usedin.length; g++){
re = re.concat(getGlyph(this.usedin[g]).collectAllUpstreamLinks(re));
re.push(this.usedin[g]);
}
return re;
};
// This method is called on Glyphs just before they are deleted
// to clean up all the component instance linking
Glyph.prototype.deleteLinks = function(thisid) {
// debug('\n Glyph.deleteLinks - START');
// debug(`\t passed this as id: ${thisid}`);
// Delete upstream Component Instances
var upstreamglyph;
for(var c=0; c<this.usedin.length; c++){
upstreamglyph = getGlyph(this.usedin[c]);
// debug(`\t removing upstream from ${upstreamglyph.name}`);
// debug(upstreamglyph.shapes);
for(var u=0; u<upstreamglyph.shapes.length; u++){
if(upstreamglyph.shapes[u].objtype === 'componentinstance' && upstreamglyph.shapes[u].link === thisid){
upstreamglyph.shapes.splice(u, 1);
upstreamglyph.changed();
u--;
}
}
// debug(upstreamglyph.shapes);
}
// Delete downstream usedin array values
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype === 'componentinstance'){
// debug(`\t removing downstream from ${this.shapes[s].link}`);
removeFromUsedIn(this.shapes[s].link, thisid);
}
}
// debug(` Glyph.deleteLinks - END\n\n`);
};
// Returns a boolian that tells if the specified component has
// more than one instances in this single glyph
Glyph.prototype.hasMultipleInstancesOf = function(linkID) {
// debug(`\n Glyph.hasMultipleInstancesOf - START`);
var count = 0;
for(var i=0; i<this.shapes.length; i++) {
if(this.shapes[i].link && this.shapes[i].link === linkID){
count++;
// debug(`\t found match ${count} for ${linkID}`);
}
}
// debug(` Glyph.hasMultipleInstancesOf - END - returning ${count > 1}\n\n`);
return count > 1;
};
//-------------------------------------------------------
// DRAWING AND EXPORTING
//-------------------------------------------------------
Glyph.prototype.drawGlyph = function(lctx, view, alpha, addLSB){
// debug('\n Glyph.drawGlyph - START ' + this.name);
// debug('\t view ' + json(view, true));
var sl = this.shapes;
var shape, drewshape;
if(isNaN(alpha) || alpha > 1 || alpha < 0) alpha = 1;
if(addLSB && this.isautowide) view.dx += (this.getLSB() * view.dz);
lctx.beginPath();
for(var j=0; j<sl.length; j++) {
shape = sl[j];
if(shape.visible) {
// debug('\t ' + this.name + ' drawing ' + shape.objtype + ' ' + j + ' ' + shape.name);
drewshape = shape.drawShape(lctx, view);
if(!drewshape){
console.warn('Could not draw shape ' + shape.name + ' in Glyph ' + this.name);
if(shape.objtype === 'componentinstance' && !getGlyph(shape.link)){
console.warn('>>> Component Instance has bad link: ' + shape.link);
var i = this.shapes.indexOf(shape);
if(i > -1){
this.shapes.splice(i, 1);
console.warn('>>> Deleted the Instance');
}
}
}
}
}
lctx.closePath();
// lctx.fillStyle = RGBAtoRGB(_GP.projectsettings.colors.glyphfill, alpha);
lctx.fillStyle = _GP.projectsettings.colors.glyphfill;
lctx.globalAlpha = alpha;
lctx.fill('nonzero');
lctx.globalAlpha = 1;
// debug(' Glyph.drawGlyph - END ' + this.name + '\n');
return (this.getAdvanceWidth()*view.dz);
};
Glyph.prototype.makeSVG = function(size, gutter) {
// debug('\n Glyph.makeSVG - START');
var ps = _GP.projectsettings;
size = size || _UI.thumbsize;
gutter = gutter || _UI.thumbgutter;
var emsquare = Math.max(ps.upm, (ps.ascent - ps.descent));
var desc = Math.abs(ps.descent);
var charscale = (size-(gutter*2)) / size;
var gutterscale = (gutter / size) * emsquare;
var vbsize = emsquare - (gutter*2);
var pathdata = this.getSVGpathData();
// Assemble SVG
var re = '<svg version="1.1" ';
re += 'xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ';
re += 'width="'+size+'" height="'+size+'" viewBox="0,0,'+vbsize+','+vbsize+'">';
re += '<g transform="translate('+(gutterscale)+','+(emsquare-desc-(gutterscale/2))+') scale('+charscale+',-'+charscale+')">';
// re += '<rect x="0" y="-'+desc+'" height="'+desc+'" width="1000" fill="lime"/>';
// re += '<rect x="0" y="0" height="'+(emsquare-desc)+'" width="1000" fill="cyan"/>';
re += '<path d="' + pathdata + '"/>';
re += '</g>';
re += '</svg>';
// debug(' Glyph.makeSVG - END\n');
return re;
};
Glyph.prototype.getSVGpathData = function() {
if(this.cache.svg) return this.cache.svg;
this.cache.svg = this.makeSVGpathData();
return this.cache.svg;
};
Glyph.prototype.makeSVGpathData = function() {
// SVG will not include LSB
var sl = this.shapes;
// Make Pathdata
var pathdata = '';
var shape;
for(var j=0; j<sl.length; j++) {
shape = sl[j];
if(shape.visible) {
pathdata += shape.getSVGpathData();
if(j < sl.length-1) pathdata += ' ';
}
}
if(trim(pathdata) === '') pathdata = 'M0,0Z';
this.cache.svg = pathdata;
return pathdata;
};
Glyph.prototype.makeOpenTypeJSpath = function(otpath) {
otpath = otpath || new opentype.Path();
for(var s=0; s < this.shapes.length; s++){
otpath = this.shapes[s].makeOpenTypeJSpath(otpath);
}
return otpath;
};
Glyph.prototype.draw_MultiSelectAffordances = function() {
var allpoints = [];
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype !== 'componentinstance'){
allpoints = allpoints.concat(this.shapes[s].path.pathpoints);
this.shapes[s].draw_PathOutline(_UI.colors.blue, 1);
}
}
draw_PathPoints(allpoints, _UI.colors.blue);
};
Glyph.prototype.isOverControlPoint = function(x, y, nohandles) {
var re = false;
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype !== 'componentinstance'){
re = this.shapes[s].path.isOverControlPoint(x, y, nohandles);
if(re) return re;
}
}
return false;
};
Glyph.prototype.findWinding = function() {
for(var s=0; s<this.shapes.length; s++){
this.shapes[s].findWinding();
}
};
Glyph.prototype.flattenGlyph = function() {
var reshapes = [];
var ts, tg;
for(var s=0; s<this.shapes.length; s++){
ts = this.shapes[s];
if(ts.objtype === 'shape'){
reshapes.push(new Shape(clone(ts, 'Glyph.flattenGlyph')));
} else if (ts.objtype === 'componentinstance'){
tg = ts.getTransformedGlyph();
tg = tg.flattenGlyph();
reshapes = reshapes.concat(tg.shapes);
} else {
// debug('\n Glyph.flattenGlyph - ERROR - none shape or ci in shapes array');
}
}
this.shapes = reshapes;
// this.calcGlyphMaxes();
return this;
};
Glyph.prototype.combineAllShapes = function(donttoast, dontresolveoverlaps) {
// debug('\n Glyph.combineAllShapes - START - ' + this.name);
if(this.shapes.length < 2) return this;
this.flattenGlyph();
var cs = combineShapes(this.shapes, donttoast, dontresolveoverlaps);
if(cs){
// debug('\t new shapes');
this.shapes = cs;
// debug(this.shapes);
this.changed();
}
// debug(this.name + ' \t\t ' + this.shapes.length);
// debug(' Glyph.combineAllShapes - END - ' + this.name + '\n');
return this;
};
Glyph.prototype.resolveOverlapsForAllShapes = function() {
var newshapes = [];
for(var ts=0; ts<this.shapes.length; ts++){
newshapes = newshapes.concat(this.shapes[ts].resolveSelfOverlaps());
}
this.shapes = newshapes;
this.changed();
};
//-------------------------------------------------------
// METHODS
//-------------------------------------------------------
Glyph.prototype.changed = function(descend, ascend) {
this.cache = {};
var usedGlyph;
if(ascend){
for(var g=0; g<this.usedin.length; g++){
usedGlyph = getGlyph(this.usedin[g]);
if(usedGlyph && usedGlyph.changed) usedGlyph.changed(descend, ascend);
}
}
if(descend){
for(var s=0; s<this.shapes.length; s++) this.shapes[s].changed(descend, ascend);
}
this.calcGlyphMaxes();
};
Glyph.prototype.map = function(indents) {
indents = indents || ' ';
var re = (indents + 'GLYPH ' + this.name + '\n');
var ts;
for(var s=0; s < this.shapes.length; s++){
ts = this.shapes[s];
if(ts.objtype === 'shape'){
re += (indents + '-' + s + '-' + ts.name + ' ' + json(ts.path.maxes, true) + '\n');
} else if(ts.objtype === 'componentinstance'){
re += (indents+ '~' + s + '~' + ts.name + '\n');
re += getGlyph(ts.link).map(indents + ' ');
}
}
return re;
};
Glyph.prototype.copyShapesTo = function(destinationID, copyGlyphAttributes, selectNewShapes) {
// debug('\n Glyph.copyShapesTo - START');
copyGlyphAttributes = copyGlyphAttributes || { srcAutoWidth: false, srcWidth: false, srcLSB: false, srcRSB: false};
selectNewShapes = selectNewShapes || false;
var destinationGlyph = getGlyph(destinationID, true);
var tc;
if(selectNewShapes) _UI.ms.shapes.clear();
for(var c=0; c<this.shapes.length; c++){
tc = this.shapes[c];
if(tc.objtype === 'componentinstance'){
addToUsedIn(tc.link, destinationID);
tc = new ComponentInstance(clone(tc, 'Glyph.copyShapesTo'));
} else if(tc.objtype === 'shape'){
tc = new Shape(clone(tc, 'Glyph.copyShapesTo'));
}
destinationGlyph.shapes.push(tc);
if(selectNewShapes) _UI.ms.shapes.add(tc);
}
if(copyGlyphAttributes.srcAutoWidth) destinationGlyph.isautowide = this.isautowide;
if(copyGlyphAttributes.srcWidth) destinationGlyph.glyphwidth = this.glyphwidth;
if(copyGlyphAttributes.srcLSB) destinationGlyph.leftsidebearing = this.leftsidebearing;
if(copyGlyphAttributes.srcRSB) destinationGlyph.rightsidebearing = this.rightsidebearing;
if(!selectNewShapes) showToast('Copied ' + this.shapes.length + ' shapes');
destinationGlyph.changed();
// debug('\t new shapes');
// debug(destinationGlyph.shapes);
// debug(' Glyph.copyShapesTo - END\n');
};
Glyph.prototype.isHere = function(x, y) {
for(var s=0; s < this.shapes.length; s++){
if(this.shapes[s].isHere(x, y)) return true;
}
return false;
};
Glyph.prototype.hasShapes = function() {
var tg;
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].objtype !== 'componentinstance') return true;
else {
tg = this.shapes[s].getTransformedGlyph();
if(tg.hasShapes()) return true;
}
}
return false;
};
Glyph.prototype.removeShapesWithZeroLengthPaths = function() {
for(var s=0; s<this.shapes.length; s++){
if(this.shapes[s].path && this.shapes[s].path.pathpoints.length === 0){
this.shapes.splice(s, 1);
s--;
}
}
};
Glyph.prototype.getPathPoints = function() {
var points = [];
this.shapes.forEach(function(shape, i) {
points = points.concat(shape.path.pathpoints);
});
return points;
};
Glyph.prototype.getShapes = function() {
return this.shapes;
};
Glyph.prototype.roundAll = function(precision) {
for(var s=0; s<this.shapes.length; s++){
this.shapes[s].roundAll(precision);
}
this.changed();
};
//-------------------------------------------------------
// GLYPH FUNCTIONS
//-------------------------------------------------------
// GET
function getGlyph(id, create) {
// debug('\n getGlyph - START');
// debug('\t passed: ' + id + ' create: ' + create);
if(!id){
// debug('\t Not passed an ID, returning false');
return false;
}
if(_GP === {}){
// debug('\t _GP is uninitialized, returning false');
return false;
}
id = ''+id;
var rechar;
if (id.indexOf('0x', 2) > -1){
rechar = _GP.ligatures[id];
// debug('\t retrieved ' + rechar + ' from ligatures.');
if(rechar){
return rechar;
} else if(create){
// debug('\t create was true, returning a new ligature.');
_GP.ligatures[id] = new Glyph({'glyphhex':id});
return _GP.ligatures[id];
}
} else if(id.indexOf('0x') > -1){
rechar = _GP.glyphs[id];
// debug('\t retrieved ' + rechar + ' from glyphs.');
if(rechar){
return rechar;
} else if(create){
// debug('\t create was true, returning a new char.');
_GP.glyphs[id] = new Glyph({'glyphhex':id});
return _GP.glyphs[id];
}
} else {
// debug('\t component, retrieved');
// debug(_GP.components[id]);
return _GP.components[id] || false;
}
// debug('getGlyph - returning FALSE\n');
return false;
}
function getGlyphType(id) {
if (id.indexOf('0x', 2) > -1) return 'ligature';
else if(id.indexOf('0x') > -1) return 'glyph';
else return 'component';
}
function getGlyphName(ch) {
ch = ''+ch;
// debug('\n getGlyphName');
// debug('\t passed ' + ch);
// not passed an id
if(!ch){
// debug('\t not passed an ID, returning false');
return false;
}
// known unicode names
var un = getUnicodeName(ch);
if(un && un !== '[name not found]'){
// debug('\t got unicode name: ' + un);
return escapeXMLValues(un);
}
var cobj = getGlyph(ch);
if(ch.indexOf('0x',2) > -1){
// ligature
// debug('\t ligature - returning ' + hexToHTML(ch));
return escapeXMLValues(cobj.name) || hexToHTML(ch);
} else {
// Component
// debug('getGlyphName - inexplicably fails, returning [name not found]\n');
return escapeXMLValues(cobj.name) || '[name not found]';
}
// debug(' getGlyphName - returning nothing - END\n');
}
function getFirstGlyphID() {
if(_GP.glyphs['0x0041']) return '0x0041';
else return getFirstID(_GP.glyphs);
}
// GET SELECTED
function getSelectedGlyphLeftSideBearing(){
//debug('getSelectedGlyphLeftSideBearing');
var sc = getSelectedWorkItem();
if(!sc) return 0;
if(sc.objtype === 'component') return 0;
if(!sc.isautowide) return 0;
if(sc.leftsidebearing === true) sc.leftsidebearing = _GP.projectsettings.defaultlsb;
return sc.leftsidebearing !== false? sc.leftsidebearing : _GP.projectsettings.defaultlsb;
}
function getSelectedGlyphRightSideBearing(){
//debug('getSelectedGlyphLeftSideBearing');
var sc = getSelectedWorkItem();
if(!sc) return 0;
if(sc.objtype === 'component') return 0;
if(!sc.isautowide) return 0;
if(sc.rightsidebearing === true) sc.rightsidebearing = _GP.projectsettings.defaultrsb;
return sc.rightsidebearing !== false? sc.rightsidebearing : _GP.projectsettings.defaultrsb;
}
function updateCurrentGlyphWidth() {
var sc = getSelectedWorkItem();
if(!sc) return;
if(_UI.current_page === 'glyph edit'){
sc.changed();
} else if (_UI.current_page === 'components' && sc) {
var lsarr = sc.usedin;
if(lsarr) for(var c=0; c<lsarr.length; c++) getGlyph(lsarr[c]).changed();
}
}
// Delete
function deleteGlyph(id) {
// debug('\n deleteGlyph');
// debug('\t passed: ' + id);
if(!id){
// debug('\t Not passed an ID, returning false');
return false;
}
if(_GP === {}){
// debug('\t _GP is uninitialized, returning false');
return false;
}
id = ''+id;
if(_GP.glyphs[id]){
_GP.glyphs[id].deleteLinks(id);
delete _GP.glyphs[id];
// debug(`\t deleted glyph, it is now:`);
// debug(_GP.glyphs[id]);
return true;
}
return false;
}
// end of file<|fim▁end|> | |
<|file_name|>test_fn.py<|end_file_name|><|fim▁begin|># coding: utf8
from django.test import RequestFactory
from django.test import TestCase
from django.core.urlresolvers import resolve
from django.views.generic import ListView
import pytest
from selenium import webdriver
import time
from cms.test_utils.testcases import CMSTestCase
from django.test.utils import override_settings
from .. import views
from categories.models import Category
from mixer.backend.django import mixer
from django.contrib.auth.models import AnonymousUser
from bidders.models import Bidder
pytestmark = pytest.mark.django_db
class SmokeTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome()
def tearDown(self):
self.browser.quit()
def test_bad_maths(self):
self.assertEqual(1 + 1, 2)
self.browser.get('http://localhost:8000')
assert u'홈페이지' in self.browser.title
class TestViews(TestCase):
def setUp(self):
self.browser = webdriver.Chrome()
self.category = mixer.blend(Category, name=u'조달업체')
def tearDown(self):
self.browser.quit()<|fim▁hole|> 'queryset': Category.objects.filter(level=0)
}
user = AnonymousUser()
req.user = user
res = ListView.as_view(**categorytree_dict)(req)
assert res.status_code == 200
print(res.context_data)
@override_settings(ROOT_URLCONF='categories.tests.urls')
def test_url_resolves_to_categories_page(self):
found = resolve('/categories/')
assert found.view_name == 'categories_tree_list'
def test_home_page_returns_correct_html(self):
self.browser.get('http://localhost:8000/categories')
assert u'조달업체' in self.browser.page_source
def test_entry_listing_within_a_category(self):
self.browser.get('http://localhost:8000/categories')
self.browser.find_element_by_link_text(u'조달업체').click()
time.sleep(1)
assert u'견본 업체' in self.browser.page_source<|fim▁end|> |
def test_anonymous(self):
req = RequestFactory().get('/categories/')
categorytree_dict = { |
<|file_name|>websocket.go<|end_file_name|><|fim▁begin|>package input
import (
"github.com/Jeffail/benthos/v3/internal/component/input"
"github.com/Jeffail/benthos/v3/internal/component/metrics"<|fim▁hole|> "github.com/Jeffail/benthos/v3/internal/old/input/reader"
btls "github.com/Jeffail/benthos/v3/internal/tls"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeWebsocket] = TypeSpec{
constructor: fromSimpleConstructor(NewWebsocket),
Summary: `
Connects to a websocket server and continuously receives messages.`,
Description: `
It is possible to configure an ` + "`open_message`" + `, which when set to a
non-empty string will be sent to the websocket server each time a connection is
first established.`,
FieldSpecs: append(docs.FieldSpecs{
docs.FieldCommon("url", "The URL to connect to.", "ws://localhost:4195/get/ws").HasType("string"),
docs.FieldAdvanced("open_message", "An optional message to send to the server upon connection."),
btls.FieldSpec(),
}, auth.FieldSpecs()...),
Categories: []Category{
CategoryNetwork,
},
}
}
//------------------------------------------------------------------------------
// NewWebsocket creates a new Websocket input type.
func NewWebsocket(conf Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (input.Streamed, error) {
ws, err := reader.NewWebsocket(conf.Websocket, log, stats)
if err != nil {
return nil, err
}
return NewAsyncReader("websocket", true, reader.NewAsyncPreserver(ws), log, stats)
}
//------------------------------------------------------------------------------<|fim▁end|> | "github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/internal/http/docs/auth"
"github.com/Jeffail/benthos/v3/internal/interop"
"github.com/Jeffail/benthos/v3/internal/log" |
<|file_name|>importer.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import time
from operator import itemgetter
from sqlalchemy.sql import func, select
from indico.core.db.sqlalchemy import db
from indico.core.db.sqlalchemy.protection import ProtectionMode
from indico.modules.groups import GroupProxy
from indico_migrate.logger import logger_proxy
from indico_migrate.util import convert_to_unicode
class Importer(object):
step_name = ''
#: Specify plugins that need to be loaded for the import (e.g. to access its .settings property)
plugins = frozenset()
print_info = logger_proxy('info')
print_success = logger_proxy('success')
print_warning = logger_proxy('warning')
print_error = logger_proxy('error')
print_log = logger_proxy('log')
def __init__(self, logger, app, sqlalchemy_uri, zodb_root, verbose, dblog, default_group_provider, tz, **kwargs):
self.sqlalchemy_uri = sqlalchemy_uri
self.quiet = not verbose
self.dblog = dblog
self.zodb_root = zodb_root
self.app = app
self.tz = tz
self.default_group_provider = default_group_provider
self.logger = logger
self.initialize_global_ns(Importer._global_ns)
def initialize_global_ns(self, g):
pass
@property
def log_prefix(self):
return '%[cyan]{:<14}%[reset]'.format('[%[grey!]{}%[cyan]]'.format(self.step_name))
@property
def makac_info(self):
return self.zodb_root['MaKaCInfo']['main']
@property
def global_ns(self):
return Importer._global_ns
def __repr__(self):
return '<{}({})>'.format(type(self).__name__, self.sqlalchemy_uri)
def flushing_iterator(self, iterable, n=5000):
"""Iterates over `iterable` and flushes the ZODB cache every `n` items.
:param iterable: an iterable object
:param n: number of items to flush after
"""
conn = self.zodb_root._p_jar
for i, item in enumerate(iterable, 1):
yield item
if i % n == 0:
conn.sync()
def convert_principal(self, old_principal):
"""Converts a legacy principal to PrincipalMixin style"""
if old_principal.__class__.__name__ == 'Avatar':
principal = self.global_ns.avatar_merged_user.get(old_principal.id)
if not principal and 'email' in old_principal.__dict__:
email = convert_to_unicode(old_principal.__dict__['email']).lower()
principal = self.global_ns.users_by_primary_email.get(
email, self.global_ns.users_by_secondary_email.get(email))
if principal is not None:
self.print_warning('Using {} for {} (matched via {})'.format(principal, old_principal, email))
if not principal:
self.print_error("User {} doesn't exist".format(old_principal.id))
return principal
elif old_principal.__class__.__name__ == 'Group':
assert int(old_principal.id) in self.global_ns.all_groups
return GroupProxy(int(old_principal.id))
elif old_principal.__class__.__name__ in {'CERNGroup', 'LDAPGroup', 'NiceGroup'}:
return GroupProxy(old_principal.id, self.default_group_provider)
def convert_principal_list(self, opt):
"""Convert ACL principals to new objects"""
return set(filter(None, (self.convert_principal(principal) for principal in opt._PluginOption__value)))
def fix_sequences(self, schema=None, tables=None):
for name, cls in sorted(db.Model._decl_class_registry.iteritems(), key=itemgetter(0)):
table = getattr(cls, '__table__', None)
if table is None:
continue
elif schema is not None and table.schema != schema:
continue
elif tables is not None and cls.__tablename__ not in tables:
continue
# Check if we have a single autoincrementing primary key
candidates = [col for col in table.c if col.autoincrement and col.primary_key]
if len(candidates) != 1 or not isinstance(candidates[0].type, db.Integer):
continue
serial_col = candidates[0]
sequence_name = '{}.{}_{}_seq'.format(table.schema, cls.__tablename__, serial_col.name)
query = select([func.setval(sequence_name, func.max(serial_col) + 1)], table)
db.session.execute(query)
db.session.commit()
def protection_from_ac(self, target, ac, acl_attr='acl', ac_attr='allowed', allow_public=False):
"""Convert AccessController data to ProtectionMixin style.
This needs to run inside the context of `patch_default_group_provider`.
:param target: The new object that uses ProtectionMixin
:param ac: The old AccessController
:param acl_attr: The attribute name for the acl of `target`
:param ac_attr: The attribute name for the acl in `ac`
:param allow_public: If the object allows `ProtectionMode.public`.
Otherwise, public is converted to inheriting.
"""
if ac._accessProtection == -1:
target.protection_mode = ProtectionMode.public if allow_public else ProtectionMode.inheriting
elif ac._accessProtection == 0:
target.protection_mode = ProtectionMode.inheriting
elif ac._accessProtection == 1:
target.protection_mode = ProtectionMode.protected
acl = getattr(target, acl_attr)
for principal in getattr(ac, ac_attr):
principal = self.convert_principal(principal)
assert principal is not None
acl.add(principal)
else:
raise ValueError('Unexpected protection: {}'.format(ac._accessProtection))
class TopLevelMigrationStep(Importer):
def run(self):
start = time.time()
self.pre_migrate()
try:
self.migrate()
finally:
self.post_migrate()
self.print_log('%[cyan]{:.06f} seconds%[reset]\a'.format((time.time() - start)))
def pre_migrate(self):
pass
def migrate(self):
raise NotImplementedError
<|fim▁hole|><|fim▁end|> | def post_migrate(self):
pass |
<|file_name|>methods.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, absolute_import, division
import numpy as np
import pandas as pd
from toolz import partition
def loc(df, ind):
return df.loc[ind]
def index_count(x):
# Workaround since Index doesn't implement `.count`
return pd.notnull(x).sum()
def mean_aggregate(s, n):
try:
return s / n
except ZeroDivisionError:
return np.nan<|fim▁hole|> result = (x2 / n) - (x / n)**2
if ddof != 0:
result = result * n / (n - ddof)
return result
except ZeroDivisionError:
return np.nan
def describe_aggregate(values):
assert len(values) == 6
count, mean, std, min, q, max = values
typ = pd.DataFrame if isinstance(count, pd.Series) else pd.Series
part1 = typ([count, mean, std, min],
index=['count', 'mean', 'std', 'min'])
q.index = ['25%', '50%', '75%']
part3 = typ([max], index=['max'])
return pd.concat([part1, q, part3])
def cummin_aggregate(x, y):
if isinstance(x, (pd.Series, pd.DataFrame)):
return x.where((x < y) | x.isnull(), y, axis=x.ndim - 1)
else: # scalar
return x if x < y else y
def cummax_aggregate(x, y):
if isinstance(x, (pd.Series, pd.DataFrame)):
return x.where((x > y) | x.isnull(), y, axis=x.ndim - 1)
else: # scalar
return x if x > y else y
def assign(df, *pairs):
kwargs = dict(partition(2, pairs))
return df.assign(**kwargs)
def unique(x, series_name=None):
# unique returns np.ndarray, it must be wrapped
return pd.Series(pd.Series.unique(x), name=series_name)
def value_counts_aggregate(x):
return x.groupby(level=0).sum().sort_values(ascending=False)
def nbytes(x):
return x.nbytes<|fim▁end|> |
def var_aggregate(x2, x, n, ddof):
try: |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import Component from './ImageSlider';<|fim▁hole|>export default StyledComponent(Component);<|fim▁end|> | import StyledComponent from './styles';
|
<|file_name|>0003_auto_20150423_1545.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connectors', '0002_parameter_description'),
]
operations = [
migrations.RenameField(<|fim▁hole|> old_name='type',
new_name='method',
),
]<|fim▁end|> | model_name='callback', |
<|file_name|>vault-keyring.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <[email protected]>
# (c) 2016, Justin Mayer <https://justinmayer.com/>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# =============================================================================
#
# This script is to be used with vault_password_file or --vault-password-file
# to retrieve the vault password via your OS's native keyring application.
#
# This file *MUST* be saved with executable permissions. Otherwise, Ansible
# will try to parse as a password file and display: "ERROR! Decryption failed"
#
# The `keyring` Python module is required: https://pypi.python.org/pypi/keyring
#
# By default, this script will store the specified password in the keyring of
# the user that invokes the script. To specify a user keyring, add a [vault]
# section to your ansible.cfg file with a 'username' option. Example:
#
# [vault]
# username = 'ansible-vault'
#
# Another optional setting is for the key name, which allows you to use this
# script to handle multiple project vaults with different passwords:
#
# [vault]
# keyname = 'ansible-vault-yourproject'
#
# You can configure the `vault_password_file` option in ansible.cfg:
#
# [defaults]
# ...<|fim▁hole|># ...
#
# To set your password, `cd` to your project directory and run:
#
# python /path/to/vault-keyring.py set
#
# If you choose not to configure the path to `vault_password_file` in
# ansible.cfg, your `ansible-playbook` command might look like:
#
# ansible-playbook --vault-password-file=/path/to/vault-keyring.py site.yml
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
import sys
import getpass
import keyring
import ansible.constants as C
def main():
(parser, config_path) = C.load_config_file()
if parser.has_option('vault', 'username'):
username = parser.get('vault', 'username')
else:
username = getpass.getuser()
if parser.has_option('vault', 'keyname'):
keyname = parser.get('vault', 'keyname')
else:
keyname = 'ansible'
if len(sys.argv) == 2 and sys.argv[1] == 'set':
intro = 'Storing password in "{}" user keyring using key name: {}\n'
sys.stdout.write(intro.format(username, keyname))
password = getpass.getpass()
confirm = getpass.getpass('Confirm password: ')
if password == confirm:
keyring.set_password(keyname, username, password)
else:
sys.stderr.write('Passwords do not match\n')
sys.exit(1)
else:
sys.stdout.write('{}\n'.format(keyring.get_password(keyname,
username)))
sys.exit(0)
if __name__ == '__main__':
main()<|fim▁end|> | # vault_password_file = /path/to/vault-keyring.py |
<|file_name|>fmt.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Deprecated fmt! syntax extension
use ast;
use codemap::Span;
use ext::base;
use ext::build::AstBuilder;<|fim▁hole|> _tts: &[ast::TokenTree]) -> base::MacResult {
ecx.span_err(sp, "`fmt!` is deprecated, use `format!` instead");
ecx.parse_sess.span_diagnostic.span_note(sp,
"see http://static.rust-lang.org/doc/master/std/fmt/index.html \
for documentation");
base::MRExpr(ecx.expr_uint(sp, 2))
}<|fim▁end|> |
pub fn expand_syntax_ext(ecx: &mut base::ExtCtxt, sp: Span, |
<|file_name|>add.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011, 2012 Free Software Foundation
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This project incorporates work covered by the following copyright and permission notice:
# Copyright (c) 2009, Julien Fache
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the author nor the names of other
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
# Copyright (c) 2011, 2012 Free Software Foundation
# This program is free software: you can redistribute it and/or modify<|fim▁hole|># published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Urls for Objectapp forms"""
from django.conf.urls.defaults import url
from django.conf.urls.defaults import patterns
urlpatterns = patterns('objectapp.views.add',
url(r'^gbobject/$', 'addgbobject',
name='objectapp_add_gbobject'),
url(r'^process/$', 'addprocess',
name='objectapp_add_gbobject'),
url(r'^system/$', 'addsystem',
name='objectapp_add_system'),
)<|fim▁end|> | # it under the terms of the GNU Affero General Public License as |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: iso-8859-2 -*-
#
# Copyright (C) 2007 Adam Folmert <[email protected]>
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
#
#
"""This is the module for models used in Mentor GUI"""
import release
__author__ = '%s <%s>' % \
( release.authors['afolmert'][0], release.authors['afolmert'][1])
__license__ = release.license
__version__ = release.version
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from cards import Card, Cards
from utils import isstring, log
from utils_qt import tr
class CardModel(QAbstractItemModel):
"""Model to be used for list and tree view."""
class InvalidIndexError(Exception): pass
class ModelNotActiveError(Exception): pass
def __init__(self, parent=None):
QAbstractListModel.__init__(self, parent)
self.cards = Cards()
def _checkIndex(self, index):
if index is None or not index.isValid() or index == QModelIndex():
raise CardModel.InvalidIndexError, "Invalid index given"
def _checkActive(self):
if not self.isActive():
raise CardModel.ModelNotActiveError, "Model is not active. Use open first."
def open(self, dbpath):
self.cards.open(str(dbpath))
# FIXME why these do not work??
self.reset()
# ^ self.emit(SIGNAL('modelReset()'))
def close(self):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.close()
self.reset()
def filepath(self):
"""Returns path to currently open database"""
if self.cards.isOpen():
return self.cards.db_path
else:
return None
def isActive(self):
return self.cards.isOpen()
def parent(self, index):
return QModelIndex()
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
if self.cards.isOpen():
return self.cards.getCardsCount()
else:
return 0
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
if self.cards.isOpen():
return 5
else:
return 0
def index(self, row, column, parent=QModelIndex()):
if row < 0 or column < 0 or not self.cards.isOpen():
return QModelIndex()
else:
# returns index with given card id
header = self.cards.getCardHeaders('', row, row + 1)
if len(header) == 1:
return self.createIndex(row, column, int(header[0][0]))
else:
return QModelIndex()
# for display role only id+question in following columns will be
# for specific data , in the following columns
def data(self, index, role=Qt.DisplayRole):
self._checkIndex(index)
if role not in (Qt.DisplayRole, Qt.UserRole):
return QVariant()
card = self.cards.getCard(index.internalId())
if role == Qt.UserRole:
return card
else:
if index.column() == 0:
return QVariant('#%d %s' % (card.id, str(card.question).strip()))
elif index.column() == 1:
return QVariant('%s' % str(card.answer).strip())<|fim▁hole|> return QVariant('%s' % str(card.answer_hint).strip())
elif index.column() == 4:
return QVariant('%s' % str(card.score))
else:
return QVariant()
def flags(self, index):
return QAbstractListModel.flags(self, index) | Qt.ItemIsEnabled | Qt.ItemIsSelectable
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role == Qt.DisplayRole:
if orientation == Qt.Horizontal:
if section == 0:
return QVariant("Question")
elif section == 1:
return QVariant("Answer")
elif section == 2:
return QVariant(tr("Question hint"))
elif section == 3:
return QVariant(tr("Answer hint"))
elif section == 4:
return QVariant(tr("Score"))
else:
return QVariant()
else:
return QVariant(str(section))
return QVariant()
def getPreviousIndex(self, index):
"""Returns previous index before given or given if it's first."""
self._checkIndex(index)
if index.row() == 0:
return index
else:
return self.index(index.row() - 1, 0)
# pointer , get row before
def getNextIndex(self, index):
"""Returns next index after given or given if it's last."""
self._checkIndex(index)
if index.row() == self.rowCount() - 1:
return index
else:
return self.index(index.row() + 1, 0)
# get row after ?
# TODO
# what about inserting rows
# and moving rows up and down ??
# must have parameter position or display position ??
# TODO
# add special handlers like rowsAboutToBeInserted etc .
# right now only model to be reset
def addNewCard(self):
"""Adds a new empty card."""
self.emit(SIGNAL('modelAboutToBeReset()'))
rowid = self.cards.addCard(Card())
# TODO is it ok to return it here?
result = self.createIndex(self.cards.getCardsCount(), 0, rowid)
# cards.addCard(Card())
# TODO
# why these do not work ?
self.reset()
# self.emit(SIGNAL('modelReset()'))
#
return result
def deleteCard(self, index):
self._checkIndex(index)
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.deleteCard(index.internalId())
# why these do not work??
self.reset()
# self.emit(SIGNAL('modelReset()'))
# cards - deleteCard card_id
# TODO question
# how to update card if peg is somewhere else ?
# maybe keep blob as well ?
# the items are then splitted
def updateCard(self, index, question, answer):
self._checkIndex(index)
card = Card(index.internalId(), question, answer)
self.cards.updateCard(card)
# update data in the model
self.emit(SIGNAL('dataChanged(QModelIndex)'), index)
# TODO model should not have any algorithms - it should be just as a proxy
# between database and any more advanced algorithm
# e.g. database importer
# btw. they should use the same classes with the probe program
# TODO progress bar for importing and possibility to cancel if is a long
# operatoin
def importQAFile(self, file, clean=True):
"""Import cards from given question&answer file.
@param file can be file name or file like object
"""
self.emit(SIGNAL('modelAboutToBeReset()'))
self._checkActive()
if isstring(file):
file = open(file, 'rt')
if clean:
self.cards.deleteAllCards()
prefix = ''
last_prefix = ''
card = Card()
for line in file.readlines():
if line.upper().startswith('Q:') or line.upper().startswith('A:'):
last_prefix = prefix
prefix = line[:2].upper()
line = line[3:]
# if new card then recreate
if prefix == 'Q:' and prefix != last_prefix:
if not card.isEmpty():
self.cards.addCard(card, False)
card = Card()
if line.strip() != '':
if prefix == 'Q:':
card.question += line
else: # prefix == a
card.answer += line
# add last card
if not card.isEmpty():
self.cards.addCard(card)
# TODO do it in a real transaction way
# in case of error do a rollback
self.cards.commit()
self.reset()
# FIXME
# How should I design it ?
# Right now it is just a container (stack) for a bunch of cards which get
# randomized
class DrillModel(QAbstractItemModel):
"""Model for drilling cards"""
# scores
Good, Bad = range(2)
def __init__(self, parent=None):
QAbstractItemModel.__init__(self, parent)
self.cards = []
def parent(self, index=QModelIndex()):
return QModelIndex()
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
return len(self.cards)
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
else:
return 1
def index(self, row, column, parent=QModelIndex()):
if parent.isValid():
return QModelIndex()
else:
if row >= 0 and row < len(self.cards) and column == 0:
return self.createIndex(row, column, None)
else:
return QModelIndex()
def data(self, index, role=Qt.DisplayRole):
if role not in (Qt.DisplayRole,):
return QVariant()
else:
if index.row() < len(self.cards):
card = self.cards[index.row()]
return QVariant("%d %s" % (card.id, card.question))
else:
return QVariant()
def headerData(self, section, orientation, role=Qt.DisplayRole):
return QVariant(str(section))
# return QAbstractItemModel.headerData(self, section, orientation, role)
def flags(self, index):
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
def addCard(self, card):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.append(card)
self.reset()
def clear(self):
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.clear()
self.reset()
def selectNextCard(self):
# take from the stack and put it on top
if len(self.cards) > 0:
self.emit(SIGNAL('modelAboutToBeReset()'))
result = self.cards[0]
self.cards = self.cards[1:]
self.cards.append(result)
self.reset()
return result
else:
return Card()
def removeCard(self, card):
try:
self.emit(SIGNAL('modelAboutToBeReset()'))
self.cards.remove(card)
self.reset()
except:
pass
def scoreCard(self, card, score):
if score == DrillModel.Good:
log("Card: $card will be removed from drill.")
self.removeCard(card)
def shuffleCards(self):
from random import shuffle
self.emit(SIGNAL('modelAboutToBeReset()'))
shuffle(self.cards)
self.reset()
def printCards(self):
print "Printing cards..."
sys.stdout.flush()
i = 0
for card in self.cards:
print "%d %s\n" % (i, str(card))
sys.stdout.flush()
i += 1
print "Done."
sys.stdout.flush()
def main():
pass
if __name__ == '__main__':
main()<|fim▁end|> | elif index.column() == 2:
return QVariant('%s' % str(card.question_hint).strip())
elif index.column() == 3: |
<|file_name|>JobOrderListResolve.js<|end_file_name|><|fim▁begin|>define(function () {
return {
customerAccount: ['CustomerAccountService', function (CustomerAccountService) {<|fim▁hole|><|fim▁end|> | return CustomerAccountService.getCurrent();
}]
};
}); |
<|file_name|>health-check.ts<|end_file_name|><|fim▁begin|>export interface IHealthCheck {
check_interval_seconds?: number;<|fim▁hole|> protocol: string;
response_timeout_seconds?: number;
unhealthy_threshold?: number;
}<|fim▁end|> | healthy_threshold?: number;
path?: string;
port: number; |
<|file_name|>issue-15381.rs<|end_file_name|><|fim▁begin|>fn main() {
let values: Vec<u8> = vec![1,2,3,4,5,6,7,8];<|fim▁hole|> println!("y={}", y);
}
}<|fim▁end|> |
for &[x,y,z] in values.chunks(3).filter(|&xs| xs.len() == 3) {
//~^ ERROR refutable pattern in `for` loop binding: `&[]`, `&[_]`, `&[_, _]` and 1 more not |
<|file_name|>static-resource.js<|end_file_name|><|fim▁begin|>import { StaticResource } from 'iab-vast-model'
<|fim▁hole|> const res = new StaticResource()
res.creativeType = $staticResource.creativeType
res.uri = $staticResource._value
return res
}<|fim▁end|> | export default ($staticResource) => { |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
<|fim▁hole|><|fim▁end|> | class OntakAppConfig(AppConfig):
name = "oioioi.ontak" |
<|file_name|>configInterface.ts<|end_file_name|><|fim▁begin|>/**
* Config Interface
*/
"use strict";
/* Node modules */
/* Third-party modules */
<|fim▁hole|>/* Files */
export interface IConfigInterface {
url: string;
mongoOptions?: {
uri_decode_auth?: boolean;
db?: any;
server?: any;
replSet?: any;
mongos?: any;
promiseLibrary?: any;
};
poolOptions?: {
name?: string;
max?: number;
min?: number;
refreshIdle?: boolean;
idleTimeoutMillis?: number;
reapIntervalMillis?: number;
returnToHead?: boolean;
priorityRange?: number;
validate?: (client: any) => boolean;
validateAsync?: (client: any, callback: (remove: boolean) => void) => void;
log?: boolean | ((log: string, level: string) => void);
};
}<|fim▁end|> | |
<|file_name|>test_get_rest_mags.py<|end_file_name|><|fim▁begin|>import unittest
import ezgal.zf_grid
import numpy as np
import math
# I put the test data for the zf_grid tests in
# tests.zf_grid instead of in tests because
# there is a lot of data but it is all
# specific for this test.
import tests.zf_grid
class test_get_rest_mags(tests.zf_grid.test_zf_grid):
def test_get_rest_mags(self):
self.assertTrue(np.allclose(
self.zf_grid.get_rest_mags(tests.zf_grid.test_zs),
[0.275, 0.75, 1.25, 1.75, 2.25, 2.778], 1e-4))
def test_get_rest_mags_lower_bound(self):
# if we go lower than our lowest grided z then
# we should get a nan
vals = self.zf_grid.get_rest_mags([-1])
self.assertTrue(math.isnan(vals[0]))
def test_get_rest_mags_upper_bound(self):
# if we go lower than our lowest grided z then
# we should get a nan
vals = self.zf_grid.get_rest_mags([4])
<|fim▁hole|> self.assertTrue(math.isnan(vals[0]))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>faceup.py<|end_file_name|><|fim▁begin|>import time
import maestro
# servo 0 is left/right
# servo 1 is up/down
try:
servo = maestro.Controller()
servo.setRange(1,4000,8000)
# about 5 clicks per full motion
# 1040 for left/right + is left, - is right.
# 800 for up/down + is up, - is down.<|fim▁hole|> servo.setTarget(1,x)
finally:
servo.close<|fim▁end|> | x = servo.getPosition(1) + 800
servo.setAccel(1,6) |
<|file_name|>RpcImporter.java<|end_file_name|><|fim▁begin|>package com.netwebx.hackerrank.rpc.client;
import java.io.ObjectInputStream;<|fim▁hole|>import java.net.InetSocketAddress;
import java.net.Socket;
/**
* Created by apple on 2017/2/26.
*/
public class RpcImporter<S> {
public S importer(final Class<?> serviceClass, final InetSocketAddress addr) {
return (S) Proxy.newProxyInstance(
serviceClass.getClassLoader(),
new Class<?>[]{serviceClass.getInterfaces()[0]},
new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Socket socket = null;
ObjectOutputStream output = null;
ObjectInputStream input = null;
try {
socket = new Socket();
socket.connect(addr);
output = new ObjectOutputStream(socket.getOutputStream());
output.writeUTF(serviceClass.getName());
output.writeUTF(method.getName());
output.writeObject(method.getParameterTypes());
output.writeObject(args);
input = new ObjectInputStream(socket.getInputStream());
return input.readObject();
} finally {
if (socket != null) {
socket.close();
}
if (output != null) {
output.close();
}
if (input != null) {
input.close();
}
}
}
}
);
}
}<|fim▁end|> | import java.io.ObjectOutputStream;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy; |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# (c) 2015 ACSONE SA/NV, Dhinesh D
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "Inactive Sessions Timeout",
'summary': """
This module disable all inactive sessions since a given delay""",
'author': "ACSONE SA/NV, "
"Dhinesh D, "
"Jesse Morgan, "
"LasLabs, "<|fim▁hole|> "Odoo Community Association (OCA)",
'maintainer': 'Odoo Community Association (OCA)',
'website': "http://acsone.eu",
'category': 'Tools',
'version': '10.0.1.0.2',
'license': 'AGPL-3',
'data': [
'data/ir_config_parameter_data.xml'
],
'installable': True,
}<|fim▁end|> | |
<|file_name|>iword_embedding.py<|end_file_name|><|fim▁begin|>"""
Contains basic interface (abstract base class) for word embeddings.
"""
import os
from abc import ABCMeta, abstractmethod
class IWordEmbedding(object):
"""
Abstract base class for word embeddings
"""
__metaclass__ = ABCMeta
def __init__(self, path, vector_length):
self.model = None
self.path = path
self.vector_length = vector_length
self.already_built = False
@abstractmethod
def _build(self):
raise NotImplementedError
@abstractmethod
def __getitem__(self, word):
raise NotImplementedError
def build(self):
""" Loads word embedding from its file """
if not self.already_built:
print("Loading pre-trained word embedding from {0}...".format(self.path))
self._build()
self.already_built = True
print("Pre-trained word embedding from {0} loaded!".format(self.path))<|fim▁hole|> """ :return: absolute path to folder containing saved word embedding model """
return os.path.join(os.path.dirname(__file__), '../../../models/word_embeddings', self.path)
@staticmethod
def data_file_to_sentences(data_file_path):
"""
Converts a processed data file to generator of lists of words
:param data_file_path: path to data file
:return: iterator yielding sentences as lists of words
"""
with open(data_file_path, 'r') as f:
for line in f:
sentence = line.split(' ')[1]
yield map(lambda word: word.rstrip(), sentence.split(','))
def __str__(self):
return type(self).__name__<|fim▁end|> |
def get_embedding_model_path(self): |
<|file_name|>test_itemmotion_2.js<|end_file_name|><|fim▁begin|>/**
* Created by whobird on 17/4/10.
*/
var winResizeDuration=null;
$(document).ready(function(){
if(!IsPC()){
window.location="./index.html";
return;
}
//project init
//start
$("img.lazy").lazyload({
event : "sporty",
effect : "fadeIn"
});
setPage();
$('#preloader').delay(350).fadeOut(function(){
$("img.lazy").trigger("sporty");
});
var slideCount=$(".section-wrapper").find(".section-slide").length;
var curIndex=0;
var curIndexProgress=0;
var lastProgress=0;
var checkPositionWorker=null;
function setPosition(){
console.log("timeout=============action=================");
console.log(curIndex);
mySwiper.slideTo(curIndex);
setTimeout(function(){
if(curIndex<0){curIndex=0;}
if(curIndex>=(slideCount-1)){
curIndex=slideCount-1;
//console.log(">=slide count -1===================")
//最后一张动画要反向;
//console.log("translate3d("+(-20*(curIndex-2))+"%, 0px, 0px)");
$(".section-wrapper").css({
"transform": "translate3d("+(-50*(curIndex-2))+"%, 0px, 0px)",
});
tweenAnim.setBox(curIndex,-1);
for(i=0;i<slideCount;i++) {
if (i == (curIndex - 1) || i == (curIndex - 2)) {
$("#section-" + i).removeClass("active").css("width", "20%");
} else if (i == curIndex) {
$("#section-" + curIndex).addClass("active").css("width", "50%");
} else {
$("#section-" + i).removeClass("active").css("width", "50%");
}
}
}else{
//console.log("curIndex:"+curIndex);
//console.log("translate3d("+(-20*curIndex)+"%, 0px, 0px)");
$(".section-wrapper").css({
"transform": "translate3d("+(-50*curIndex)+"%, 0px, 0px)",
});
tweenAnim.setBox(curIndex,0);
for(i=0;i<slideCount;i++){
if(i>curIndex){
$("#section-"+i).removeClass("active").css("width","20%");
}else if(i<curIndex){
$("#section-"+i).removeClass("active").css("width","50%");
}else{
$("#section-"+curIndex).addClass("active").css("width","50%");
}
}
}
},100);
clearTimeout(checkPositionWorker);
checkPositionWorker=null;
};
var mySwiper = new Swiper('.swiper-container', {
// slidesPerView: 3.2,
freeMode:true,
freeModeMomentum:false,
/* freeModeMomentumBounce : true,
freeModeMomentumBounceRatio:10,*/
slidesPerView: 'auto',
observer:true,
observerParents:true,
mousewheelControl:true,
mousewheelSensitivity :1.8,
watchSlidesProgress : true,
touchRatio:3.8,
//shortSwipes:false,
//threshold : 100,<|fim▁hole|> grabCursor : true,
onProgress: function(swiper, progress){
//根据 progress 计算当前的index
var indexRate=100/(slideCount-1);
var index=Math.round(progress*100/indexRate)||0;
if(index<0){index=0;}
if(index>=(slideCount-1)){
index=slideCount-1;
//最后一张动画要反向;
var indexProgress=swiper.slides[index].progress;
$(".section-wrapper").css({
"transform": "translate3d("+((-50*(index-2))-(50*indexProgress) )+"%, 0px, 0px)",
});
}else if(index==(slideCount-2)){
var indexProgress=swiper.slides[index].progress;
//倒数第二张情况分两种
if(indexProgress>0){
//当前slide移走 时,最后一张进入,动画要反向
$(".section-wrapper").css({
"transform": "translate3d("+((-50*(index))+(50*indexProgress) )+"%, 0px, 0px)",
});
}else if(indexProgress<0){
//当前slide 移过来
$(".section-wrapper").css({
"transform": "translate3d("+((-50*(index))-(50*indexProgress) )+"%, 0px, 0px)",
});
}
}else{
var indexProgress=swiper.slides[index].progress;
$(".section-wrapper").css({
"transform": "translate3d("+((-50*(index))-(50*indexProgress) )+"%, 0px, 0px)",
});
}
curIndex=index;
if(index>0 && index<slideCount-1){
if(indexProgress>0){
//当前slide移走
var $targetSection=$("#section-"+(index+1));
$targetSection.css("width",(20+30*indexProgress)+"%");
tweenAnim.update(curIndex+1,indexProgress,1);
}else if(indexProgress<0){
//当前slide 移过来
var $curSection=$("#section-"+index);
$curSection.css("width",(50+30*indexProgress)+"%");
tweenAnim.update(curIndex,indexProgress,0);
}
}else if(index==0){
//index==0只处理slide移走情况
if(indexProgress>0){
var $targetSection=$("#section-"+(index+1));
$targetSection.css("width",(20+30*indexProgress)+"%");
tweenAnim.update(curIndex+1,indexProgress,1);
}
}else if(index==(slideCount-1)){
//最后一张考虑 slide 移过来
if(indexProgress<0){
//当前slide 移过来
var $curSection=$("#section-"+index);
var $prevSection=$("#section-"+(index-1));
var $prevSection2=$("#section-"+(index-2));
$curSection.css("width",(50+30*indexProgress)+"%");
$prevSection.css("width",(20-30*indexProgress)+"%");
$prevSection.css("width",(20-30*indexProgress)+"%");
tweenAnim.update(curIndex,indexProgress,0);
tweenAnim.update(curIndex-1,indexProgress,-2);
tweenAnim.update(curIndex-2,indexProgress,-3);
}
}
clearTimeout(checkPositionWorker);
checkPositionWorker=null;
if(!checkPositionWorker){
//console.log("timeout=============");
checkPositionWorker=setTimeout(setPosition,300);
}
},
onTransitionEnd: function(swiper){
console.log("transitionEnd======================");
}
});
$(".nav-btn").on("click",function(e){
$(".main").toggleClass("nav-active");
});
$(".swiper-slide > div.col-next").on("click",function(e){
e.preventDefault();
var index=$(this).data("index");
console.log("index============"+index);
var offset=parseInt($(".swiper-slide").css("width"))*(index-0.3);
console.log(offset);
mySwiper.setWrapperTranslate(-offset);
});
$(window).on("resize",function(e){
cancelAnimationFrame (winResizeDuration);
winResizeDuration=null;
if(!winResizeDuration){
winResizeDuration=requestAnimationFrame(setPage);
}
});
});
function setPage(){
setBottom();
setScale();
}
function setBottom(){
//计算底部比例,保证底部放大时正好触底
var w=parseFloat($(".section-slide.active").css("width"));
var h=parseFloat($("body").css("height"));
var imageRate=(1000/1980)//根据设计稿
var bottomPercent=(w-22)*imageRate/h;
$(".slide-content-set").css("height",bottomPercent*100+"%");
}
function setScale(){
var curW=parseFloat($(".section-slide.active").css("width"))*0.4;
var targetW=400;
var scaleRate=curW/targetW;
if(scaleRate>1){
scaleRate=1;
}
$(".slide-head").css({
"-webkit-transform":"scale("+scaleRate+")",
"transform":"scale("+scaleRate+")"
});
$(".slide-header-index").css({
"-webkit-transform":"scale("+scaleRate+")",
"transform":"scale("+scaleRate+")"
});
$(".slide-head-list").css({
"-webkit-transform":"scale("+scaleRate+")",
"transform":"scale("+scaleRate+")"
});
var fontScaleSize=14*scaleRate<10?(10/scaleRate):14;
$(".slide-head").find("p").css("font-size",(fontScaleSize)+"px");
$(".section-list-title").css("font-size",(fontScaleSize)+"px");
var contentScaleRate=fontScaleSize*scaleRate/12;
if(contentScaleRate>1){
contentScaleRate=1;
}
console.log("contentScaleRate============="+contentScaleRate);
var contentWidth=curW/contentScaleRate;
$(".content-main").css({
"min-width":contentWidth+"px",
"-webkit-transform":"scale("+contentScaleRate+")",
"transform":"scale("+contentScaleRate+")",
//"font-size":
});
$("#section-0 .slide-head").css({
"-webkit-transform":"scale("+contentScaleRate+")",
"transform":"scale("+contentScaleRate+")"
})
}<|fim▁end|> | /*freeModeSticky:true,
freeModeMomentumBounce : true,
freeModeMomentumBounceRatio:10,*/ |
<|file_name|>mod_pow.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::mod_pow::simple_binary_mod_pow;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;<|fim▁hole|>use malachite_base_test_util::bench::bucketers::triple_2_3_product_bit_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::{
unsigned_triple_gen_var_14, unsigned_triple_gen_var_15,
};
use malachite_base_test_util::num::arithmetic::mod_pow::naive_mod_pow;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_unsigned_demos!(runner, demo_mod_pow);
register_unsigned_demos!(runner, demo_mod_pow_assign);
register_unsigned_benches!(runner, benchmark_mod_pow_algorithms);
register_unsigned_benches!(runner, benchmark_mod_pow_naive_algorithms);
register_unsigned_benches!(runner, benchmark_mod_pow_assign);
register_unsigned_benches!(runner, benchmark_mod_pow_precomputed_algorithms);
}
fn demo_mod_pow<T: PrimitiveUnsigned>(gm: GenMode, config: GenConfig, limit: usize) {
for (x, exp, m) in unsigned_triple_gen_var_15::<T, u64>()
.get(gm, &config)
.take(limit)
{
println!("{}.pow({}) === {} mod {}", x, exp, x.mod_pow(exp, m), m);
}
}
fn demo_mod_pow_assign<T: PrimitiveUnsigned>(gm: GenMode, config: GenConfig, limit: usize) {
for (mut x, exp, m) in unsigned_triple_gen_var_15::<T, u64>()
.get(gm, &config)
.take(limit)
{
let old_x = x;
x.mod_pow_assign(exp, m);
println!(
"x := {}; x.mod_pow_assign({}, {}); x = {}",
old_x, exp, m, x
);
}
}
fn benchmark_mod_pow_algorithms<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.mod_pow(u64, {})", T::NAME, T::NAME),
BenchmarkType::Algorithms,
unsigned_triple_gen_var_15::<T, u64>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_2_3_product_bit_bucketer("exp", "m"),
&mut [
("default", &mut |(x, exp, m)| no_out!(x.mod_pow(exp, m))),
("simple binary", &mut |(x, exp, m)| {
no_out!(simple_binary_mod_pow(x, exp, m))
}),
],
);
}
fn benchmark_mod_pow_naive_algorithms<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.mod_pow(u64, {})", T::NAME, T::NAME),
BenchmarkType::Algorithms,
unsigned_triple_gen_var_14::<T, u64>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_2_3_product_bit_bucketer("exp", "m"),
&mut [
("default", &mut |(x, exp, m)| no_out!(x.mod_pow(exp, m))),
("naive", &mut |(x, exp, m)| {
no_out!(naive_mod_pow(x, exp, m))
}),
("simple binary", &mut |(x, exp, m)| {
no_out!(simple_binary_mod_pow(x, exp, m))
}),
],
);
}
fn benchmark_mod_pow_assign<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.mod_pow_assign(u64, {})", T::NAME, T::NAME),
BenchmarkType::Single,
unsigned_triple_gen_var_15::<T, u64>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_2_3_product_bit_bucketer("exp", "m"),
&mut [("Malachite", &mut |(mut x, exp, m)| x.mod_pow_assign(exp, m))],
);
}
fn benchmark_mod_pow_precomputed_algorithms<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.mod_pow(u64, {})", T::NAME, T::NAME),
BenchmarkType::Algorithms,
unsigned_triple_gen_var_15::<T, u64>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_2_3_product_bit_bucketer("exp", "m"),
&mut [
("default", &mut |(x, exp, m)| {
for _ in 0..10 {
x.mod_pow(exp, m);
}
}),
("precomputed", &mut |(x, exp, m)| {
let data = T::precompute_mod_pow_data(&m);
for _ in 0..10 {
x.mod_pow_precomputed(exp, m, &data);
}
}),
],
);
}<|fim▁end|> | |
<|file_name|>amp-instagram.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Embeds an instagram photo. The data-shortcode attribute can be
* easily copied from a normal instagram URL. Example: <code> <amp-instagram
* data-shortcode="fBwFP" data-captioned data-default-framing alt="Fastest page
* in the west." width="320" height="392" layout="responsive"> </amp-instagram>
* </code>
*
* For responsive embedding the width and height can be left unchanged from the
* example above and should produce the correct aspect ratio. amp-instagram will
* attempt to resize on load based on the height reported by the embedded frame.
* If captions are specified (data-captioned) then a resize will be requested
* every time due to the fact that it's not possible to know the height of the
* caption in advance.
*
* If captions are included it is stringly reccomended that an overflow element
* is also included. See description of overflow in amp-iframe.
*
* If data-default-framing is present will apply the default instagram frame
* style without changing the layout/size.
*/
import {CSS} from '../../../build/amp-instagram-0.1.css';
import {getData, listen} from '../../../src/event-helper';
import {isLayoutSizeDefined} from '../../../src/layout';
import {isObject} from '../../../src/types';
import {removeElement} from '../../../src/dom';
import {setStyles} from '../../../src/style';
import {startsWith} from '../../../src/string';
import {tryParseJson} from '../../../src/json';
import {user} from '../../../src/log';
class AmpInstagram extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {<|fim▁hole|> this.iframe_ = null;
/** @private {?string} */
this.shortcode_ = '';
/** @private {?Function} */
this.unlistenMessage_ = null;
/** @private {string} */
this.captioned_ = '';
/**
* @private {?Promise}
* @visibleForTesting
*/
this.iframePromise_ = null;
}
/**
* @param {boolean=} opt_onLayout
* @override
*/
preconnectCallback(opt_onLayout) {
// See
// https://instagram.com/developer/embedding/?hl=en
this.preconnect.url('https://www.instagram.com', opt_onLayout);
// Host instagram used for image serving. While the host name is
// funky this appears to be stable in the post-domain sharding era.
this.preconnect.url('https://instagram.fsnc1-1.fna.fbcdn.net',
opt_onLayout);
}
/** @override */
renderOutsideViewport() {
return false;
}
/** @override */
buildCallback() {
this.shortcode_ = user().assert(
(this.element.getAttribute('data-shortcode') ||
this.element.getAttribute('shortcode')),
'The data-shortcode attribute is required for <amp-instagram> %s',
this.element);
this.captioned_ = this.element.hasAttribute('data-captioned') ?
'captioned/' : '';
}
/** @override */
createPlaceholderCallback() {
const placeholder = this.win.document.createElement('div');
placeholder.setAttribute('placeholder', '');
const image = this.win.document.createElement('amp-img');
image.setAttribute('noprerender', '');
// This will redirect to the image URL. By experimentation this is
// always the same URL that is actually used inside of the embed.
image.setAttribute('src', 'https://www.instagram.com/p/' +
encodeURIComponent(this.shortcode_) + '/media/?size=l');
image.setAttribute('layout', 'fill');
image.setAttribute('referrerpolicy', 'origin');
this.propagateAttributes(['alt'], image);
/*
* Add instagram default styling
*/
if (this.element.hasAttribute('data-default-framing')) {
this.element.classList.add('amp-instagram-default-framing');
}
// This makes the non-iframe image appear in the exact same spot
// where it will be inside of the iframe.
setStyles(image, {
'top': '0 px',
'bottom': '0 px',
'left': '0 px',
'right': '0 px',
});
placeholder.appendChild(image);
return placeholder;
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
layoutCallback() {
const iframe = this.element.ownerDocument.createElement('iframe');
this.iframe_ = iframe;
this.unlistenMessage_ = listen(
this.win,
'message',
this.handleInstagramMessages_.bind(this)
);
iframe.setAttribute('scrolling', 'no');
iframe.setAttribute('frameborder', '0');
iframe.setAttribute('allowtransparency', 'true');
//Add title to the iframe for better accessibility.
iframe.setAttribute('title', 'Instagram: ' +
this.element.getAttribute('alt'));
iframe.src = 'https://www.instagram.com/p/' +
encodeURIComponent(this.shortcode_) + '/embed/' +
this.captioned_ + '?cr=1&v=7';
this.applyFillContent(iframe);
this.element.appendChild(iframe);
setStyles(iframe, {
'opacity': 0,
});
return this.iframePromise_ = this.loadPromise(iframe).then(() => {
this.getVsync().mutate(() => {
setStyles(iframe, {
'opacity': 1,
});
});
});
}
/**
* @param {!Event} event
* @private
*/
handleInstagramMessages_(event) {
if (event.origin != 'https://www.instagram.com' ||
event.source != this.iframe_.contentWindow) {
return;
}
const eventData = getData(event);
if (!eventData || !(isObject(eventData)
|| startsWith(/** @type {string} */ (eventData), '{'))) {
return; // Doesn't look like JSON.
}
const data = isObject(eventData) ? eventData : tryParseJson(eventData);
if (data === undefined) {
return; // We only process valid JSON.
}
if (data['type'] == 'MEASURE' && data['details']) {
const height = data['details']['height'];
this.getVsync().measure(() => {
if (this.iframe_ && this.iframe_./*OK*/offsetHeight !== height) {
this./*OK*/changeHeight(height);
}
});
}
}
/** @override */
unlayoutOnPause() {
return true;
}
/** @override */
unlayoutCallback() {
if (this.iframe_) {
removeElement(this.iframe_);
this.iframe_ = null;
this.iframePromise_ = null;
}
if (this.unlistenMessage_) {
this.unlistenMessage_();
}
return true; // Call layoutCallback again.
}
}
AMP.extension('amp-instagram', '0.1', AMP => {
AMP.registerElement('amp-instagram', AmpInstagram, CSS);
});<|fim▁end|> | super(element);
/** @private {?Element} */ |
<|file_name|>hand.go<|end_file_name|><|fim▁begin|>package mahjong
type Hand []Pai
func remove(list []Pai, p Pai) []Pai {<|fim▁hole|> if e == p && !removed {
removed = true
} else {
result = append(result, e)
}
}
return result
}
func contain(list []Pai, p Pai) bool {
for _, a := range list {
if a == p {
return true
}
}
return false
}
func contain2(list []Pai, p Pai) bool {
count := 0
for _, a := range list {
if a == p {
count += 1
}
}
return count >= 2
}
func createCandidates(list []Pai, cand [][][]Pai) [][][]Pai {
if len(list) <= 0 {
return cand
}
current := list[0]
remain := list[1:]
nextOne := current + 1
nextTwo := current + 2
if current.IsNumber() {
if current.Suit() == nextOne.Suit() && current.Suit() == nextTwo.Suit() &&
contain(remain, nextOne) && contain(remain, nextTwo) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[idx] = append(cand[idx], []Pai{current, nextOne, nextTwo})
_remain := remove(remove(remain, nextOne), nextTwo)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if current.Suit() == nextOne.Suit() && contain(remain, nextOne) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextOne})
_remain := remove(remain, nextOne)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if current.Suit() == nextTwo.Suit() && contain(remain, nextTwo) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextTwo})
_remain := remove(remain, nextTwo)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
}
if contain2(remain, current) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current, current})
_remain := remove(remove(remain, current), current)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if contain(remain, current) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current})
_remain := remove(remain, current)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current})
return createCandidates(remain, cand)
}
func isUnique(list []Pai) bool {
result := []Pai{}
for _, p := range list {
if contain(result, p) {
// nothing to do
} else {
result = append(result, p)
}
}
return len(list) == len(result)
}
func isSevenPairs(list [][]Pai) bool {
if len(list) != 7 {
return false
}
stack := []Pai{}
for _, pair := range list {
if len(pair) == 2 && pair[0] != pair[1] {
return false
}
stack = append(stack, pair[0])
}
return isUnique(stack)
}
func isThirteenOrphans(list [][]Pai) bool {
if len(list) == 12 || len(list) == 13 {
for _, pair := range list {
for _, pai := range pair {
if !pai.IsOrphan() {
return false
}
}
}
return true
}
return false
}
func (hand *Hand) IsTenpai() bool {
_hand := *hand
cand := [][][]Pai{[][]Pai{}}
cand = createCandidates(_hand, cand)
for _, a := range cand {
// regular type
if len(a) == 5 {
return true
}
// seven pairs
if isSevenPairs(a) {
return true
}
if isThirteenOrphans(a) {
return true
}
}
return false
}<|fim▁end|> | var result []Pai
removed := false
for _, e := range list { |
<|file_name|>length_expr.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(custom_attribute, plugin, slice_bytes, vec_push_all)]
#![plugin(pnet_macros_plugin)]
extern crate pnet;
#[packet]
pub struct Key {
banana: u8,
#[length = "banana"]
#[payload]
payload: Vec<u8>
}
#[packet]
pub struct AnotherKey {
banana: u8,
#[length = "banana + 7"]
#[payload]
payload: Vec<u8>
}
fn main() {}<|fim▁end|> | // Copyright (c) 2015 Robert Clipsham <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
<|file_name|>context.rs<|end_file_name|><|fim▁begin|>//! Manage OpenGL context and window.
use std::string::ToString;
use std::rc::Rc;
use std::ops::Deref;
use std::cell::{UnsafeCell, RefCell, RefMut, Ref};
use glium::{Display, DisplayBuild};
use glium::glutin::WindowBuilder;
pub use glium::{Frame, Surface, SwapBuffersError, DrawError};
/// Just a reference of `Context`.
#[derive(Clone)]
pub struct Gfx(Rc<Context>);
impl Gfx {
pub fn new(ctx: Context) -> Gfx {
Gfx(Rc::new(ctx))
}
}
impl Deref for Gfx {
type Target = Context;
fn deref(&self) -> &Context {
&*self.0
}
}
/// Context handle object.
///
/// Manage `glium::Display` context and current frame.
pub struct Context {
pub display: Display,
frame: UnsafeCell<Option<RefCell<Frame>>>,
clear_color: (f32, f32, f32, f32),
}
impl Context {
/// Builds OpenGL context and create a window.
pub fn create<T: ToString>(title: T, (width, height): (u32, u32)) -> Context {
let display = WindowBuilder::new()
.with_title(title.to_string())
.with_dimensions(width, height)
.with_depth_buffer(24)
.with_vsync()
.build_glium()
.unwrap();
Context {
display: display,
frame: UnsafeCell::new(None),
clear_color: (0.0, 0.0, 0.0, 0.0),
}
}
/// Sets clear color.
pub fn clear_color(self, r: f32, g: f32, b: f32, a: f32) -> Context {
Context { clear_color: (r, g, b, a), ..self }
}
/// Into be a reference.
pub fn gfx(self) -> Gfx {
Gfx::new(self)
}
unsafe fn get_cell(&self) -> &mut Option<RefCell<Frame>> {
self.frame.get().as_mut().unwrap()
}
/// Start a new frame.
fn start_frame(&self) {
unsafe {
let mut cell = self.get_cell();
if cell.is_some() {
println!("Frame has already started.");
} else {
let mut frame = self.display.draw();
frame.clear_color_and_depth(self.clear_color, 1.0);
*cell = Some(RefCell::new(frame));
}
}
}
/// Get frame immutable reference.
/// # Panics
/// Panic if frame not created or something is mutable borrowing the frame.
pub fn get_frame(&self) -> Ref<Frame> {
unsafe {
let cell = self.get_cell();
if cell.is_none() {
panic!("Frame not exist.")
}
cell.as_ref().unwrap().borrow()
}
}
/// Get frame mutable reference.
/// # Panics
/// Panic if something is borrowing the frame.
pub fn get_frame_mut(&self) -> RefMut<Frame> {
unsafe {
let cell = self.get_cell();
if cell.is_none() {
panic!("Frame not exist.")
}
cell.as_ref().unwrap().borrow_mut()
}
}
/// End the frame.
fn end_frame(&self) -> Result<(), SwapBuffersError> {
unsafe {
let mut cell = self.get_cell();
if cell.is_none() {
println!("Frame has already ended.");
Ok(())
} else {
// Test whether the frame is borrowed.
// TODO: Waiting `borrow_state` stabilization:
// https://github.com/rust-lang/rust/issues/27733
let _ = cell.as_ref().unwrap().borrow_mut();
let frame = cell.take().unwrap().into_inner();
frame.finish()
}
}
}
/// Start a new frame and auto end it.
pub fn frame<F>(&self, f: F) -> Result<(), SwapBuffersError>
where F: FnOnce()
{
// TODO: Refactor and error handling.
self.start_frame();
f();
self.end_frame()<|fim▁hole|> /// and the window size in screen pixels.
pub fn hidpi_factor(&self) -> f32 {
self.display.get_window().unwrap().hidpi_factor()
}
}<|fim▁end|> | }
/// Returns the ratio between the backing framebuffer resolution |
<|file_name|>imagedata.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::codegen::Bindings::ImageDataBinding;
use dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use dom::bindings::js::Root;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use euclid::size::Size2D;
use js::jsapi::{Heap, JSContext, JSObject};
use js::rust::Runtime;
use js::typedarray::Uint8ClampedArray;
use std::default::Default;
use std::ptr;
use std::vec::Vec;
#[dom_struct]
pub struct ImageData {
reflector_: Reflector,
width: u32,
height: u32,
data: Heap<*mut JSObject>,
}
impl ImageData {
#[allow(unsafe_code)]
pub fn new(global: &GlobalScope, width: u32, height: u32, data: Option<Vec<u8>>) -> Root<ImageData> {
let imagedata = box ImageData {
reflector_: Reflector::new(),
width: width,
height: height,
data: Heap::default(),
};
unsafe {
let cx = global.get_cx();
rooted!(in (cx) let mut js_object = ptr::null_mut());
let data = data.as_ref().map(|d| &d[..]);
Uint8ClampedArray::create(cx, width * height * 4, data, js_object.handle_mut()).unwrap();
(*imagedata).data.set(js_object.get());
}
reflect_dom_object(imagedata,
global, ImageDataBinding::Wrap)
}
#[allow(unsafe_code)]<|fim▁hole|> pub fn get_data_array(&self) -> Vec<u8> {
unsafe {
assert!(!self.data.get().is_null());
let cx = Runtime::get();
assert!(!cx.is_null());
typedarray!(in(cx) let array: Uint8ClampedArray = self.data.get());
let vec = array.unwrap().as_slice().to_vec();
vec
}
}
pub fn get_size(&self) -> Size2D<i32> {
Size2D::new(self.Width() as i32, self.Height() as i32)
}
}
impl ImageDataMethods for ImageData {
// https://html.spec.whatwg.org/multipage/#dom-imagedata-width
fn Width(&self) -> u32 {
self.width
}
// https://html.spec.whatwg.org/multipage/#dom-imagedata-height
fn Height(&self) -> u32 {
self.height
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
unsafe fn Data(&self, _: *mut JSContext) -> NonZero<*mut JSObject> {
assert!(!self.data.get().is_null());
NonZero::new(self.data.get())
}
}<|fim▁end|> | |
<|file_name|>SiteActivationNotification.py<|end_file_name|><|fim▁begin|>##
##
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/10/14 #3623 randerso Manually created, do not regenerate
#
##
class SiteActivationNotification(object):
def __init__(self):
self.type = None
self.status = None
self.primarySite = None
self.modifiedSite = None
self.runMode = None
self.serverName = None<|fim▁hole|> self.pluginName = None
def getType(self):
return self.type
def setType(self, type):
self.type = type
def getStatus(self):
return self.status
def setStatus(self, status):
self.status = status
def getPrimarySite(self):
return self.primarysite
def setPrimarySite(self, primarysite):
self.primarysite = primarysite
def getModifiedSite(self):
return self.modifiedSite
def setModifiedSite(self, modifiedSite):
self.modifiedSite = modifiedSite
def getRunMode(self):
return self.runMode
def setRunMode(self, runMode):
self.runMode = runMode
def getServerName(self):
return self.serverName
def setServerName(self, serverName):
self.serverName = serverName
def getPluginName(self):
return self.pluginName
def setPluginName(self, pluginName):
self.pluginName = pluginName
def __str__(self):
return self.pluginName.upper() + ":" \
+ self.status + ":" \
+ self.type + " " \
+ self.modifiedSite.upper() + " on " \
+ self.serverName + ":" \
+ self.runMode<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
#![feature(never_type)]
use std::sync::Arc;
use anyhow::Result;
use async_trait::async_trait;
use context::CoreContext;
use futures::future::{BoxFuture, FutureExt};
use futures::stream::{BoxStream, TryStreamExt};
use mononoke_types::ChangesetId;
<|fim▁hole|>mod cache;
mod log;
mod subscription;
mod transaction;
pub use bookmarks_types::{
Bookmark, BookmarkKind, BookmarkName, BookmarkPagination, BookmarkPrefix, BookmarkPrefixRange,
Freshness,
};
pub use cache::CachedBookmarks;
pub use log::{
ArcBookmarkUpdateLog, BookmarkUpdateLog, BookmarkUpdateLogArc, BookmarkUpdateLogEntry,
BookmarkUpdateLogRef, BookmarkUpdateReason, BundleReplay, RawBundleReplayData,
};
pub use subscription::BookmarksSubscription;
pub use transaction::{BookmarkTransaction, BookmarkTransactionError, BookmarkTransactionHook};
#[facet::facet]
#[async_trait]
pub trait Bookmarks: Send + Sync + 'static {
/// Get the current value of a bookmark.
///
/// Returns `Some(ChangesetId)` if the bookmark exists, or `None` if doesn't
fn get(
&self,
ctx: CoreContext,
name: &BookmarkName,
) -> BoxFuture<'static, Result<Option<ChangesetId>>>;
/// List bookmarks that match certain parameters.
///
/// `prefix` requires that bookmark names begin with a certain prefix.
///
/// `kinds` requires that the bookmark is of a certain kind.
///
/// `pagination` limits bookmarks to those lexicographically after the
/// named bookmark for pagination purposes.
///
/// `limit` limits the total number of bookmarks returned.
///
/// Bookmarks are returned in lexicographic order. If a request
/// hits the limit, then a subsequent request with `pagination`
/// set to `BookmarkPagination::After(name)` will allow listing
/// of the remaining bookmarks.
fn list(
&self,
ctx: CoreContext,
freshness: Freshness,
prefix: &BookmarkPrefix,
kinds: &[BookmarkKind],
pagination: &BookmarkPagination,
limit: u64,
) -> BoxStream<'static, Result<(Bookmark, ChangesetId)>>;
/// Create a transaction to modify bookmarks.
fn create_transaction(&self, ctx: CoreContext) -> Box<dyn BookmarkTransaction>;
/// Create a subscription to efficiently observe changes to publishing & pull default
/// bookmarks.
async fn create_subscription(
&self,
ctx: &CoreContext,
freshness: Freshness,
) -> Result<Box<dyn BookmarksSubscription>>;
/// Drop any caches held by this instance of Bookmarks.
fn drop_caches(&self) {
// No-op by default.
}
}
/// Construct a heads fetcher (function that returns all the heads in the
/// repo) that uses the publishing bookmarks as all heads.
pub fn bookmark_heads_fetcher(
bookmarks: ArcBookmarks,
) -> Arc<dyn Fn(&CoreContext) -> BoxFuture<'static, Result<Vec<ChangesetId>>> + Send + Sync> {
Arc::new({
move |ctx: &CoreContext| {
bookmarks
.list(
ctx.clone(),
Freshness::MaybeStale,
&BookmarkPrefix::empty(),
BookmarkKind::ALL_PUBLISHING,
&BookmarkPagination::FromStart,
std::u64::MAX,
)
.map_ok(|(_, cs_id)| cs_id)
.try_collect()
.boxed()
}
})
}<|fim▁end|> | |
<|file_name|>run.go<|end_file_name|><|fim▁begin|>// Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package harness
import (
"fmt"
"os"
"path/filepath"
"github.com/coreos/mantle/harness"
"github.com/coreos/mantle/platform"
"github.com/coreos/mantle/platform/machine/gcloud"
"github.com/coreos/mantle/pluton"
"github.com/coreos/mantle/pluton/spawn"
)
// Call this from main after setting all the global options. Tests are filtered
// by name based on the glob pattern given.
func RunSuite(pattern string) {
Opts.GCEOptions.Options = &Opts.PlatformOptions
tests, err := filterTests(Tests, pattern)
if err != nil {
fmt.Printf("Error filtering glob pattern: %v", err)
os.Exit(1)
}
opts := harness.Options{
OutputDir: Opts.OutputDir,
Parallel: Opts.Parallel,
Verbose: true,
}
suite := harness.NewSuite(opts, tests)
if err := suite.Run(); err != nil {
fmt.Fprintln(os.Stderr, err)
fmt.Println("FAIL")
os.Exit(1)
}
fmt.Println("PASS")
os.Exit(0)
}
func filterTests(tests harness.Tests, pattern string) (harness.Tests, error) {
var filteredTests = make(harness.Tests)
for name, t := range tests {
match, err := filepath.Match(pattern, name)
if err != nil {
return nil, err
}
if !match {
continue
}
filteredTests[name] = t
}
return filteredTests, nil
}
// RunTest is called inside the closure passed into the harness. Currently only
// GCE is supported, no reason this can't change
func runTest(t pluton.Test, h *harness.H) {
h.Parallel()
var cloud platform.Cluster
var err error
switch Opts.CloudPlatform {
case "gce":
cloud, err = gcloud.NewCluster(&Opts.GCEOptions, h.OutputDir())
default:
err = fmt.Errorf("invalid cloud platform %v", Opts.CloudPlatform)
}
if err != nil {
h.Fatalf("Cluster failed: %v", err)
}
defer func() {
if err := cloud.Destroy(); err != nil {
h.Logf("cluster.Destroy(): %v", err)
}
}()
config := spawn.BootkubeConfig{
ImageRepo: Opts.BootkubeRepo,
ImageTag: Opts.BootkubeTag,
ScriptDir: Opts.BootkubeScriptDir,
InitialWorkers: t.Options.InitialWorkers,
InitialMasters: t.Options.InitialMasters,
SelfHostEtcd: t.Options.SelfHostEtcd,
}
c, err := spawn.MakeBootkubeCluster(cloud, config)
if err != nil {
h.Fatalf("creating cluster: %v", err)
}
// TODO(pb): evidence that harness and spawn should be the same package?
c.H = h
t.Run(c)
}<|fim▁end|> | |
<|file_name|>home-unit-spec.js<|end_file_name|><|fim▁begin|>describe('controllers/home', function () {
var di,
Core,
Home,
Type,
contentModel = {
findOne: function() {
}
},
widgetHooks = [],
widgetHook = {
load: function (a, b, c) {
widgetHooks.push({
name: a,
alias: b,
method: c
});
},
handle: function () {
}
};
beforeEach(function () {
di = require('mvcjs');
di.setAlias('cp', __dirname + '/../../app/controllers/');
Type = di.load('typejs');
Core = di.mock('@{cp}/core', {
'typejs': Type,
'core/controller': {
inherit: function () {
return Type.create.apply(Type, arguments);
}
},
'@{core}/widget-hook': widgetHook
});
Home = di.mock('@{cp}/home', {
'typejs': Type,
'promise': di.load('promise'),
'@{controllersPath}/core': Core,
'@{modelsPath}/content': contentModel
});
});
it('construct', function () {
var api = {};
var controller = new Home(api);
expect(controller.locals.scripts.length).toBe(0);
expect(controller.locals.brand).toBe('MVCJS');
expect(controller.locals.pageTitle).toBe('Mvcjs nodejs framework');
expect(controller.locals.pageDesc).toBe('Mvcjs fast, opinionated lightweight mvc framework for Node.js inspired by Yii framework');
expect(controller.menu.length).toBe(0);
});
it('action_index', function () {
var api = {
locals: {
scripts: []
},
renderFile: function(route, locals) {
return 'RENDERED';
}
};
spyOn(api, 'renderFile').and.callThrough();
di.setAlias('basePath', __dirname + '/../../');
var controller = new Home(api);
var result = controller.action_index.call(api);
expect(api.renderFile).toHaveBeenCalledWith( 'home/index', {
scripts : [ {
src : 'https://buttons.github.io/buttons.js',
id : 'github-bjs',
async : true
} ],
version : '0.1.0-beta-15'
});
expect(result).toBe('RENDERED');
expect(api.locals.scripts.length).toBe(1);
});
it('action_content', function () {
var api = {
locals: {
content: '',
pageTitle: '',
pageDesc: ''
},
renderFile: function(route, locals) {
return 'RENDERED';
}
};<|fim▁hole|> var result = controller.action_content.call(api, {}, {
text: 'TEXT',
pageTitle: 'TITLE',
pageDesc: 'DESC'
});
expect(api.renderFile).toHaveBeenCalledWith( 'home/content', {
pageTitle: 'TITLE',
pageDesc: 'DESC',
content : 'TEXT'
});
expect(result).toBe('RENDERED');
});
it('before_content', function (done) {
var api = {
getParsedUrl: function(route, locals) {
return {
pathname: '/home/index'
};
}
};
contentModel.findOne = function(data, callback) {
expect(data.url).toBe('/home/index');
callback(null, {
id: 1,
text: 'yes'
});
};
spyOn(api, 'getParsedUrl').and.callThrough();
spyOn(contentModel, 'findOne').and.callThrough();
di.setAlias('basePath', __dirname + '/../../');
var controller = new Home(api);
var result = controller.before_content.call(api);
result.then(function(data) {
expect(api.getParsedUrl).toHaveBeenCalled();
expect(contentModel.findOne).toHaveBeenCalled();
expect(data.id).toBe(1);
expect(data.text).toBe('yes');
done();
});
});
it('before_content error', function (done) {
var api = {
getParsedUrl: function(route, locals) {
return {
pathname: '/home/index'
};
}
};
contentModel.findOne = function(data, callback) {
expect(data.url).toBe('/home/index');
callback(true, {
id: 1,
text: 'yes'
});
};
spyOn(api, 'getParsedUrl').and.callThrough();
spyOn(contentModel, 'findOne').and.callThrough();
di.setAlias('basePath', __dirname + '/../../');
var controller = new Home(api);
var result = controller.before_content.call(api);
result.then(null, function(error) {
console.log('error', error);
done();
});
});
it('beforeEach', function () {
var api = {};
widgetHook.handle = function(hooks) {
expect(hooks.indexOf('menu-hook')).toBe(0);
return hooks.shift();
};
var controller = new Home(api);
expect(controller.beforeEach()).toBe('menu-hook');
expect(controller.locals.scripts.length).toBe(1);
});
it('action_error', function () {
var api = {
locals: {},
setStatusCode: function(code) {
expect(code).toBe(500);
},
renderFile: function(name, locals) {
expect(name).toBe('home/error');
expect(locals.pageTitle).toBe('Error - mvcjs nodejs framework');
expect(locals.text).toBe('ERROR');
return 'RENDER';
}
};
spyOn(api, 'setStatusCode').and.callThrough();
spyOn(api, 'renderFile').and.callThrough();
var controller = new Home({});
var response = controller.action_error.call(api, {
code: 500,
toString: function() {
return "ERROR";
}
});
expect(api.setStatusCode).toHaveBeenCalled();
expect(api.renderFile).toHaveBeenCalled();
expect(response).toBe('RENDER');
});
});<|fim▁end|> | spyOn(api, 'renderFile').and.callThrough();
di.setAlias('basePath', __dirname + '/../../');
var controller = new Home(api); |
<|file_name|>component-checkbox.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit, Input} from '@angular/core';
import { AppService } from '../../app.service';
@Component({
selector: 'lk-component-checkbox',
templateUrl: './component-checkbox.component.html',
styleUrls: [
'./component-checkbox.component.scss'
]
})
export class ComponentCheckboxComponent implements OnInit {
checked = false;
indeterminate = false;
align = 'start';
disabled = false;
constructor(private appService: AppService) {
appService.getState().topnavTitle = 'Checkbox';
}<|fim▁hole|>
ngOnInit() {
}
}<|fim▁end|> | |
<|file_name|>FormTests.ts<|end_file_name|><|fim▁begin|>import * as exprTree from 'src/expressionTree';
import * as textstream from 'src/textstream';
import { TestContext, setupTests } from './UnitTests';
import { IForm, DiFunction, BindingCollection, InjectedControllerBuilder } from 'src/controller';
import { IViewDataFormatter, Extractor } from 'src/view';
import { JsonSchema } from 'src/schema';
var runner = setupTests();<|fim▁hole|>runner.beginTestSection("Form");
//This test won't work until a form builder is loaded. It was relying on runners.
runner.runTest("Simple Form", c => {
class TestController {
private form: IForm<any>;
public static get InjectorArgs(): DiFunction<any>[] {
return [BindingCollection, TestContext];
}
constructor(bindings: BindingCollection, testContext: TestContext) {
this.form = bindings.getForm("form");
var schema: JsonSchema = createInputSchema();
this.form.setSchema(schema);
}
public submit(evt: Event): void {
evt.preventDefault();
alert(JSON.stringify(this.form.getData()));
}
}
var builder = new InjectedControllerBuilder();
builder.Services.addShared(TestContext, s => c);
builder.Services.addShared(TestController, TestController);
var result = builder.create("simpleForm", TestController);
c.assert(result.length > 0, "No Controller Created");
});
runner.endTestSection();
function createInputSchema(): any {
return {
"title": "Input",
"type": "object",
"additionalProperties": false,
"properties": {
"field": {
"type": [
"null",
"string"
]
}
}
}
}
function createComplexSchema(): any {
return {
"title": "Title of Input",
"type": "object",
"additionalProperties": false,
"properties": {
"first": {
"type": [
"null",
"string"
],
"x-ui-order": 18
},
"middle": {
"type": [
"null",
"string"
],
"x-ui-order": 21
},
"last": {
"type": [
"null",
"string"
],
"x-ui-order": 24
},
"stringArray": {
"type": ["array", "null"],
"items": { "type": "string" },
"x-ui-order": 1,
},
"complexArray": {
"type": ["array", "null"],
"items": {
"type": "object",
"properties": {
"first": {
"type": [
"null",
"string"
],
"x-ui-order": 18
},
"middle": {
"type": [
"null",
"string"
],
"x-ui-order": 21
},
"last": {
"type": [
"null",
"string"
],
"x-ui-order": 24
}
}
},
"x-ui-order": 2,
},
"multiChoice": {
"title": "Multi Choice",
"type": [
"array",
"null"
],
"items": {
"type": "integer",
"format": "int32"
},
"x-ui-type": "select",
"x-ui-order": 1,
"x-values": [
{
"label": "Choice 1",
"value": 1
},
{
"label": "Choice 2",
"value": 2
}
]
},
"checktest": {
"type": [
"boolean"
],
"x-ui-order": 24
},
"comboTest": {
"title": "Site",
"type": "integer",
"format": "int32",
"x-ui-order": 27,
"x-values": [
{
"label": "Choice 1",
"value": "one"
},
{
"label": "Choice 2",
"value": "two"
}
]
},
"enumTest": {
"type": "string",
"description": "",
"x-enumNames": [
"Name 1",
"Name 2",
"Name 3"
],
"enum": [
"Name1",
"Name2",
"Name3"
],
"x-ui-order": 38
},
"dateTest": {
"type": "date",
"format": "date-time",
"x-ui-order": 50
},
"address": {
"type": [
"null",
"string"
],
"x-ui-order": 53
},
"city": {
"type": [
"null",
"string"
],
"x-ui-order": 56
},
"state": {
"type": [
"null",
"string"
],
"x-ui-order": 59
},
"zipcode": {
"type": [
"null",
"string"
],
"x-ui-order": 62
}
}
}
}<|fim▁end|> | |
<|file_name|>updatemarras.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | db.groups.update(
{lname: "marrasputki"},
{$set:{users: ["Jörö"], description: "Marrasputki 2018"}}) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import pyaudio
import wave
#CHUNK = 1024
CHUNK = 1
FORMAT = pyaudio.paInt16
#CHANNELS = 2
CHANNELS = 1
#RATE = 44100
RATE = 10025
RECORD_SECONDS = 5
WAVE_OUTPUT_FILENAME = "output.wav"
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)<|fim▁hole|>
print("* recording, CHUNK=%d" % CHUNK)
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
print('data=%s, len=%d' % (str(data), len(data)))
# print(str(data))
# print('%d' % ord(data))
print("* done recording")
stream.stop_stream()
stream.close()
p.terminate()<|fim▁end|> | |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>use ::Disruption;
use rustc_serialize::{Encodable};
use rustc_serialize::json::{self};
use std::sync::Arc;
use std::sync::atomic::Ordering;
use time::PreciseTime;
use hyper::Client;
pub fn disruption_to_usize(d: &Option<&(Disruption, usize)>) -> usize {
match *d {<|fim▁hole|> Some(&(Disruption::Metric(_), _)) => 3
}
}
pub fn send_bulk<T: Encodable>(url: &str, client: &Arc<Client>, bulk: Vec<T>) {
::ACTIVE_THREADS.fetch_add(1, Ordering::SeqCst);
debug!("......");
let mut s = String::new();
let size = bulk.len();
for b in bulk {
s.push_str("{\"index\":{}}\n");
s.push_str(&json::encode(&b).unwrap());
s.push_str("\n");
}
debug!(" >>>>> Bulk: {} mb ({} elements)", s.len() / 1024 / 1024, size);
let start = PreciseTime::now();
let _ = client.post(url)
.body(&s)
.send()
.unwrap();
let end = PreciseTime::now();
let _ = PreciseTime::to(&start, end);
::ACTIVE_THREADS.fetch_sub(1, Ordering::SeqCst);
}<|fim▁end|> | None => 0,
Some(&(Disruption::Node(_), _)) => 1,
Some(&(Disruption::Query(_), _)) => 2, |
<|file_name|>android_ip_webcam.py<|end_file_name|><|fim▁begin|>"""
Support for IP Webcam settings.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.android_ip_webcam/
"""
from homeassistant.components.switch import SwitchDevice
from homeassistant.components.android_ip_webcam import (
KEY_MAP, ICON_MAP, DATA_IP_WEBCAM, AndroidIPCamEntity, CONF_HOST,
CONF_NAME, CONF_SWITCHES)
DEPENDENCIES = ['android_ip_webcam']
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the IP Webcam switch platform."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
switches = discovery_info[CONF_SWITCHES]
ipcam = hass.data[DATA_IP_WEBCAM][host]
all_switches = []
for setting in switches:
all_switches.append(IPWebcamSettingsSwitch(name, host, ipcam, setting))
async_add_entities(all_switches, True)
class IPWebcamSettingsSwitch(AndroidIPCamEntity, SwitchDevice):
"""An abstract class for an IP Webcam setting."""
def __init__(self, name, host, ipcam, setting):
"""Initialize the settings switch."""
super().__init__(host, ipcam)
self._setting = setting
self._mapped_name = KEY_MAP.get(self._setting, self._setting)
self._name = '{} {}'.format(name, self._mapped_name)
self._state = False
@property
def name(self):
"""Return the name of the node."""
return self._name
async def async_update(self):
"""Get the updated status of the switch."""
self._state = bool(self._ipcam.current_settings.get(self._setting))
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn device on."""
if self._setting == 'torch':
await self._ipcam.torch(activate=True)
elif self._setting == 'focus':
await self._ipcam.focus(activate=True)
elif self._setting == 'video_recording':
await self._ipcam.record(record=True)
else:
await self._ipcam.change_setting(self._setting, True)
self._state = True
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn device off."""
if self._setting == 'torch':
await self._ipcam.torch(activate=False)
elif self._setting == 'focus':<|fim▁hole|> await self._ipcam.change_setting(self._setting, False)
self._state = False
self.async_schedule_update_ha_state()
@property
def icon(self):
"""Return the icon for the switch."""
return ICON_MAP.get(self._setting, 'mdi:flash')<|fim▁end|> | await self._ipcam.focus(activate=False)
elif self._setting == 'video_recording':
await self._ipcam.record(record=False)
else: |
<|file_name|>email-address.js<|end_file_name|><|fim▁begin|>module.exports = function(EmailAddress) {<|fim▁hole|><|fim▁end|> |
}; |
<|file_name|>SqlQueries.go<|end_file_name|><|fim▁begin|>package main
import (
"encoding/json"
"fmt"
"log"
)
func GetAllPlatforms(requestBody []byte) (string, error) {
queryString := "SELECT RowId, Name, Description FROM Platform"
ndrs := GetAllNameDescriptionRows(queryString)
var pmf PlatformsMasterFile
for _, v := range(ndrs) {
var p Platform
p.RowId = v.RowId
p.Name = v.Name
p.Description = v.Description
pmf.Platforms = append(pmf.Platforms, p)
}
j, err := json.Marshal(pmf)
if (err!=nil) {
log.Printf("Error while marshalling platform json: %s", err)
return "", err
}
return fmt.Sprintf("%s", j), err
}
func GetAllGenres(requestBody[] byte) (string, error) {
queryString := "SELECT RowId, Name, Description FROM Genre"
ndrs := GetAllNameDescriptionRows(queryString)
var gmf GenresMasterFile
for _, v := range(ndrs) {
var genre Genre
genre.RowId = v.RowId
genre.Name = v.Name
genre.Description = v.Description
gmf.Genres = append(gmf.Genres, genre)
}
j, err := json.Marshal(gmf)
if err!=nil {
log.Print("Error while marshalling genres: %s", err)
return "", err
}
return fmt.Sprintf("%s", j), err
}
func GetAllHardwareTypes(requestBody[] byte) (string, error) {
queryString := "SELECT RowId, Name, Description FROM HardwareType"
ndrs := GetAllNameDescriptionRows(queryString)
var htmf HardwareTypesMasterFile
for _, v := range(ndrs) {
var ht HardwareType
ht.RowId = v.RowId
ht.Name = v.Name
ht.Description = v.Description
htmf.HardwareTypes = append(htmf.HardwareTypes, ht)
}
j, err := json.Marshal(htmf)
if err!=nil {
log.Printf("Error while marshalling hardware types json: %s", err)
return "", err
}
return fmt.Sprintf("%s", j), err
}
func GetAllNameDescriptionRows(queryString string) []NameDescriptionTable {
db := getDbConnection()
defer db.Close()
tx := getDbTransaction(db)
defer tx.Commit()
stmt := prepareQuery(tx, queryString)
defer stmt.Close()
rs, err := stmt.Query()
if err!=nil {
log.Fatal(err)
}
var items []NameDescriptionTable
for rs.Next() {
var item NameDescriptionTable
rs.Scan(&item.RowId, &item.Name, &item.Description)
items = append(items, item)
}
return items
}
func SaveGameFromJson(requestBody []byte) (string, error) {
g := Game{}
err := json.Unmarshal(requestBody, &g)
if err!=nil {
return "", err
}
return SaveGame(g)
}
func SaveGame(g Game) (string, error) {
/* At this point I could check to see if an identical game has been saved (e.g. same title/platform) and raise
* an error if that is the case, but at this point I won't do that. It would be possible to have this situation. For
* instance there are two different versions of "Tetris" for the NES (by Nintendo and Tengen).
*
* Maybe at a later date we could have something in place that will go back and ask the user for confirmation if this
* is the case.
*/
j, err := GetGameById(fmt.Sprintf("%d",g.RowId))
if err!=nil {
log.Printf("Error while retrieving game %s during save operation: %s", g, err)
return "", err
}
existingGame := Game{}
err = json.Unmarshal([]byte(j), &existingGame)
if err!=nil {
log.Printf("Error unmrashaling json %s: %s", j, err)
return "", err
}
if existingGame.RowId==g.RowId && g.RowId!=0 {
return updateGame(g)
}
return addGame(g)
}
func addGame(g Game) (string, error) {
log.Printf("Adding new game %s", g)
insertString := "INSERT INTO Game (Title, Genre, Platform, NumberOwned, NumberBoxed, NumberOfManuals, DatePurchased, "
insertString += "ApproximatePurchaseDate, Notes) "
insertString += "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
stmt, closerFunc := GetQuery(insertString)
defer closerFunc()
approximatePurchaseDate := 0
if g.ApproximatePurchaseDate {
approximatePurchaseDate = 1
}
_, err := stmt.Exec(g.Title, g.Genre, g.Platform, g.NumberOwned, g.NumberBoxed, g.NumberOfManuals, g.DatePurchased,
approximatePurchaseDate, g.Notes)
if err!=nil {
log.Printf("Error while adding new game %s: %s", g, err)
}
return "", err
}
func updateGame(g Game) (string, error) {
log.Printf("Updating game %s with row id of %d", g, g.RowId)
updateString := "UPDATE GAME "
updateString += "SET Title=?, Genre=?, Platform=?, NumberOwned=?, NumberBoxed=?, NumberOfManuals=?, DatePurchased=?, "
updateString += "ApproximatePurchaseDate=?, Notes=? "
updateString += "WHERE RowId=?"
stmt, closerFunc := GetQuery(updateString)
defer closerFunc()
approximatePurchaseDate := 0
if g.ApproximatePurchaseDate {
approximatePurchaseDate = 1
}
_, err := stmt.Exec(g.Title, g.Genre, g.Platform, g.NumberOwned, g.NumberBoxed, g.NumberOfManuals, g.DatePurchased,
approximatePurchaseDate, g.Notes, g.RowId)
if err!=nil {
log.Printf("Error while updating existing game %s: %s", g, err)
}
return "", err
}
func GetAllGames(requestBody []byte) (string, error) {
queryString := "SELECT g.RowId, g.Title, g.Genre, COALESCE(p.Name, ''), g.NumberOwned, g.NumberBoxed, "
queryString += "g.NumberOfManuals, g.DatePurchased, g.ApproximatePurchaseDate, g.Notes "
queryString += "FROM Game g "
queryString += "LEFT JOIN Platform p ON g.Platform=p.RowId "
queryString += "LEFT JOIN Genre gen on g.Genre=gen.RowId "
var gameList GameList
var err error
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
rs, err := stmt.Query()
if err!=nil {
log.Printf("Error while running sql query to retrieve games: \n%s\n%s", queryString, err)
return "", err
}
for rs.Next() {
g := Game{}
err = rs.Scan(&g.RowId, &g.Title, &g.Genre, &g.Platform, &g.NumberOwned, &g.NumberBoxed, &g.NumberOfManuals,
&g.DatePurchased, &g.ApproximatePurchaseDate, &g.Notes)
if (err!=nil) {
log.Printf("Error while scanning games resultset: %s", err)
return "", err
}
gameList.Games = append(gameList.Games, g)
}
j, err := json.Marshal(gameList)
if err!=nil {
log.Printf("Error while unmarshalling games json: %s", err)
}
return string(j), err
}
func GetGamesByNameAndPlatform(g Game) ([]Game, error) {
queryString := "SELECT g.RowId, g.Title, g.Genre, COALESCE(p.Name, ''), g.NumberOwned, g.NumberBoxed, "
queryString += "g.NumberOfManuals, g.DatePurchased, g.ApproximatePurchaseDate, g.Notes "
queryString += "FROM Game g "
queryString += "LEFT JOIN Platform p ON g.Platform=p.RowId "
queryString += "LEFT JOIN Genre gen on g.Genre=gen.RowId "
queryString += "WHERE g.Title = ? AND g.Platform = ?"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
var gs []Game
rs, err := stmt.Query(g.Title, g.Platform)
if err!=nil {
return gs, err
}
for rs.Next() {
g := Game{}
err = rs.Scan(&g.RowId, &g.Title, &g.Genre, &g.Platform, &g.NumberOwned, &g.NumberBoxed, &g.NumberOfManuals,
&g.DatePurchased, &g.ApproximatePurchaseDate, &g.Notes)
if err!=nil {
return gs, err
}
gs = append(gs, g)
}
return gs, err
}
type GameResult struct {
Game
PlatformId string
GenreId string
}
func GetGameById(gameId string) (string, error) {
var err error
queryString := "SELECT g.RowId, g.Title, g.Genre, COALESCE(gen.Name, ''), g.Platform, COALESCE(p.Name, ''), "
queryString += "g.NumberOwned, g.NumberBoxed, g.NumberOfManuals, g.DatePurchased, g.ApproximatePurchaseDate, g.Notes "
queryString += "FROM Game g "
queryString += "LEFT JOIN Platform p ON g.Platform=p.RowId "
queryString += "LEFT JOIN Genre gen on g.Genre=gen.RowId "
queryString += "WHERE g.RowId=? "
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
rs, err := stmt.Query(gameId)
if err!=nil {
return "", err
}
g := GameResult{}
for rs.Next() {
err = rs.Scan(&g.RowId, &g.Title, &g.GenreId, &g.Genre, &g.PlatformId, &g.Platform, &g.NumberOwned,
&g.NumberBoxed, &g.NumberOfManuals, &g.DatePurchased, &g.ApproximatePurchaseDate, &g.Notes)
if err!=nil {
return "", err
}
}
j, err := json.Marshal(g)
return string(j), err
}
func DeleteGame(requestBody []byte) (string, error) {
var err error
var g Game
err = json.Unmarshal(requestBody, &g)
if err!=nil {
return "", err
}
deleteString := "DELETE FROM GAME WHERE RowId=?"
stmt, closerFunc := GetQuery(deleteString)
defer closerFunc()
_, err = stmt.Exec(g.RowId)
return "", err
}
func GetPlatformByName(name string) (Platform, error) {
var err error
var p Platform
queryString := "SELECT RowId, * FROM Platform WHERE Name=?"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
rs, err := stmt.Query(name)
if err!=nil {
return p, err
}
for rs.Next() {
err = rs.Scan(&p.RowId, &p.Name, &p.Description)
if err!=nil {
return p, err
}<|fim▁hole|>
return p, err
}
func AddPlatform(platform Platform) error {
var err error
insertString := "INSERT INTO Platform (Name, Description) VALUES (?, ?)"
stmt, closerFunc := GetQuery(insertString)
defer closerFunc()
_, err = stmt.Exec(platform.Name, platform.Description)
return err
}
func GetGenreByName(name string) (Genre, error) {
var err error
var g Genre
queryString := "SELECT RowId, * FROM Genre WHERE Name=?"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
rs, err := stmt.Query(name)
if err!=nil {
return g, err
}
for rs.Next() {
err = rs.Scan(&g.RowId, &g.Name, &g.Description)
if err!=nil {
return g, err
}
}
return g, err
}
func AddGenre(genre Genre) error {
var err error
queryString := "INSERT INTO Genre (Name, Description) VALUES (?, ?)"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
_, err = stmt.Exec(genre.Name, genre.Description)
return err
}
func GetHardwareTypeByName(name string) (HardwareType, error) {
var err error
var ht HardwareType
queryString := "SELECT RowId, * From HardwareType WHERE Name=?"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
rs, err := stmt.Query(name)
if err!=nil {
return ht, err
}
for rs.Next() {
err = rs.Scan(&ht.RowId, &ht.Name, &ht.Description)
if err!=nil {
return ht, err
}
}
return ht, err
}
func AddHardwareType(ht HardwareType) error {
var err error
queryString := "INSERT INTO HardwareType (Name, Description) VALUES (?, ?)"
stmt, closerFunc := GetQuery(queryString)
defer closerFunc()
_, err = stmt.Exec(ht.Name, ht.Description)
return err
}<|fim▁end|> | } |
<|file_name|>splice.controls.pageloader.js<|end_file_name|><|fim▁begin|>$js.module({
prerequisite:[
'/{$jshome}/modules/splice.module.extensions.js'
],
imports:[
{ Inheritance : '/{$jshome}/modules/splice.inheritance.js' },
{'SpliceJS.UI':'../splice.ui.js'},
'splice.controls.pageloader.html'
],
definition:function(){
var scope = this;
var
imports = scope.imports
;
var
Class = imports.Inheritance.Class
, UIControl = imports.SpliceJS.UI.UIControl
;<|fim▁hole|> }).extend(UIControl);
scope.exports(
PageLoader
);
}
})<|fim▁end|> |
var PageLoader = Class(function PageLoaderController(){
this.base(); |
<|file_name|>ClientAuthenticator.java<|end_file_name|><|fim▁begin|>package com.rideon.web.security;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.DefaultHttpClient;
import org.springframework.web.client.RestTemplate;
public class ClientAuthenticator {
public ClientAuthenticator() {
super();
}
// API
public static void setAuthentication(final RestTemplate restTemplate, final String username, final String password) {
basicAuth(restTemplate, username, password);
}
private static void basicAuth(final RestTemplate restTemplate, final String username, final String password) {
final HttpComponentsClientHttpRequestFactoryBasicAuth requestFactory =
((HttpComponentsClientHttpRequestFactoryBasicAuth) restTemplate.getRequestFactory());
DefaultHttpClient httpClient = (DefaultHttpClient) requestFactory.getHttpClient();
CredentialsProvider prov = httpClient.getCredentialsProvider();<|fim▁hole|>}<|fim▁end|> | prov.setCredentials(requestFactory.getAuthScope(), new UsernamePasswordCredentials(username, password));
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::arch::{CODESET};
pub use self::arch::{D_T_FMT};<|fim▁hole|>pub use self::arch::{D_FMT};
pub use self::arch::{T_FMT};
pub use self::arch::{T_FMT_AMPM};
pub use self::arch::{AM_STR};
pub use self::arch::{PM_STR};
pub use self::arch::{DAY_1};
pub use self::arch::{DAY_2};
pub use self::arch::{DAY_3};
pub use self::arch::{DAY_4};
pub use self::arch::{DAY_5};
pub use self::arch::{DAY_6};
pub use self::arch::{DAY_7};
pub use self::arch::{ABDAY_1};
pub use self::arch::{ABDAY_2};
pub use self::arch::{ABDAY_3};
pub use self::arch::{ABDAY_4};
pub use self::arch::{ABDAY_5};
pub use self::arch::{ABDAY_6};
pub use self::arch::{ABDAY_7};
pub use self::arch::{MON_1};
pub use self::arch::{MON_2};
pub use self::arch::{MON_3};
pub use self::arch::{MON_4};
pub use self::arch::{MON_5};
pub use self::arch::{MON_6};
pub use self::arch::{MON_7};
pub use self::arch::{MON_8};
pub use self::arch::{MON_9};
pub use self::arch::{MON_10};
pub use self::arch::{MON_11};
pub use self::arch::{MON_12};
pub use self::arch::{ABMON_1};
pub use self::arch::{ABMON_2};
pub use self::arch::{ABMON_3};
pub use self::arch::{ABMON_4};
pub use self::arch::{ABMON_5};
pub use self::arch::{ABMON_6};
pub use self::arch::{ABMON_7};
pub use self::arch::{ABMON_8};
pub use self::arch::{ABMON_9};
pub use self::arch::{ABMON_10};
pub use self::arch::{ABMON_11};
pub use self::arch::{ABMON_12};
pub use self::arch::{ERA};
pub use self::arch::{ERA_D_FMT};
pub use self::arch::{ERA_D_T_FMT};
pub use self::arch::{ERA_T_FMT};
pub use self::arch::{ALT_DIGITS};
pub use self::arch::{RADIXCHAR};
pub use self::arch::{THOUSEP};
pub use self::arch::{YESEXPR};
pub use self::arch::{NOEXPR};
pub use self::arch::{CRNCYSTR};
#[cfg(target_arch = "x86_64")]
#[path = "x86_64.rs"]
mod arch;
#[cfg(target_arch = "x86")]
#[path = "x86.rs"]
mod arch;<|fim▁end|> | |
<|file_name|>_legendgroup.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class LegendgroupValidator(_plotly_utils.basevalidators.StringValidator):<|fim▁hole|> def __init__(self, plotly_name="legendgroup", parent_name="choropleth", **kwargs):
super(LegendgroupValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs
)<|fim▁end|> | |
<|file_name|>apu.cpp<|end_file_name|><|fim▁begin|>#include "apu.h"
#include "emulator.h"
constexpr byte_t length_table[] = { 11, 254, 20, 2, 40, 4, 80, 6, 160, 8, 60, 10, 14, 12, 26, 14,
12, 16, 24, 18, 48, 20, 96, 22, 192, 24, 72, 26, 16, 28, 32, 30 };
constexpr uint16_t dmc_rate_table[] = { 428, 380, 340, 320, 286, 254, 226, 214, 190, 160, 142, 128, 106, 84, 72, 54 };
enum : uint32_t
{
SEQ_STEP_1 = 3728,
SEQ_STEP_2 = 7456,
SEQ_STEP_3 = 11185,
SEQ_STEP_4 = 14914,
SEQ_STEP_5 = 18640,
};
void APU::reset()
{
cycle_ = 0;
odd_cycle_ = false;
five_steps_sequence_ = false;
irq_inhibit_ = false;
frame_irq_ = false;
sequencer_change_delay_ = 0;
pulse1_.on_ctrl(false);
pulse2_.on_ctrl(false);
triangle_.on_ctrl(false);
noise_.on_ctrl(false);
dmc_.on_ctrl(false);
}
void APU::step()
{
bool quarter = false;
bool half = false;
if (!five_steps_sequence_ && cycle_ == SEQ_STEP_4)
frame_irq_ = !irq_inhibit_;
if (odd_cycle_)
{
if (!five_steps_sequence_ && cycle_ >= SEQ_STEP_4 + 1)
cycle_ = 0;
else if (five_steps_sequence_ && cycle_ >= SEQ_STEP_5 + 1)
cycle_ = 0;
switch (cycle_)
{
case SEQ_STEP_2:
case SEQ_STEP_5:
half = true;
break;
case SEQ_STEP_1:
case SEQ_STEP_3:
quarter = true;
break;
case SEQ_STEP_4:
if (!five_steps_sequence_)
{
half = true;
if (frame_irq_)
Emulator::instance()->get_cpu()->interrupt(false);
}
break;
}
++cycle_;
}
if (sequencer_change_delay_ > 0 && --sequencer_change_delay_ == 0)
{
half |= five_steps_sequence_;
cycle_ = 0;
}
if (quarter || half)
{
pulse1_.on_clock(half);
pulse2_.on_clock(half);
triangle_.on_clock(half);
noise_.on_clock(half);
dmc_.on_clock(half);
}
odd_cycle_ = !odd_cycle_;
}
bool APU::on_write(address_t addr, byte_t value)
{
if (addr >= 0x4000 && addr <= 0x4003)
{
pulse1_.on_write(addr, value);
return true;
}
if (addr >= 0x4004 && addr <= 0x4007)
{
pulse2_.on_write(addr, value);
return true;
}
if (addr >= 0x4008 && addr <= 0x400B)
{
triangle_.on_write(addr, value);
return true;
}
if (addr >= 0x400C && addr <= 0x400F)
{
noise_.on_write(addr, value);
return true;
}
if (addr >= 0x4010 && addr <= 0x4013)
{
dmc_.on_write(addr, value);
return true;
}
if (addr == 0x4015)
{
Control ctrl;
ctrl.set(value);
pulse1_.on_ctrl(ctrl.pulse1);
pulse2_.on_ctrl(ctrl.pulse2);
triangle_.on_ctrl(ctrl.triangle);
noise_.on_ctrl(ctrl.noise);
dmc_.on_ctrl(ctrl.dmc);
return true;
}
if (addr == 0x4017)
{
// Write is delayed by 3 or 4 CPU cycle depending on odd_cycle_.
five_steps_sequence_ = value & 0xB0;
irq_inhibit_ = value & 0x40;
if (irq_inhibit_)
frame_irq_ = false;
sequencer_change_delay_ = 3 + (odd_cycle_) ? 1 : 0;
return true;
}
return false;
}
bool APU::on_read(address_t addr, byte_t& value)
{
if (addr == 0x4015)
{
Control status;
status.pulse1 = (pulse1_.len_counter_ > 0);
status.pulse2 = (pulse2_.len_counter_ > 0);
status.triangle = (triangle_.len_counter_ > 0);
status.noise = (noise_.len_counter_ > 0);
status.dmc = dmc_.enabled_;
status.frame_int = frame_irq_;
frame_irq_ = false;
value = status.get();
return true;
}
return false;
}
void PulseChannel::on_clock(bool half_frame)
{
if (half_frame)
{
if (len_counter_ > 0 && !len_halted_)
--len_counter_;
}
}
void PulseChannel::on_write(address_t addr, byte_t value)
{
switch (addr & 0x3)
{
case 0:
len_halted_ = value & 0x20;
break;
case 1:
break;
case 2:
break;
case 3:
if (len_enabled_)
len_counter_ = length_table[value >> 3];
break;
}
}
void PulseChannel::on_ctrl(bool enable)
{
len_enabled_ = enable;
if (!enable)
len_counter_ = 0;
}
void TriangleChannel::on_clock(bool half_frame)
{
if (half_frame)
{
if (len_counter_ > 0 && !len_halted_)
--len_counter_;
}
if (timer_ > 0)
--timer_;
}
void TriangleChannel::on_write(address_t addr, byte_t value)
{
switch (addr - 0x4008)
{
case 0:
len_halted_ = value & 0x80;
break;
case 1:
timer_ = (timer_ & 0xFF00) + value;
break;
case 2:
timer_ = (timer_ & 0x00FF) + (static_cast<uint16_t>(value & 0x07) << 8);
break;
case 3:
if (len_enabled_)
len_counter_ = length_table[value >> 3];
break;
}
}
void TriangleChannel::on_ctrl(bool enable)
{<|fim▁hole|> if (!enable)
len_counter_ = 0;
}
void NoiseChannel::on_clock(bool half_frame)
{
if (half_frame)
{
if (len_counter_ > 0 && !len_halted_)
--len_counter_;
}
}
void NoiseChannel::on_write(address_t addr, byte_t value)
{
switch (addr - 0x400C)
{
case 0:
len_halted_ = value & 0x20;
break;
case 1:
break;
case 2:
break;
case 3:
if (len_enabled_)
len_counter_ = length_table[value >> 3];
break;
}
}
void NoiseChannel::on_ctrl(bool enable)
{
len_enabled_ = enable;
if (!enable)
len_counter_ = 0;
}
void DMChannel::on_clock(bool half_frame)
{
}
void DMChannel::on_write(address_t addr, byte_t value)
{
switch (addr - 0x4010)
{
case 0:
irq_enabled_ = value & 0x80;
loop_ = value & 0x40;
rate_idx_ = value & 0x0F;
break;
case 1:
output_level_ = value & 0x7F;
break;
case 2:
sample_addr_ = 0xC000 + static_cast<address_t>(value) * 64;
break;
case 3:
sample_len_ = (value << 4) + 1;
break;
}
}
void DMChannel::on_ctrl(bool enable)
{
enabled_ = enable;
}<|fim▁end|> | len_enabled_ = enable; |
<|file_name|>FBInventory.java<|end_file_name|><|fim▁begin|>package com.bukkit.gemo.FalseBook.Cart.utils;
import net.minecraft.server.v1_6_R3.Container;
import net.minecraft.server.v1_6_R3.EntityHuman;
import net.minecraft.server.v1_6_R3.InventoryCrafting;
import net.minecraft.server.v1_6_R3.ItemStack;
public class FBInventory extends InventoryCrafting {
private ItemStack[] items = new ItemStack[9];
public FBInventory(Container container, int i, int j) {
super(container, i, j);
}
public FBInventory() {
super((Container)null, 3, 3);
}
public ItemStack[] getContents() {
return this.items;
}
public int getSize() {
return 1;
}
public ItemStack getItem(int i) {
return this.items[i];<|fim▁hole|> }
public String getName() {
return "Result";
}
public ItemStack splitStack(int i, int j) {
if(this.items[i] != null) {
ItemStack itemstack = this.items[i];
this.items[i] = null;
return itemstack;
} else {
return null;
}
}
public void setItem(int i, ItemStack itemstack) {
this.items[i] = itemstack;
}
public int getMaxStackSize() {
return 64;
}
public void update() {}
public boolean a(EntityHuman entityhuman) {
return true;
}
public void f() {}
public void g() {}
}<|fim▁end|> | |
<|file_name|>test_qgslayoutmap.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutItemMap.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2017 Nyall Dawson'
__date__ = '20/10/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import QFileInfo, QRectF, QDir
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtGui import QPainter, QColor
from qgis.core import (QgsLayoutItemMap,
QgsRectangle,
QgsRasterLayer,
QgsVectorLayer,
QgsLayout,
QgsMapSettings,
QgsProject,
QgsMultiBandColorRenderer,
QgsCoordinateReferenceSystem,
QgsTextFormat,
QgsFontUtils,
QgsPalLayerSettings,
QgsNullSymbolRenderer,
QgsPoint,
QgsFeature,
QgsVectorLayerSimpleLabeling,
QgsLabelingEngineSettings,
QgsLayoutMeasurement,
QgsUnitTypes,
QgsLayoutObject,
QgsProperty,
QgsReadWriteContext)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
from qgslayoutchecker import QgsLayoutChecker
from test_qgslayoutitem import LayoutItemTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsLayoutMap(unittest.TestCase, LayoutItemTestCase):
@classmethod
def setUpClass(cls):
cls.item_class = QgsLayoutItemMap
def setUp(self):
self.report = "<h1>Python QgsLayoutItemMap Tests</h1>\n"
def tearDown(self):
report_file_path = "%s/qgistest.html" % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
myPath = os.path.join(TEST_DATA_DIR, 'rgb256x256.png')
rasterFileInfo = QFileInfo(myPath)
self.raster_layer = QgsRasterLayer(rasterFileInfo.filePath(),
rasterFileInfo.completeBaseName())
rasterRenderer = QgsMultiBandColorRenderer(
self.raster_layer.dataProvider(), 1, 2, 3)
self.raster_layer.setRenderer(rasterRenderer)
myPath = os.path.join(TEST_DATA_DIR, 'points.shp')
vector_file_info = QFileInfo(myPath)
self.vector_layer = QgsVectorLayer(vector_file_info.filePath(),
vector_file_info.completeBaseName(), 'ogr')
assert self.vector_layer.isValid()
# pipe = mRasterLayer.pipe()
# assert pipe.set(rasterRenderer), 'Cannot set pipe renderer'
QgsProject.instance().addMapLayers([self.raster_layer, self.vector_layer])
# create layout with layout map
self.layout = QgsLayout(QgsProject.instance())
self.layout.initializeDefaults()
self.map = QgsLayoutItemMap(self.layout)
self.map.attemptSetSceneRect(QRectF(20, 20, 200, 100))
self.map.setFrameEnabled(True)
self.map.setLayers([self.raster_layer])
self.layout.addLayoutItem(self.map)
def testMapCrs(self):
# create layout with layout map
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
# check that new maps inherit project CRS
QgsProject.instance().setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 200, 100))
map.setFrameEnabled(True)
rectangle = QgsRectangle(-13838977, 2369660, -8672298, 6250909)
map.setExtent(rectangle)
map.setLayers([self.vector_layer])
layout.addLayoutItem(map)
self.assertEqual(map.crs().authid(), 'EPSG:4326')
self.assertFalse(map.presetCrs().isValid())
# overwrite CRS
map.setCrs(QgsCoordinateReferenceSystem('EPSG:3857'))
self.assertEqual(map.crs().authid(), 'EPSG:3857')
self.assertEqual(map.presetCrs().authid(), 'EPSG:3857')
checker = QgsLayoutChecker('composermap_crs3857', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# overwrite CRS
map.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
self.assertEqual(map.presetCrs().authid(), 'EPSG:4326')
self.assertEqual(map.crs().authid(), 'EPSG:4326')
rectangle = QgsRectangle(-124, 17, -78, 52)
map.zoomToExtent(rectangle)
checker = QgsLayoutChecker('composermap_crs4326', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# change back to project CRS
map.setCrs(QgsCoordinateReferenceSystem())
self.assertEqual(map.crs().authid(), 'EPSG:4326')
self.assertFalse(map.presetCrs().isValid())
def testContainsAdvancedEffects(self):
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
self.assertFalse(map.containsAdvancedEffects())
self.vector_layer.setBlendMode(QPainter.CompositionMode_Darken)
result = map.containsAdvancedEffects()
self.vector_layer.setBlendMode(QPainter.CompositionMode_SourceOver)
self.assertTrue(result)
def testRasterization(self):
map_settings = QgsMapSettings()
map_settings.setLayers([self.vector_layer])
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
self.assertFalse(map.requiresRasterization())
self.vector_layer.setBlendMode(QPainter.CompositionMode_Darken)
self.assertFalse(map.requiresRasterization())
self.assertTrue(map.containsAdvancedEffects())
map.setBackgroundEnabled(False)
self.assertTrue(map.requiresRasterization())
map.setBackgroundEnabled(True)
map.setBackgroundColor(QColor(1, 1, 1, 1))
self.assertTrue(map.requiresRasterization())
self.vector_layer.setBlendMode(QPainter.CompositionMode_SourceOver)
def testLabelMargin(self):
"""
Test rendering map item with a label margin set
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, False)
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
layout.addLayoutItem(map)
checker = QgsLayoutChecker('composermap_label_nomargin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setLabelMargin(QgsLayoutMeasurement(15, QgsUnitTypes.LayoutMillimeters))
checker = QgsLayoutChecker('composermap_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setLabelMargin(QgsLayoutMeasurement(3, QgsUnitTypes.LayoutCentimeters))
checker = QgsLayoutChecker('composermap_label_cm_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
map.setMapRotation(45)
map.zoomToExtent(vl.extent())
map.setScale(map.scale() * 1.2)
checker = QgsLayoutChecker('composermap_rotated_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# data defined
map.setMapRotation(0)
map.zoomToExtent(vl.extent())
map.dataDefinedProperties().setProperty(QgsLayoutObject.MapLabelMargin, QgsProperty.fromExpression('1+3'))
map.refresh()
checker = QgsLayoutChecker('composermap_dd_label_margin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
def testPartialLabels(self):
"""
Test rendering map item with a show partial labels flag
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.UsePartialCandidates, False)
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
layout.addLayoutItem(map)
# default should always be to hide partial labels
self.assertFalse(map.mapFlags() & QgsLayoutItemMap.ShowPartialLabels)
# hiding partial labels (the default)
map.setMapFlags(QgsLayoutItemMap.MapItemFlags())
checker = QgsLayoutChecker('composermap_label_nomargin', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
# showing partial labels
map.setMapFlags(QgsLayoutItemMap.ShowPartialLabels)
checker = QgsLayoutChecker('composermap_show_partial_labels', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
def testBlockingItems(self):
"""
Test rendering map item with blocking items
"""
format = QgsTextFormat()
format.setFont(QgsFontUtils.getStandardTestFont("Bold"))
format.setSize(20)
format.setNamedStyle("Bold")
format.setColor(QColor(0, 0, 0))
settings = QgsPalLayerSettings()
settings.setFormat(format)
settings.fieldName = "'X'"
settings.isExpression = True
settings.placement = QgsPalLayerSettings.OverPoint
vl = QgsVectorLayer("Point?crs=epsg:4326&field=id:integer", "vl", "memory")
vl.setRenderer(QgsNullSymbolRenderer())
f = QgsFeature(vl.fields(), 1)
for x in range(15):
for y in range(15):
f.setGeometry(QgsPoint(x, y))
vl.dataProvider().addFeature(f)
vl.setLabeling(QgsVectorLayerSimpleLabeling(settings))
vl.setLabelsEnabled(True)
p = QgsProject()
engine_settings = QgsLabelingEngineSettings()
engine_settings.setFlag(QgsLabelingEngineSettings.DrawLabelRectOnly, True)
p.setLabelingEngineSettings(engine_settings)
<|fim▁hole|> p.addMapLayer(vl)
layout = QgsLayout(p)
layout.initializeDefaults()
p.setCrs(QgsCoordinateReferenceSystem('EPSG:4326'))
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(10, 10, 180, 180))
map.setFrameEnabled(True)
map.zoomToExtent(vl.extent())
map.setLayers([vl])
map.setId('map')
layout.addLayoutItem(map)
map2 = QgsLayoutItemMap(layout)
map2.attemptSetSceneRect(QRectF(0, 5, 50, 80))
map2.setFrameEnabled(True)
map2.setBackgroundEnabled(False)
map2.setId('map2')
layout.addLayoutItem(map2)
map3 = QgsLayoutItemMap(layout)
map3.attemptSetSceneRect(QRectF(150, 160, 50, 50))
map3.setFrameEnabled(True)
map3.setBackgroundEnabled(False)
map3.setId('map3')
layout.addLayoutItem(map3)
map.addLabelBlockingItem(map2)
map.addLabelBlockingItem(map3)
map.setMapFlags(QgsLayoutItemMap.MapItemFlags())
checker = QgsLayoutChecker('composermap_label_blockers', layout)
checker.setControlPathPrefix("composer_map")
result, message = checker.testLayout()
self.report += checker.report()
self.assertTrue(result, message)
doc = QDomDocument("testdoc")
elem = layout.writeXml(doc, QgsReadWriteContext())
l2 = QgsLayout(p)
self.assertTrue(l2.readXml(elem, doc, QgsReadWriteContext()))
map_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map'][0]
map2_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map2'][0]
map3_restore = [i for i in l2.items() if isinstance(i, QgsLayoutItemMap) and i.id() == 'map3'][0]
self.assertTrue(map_restore.isLabelBlockingItem(map2_restore))
self.assertTrue(map_restore.isLabelBlockingItem(map3_restore))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>files.py<|end_file_name|><|fim▁begin|>from markupsafe import escape
import re
from pymongo.objectid import ObjectId
from pymongo.errors import InvalidId
from app.people.people_model import People
from app.board.board_model import BoardTopic, BoardNode
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from lib.filter import none2string,mentions,video, urlink
from lib.utils import html_escape, br_escape
cache_opts = {
'cache.type': 'file',
'cache.data_dir': '/tmp/caches/data',
'cache.lock_dir': '/tmp/caches/lock',
'cache.regions': 'short_term, long_term',
#'cache.short_term.type': 'ext:memcached',
#'cache.short_term.url': '127.0.0.1.11211',
'cache.short_term.type': 'file',
'cache.short_term.expire': '1200',
'cache.long_term.type': 'file',
'cache.long_term.expire': '3600',
}
cache = CacheManager(**parse_cache_config_options(cache_opts))
@cache.region('short_term', 'cached_people')
def get_cached_people(people_id):
try:
people = People.objects.with_id(people_id)
return people
except InvalidId, error:
pass<|fim▁hole|>
return None
def fetch_cached_people(people_id, reflush=False):
if reflush:
cache.region_invalidate(get_cached_people, None, 'cached_people', people_id)
return get_cached_people(people_id)
@cache.region('long_term', 'cached_board_topic')
def get_cached_board_topic(topic_id):
try:
topic = BoardTopic.objects.with_id(topic_id)
if topic is None:
return None
if topic.content:
topic.html_content = urlink(escape(topic.content)) #urlink((mentions(youku(escape(topic.content)) ) ) , trim_url_limit=30)
else:
topic.html_content = ''
return topic
except Exception, error:
return None
return None
def fetch_cached_board_topic(topic_id, reflush=False):
if reflush:
cache.region_invalidate(get_cached_board_topic, None, 'cached_board_topic', topic_id)
return get_cached_board_topic(topic_id)
@cache.region('long_term', 'cached_board_topic_morecontent')
def get_cached_board_topic_morecontent(topic_id):
try:
topic = fetch_cached_board_topic(topic_id)
if topic is None:
return None
html_more_content = ''
if topic.more_content:
html_more_content = br_escape(urlink(escape(topic.more_content))) #urlink((mentions(youku(escape(topic.content)) ) ) , trim_url_limit=30)
extra_content = ''
if topic.video_urls:
video_html = '<p></p>'
for url in topic.video_urls:
video_html += video(url)
extra_content = video_html
return html_more_content + extra_content
except Exception, error:
return None
return None
def fetch_cached_board_topic_morecontent(topic_id, reflush=False):
if reflush:
cache.region_invalidate(get_cached_board_topic, None, 'cached_board_topic_morecontent', topic_id)
return get_cached_board_topic_morecontent(topic_id)
@cache.region('long_term', 'cached_board_nodelist')
def get_cached_board_nodelist(cache='board_nodelist'):
try:
nodelist = BoardNode.get_top_nodes()
return list(nodelist)
except InvalidId, error:
pass
return None
def fetch_cached_board_nodelist(reflush=False):
if reflush:
cache.region_invalidate(get_cached_board_nodelist, None, 'cached_board_nodelist', 'board_nodelist')
return get_cached_board_nodelist('board_nodelist')<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
var express = require("express");
var http = require("http");
var app = express();
var httpServer = http.Server(app);
var io = require('socket.io')(httpServer);
// Users array.
var users = [];
// Channels pre-defined array.
var channels = [
'Angular',
'React',
'Laravel',
'Symfony'
];
// Start http server.
httpServer.listen(3000, function () {
});
// Use static files 'app' folder for '/' path.
app.use(express.static(__dirname + '/app/'));
// Channels endpoint.
app.get('/channels', function (req, res) {
res.send(channels);
});
// On connection event.
io.on('connection', function (socket) {
// Join event.
socket.on('join', function (data) {
// Join socket to channel.
socket.join(data.channel);
// Add user to users lists.
users.push({id: socket.id, name: data.user});
// Bind username to socket object.
socket.username = data.user;
<|fim▁hole|> socket.leave(socket.channel);
}
// Bind channel to socket.
socket.channel = data.channel;
});
// Message event.
socket.on('message', function (data) {
// Send to selected channel user's message.
io.sockets.in(data.channel).emit('message', {message: data.message, user: data.username});
});
// Private message event.
socket.on('private', function (data) {
// Split message to take receiver name.
var message = data.message.split(" ");
// Get username from message array.
var to_user = message[0].slice(1);
// Filter users to find user's socket id and send message.
users.filter(function (user) {
if (user.name == to_user) {
// Format message.
var private_message = "(private) " + data.message.slice(to_user.length + 2);
// Send message to user who sent the message.
io.sockets.connected[socket.id].emit('message', {message: private_message, user: "me -> " + to_user});
// Send message to receiver.
io.sockets.connected[user.id].emit('message', {message: private_message, user: data.username});
}
});
});
// Disconnect event.
socket.on('disconnect', function () {
// Check if user joined any room and clean users array.
users = users.filter(function (user) {
if (user.id == socket.id) {
return false;
}
return true
});
});
});<|fim▁end|> | // If socket already exists in a channel, leave.
if (typeof socket.channel != 'undefined') { |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>/**
* utility library
*/
var basicAuth = require('basic-auth');
var fs = require('fs');
/**
* Simple basic auth middleware for use with Express 4.x.
*
* @example
* app.use('/api-requiring-auth', utils.basicAuth('username', 'password'));
*
* @param {string} username Expected username
* @param {string} password Expected password
* @returns {function} Express 4 middleware requiring the given credentials
*/
exports.basicAuth = function(username, password) {
return function(req, res, next) {
var user = basicAuth(req);
if (!user || user.name !== username || user.pass !== password) {
res.set('WWW-Authenticate', 'Basic realm=Authorization Required');
return res.sendStatus(401);
}
next();
};
};
exports.cpuTemp = function() {
var cputemp = Math.round(((parseFloat(fs.readFileSync("/sys/class/thermal/thermal_zone0/temp"))/1000) * (9/5) + 32)*100)/100;
return cputemp;
};
exports.w1Temp = function(serial) {
var temp;
var re=/t=(\d+)/;
try {
var text=fs.readFileSync('/sys/bus/w1/devices/' + serial + '/w1_slave','utf8');
if (typeof(text) != "undefined") {
if (text.indexOf("YES") > -1) {
var temptext=text.match(re);
if (typeof(temptext) != "undefined") {
temp = Math.round(((parseFloat(temptext[1])/1000) * (9/5) + 32)*100)/100;
}
}
}
} catch (e) {
console.log(e);
}<|fim▁hole|> return temp;
};<|fim▁end|> | |
<|file_name|>AltRange.py<|end_file_name|><|fim▁begin|>from exterminate.Utilities import builtins
<|fim▁hole|>def alt_range(start, stop, step=1):
return _range(start-2, stop+2, max(1, int(step/2)))
builtins.range = alt_range<|fim▁end|> | _range = range
|
<|file_name|>SizeAnimation.java<|end_file_name|><|fim▁begin|>package UserInterface.Animation;
/**
* Makes shit get big, makes shit get small.
*/
public class SizeAnimation extends Animation {
protected int iW, iH, fW, fH, cW, cH;
public SizeAnimation(long period, int paceType, boolean loop, int iW, int iH, int fW, int fH) {
super(period, paceType, loop);
this.iW = iW;
this.iH = iH;
this.fW = fW;<|fim▁hole|> }
@Override
protected void updateAnimation(double p) {
cW = (int)Math.round((fW - iW)*p) + iW;
cH = (int)Math.round((fH - iH)*p) + iH;
}
public int getWidth() {
return cW;
}
public int getHeight() {
return cH;
}
}<|fim▁end|> | this.fH = fH;
this.cW = iW;
this.cH = iH; |
<|file_name|>register_all_kernels.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2020 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/<|fim▁hole|>
import {nonMaxSuppressionV5Config} from './kernels/NonMaxSuppressionV5';
import {squareConfig} from './kernels/Square';
import {squaredDifferenceConfig} from './kernels/SquaredDifference';
// List all kernel configs here
const kernelConfigs: KernelConfig[] = [
nonMaxSuppressionV5Config,
squareConfig,
squaredDifferenceConfig,
];
for (const kernelConfig of kernelConfigs) {
registerKernel(kernelConfig);
}<|fim▁end|> | // We explicitly import the modular kernels so they get registered in the
// global registry when we compile the library. A modular build would replace
// the contents of this file and import only the kernels that are needed.
import {KernelConfig, registerKernel} from '../../kernel_registry'; |
<|file_name|>gwiki.py<|end_file_name|><|fim▁begin|>"""
Google Code Wiki translator.
Syntax defined by http://code.google.com/p/support/wiki/WikiSyntax
Here called gwiki to make the dialect clear (g for google).
"""
import re, os, commands, sys
from common import default_movie, plain_exercise, insert_code_and_tex, \
fix_ref_section_chapter
from plaintext import plain_quiz
from misc import _abort
from doconce import errwarn
def gwiki_code(filestr, code_blocks, code_block_types,
tex_blocks, format):
filestr = insert_code_and_tex(filestr, code_blocks, tex_blocks, format)
c = re.compile(r'^!bc(.*?)\n', re.MULTILINE)
filestr = c.sub(r'{{{\n', filestr)
filestr = re.sub(r'!ec\n', r'}}}\n', filestr)
c = re.compile(r'^!bt\n', re.MULTILINE)
filestr = c.sub(r'{{{\n', filestr)
filestr = re.sub(r'!et\n', r'}}}\n', filestr)
return filestr
def gwiki_figure(m):
filename = m.group('filename')
link = filename if filename.startswith('http') else None
if not link and not os.path.isfile(filename):
raise IOError('no figure file %s' % filename)
<|fim▁hole|> if link is None:
if not ext in '.png .gif .jpg .jpeg'.split():
# try to convert image file to PNG, using
# convert from ImageMagick:
cmd = 'convert %s png:%s' % (filename, root+'.png')
failure, output = commands.getstatusoutput(cmd)
if failure:
errwarn('\n**** Warning: could not run ' + cmd)
errwarn('Convert %s to PNG format manually' % filename)
_abort()
filename = root + '.png'
caption = m.group('caption')
# keep label if it's there:
caption = re.sub(r'label\{(.+?)\}', '(\g<1>)', caption)
errwarn("""
NOTE: Place %s at some place on the web and edit the
.gwiki page, either manually (seach for 'Figure: ')
or use the doconce script:
doconce gwiki_figsubst.py mydoc.gwiki URL
""" % filename)
result = r"""
---------------------------------------------------------------
Figure: %s
(the URL of the image file %s must be inserted here)
<wiki:comment>
Put the figure file %s on the web (e.g., as part of the
googlecode repository) and substitute the line above with the URL.
</wiki:comment>
---------------------------------------------------------------
""" % (caption, filename, filename)
return result
from common import table_analysis
def gwiki_table(table):
"""Native gwiki table."""
# add 2 chars for column width since we add boldface _..._
# in headlines:
column_width = [c+2 for c in table_analysis(table['rows'])]
# Does column and heading alignment matter?
# Not according to http://code.google.com/p/support/wiki/WikiSyntax#Tables
# but it is possible to use HTML code in gwiki (i.e., html_table)
# (think this was tried without success...)
s = '\n'
for i, row in enumerate(table['rows']):
if row == ['horizontal rule']:
continue
if i == 1 and \
table['rows'][i-1] == ['horizontal rule'] and \
table['rows'][i+1] == ['horizontal rule']:
headline = True
else:
headline = False
empty_row = max([len(column.strip())
for column in row]) == 0
if empty_row:
continue
for column, w in zip(row, column_width):
if headline:
if column:
c = ' %s ' % (('_'+ column + '_').center(w))
else:
c = ''
else:
c = ' %s ' % column.ljust(w)
s += ' || %s ' % c
s += ' ||\n'
s += '\n\n'
return s
def gwiki_author(authors_and_institutions, auth2index,
inst2index, index2inst, auth2email):
authors = []
for author, i, email in authors_and_institutions:
if email is None:
email_text = ''
else:
name, adr = email.split('@')
email_text = ' (%s at %s)' % (name, adr)
authors.append('_%s_%s' % (author, email_text))
if len(authors) == 1:
authors = authors[0]
elif len(authors) == 2:
authors = authors[0] + ' and ' + authors[1]
elif len(authors) > 2:
authors[-1] = 'and ' + authors[-1]
authors = ', '.join(authors)
else:
# no authors:
return ''
text = '\n\nBy ' + authors + '\n\n'
# we skip institutions in gwiki
return text
def wiki_ref_and_label_common(section_label2title, format, filestr):
filestr = fix_ref_section_chapter(filestr, format)
# remove label{...} from output
filestr = re.sub(r'label\{.+?\}', '', filestr) # all the remaining
# anchors in titles do not work...
# replace all references to sections:
for label in section_label2title:
title = section_label2title[label]
filestr = filestr.replace('ref{%s}' % label,
'[#%s]' % title.replace(' ', '_'))
from common import ref2equations
filestr = ref2equations(filestr)
# replace remaining ref{x} as x
filestr = re.sub(r'ref\{(.+?)\}', '\g<1>', filestr)
return filestr
def gwiki_ref_and_label(section_label2title, format, filestr):
return wiki_ref_and_label_common(section_label2title, format, filestr)
def define(FILENAME_EXTENSION,
BLANKLINE,
INLINE_TAGS_SUBST,
CODE,
LIST,
ARGLIST,
TABLE,
EXERCISE,
FIGURE_EXT,
CROSS_REFS,
INDEX_BIB,
TOC,
ENVIRS,
QUIZ,
INTRO,
OUTRO,
filestr):
# all arguments are dicts and accept in-place modifications (extensions)
FILENAME_EXTENSION['gwiki'] = '.gwiki' # output file extension
BLANKLINE['gwiki'] = '\n'
# replacement patterns for substitutions of inline tags
INLINE_TAGS_SUBST['gwiki'] = {
# use verbatim mode for math:
'math': r'\g<begin>`\g<subst>`\g<end>',
'math2': r'\g<begin>`\g<puretext>`\g<end>',
'emphasize': r'\g<begin>_\g<subst>_\g<end>',
'bold': r'\g<begin>*\g<subst>*\g<end>',
'verbatim': r'\g<begin>`\g<subst>`\g<end>',
#'linkURL': r'\g<begin>[\g<url> \g<link>]\g<end>',
'linkURL2': r'[\g<url> \g<link>]',
'linkURL3': r'[\g<url> \g<link>]',
'linkURL2v': r"[\g<url> `\g<link>`]",
'linkURL3v': r"[\g<url> `\g<link>`]",
'plainURL': r'\g<url>',
'colortext': r'<font color="\g<color>">\g<text></font>',
'chapter': r'= \g<subst> =',
'section': r'== \g<subst> ==',
'subsection': r'=== \g<subst> ===',
'subsubsection': r'==== \g<subst> ====\n',
# 'section': r'++++ \g<subst> ++++',
# 'subsection': r'++++++ \g<subst> ++++++',
# 'subsubsection': r'++++++++ \g<subst> ++++++++',
'paragraph': r'*\g<subst>*\g<space>',
#'title': r'#summary \g<subst>\n<wiki:toc max_depth="2" />',
'title': r'#summary \g<subst>\n',
'date': r'===== \g<subst> =====',
'author': gwiki_author, #r'===== \g<name>, \g<institution> =====',
# 'figure': r'<\g<filename>>',
'figure': gwiki_figure,
'movie': default_movie, # will not work for HTML movie player
'comment': '<wiki:comment> %s </wiki:comment>',
'abstract': r'\n*\g<type>.* \g<text>\g<rest>',
'linebreak': r'\g<text>' + '\n',
'non-breaking-space': ' ',
'ampersand2': r' \g<1>&\g<2>',
}
CODE['gwiki'] = gwiki_code
from html import html_table
#TABLE['gwiki'] = html_table
TABLE['gwiki'] = gwiki_table
# native list:
LIST['gwiki'] = {
'itemize': {'begin': '\n', 'item': '*', 'end': '\n\n'},
'enumerate': {'begin': '\n', 'item': '#', 'end': '\n\n'},
'description': {'begin': '\n', 'item': '* %s ', 'end': '\n\n'},
'separator': '\n'}
# (the \n\n for end is a hack because doconce.py avoids writing
# newline at the end of lists until the next paragraph is hit)
#LIST['gwiki'] = LIST['HTML'] # does not work well
# how to typeset description lists for function arguments, return
# values, and module/class variables:
ARGLIST['gwiki'] = {
'parameter': '*argument*',
'keyword': '*keyword argument*',
'return': '*return value(s)*',
'instance variable': '*instance variable*',
'class variable': '*class variable*',
'module variable': '*module variable*',
}
FIGURE_EXT['gwiki'] = {
'search': ('.png', '.gif', '.jpg', '.jpeg'),
'convert': ('.png', '.gif', '.jpg')}
CROSS_REFS['gwiki'] = gwiki_ref_and_label
from plaintext import plain_index_bib
EXERCISE['gwiki'] = plain_exercise
INDEX_BIB['gwiki'] = plain_index_bib
TOC['gwiki'] = lambda s, f: '<wiki: toc max_depth="2" />'
QUIZ['gwiki'] = plain_quiz
# document start:
INTRO['gwiki'] = ''
#INTRO['gwiki'] = '#summary YourOneLineSummary\n<wiki:toc max_depth="1" />\n'<|fim▁end|> | basename = os.path.basename(filename)
stem, ext = os.path.splitext(basename)
root, ext = os.path.splitext(filename)
|
<|file_name|>readonly-implicit-fields.spec.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2022 Imply Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ESLintUtils } from '@typescript-eslint/utils';
import { readonlyImplicitFields } from './readonly-implicit-fields';
const ruleTester = new ESLintUtils.RuleTester({
parser: '@typescript-eslint/parser',
});
ruleTester.run('readonly-implicit-fields', readonlyImplicitFields, {
valid: [
// Various valid cases inside of Immutable Classes
`class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public declare readonly foo: string;
private readonly baz: number;
public readonly qux: () => void;
public getBaz = () => this.baz;
public changeBaz = (baz: number) => this;
public doStuff(): string { return 'stuff'; }
}`,
`class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare readonly foo: string;
public declare readonly foo: string;
private declare readonly foo: string;
}`,
`class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare readonly getFoo: () => string;
public declare readonly getFoo: () => string;
private declare readonly getFoo: () => string;
}`,
`class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare readonly changeFoo: (foo: string) => MyClass;
public declare readonly changeFoo: (foo: string) => MyClass;
private declare readonly changeFoo: (foo: string) => MyClass;
}`,
// Invalid cases but not inside of BaseImmutable inheritors
`class MyClass extends NotImmutable {
declare foo: string;
}`,
`class MyClass extends NotImmutable {
public declare foo: string;
}`,
`class MyClass extends NotImmutable {
private declare foo: string;
}`,
],
invalid: [
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public declare foo: string;
}`,
errors: [{ messageId: 'useReadonlyForProperty', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public declare readonly foo: string;<|fim▁hole|> },
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare foo: string;
}`,
errors: [{ messageId: 'useReadonlyForProperty', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare readonly foo: string;
}`,
},
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
private declare foo: string;
}`,
errors: [{ messageId: 'useReadonlyForProperty', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
private declare readonly foo: string;
}`,
},
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public declare getFoo: () => string;
}`,
errors: [{ messageId: 'useReadonlyForAccessor', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public declare readonly getFoo: () => string;
}`,
},
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare getFoo: () => string;
}`,
errors: [{ messageId: 'useReadonlyForAccessor', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
declare readonly getFoo: () => string;
}`,
},
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
private declare getFoo: () => string;
}`,
errors: [{ messageId: 'useReadonlyForAccessor', line: 3, column: 11 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
private declare readonly getFoo: () => string;
}`,
},
// Weird spacing
{
code: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public
declare foo : string;
}`,
errors: [{ messageId: 'useReadonlyForProperty', line: 4, column: 13 }],
output: `
class MyClass extends BaseImmutable<MyClassValue, MyClassJS> {
public
declare readonly foo : string;
}`,
},
],
});<|fim▁end|> | }`, |
<|file_name|>jquery.validation.settings.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function($){
var nameDefault = 'Your name...';
var emailDefault = 'Your email...';
var messageDefault = 'Your message...';
// Setting up existing forms
setupforms();
function setupforms() {
// Applying default values
setupDefaultText('#name',nameDefault);
setupDefaultText('#email',emailDefault);
setupDefaultText('#message',messageDefault);
// Focus / Blur check against defaults
focusField('#name');
focusField('#email');
focusField('#message');
}
function setupDefaultText(fieldID,fieldDefault) {
$(fieldID).val(fieldDefault);
$(fieldID).attr('data-default', fieldDefault);
}
function evalDefault(fieldID) {
if($(fieldID).val() != $(fieldID).attr('data-default')) {
return false;
}
else { return true; }
}
function hasDefaults(formType) {
switch (formType)
{
case "contact" :
<|fim▁hole|> return false;
}
}
function focusField(fieldID) {
$(fieldID).focus(function(evaluation) {
if(evalDefault(fieldID)) { $(fieldID).val(''); }
}).blur(function(evaluation) {
if(evalDefault(fieldID) || $(fieldID).val() === '') { $(fieldID).val($(fieldID).attr('data-default')); }
});
}
$('.button-submit').click(function(event) {
event.preventDefault();
});
$('#submit-contact').bind('click', function(){
if(!hasDefaults('contact')) { $('#form-contact').submit(); }
});
$("#form-contact").validate({
rules: {
name: {
required: true,
minlength: 3
},
email: {
required: true,
email: true
},
message: {
required: true,
minlength: 10
}
},
messages: {
name: {
required: "Please enter your name.",
minlength: "Name must have at least 3 characters."
},
email: {
required: "Please enter your email address.",
email: "This is not a valid email address format."
},
message: {
required: "Please enter a message.",
minlength: "Message must have at least 10 characters."
}
}
});
function validateContact() {
if(!$('#form-contact').valid()) { return false; }
else { return true; }
}
$("#form-contact").ajaxForm({
beforeSubmit: validateContact,
type: "POST",
url: "assets/php/contact-form-process.php",
data: $("#form-contact").serialize(),
success: function(msg){
$("#form-message").ajaxComplete(function(event, request, settings){
if(msg == 'OK') // Message Sent? Show the 'Thank You' message
{
result = '<span class="form-message-success"><i class="icon-thumbs-up"></i> Your message was sent. Thank you!</span>';
clear = true;
}
else
{
result = '<span class="form-message-error"><i class="icon-thumbs-down"></i> ' + msg +'</span>';
clear = false;
}
$(this).html(result);
if(clear == true) {
$('#name').val('');
$('#email').val('');
$('#message').val('');
}
});
}
});
});<|fim▁end|> | if(evalDefault('#name') && evalDefault('#email') && evalDefault('#message')) { return true; }
else { return false; }
default :
|
<|file_name|>dice.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import string
import random
# Simple recursive descent parser for dice rolls, e.g. '3d6+1d8+4'.
#
# roll := die {('+' | '-') die} ('+' | '-') modifier
# die := number 'd' number
# modifier := number
class StringBuf(object):<|fim▁hole|> self.pos = 0
def peek(self):
return self.s[self.pos]
def getc(self):
c = self.peek()
self.pos += 1
return c
def ungetc(self):
self.pos -= 1
def tell(self):
return self.pos
class Symbol(object):
NUMBER = 0
D = 1
PLUS = 2
MINUS = 3
def __init__(self, type_, pos, value)
def next_symbol(s):
c = s.getc()
while c in string.whitespace:
c = s.getc()
if c in string.digits:
# start of a number
literal = c
c = s.getc()
while c in string.digits:
literal += c
c = s.getc()
s.ungetc()
sym = (Symbol.NUMBER,
elif c == 'd':
# die indicator
pass
elif c == '+':
# plus sign
pass
elif c == '-':
# minus sign
pass
else:
# unrecognized input
raise ValueError('Syntax error at position ' + s.tell())
return ()<|fim▁end|> | def __init__(self, s):
self.s = s |
<|file_name|>YoutubeCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import operator
import os
import re
import subprocess
import time
import urllib
from xml.dom.minidom import parseString as parse_xml
from module.network.CookieJar import CookieJar
from module.network.HTTPRequest import HTTPRequest
from ..internal.Hoster import Hoster
from ..internal.misc import exists, isexecutable, json, reduce, renice, replace_patterns, which
from ..internal.Plugin import Abort, Skip
class BIGHTTPRequest(HTTPRequest):
"""
Overcome HTTPRequest's load() size limit to allow
loading very big web pages by overrding HTTPRequest's write() function
"""
# @TODO: Add 'limit' parameter to HTTPRequest in v0.4.10
def __init__(self, cookies=None, options=None, limit=2000000):
self.limit = limit
HTTPRequest.__init__(self, cookies=cookies, options=options)
def write(self, buf):
""" writes response """
if self.limit and self.rep.tell() > self.limit or self.abort:
rep = self.getResponse()
if self.abort:
raise Abort()
f = open("response.dump", "wb")
f.write(rep)
f.close()
raise Exception("Loaded Url exceeded limit")
self.rep.write(buf)
class Ffmpeg(object):
_RE_DURATION = re.compile(r'Duration: (\d{2}):(\d{2}):(\d{2})\.(\d{2}),')
_RE_TIME = re.compile(r'time=(\d{2}):(\d{2}):(\d{2})\.(\d{2})')
_RE_VERSION = re.compile((r'ffmpeg version (.+?) '))
CMD = None
priority = 0
streams = []
start_time = (0, 0)
output_filename = None
error_message = ""
def __init__(self, priority, plugin=None):
self.plugin = plugin
self.priority = priority
self.streams = []
self.start_time = (0, 0)
self.output_filename = None
self.error_message = ""
self.find()
@classmethod
def find(cls):
"""
Check for ffmpeg
"""
if cls.CMD is not None:
return True
try:
if os.name == "nt":
ffmpeg = os.path.join(pypath, "ffmpeg.exe") if isexecutable(os.path.join(pypath, "ffmpeg.exe")) \
else "ffmpeg.exe"
else:
ffmpeg = "ffmpeg"
cmd = which(ffmpeg) or ffmpeg
p = subprocess.Popen([cmd, "-version"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = (_r.strip() if _r else "" for _r in p.communicate())
except OSError:
return False
m = cls._RE_VERSION.search(out)
if m is not None:
cls.VERSION = m.group(1)
cls.CMD = cmd
return True
@property
def found(self):
return self.CMD is not None
def add_stream(self, streams):
if isinstance(streams, list):
self.streams.extend(streams)
else:
self.streams.append(streams)
def set_start_time(self, start_time):
self.start_time = start_time
def set_output_filename(self, output_filename):
self.output_filename = output_filename
def run(self):
if self.CMD is None or self.output_filename is None:
return False
maps = []
args = []
meta = []
for i, stream in enumerate(self.streams):
args.extend(["-i", stream[1]])
maps.extend(["-map", "%s:%s:0" % (i, stream[0])])
if stream[0] == 's':
meta.extend(["-metadata:s:s:0:%s" % i, "language=%s" % stream[2]])
args.extend(maps)
args.extend(meta)
args.extend(["-y",
"-vcodec", "copy",
"-acodec", "copy",
"-scodec", "copy",
"-ss", "00:%s:%s.00" % (self.start_time[0], self.start_time[1]),
"-sub_charenc", "utf8"])
call = [self.CMD] + args + [self.output_filename]
p = subprocess.Popen(
call,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
renice(p.pid, self.priority)
duration = self._find_duration(p)
if duration:
last_line = self._progress(p, duration)
else:
last_line = ""
out, err = (_r.strip() if _r else "" for _r in p.communicate())
if err or p.returncode:
self.error_message = last_line
return False
else:
self.error_message = ""
return True
def _find_duration(self, process):
duration = 0
while True:
line = process.stderr.readline() #: ffmpeg writes to stderr
#: Quit loop on eof
if not line:
break
m = self._RE_DURATION.search(line)
if m is not None:
duration = sum(int(v) * [60 * 60 * 100, 60 * 100, 100, 1][i]
for i, v in enumerate(m.groups()))
break
return duration
def _progress(self, process, duration):
line = ""
last_line = ""
while True:
c = process.stderr.read(1) #: ffmpeg writes to stderr
#: Quit loop on eof
if not c:
break
elif c == "\r":
last_line = line.strip('\r\n')
line = ""
m = self._RE_TIME.search(last_line)
if m is not None:
current_time = sum(int(v) * [60 * 60 * 100, 60 * 100, 100, 1][i]
for i, v in enumerate(m.groups()))
if self.plugin:
progress = current_time * 100 / duration
self.plugin.pyfile.setProgress(progress)
else:
line += c
continue
return last_line #: Last line may contain error message
class YoutubeCom(Hoster):
__name__ = "YoutubeCom"
__type__ = "hoster"
__version__ = "0.68"
__status__ = "testing"
__pattern__ = r'https?://(?:[^/]*\.)?(?:youtu\.be/|youtube\.com/watch\?(?:.*&)?v=)[\w\-]+'
__config__ = [("activated", "bool", "Activated", True),
("quality", "sd;hd;fullhd;240p;360p;480p;720p;1080p;1440p;2160p;3072p;4320p", "Quality Setting", "hd"),
("vfmt", "int", "Video FMT/ITAG Number (0 for auto)", 0),
("afmt", "int", "Audio FMT/ITAG Number (0 for auto)", 0),
(".mp4", "bool", "Allow .mp4", True),
(".flv", "bool", "Allow .flv", True),
(".webm", "bool", "Allow .webm", True),
(".mkv", "bool", "Allow .mkv", True),
(".3gp", "bool", "Allow .3gp", False),
("aac", "bool", "Allow aac audio (DASH video only)", True),
("vorbis", "bool", "Allow vorbis audio (DASH video only)", True),
("opus", "bool", "Allow opus audio (DASH video only)", True),
("ac3", "bool", "Allow ac3 audio (DASH video only)", True),
("dts", "bool", "Allow dts audio (DASH video only)", True),
("3d", "bool", "Prefer 3D", False),
("subs_dl", "off;all_specified;first_available", "Download subtitles", "off"),
("subs_dl_langs", "str", "Subtitle language codes (ISO639-1) to download (comma separated)", ""),
("subs_embed", "bool", "Embed subtitles inside the output file (.mp4 and .mkv only)", False),
("priority", "int", "ffmpeg process priority", 0)]
__description__ = """Youtube.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("spoob", "[email protected]"),
("zoidberg", "[email protected]"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
URL_REPLACEMENTS = [(r'youtu\.be/', 'youtube.com/watch?v=')]
#: Invalid characters that must be removed from the file name
invalid_chars = u'\u2605:?><"|\\'
#: name, width, height, quality ranking, 3D, type
formats = {
# 3gp
17: {'ext': ".3gp", 'width': 176, 'height': 144, 'qi': 0, '3d': False, 'type': "av"},
36: {'ext': ".3gp", 'width': 400, 'height': 240, 'qi': 1, '3d': False, 'type': "av"},
# flv
5: {'ext': ".flv", 'width': 400, 'height': 240, 'qi': 1, '3d': False, 'type': "av"},
6: {'ext': ".flv", 'width': 640, 'height': 400, 'qi': 4, '3d': False, 'type': "av"},
34: {'ext': ".flv", 'width': 640, 'height': 360, 'qi': 4, '3d': False, 'type': "av"},
35: {'ext': ".flv", 'width': 854, 'height': 480, 'qi': 6, '3d': False, 'type': "av"},
# mp4
83: {'ext': ".mp4", 'width': 400, 'height': 240, 'qi': 1, '3d': True, 'type': "av"},
18: {'ext': ".mp4", 'width': 480, 'height': 360, 'qi': 2, '3d': False, 'type': "av"},
82: {'ext': ".mp4", 'width': 640, 'height': 360, 'qi': 3, '3d': True, 'type': "av"},
22: {'ext': ".mp4", 'width': 1280, 'height': 720, 'qi': 8, '3d': False, 'type': "av"},
136: {'ext': ".mp4", 'width': 1280, 'height': 720, 'qi': 8, '3d': False, 'type': "v"},
84: {'ext': ".mp4", 'width': 1280, 'height': 720, 'qi': 8, '3d': True, 'type': "av"},
37: {'ext': ".mp4", 'width': 1920, 'height': 1080, 'qi': 9, '3d': False, 'type': "av"},
137: {'ext': ".mp4", 'width': 1920, 'height': 1080, 'qi': 9, '3d': False, 'type': "v"},
85: {'ext': ".mp4", 'width': 1920, 'height': 1080, 'qi': 9, '3d': True, 'type': "av"},
264: {'ext': ".mp4", 'width': 2560, 'height': 1440, 'qi': 10, '3d': False, 'type': "v"},
266: {'ext': ".mp4", 'width': 3840, 'height': 2160, 'qi': 11, '3d': False, 'type': "v"},
38: {'ext': ".mp4", 'width': 4096, 'height': 3072, 'qi': 12 , '3d': False, 'type': "av"},
# webm
43: {'ext': ".webm", 'width': 640, 'height': 360, 'qi': 3, '3d': False, 'type': "av"},
100: {'ext': ".webm", 'width': 640, 'height': 360, 'qi': 3, '3d': True, 'type': "av"},
101: {'ext': ".webm", 'width': 640, 'height': 360, 'qi': 4, '3d': True, 'type': "av"},
44: {'ext': ".webm", 'width': 854, 'height': 480, 'qi': 5, '3d': False, 'type': "av"},
45: {'ext': ".webm", 'width': 1280, 'height': 720, 'qi': 7, '3d': False, 'type': "av"},
247: {'ext': ".webm", 'width': 1280, 'height': 720, 'qi': 7, '3d': False, 'type': "v"},
102: {'ext': ".webm", 'width': 1280, 'height': 720, 'qi': 8, '3d': True, 'type': "av"},
46: {'ext': ".webm", 'width': 1920, 'height': 1080, 'qi': 9, '3d': False, 'type': "av"},
248: {'ext': ".webm", 'width': 1920, 'height': 1080, 'qi': 9, '3d': False, 'type': "v"},
271: {'ext': ".webm", 'width': 2560, 'height': 1440, 'qi': 10, '3d': False, 'type': "v"},
313: {'ext': ".webm", 'width': 3840, 'height': 2160, 'qi': 11, '3d': False, 'type': "v"},
272: {'ext': ".webm", 'width': 7680, 'height': 4320, 'qi': 13, '3d': False, 'type': "v"},
# audio
139: {'ext': ".mp4", 'qi': 1, 'acodec': "aac", 'type': "a"},
140: {'ext': ".mp4", 'qi': 2, 'acodec': "aac", 'type': "a"},
141: {'ext': ".mp4", 'qi': 3, 'acodec': "aac", 'type': "a"},
256: {'ext': ".mp4", 'qi': 4, 'acodec': "aac", 'type': "a"},
258: {'ext': ".mp4", 'qi': 5, 'acodec': "aac", 'type': "a"},
325: {'ext': ".mp4", 'qi': 6, 'acodec': "dts", 'type': "a"},
328: {'ext': ".mp4", 'qi': 7, 'acodec': "ac3", 'type': "a"},
171: {'ext': ".webm", 'qi': 1, 'acodec': "vorbis", 'type': 'a'},
172: {'ext': ".webm", 'qi': 2, 'acodec': "vorbis", 'type': 'a'},
249: {'ext': ".webm", 'qi': 3, 'acodec': "opus", 'type': 'a'},
250: {'ext': ".webm", 'qi': 4, 'acodec': "opus", 'type': 'a'},
251: {'ext': ".webm", 'qi': 5, 'acodec': "opus", 'type': 'a'}
}
def _decrypt_signature(self, encrypted_sig):
"""Turn the encrypted 's' field into a working signature"""
# try:
# player_url = json.loads(re.search(r'"assets":.+?"js":\s*("[^"]+")',self.data).group(1))
# except (AttributeError, IndexError):
# self.fail(_("Player URL not found"))
player_url = self.player_config['assets']['js']
if player_url.startswith("//"):
player_url = 'https:' + player_url
if not player_url.endswith(".js"):
self.fail(_("Unsupported player type %s") % player_url)
cache_info = self.db.retrieve("cache")
cache_dirty = False
if cache_info is None or 'version' not in cache_info or cache_info[
'version'] != self.__version__:
cache_info = {'version': self.__version__,
'cache': {}}
cache_dirty = True
if player_url in cache_info['cache'] and time.time() < cache_info['cache'][player_url]['time'] + 24 * 60 * 60:
self.log_debug("Using cached decode function to decrypt the URL")
decrypt_func = lambda s: ''.join(s[_i] for _i in cache_info['cache'][player_url]['decrypt_map'])
decrypted_sig = decrypt_func(encrypted_sig)
else:
player_data = self.load(self.fixurl(player_url))
m = re.search(r'\.sig\|\|(?P<sig>[a-zA-Z0-9$]+)\(', player_data) or \
re.search(r'(["\'])signature\1\s*,\s*(?P<sig>[a-zA-Z0-9$]+)\(', player_data)
try:
function_name = m.group('sig')
except (AttributeError, IndexError):
self.fail(_("Signature decode function name not found"))
try:
jsi = JSInterpreter(player_data)
decrypt_func = lambda s: jsi.extract_function(function_name)([s])
#: Since Youtube just scrambles the order of the characters in the signature
#: and does not change any byte value, we can store just a transformation map as a cached function
decrypt_map = [ord(c) for c in decrypt_func(''.join(map(unichr, range(len(encrypted_sig)))))]
cache_info['cache'][player_url] = {'decrypt_map': decrypt_map,
'time': time.time()}
cache_dirty = True
decrypted_sig = decrypt_func(encrypted_sig)
except (JSInterpreterError, AssertionError), e:
self.log_error(_("Signature decode failed"), e)
self.fail(e.message)
#: Remove old records from cache
for _k in list(cache_info['cache'].keys()):
if time.time() >= cache_info['cache'][_k]['time'] + 24 * 60 * 60:
cache_info['cache'].pop(_k, None)
cache_dirty = True
if cache_dirty:
self.db.store("cache", cache_info)
return decrypted_sig
def _handle_video(self):
use3d = self.config.get('3d')
if use3d:
quality = {'sd': 82, 'hd': 84, 'fullhd': 85, '240p': 83, '360p': 82, '480p': 82, '720p': 84,
'1080p': 85, '1440p': 85, '2160p': 85, '3072p': 85, '4320p': 85}
else:
quality = {'sd': 18, 'hd': 22, 'fullhd': 37, '240p': 5, '360p': 18, '480p': 35, '720p': 22,
'1080p': 37, '1440p': 264, '2160p': 266, '3072p': 38, '4320p': 272}
desired_fmt = self.config.get('vfmt') or quality.get(self.config.get('quality'), 0)
is_video = lambda x: 'v' in self.formats[x]['type']
if desired_fmt not in self.formats or not is_video(desired_fmt):
self.log_warning(_("VIDEO ITAG %d unknown, using default") % desired_fmt)
desired_fmt = 22
#: Build dictionary of supported itags (3D/2D)
allowed_suffix = lambda x: self.config.get(self.formats[x]['ext'])
video_streams = dict([(_s[0], _s[1:]) for _s in self.streams
if _s[0] in self.formats and allowed_suffix(_s[0]) and
is_video(_s[0]) and self.formats[_s[0]]['3d'] == use3d])
if not video_streams:
self.fail(_("No available video stream meets your preferences"))
self.log_debug("DESIRED VIDEO STREAM: ITAG:%d (%s %dx%d Q:%d 3D:%s) %sfound, %sallowed" %
(desired_fmt, self.formats[desired_fmt]['ext'], self.formats[desired_fmt]['width'],
self.formats[desired_fmt]['height'], self.formats[desired_fmt]['qi'],
self.formats[desired_fmt]['3d'], "" if desired_fmt in video_streams else "NOT ",
"" if allowed_suffix(desired_fmt) else "NOT "))
#: Return fmt nearest to quality index
if desired_fmt in video_streams and allowed_suffix(desired_fmt):
chosen_fmt = desired_fmt
else:
quality_index = lambda x: self.formats[x]['qi'] #: Select quality index
quality_distance = lambda x, y: abs(quality_index(x) - quality_index(y))
self.log_debug("Choosing nearest stream: %s" % [(_s, allowed_suffix(_s), quality_distance(_s, desired_fmt))
for _s in video_streams.keys()])
chosen_fmt = reduce(lambda x, y: x if quality_distance(x, desired_fmt) <= quality_distance(y, desired_fmt)
and quality_index(x) > quality_index(y) else y, video_streams.keys())
self.log_debug("CHOSEN VIDEO STREAM: ITAG:%d (%s %dx%d Q:%d 3D:%s)" %
(chosen_fmt, self.formats[chosen_fmt]['ext'], self.formats[chosen_fmt]['width'],
self.formats[chosen_fmt]['height'], self.formats[chosen_fmt]['qi'],
self.formats[chosen_fmt]['3d']))
url = video_streams[chosen_fmt][0]
if video_streams[chosen_fmt][1]:
if video_streams[chosen_fmt][2]:
signature = self._decrypt_signature(video_streams[chosen_fmt][1])
else:
signature = video_streams[chosen_fmt][1]
url += "&signature=" + signature
if "&ratebypass=" not in url:
url += "&ratebypass=yes"
file_suffix = self.formats[chosen_fmt]['ext'] if chosen_fmt in self.formats else ".flv"
if 'a' not in self.formats[chosen_fmt]['type']:
file_suffix = ".video" + file_suffix
self.pyfile.name = self.file_name + file_suffix
try:
filename = self.download(url, disposition=False)
except Skip, e:
filename = os.path.join(self.pyload.config.get("general", "download_folder"),
self.pyfile.package().folder,
self.pyfile.name)
self.log_info(_("Download skipped: %s due to %s") % (self.pyfile.name, e.message))
return filename, chosen_fmt
def _handle_audio(self, video_fmt):
desired_fmt = self.config.get('afmt') or 141
is_audio = lambda x: self.formats[x]['type'] == "a"
if desired_fmt not in self.formats or not is_audio(desired_fmt):
self.log_warning(_("AUDIO ITAG %d unknown, using default") % desired_fmt)
desired_fmt = 141
#: Build dictionary of supported audio itags
allowed_codec = lambda x: self.config.get(self.formats[x]['acodec'])
allowed_suffix = lambda x: self.config.get(".mkv") or \
self.config.get(self.formats[x]['ext']) and \
self.formats[x]['ext'] == self.formats[video_fmt]['ext']
audio_streams = dict([(_s[0], _s[1:]) for _s in self.streams
if _s[0] in self.formats and is_audio(_s[0]) and
allowed_codec(_s[0]) and allowed_suffix(_s[0])])
if not audio_streams:
self.fail(_("No available audio stream meets your preferences"))
if desired_fmt in audio_streams and allowed_suffix(desired_fmt):
chosen_fmt = desired_fmt
else:
quality_index = lambda x: self.formats[x]['qi'] #: Select quality index
quality_distance = lambda x, y: abs(quality_index(x) - quality_index(y))
self.log_debug("Choosing nearest stream: %s" % [(_s, allowed_suffix(_s), quality_distance(_s, desired_fmt))
for _s in audio_streams.keys()])
chosen_fmt = reduce(lambda x, y: x if quality_distance(x, desired_fmt) <= quality_distance(y, desired_fmt)
and quality_index(x) > quality_index(y) else y, audio_streams.keys())
self.log_debug("CHOSEN AUDIO STREAM: ITAG:%d (%s %s Q:%d)" %
(chosen_fmt, self.formats[chosen_fmt]['ext'], self.formats[chosen_fmt]['acodec'],
self.formats[chosen_fmt]['qi']))
url = audio_streams[chosen_fmt][0]
if audio_streams[chosen_fmt][1]:
if audio_streams[chosen_fmt][2]:
signature = self._decrypt_signature(audio_streams[chosen_fmt][1])
else:
signature = audio_streams[chosen_fmt][1]
url += "&signature=" + signature
if "&ratebypass=" not in url:
url += "&ratebypass=yes"
file_suffix = ".audio" + self.formats[chosen_fmt]['ext'] if chosen_fmt in self.formats else ".m4a"
self.pyfile.name = self.file_name + file_suffix
try:
filename = self.download(url, disposition=False)
except Skip, e:
filename = os.path.join(self.pyload.config.get("general", "download_folder"),
self.pyfile.package().folder,
self.pyfile.name)
self.log_info(_("Download skipped: %s due to %s") % (self.pyfile.name, e.message))
return filename, chosen_fmt
def _handle_subtitles(self):
def timedtext_to_srt(timedtext):
def _format_srt_time(millisec):
sec, milli = divmod(millisec, 1000)
m, s = divmod(int(sec), 60)
h, m = divmod(m, 60)
return "%02d:%02d:%02d,%s" % (h, m, s, milli)
i = 1
srt = ""
dom = parse_xml(timedtext)
body = dom.getElementsByTagName("body")[0]
paras = body.getElementsByTagName("p")
for para in paras:
srt += str(i) + "\n"
srt += _format_srt_time(int(para.attributes['t'].value)) + ' --> ' + \
_format_srt_time(int(para.attributes['t'].value) + int(para.attributes['d'].value)) + "\n"
for child in para.childNodes:
if child.nodeName == 'br':
srt += "\n"
elif child.nodeName == '#text':
srt += unicode(child.data)
srt += "\n\n"
i += 1
return srt
srt_files =[]
try:
subs = json.loads(self.player_config['args']['player_response'])['captions']['playerCaptionsTracklistRenderer']['captionTracks']
subtitles_urls = dict([(_subtitle['languageCode'],
urllib.unquote(_subtitle['baseUrl']).decode('unicode-escape') + "&fmt=3")
for _subtitle in subs])
self.log_debug("AVAILABLE SUBTITLES: %s" % subtitles_urls.keys() or "None")
except KeyError:
self.log_debug("AVAILABLE SUBTITLES: None")
return srt_files
subs_dl = self.config.get('subs_dl')
if subs_dl != "off":
subs_dl_langs = [_x.strip() for _x in self.config.get('subs_dl_langs', "").split(',') if _x.strip()]
if subs_dl_langs:
# Download only listed subtitles (`subs_dl_langs` config gives the priority)
for _lang in subs_dl_langs:
if _lang in subtitles_urls:
srt_filename = os.path.join(self.pyload.config.get("general", "download_folder"),
self.pyfile.package().folder,
os.path.splitext(self.file_name)[0] + "." + _lang + ".srt")
if self.pyload.config.get('download', 'skip_existing') and \
exists(srt_filename) and os.stat(srt_filename).st_size != 0:
self.log_info("Download skipped: %s due to File exists" % os.path.basename(srt_filename))
srt_files.append((srt_filename, _lang))
continue
timed_text = self.load(subtitles_urls[_lang], decode=False)
srt = timedtext_to_srt(timed_text)
with open(srt_filename, "w") as f:
f.write(srt.encode('utf-8'))
self.set_permissions(srt_filename)
self.log_debug("Saved subtitle: %s" % os.path.basename(srt_filename))
srt_files.append((srt_filename, _lang))
if subs_dl == "first_available":
break
else:
# Download any available subtitle
for _subtitle in subtitles_urls.items():
srt_filename = os.path.join(self.pyload.config.get("general", "download_folder"),
self.pyfile.package().folder,
os.path.splitext(self.file_name)[0] + "." + _subtitle[0] + ".srt")
if self.pyload.config.get('download', 'skip_existing') and \
exists(srt_filename) and os.stat(srt_filename).st_size != 0:
self.log_info("Download skipped: %s due to File exists" % os.path.basename(srt_filename))
srt_files.append((srt_filename, _subtitle[0]))
continue
timed_text = self.load(_subtitle[1], decode=False)
srt = timedtext_to_srt(timed_text)
with open(srt_filename, "w") as f:
f.write(srt.encode('utf-8'))
self.set_permissions(srt_filename)
self.log_debug("Saved subtitle: %s" % os.path.basename(srt_filename))
srt_files.append((srt_filename, _lang))
if subs_dl == "first_available":
break
return srt_files
def _postprocess(self, video_filename, audio_filename, subtitles_files):
final_filename = video_filename
subs_embed = self.config.get("subs_embed")
self.pyfile.setCustomStatus("postprocessing")
self.pyfile.setProgress(0)
if self.ffmpeg.found:
if audio_filename is not None:
video_suffix = os.path.splitext(video_filename)[1]
final_filename = os.path.join(os.path.dirname(video_filename),
self.file_name +
(video_suffix if video_suffix == os.path.splitext(audio_filename)[1]
else ".mkv"))
self.ffmpeg.add_stream(('v', video_filename))
self.ffmpeg.add_stream(('a', audio_filename))
if subtitles_files and subs_embed:
for subtitle in subtitles_files:
self.ffmpeg.add_stream(('s',) + subtitle)
self.ffmpeg.set_start_time(self.start_time)
self.ffmpeg.set_output_filename(final_filename)
self.pyfile.name = os.path.basename(final_filename)
self.pyfile.size = os.path.getsize(video_filename) + \
os.path.getsize(audio_filename) #: Just an estimate
if self.ffmpeg.run():
self.remove(video_filename, trash=False)
self.remove(audio_filename, trash=False)
if subtitles_files and subs_embed:
for subtitle in subtitles_files:
self.remove(subtitle[0])
else:
self.log_warning(_("ffmpeg error"), self.ffmpeg.error_message)
final_filename = video_filename
elif self.start_time[0] != 0 or self.start_time[1] != 0 or subtitles_files and subs_embed:
inputfile = video_filename + "_"
final_filename = video_filename
os.rename(video_filename, inputfile)
self.ffmpeg.add_stream(('v', video_filename))
self.ffmpeg.set_start_time(self.start_time)
if subtitles_files and subs_embed:
for subtitle in subtitles_files:
self.ffmpeg.add_stream(('s', subtitle))
self.pyfile.name = os.path.basename(final_filename)
self.pyfile.size = os.path.getsize(inputfile) #: Just an estimate
if self.ffmpeg.run():
self.remove(inputfile, trash=False)
if subtitles_files and subs_embed:
for subtitle in subtitles_files:
self.remove(subtitle[0])
else:
self.log_warning(_("ffmpeg error"), self.ffmpeg.error_message)
else:
if audio_filename is not None:
self.log_warning("ffmpeg is not installed, video and audio files will not be merged")
if subtitles_files and self.config.get("subs_embed"):
self.log_warning("ffmpeg is not installed, subtitles files will not be embedded")
self.pyfile.setProgress(100)
self.set_permissions(final_filename)
return final_filename
def setup(self):
self.resume_download = True
self.multiDL = True
try:
self.req.http.close()
except Exception:
pass
self.req.http = BIGHTTPRequest(
cookies=CookieJar(None),
options=self.pyload.requestFactory.getOptions(),
limit=2500000)
def process(self, pyfile):
pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
self.data = self.load(pyfile.url)
if re.search(r'<div id="player-unavailable" class="\s*player-width player-height\s*(?:player-unavailable\s*)?">',
self.data) or '"playabilityStatus":{"status":"ERROR"' in self.data:
self.offline()
if "We have been receiving a large volume of requests from your network." in self.data:
self.temp_offline()
m = re.search(r'ytplayer.config = ({.+?});', self.data)
if m is None:
self.fail(_("Player config pattern not found"))
self.player_config = json.loads(m.group(1))
self.ffmpeg = Ffmpeg(self.config.get('priority') ,self)
#: Set file name
self.file_name = self.player_config['args']['title']
#: Check for start time
self.start_time = (0, 0)
m = re.search(r't=(?:(\d+)m)?(\d+)s', pyfile.url)
if self.ffmpeg and m:
self.start_time = tuple(map(lambda _x: 0 if _x is None else int(_x), m.groups()))
self.file_name += " (starting at %sm%ss)" % (self.start_time[0], self.start_time[1])
#: Cleaning invalid characters from the file name
self.file_name = self.file_name.encode('ascii', 'replace')
for c in self.invalid_chars:
self.file_name = self.file_name.replace(c, '_')
#: Parse available streams
streams_keys = ['url_encoded_fmt_stream_map']
if 'adaptive_fmts' in self.player_config['args']:
streams_keys.append('adaptive_fmts')
self.streams = []
for streams_key in streams_keys:
streams = self.player_config['args'][streams_key]
streams = [_s.split('&') for _s in streams.split(',')]
streams = [dict((_x.split('=', 1)) for _x in _s) for _s in streams]
streams = [(int(_s['itag']),
urllib.unquote(_s['url']),
_s.get('s', _s.get('sig', None)),
True if 's' in _s else False)
for _s in streams]
self.streams += streams
self.log_debug("AVAILABLE STREAMS: %s" % [_s[0] for _s in self.streams])
video_filename, video_itag = self._handle_video()
has_audio = 'a' in self.formats[video_itag]['type']
if not has_audio:
audio_filename, audio_itag = self._handle_audio(video_itag)
else:
audio_filename = None
subtitles_files = self._handle_subtitles()
final_filename = self._postprocess(video_filename,
audio_filename,
subtitles_files)
#: Everything is finished and final name can be set
pyfile.name = os.path.basename(final_filename)
pyfile.size = os.path.getsize(final_filename)
self.last_download = final_filename
"""Credit to this awesome piece of code below goes to the 'youtube_dl' project, kudos!"""
class JSInterpreterError(Exception):
pass
class JSInterpreter(object):
def __init__(self, code, objects=None):
self._OPERATORS = [
('|', operator.or_),
('^', operator.xor),
('&', operator.and_),
('>>', operator.rshift),
('<<', operator.lshift),
('-', operator.sub),
('+', operator.add),
('%', operator.mod),
('/', operator.truediv),
('*', operator.mul),
]
self._ASSIGN_OPERATORS = [(op + '=', opfunc)
for op, opfunc in self._OPERATORS]
self._ASSIGN_OPERATORS.append(('=', lambda cur, right: right))
self._VARNAME_PATTERN = r'[a-zA-Z_$][a-zA-Z_$0-9]*'
if objects is None:
objects = {}
self.code = code
self._functions = {}
self._objects = objects
def interpret_statement(self, stmt, local_vars, allow_recursion=100):
if allow_recursion < 0:
raise JSInterpreterError('Recursion limit reached')
should_abort = False
stmt = stmt.lstrip()
stmt_m = re.match(r'var\s', stmt)
if stmt_m:
expr = stmt[len(stmt_m.group(0)):]
else:
return_m = re.match(r'return(?:\s+|$)', stmt)
if return_m:
expr = stmt[len(return_m.group(0)):]
should_abort = True
else:
# Try interpreting it as an expression
expr = stmt
v = self.interpret_expression(expr, local_vars, allow_recursion)
return v, should_abort
def interpret_expression(self, expr, local_vars, allow_recursion):
expr = expr.strip()
if expr == '': # Empty expression
return None
<|fim▁hole|> if m.group(0) == '(':
parens_count += 1
else:
parens_count -= 1
if parens_count == 0:
sub_expr = expr[1:m.start()]
sub_result = self.interpret_expression(sub_expr, local_vars, allow_recursion)
remaining_expr = expr[m.end():].strip()
if not remaining_expr:
return sub_result
else:
expr = json.dumps(sub_result) + remaining_expr
break
else:
raise JSInterpreterError('Premature end of parens in %r' % expr)
for op, opfunc in self._ASSIGN_OPERATORS:
m = re.match(r'(?x)(?P<out>%s)(?:\[(?P<index>[^\]]+?)\])?\s*%s(?P<expr>.*)$' %
(self._VARNAME_PATTERN, re.escape(op)), expr)
if m is None:
continue
right_val = self.interpret_expression(m.group('expr'), local_vars, allow_recursion - 1)
if m.groupdict().get('index'):
lvar = local_vars[m.group('out')]
idx = self.interpret_expression(m.group('index'), local_vars, allow_recursion)
assert isinstance(idx, int)
cur = lvar[idx]
val = opfunc(cur, right_val)
lvar[idx] = val
return val
else:
cur = local_vars.get(m.group('out'))
val = opfunc(cur, right_val)
local_vars[m.group('out')] = val
return val
if expr.isdigit():
return int(expr)
var_m = re.match(r'(?!if|return|true|false)(?P<name>%s)$' % self._VARNAME_PATTERN, expr)
if var_m:
return local_vars[var_m.group('name')]
try:
return json.loads(expr)
except ValueError:
pass
m = re.match(r'(?P<var>%s)\.(?P<member>[^(]+)(?:\(+(?P<args>[^()]*)\))?$' % self._VARNAME_PATTERN, expr)
if m is not None:
variable = m.group('var')
member = m.group('member')
arg_str = m.group('args')
if variable in local_vars:
obj = local_vars[variable]
else:
if variable not in self._objects:
self._objects[variable] = self.extract_object(variable)
obj = self._objects[variable]
if arg_str is None:
# Member access
if member == 'length':
return len(obj)
return obj[member]
assert expr.endswith(')')
# Function call
if arg_str == '':
argvals = tuple()
else:
argvals = tuple(self.interpret_expression(v, local_vars, allow_recursion) for v in arg_str.split(','))
if member == 'split':
assert argvals == ('',)
return list(obj)
if member == 'join':
assert len(argvals) == 1
return argvals[0].join(obj)
if member == 'reverse':
assert len(argvals) == 0
obj.reverse()
return obj
if member == 'slice':
assert len(argvals) == 1
return obj[argvals[0]:]
if member == 'splice':
assert isinstance(obj, list)
index, howMany = argvals
res = []
for i in range(index, min(index + howMany, len(obj))):
res.append(obj.pop(index))
return res
return obj[member](argvals)
m = re.match(r'(?P<in>%s)\[(?P<idx>.+)\]$' % self._VARNAME_PATTERN, expr)
if m is not None:
val = local_vars[m.group('in')]
idx = self.interpret_expression(m.group('idx'), local_vars, allow_recursion - 1)
return val[idx]
for op, opfunc in self._OPERATORS:
m = re.match(r'(?P<x>.+?)%s(?P<y>.+)' % re.escape(op), expr)
if m is None:
continue
x, abort = self.interpret_statement(m.group('x'), local_vars, allow_recursion - 1)
if abort:
raise JSInterpreterError('Premature left-side return of %s in %r' % (op, expr))
y, abort = self.interpret_statement(m.group('y'), local_vars, allow_recursion - 1)
if abort:
raise JSInterpreterError('Premature right-side return of %s in %r' % (op, expr))
return opfunc(x, y)
m = re.match(r'^(?P<func>%s)\((?P<args>[a-zA-Z0-9_$,]+)\)$' % self._VARNAME_PATTERN, expr)
if m is not None:
fname = m.group('func')
argvals = tuple(int(v) if v.isdigit() else local_vars[v]
for v in m.group('args').split(','))
if fname not in self._functions:
self._functions[fname] = self.extract_function(fname)
return self._functions[fname](argvals)
raise JSInterpreterError('Unsupported JS expression %r' % expr)
def extract_object(self, objname):
obj = {}
obj_m = re.search(r'(?:var\s+)?%s\s*=\s*\{\s*(?P<fields>([a-zA-Z$0-9]+\s*:\s*function\(.*?\)\s*\{.*?\}(?:,\s*)?)*)\}\s*;'
% re.escape(objname), self.code)
fields = obj_m.group('fields')
# Currently, it only supports function definitions
fields_m = re.finditer(r'(?P<key>[a-zA-Z$0-9]+)\s*:\s*function\((?P<args>[a-z,]+)\){(?P<code>[^}]+)}', fields)
for f in fields_m:
argnames = f.group('args').split(',')
obj[f.group('key')] = self.build_function(argnames, f.group('code'))
return obj
def extract_function(self, function_name):
func_m = re.search(r'(?x)(?:function\s+%s|[{;,]\s*%s\s*=\s*function|var\s+%s\s*=\s*function)\s*\((?P<args>[^)]*)\)\s*\{(?P<code>[^}]+)\}'
% (re.escape(function_name), re.escape(function_name), re.escape(function_name)), self.code)
if func_m is None:
raise JSInterpreterError('Could not find JS function %r' % function_name)
argnames = func_m.group('args').split(',')
return self.build_function(argnames, func_m.group('code'))
def call_function(self, function_name, *args):
f = self.extract_function(function_name)
return f(args)
def build_function(self, argnames, code):
def resf(argvals):
local_vars = dict(zip(argnames, argvals))
for stmt in code.split(';'):
res, abort = self.interpret_statement(stmt, local_vars)
if abort:
break
return res
return resf<|fim▁end|> | if expr.startswith('('):
parens_count = 0
for m in re.finditer(r'[()]', expr): |
<|file_name|>JS_divergence.cpp<|end_file_name|><|fim▁begin|>#include <Rcpp.h>
using namespace Rcpp;
// underflow prevention: if the argument to log is so small<|fim▁hole|> return (lg == R_NegInf) ? 0 : x * lg;
}
// [[Rcpp::export]]
double jsdiv_v(NumericVector P, NumericVector Q) {
int n = P.size();
if (Q.size() != n) {
stop("P and Q must be of same length");
}
double total = 0;
double PQ_mean;
for (int i = 0; i < n; i++) {
PQ_mean = (P[i] + Q[i]) / 2;
if (P[i] != 0) {
total += xlogy(P[i], P[i] / PQ_mean);
}
if (Q[i] != 0) {
total += xlogy(Q[i], Q[i] / PQ_mean);
}
}
return total / 2;
}
// [[Rcpp::export]]
NumericMatrix jsdiv_m(NumericMatrix x, NumericMatrix y) {
int n = x.nrow(), m = y.nrow();
if (y.ncol() != x.ncol()) {
stop("x and y must have the same number of columns");
}
NumericMatrix result(n, m);
for (int i = 0; i < n; ++i) {
for (int j = 0; j < m; ++j) {
result(i, j) = jsdiv_v(x(i, _), y(j, _));
}
}
return result;
}
// TODO an RcppEigen version for sparse matrices would be nice<|fim▁end|> | // we get -Inf, we just give back 0.
inline double xlogy(double x, double y) {
double lg = log(y); |
<|file_name|>ARCComputingElement.py<|end_file_name|><|fim▁begin|>########################################################################
# File : ARCComputingElement.py
# Author : A.T.
########################################################################
""" ARC Computing Element
"""
__RCSID__ = "58c42fc (2013-07-07 22:54:57 +0200) Andrei Tsaregorodtsev <[email protected]>"
import os
import stat
import tempfile
from types import StringTypes
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.Computing.ComputingElement import ComputingElement
from DIRAC.Core.Utilities.Grid import executeGridCommand
CE_NAME = 'ARC'
MANDATORY_PARAMETERS = [ 'Queue' ]
class ARCComputingElement( ComputingElement ):
#############################################################################
def __init__( self, ceUniqueID ):
""" Standard constructor.
"""
ComputingElement.__init__( self, ceUniqueID )
self.ceType = CE_NAME
self.submittedJobs = 0
self.mandatoryParameters = MANDATORY_PARAMETERS
self.pilotProxy = ''
self.queue = ''
self.outputURL = 'gsiftp://localhost'
self.gridEnv = ''
self.ceHost = self.ceName
if 'Host' in self.ceParameters:
self.ceHost = self.ceParameters['Host']
if 'GridEnv' in self.ceParameters:
self.gridEnv = self.ceParameters['GridEnv']
#############################################################################
def _addCEConfigDefaults( self ):
"""Method to make sure all necessary Configuration Parameters are defined
"""
# First assure that any global parameters are loaded
ComputingElement._addCEConfigDefaults( self )
def __writeXRSL( self, executableFile ):
""" Create the JDL for submission
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.xrsl', prefix = 'ARC_', dir = workingDirectory )
diracStamp = os.path.basename( name ).replace( '.xrsl', '' ).replace( 'ARC_', '' )
xrslFile = os.fdopen( fd, 'w' )
xrsl = """
&(executable="%(executable)s")
(inputFiles=(%(executable)s "%(executableFile)s"))
(stdout="%(diracStamp)s.out")
(stderr="%(diracStamp)s.err")
(outputFiles=("%(diracStamp)s.out" "") ("%(diracStamp)s.err" ""))
""" % {
'executableFile':executableFile,
'executable':os.path.basename( executableFile ),
'diracStamp':diracStamp
}
xrslFile.write( xrsl )
xrslFile.close()
return name, diracStamp
def _reset( self ):
self.queue = self.ceParameters['Queue']
self.gridEnv = self.ceParameters['GridEnv']
#############################################################################
def submitJob( self, executableFile, proxy, numberOfJobs = 1 ):
""" Method to submit job
"""
self.log.verbose( "Executable file path: %s" % executableFile )
if not os.access( executableFile, 5 ):
os.chmod( executableFile, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH + stat.S_IXOTH )
batchIDList = []
stampDict = {}
i = 0
while i < numberOfJobs:
i += 1
xrslName, diracStamp = self.__writeXRSL( executableFile )
cmd = ['arcsub', '-j', self.ceParameters['JobListFile'],
'-c', '%s' % self.ceHost, '%s' % xrslName ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( xrslName )
if not result['OK']:
break
if result['Value'][0] != 0:
break
pilotJobReference = result['Value'][1].strip()
if pilotJobReference and pilotJobReference.startswith('Job submitted with jobid:'):
pilotJobReference = pilotJobReference.replace('Job submitted with jobid:', '').strip()
batchIDList.append( pilotJobReference )
stampDict[pilotJobReference] = diracStamp
else:
break <|fim▁hole|> result = S_OK( batchIDList )
result['PilotStampDict'] = stampDict
else:
result = S_ERROR('No pilot references obtained from the glite job submission')
return result
def killJob( self, jobIDList ):
""" Kill the specified jobs
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.list', prefix = 'KillJobs_', dir = workingDirectory )
jobListFile = os.fdopen( fd, 'w' )
jobList = list( jobIDList )
if type( jobIDList ) in StringTypes:
jobList = [ jobIDList ]
for job in jobList:
jobListFile.write( job+'\n' )
cmd = ['arckill', '-c', self.ceHost, '-i', name]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( name )
if not result['OK']:
return result
if result['Value'][0] != 0:
return S_ERROR( 'Failed kill job: %s' % result['Value'][0][1] )
return S_OK()
#############################################################################
def getCEStatus( self ):
""" Method to return information on running and pending jobs.
"""
cmd = ['arcstat', '-c', self.ceHost, '-j', self.ceParameters['JobListFile'] ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
resultDict = {}
if not result['OK']:
return result
if result['Value'][0] == 1 and result['Value'][1] == "No jobs\n":
result = S_OK()
result['RunningJobs'] = 0
result['WaitingJobs'] = 0
result['SubmittedJobs'] = 0
return result
if result['Value'][0]:
if result['Value'][2]:
return S_ERROR(result['Value'][2])
else:
return S_ERROR('Error while interrogating CE status')
if result['Value'][1]:
resultDict = self.__parseJobStatus( result['Value'][1] )
running = 0
waiting = 0
for ref in resultDict:
status = resultDict[ref]
if status == 'Scheduled':
waiting += 1
if status == 'Running':
running += 1
result = S_OK()
result['RunningJobs'] = running
result['WaitingJobs'] = waiting
result['SubmittedJobs'] = 0
return result
def __parseJobStatus( self, commandOutput ):
"""
"""
resultDict = {}
lines = commandOutput.split('\n')
ln = 0
while ln < len( lines ):
if lines[ln].startswith( 'Job:' ):
jobRef = lines[ln].split()[1]
ln += 1
line = lines[ln].strip()
stateARC = ''
if line.startswith( 'State' ):
stateARC = line.replace( 'State:','' ).strip()
line = lines[ln+1].strip()
exitCode = None
if line.startswith( 'Exit Code' ):
line = line.replace( 'Exit Code:','' ).strip()
exitCode = int( line )
# Evaluate state now
if stateARC in ['Accepted','Preparing','Submitting','Queuing','Hold']:
resultDict[jobRef] = "Scheduled"
elif stateARC in ['Running','Finishing']:
resultDict[jobRef] = "Running"
elif stateARC in ['Killed','Deleted']:
resultDict[jobRef] = "Killed"
elif stateARC in ['Finished','Other']:
if exitCode is not None:
if exitCode == 0:
resultDict[jobRef] = "Done"
else:
resultDict[jobRef] = "Failed"
else:
resultDict[jobRef] = "Failed"
elif stateARC in ['Failed']:
resultDict[jobRef] = "Failed"
else:
self.log.warn( "Unknown state %s for job %s" % ( stateARC, jobRef ) )
elif lines[ln].startswith( "WARNING: Job information not found:" ):
jobRef = lines[ln].replace( 'WARNING: Job information not found:', '' ).strip()
resultDict[jobRef] = "Scheduled"
ln += 1
return resultDict
def getJobStatus( self, jobIDList ):
""" Get the status information for the given list of jobs
"""
workingDirectory = self.ceParameters['WorkingDirectory']
fd, name = tempfile.mkstemp( suffix = '.list', prefix = 'StatJobs_', dir = workingDirectory )
jobListFile = os.fdopen( fd, 'w' )
jobTmpList = list( jobIDList )
if type( jobIDList ) in StringTypes:
jobTmpList = [ jobIDList ]
jobList = []
for j in jobTmpList:
if ":::" in j:
job = j.split(":::")[0]
else:
job = j
jobList.append( job )
jobListFile.write( job+'\n' )
cmd = ['arcstat', '-c', self.ceHost, '-i', name, '-j', self.ceParameters['JobListFile']]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
os.unlink( name )
resultDict = {}
if not result['OK']:
self.log.error( 'Failed to get job status', result['Message'] )
return result
if result['Value'][0]:
if result['Value'][2]:
return S_ERROR(result['Value'][2])
else:
return S_ERROR('Error while interrogating job statuses')
if result['Value'][1]:
resultDict = self.__parseJobStatus( result['Value'][1] )
if not resultDict:
return S_ERROR('No job statuses returned')
# If CE does not know about a job, set the status to Unknown
for job in jobList:
if not resultDict.has_key( job ):
resultDict[job] = 'Unknown'
return S_OK( resultDict )
def getJobOutput( self, jobID, localDir = None ):
""" Get the specified job standard output and error files. If the localDir is provided,
the output is returned as file in this directory. Otherwise, the output is returned
as strings.
"""
if jobID.find( ':::' ) != -1:
pilotRef, stamp = jobID.split( ':::' )
else:
pilotRef = jobID
stamp = ''
if not stamp:
return S_ERROR( 'Pilot stamp not defined for %s' % pilotRef )
arcID = os.path.basename(pilotRef)
if "WorkingDirectory" in self.ceParameters:
workingDirectory = os.path.join( self.ceParameters['WorkingDirectory'], arcID )
else:
workingDirectory = arcID
outFileName = os.path.join( workingDirectory, '%s.out' % stamp )
errFileName = os.path.join( workingDirectory, '%s.err' % stamp )
cmd = ['arcget', '-j', self.ceParameters['JobListFile'], pilotRef ]
result = executeGridCommand( self.proxy, cmd, self.gridEnv )
output = ''
if result['OK']:
if not result['Value'][0]:
outFile = open( outFileName, 'r' )
output = outFile.read()
outFile.close()
os.unlink( outFileName )
errFile = open( errFileName, 'r' )
error = errFile.read()
errFile.close()
os.unlink( errFileName )
else:
error = '\n'.join( result['Value'][1:] )
return S_ERROR( error )
else:
return S_ERROR( 'Failed to retrieve output for %s' % jobID )
return S_OK( ( output, error ) )
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#<|fim▁end|> |
#os.unlink( executableFile )
if batchIDList: |
<|file_name|>MedicationOrderItem.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Cognitive Medical Systems, Inc (http://www.cognitivemedciine.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.socraticgrid.hl7.services.orders.model.types.orderitems;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.socraticgrid.hl7.services.orders.model.OrderItem;
import org.socraticgrid.hl7.services.orders.model.primatives.Code;
import org.socraticgrid.hl7.services.orders.model.primatives.Identifier;
import org.socraticgrid.hl7.services.orders.model.primatives.Period;
import org.socraticgrid.hl7.services.orders.model.primatives.Quantity;
import org.socraticgrid.hl7.services.orders.model.primatives.Ratio;
public class MedicationOrderItem extends OrderItem
{
/**
*
*/
private static final long serialVersionUID = 1L;
private String additionalDosageIntructions;
private String comment;
private Quantity dispenseQuantity= new Quantity();
private String dosageInstructions;
private Code dosageMethod;
private Quantity dosageQuantity = new Quantity();
private Ratio dosageRate = new Ratio();
private Code dosageSite = new Code();
private Date dosageTiming;
private Period dosageTimingPeriod = new Period();
private List<Identifier> drug = new ArrayList<Identifier>(0);
private Date endDate;
private Quantity expectedSupplyDuration = new Quantity();
private Ratio maxDosePerPeriod = new Ratio();
private List<Identifier> medication = new ArrayList<Identifier>(0);
private int numberOfRepeatsAllowed=0;
private Code prescriber;
private Code route = new Code();
private String schedule;
private Date startDate;
/**
* @return the additionalDosageIntructions
*/
public String getAdditionalDosageIntructions() {
return additionalDosageIntructions;
}
/**
* @return the comment
*/
public String getComment() {
return comment;
}
/**
* @return the dispenseQuantity
*/
public Quantity getDispenseQuantity() {
return dispenseQuantity;
}
/**
* @return the dosageInstructions
*/
public String getDosageInstructions() {
return dosageInstructions;
}
/**
* @return the dosageMethod
*/
public Code getDosageMethod() {
return dosageMethod;
}
/**
* @return the dosageQuantity
*/
public Quantity getDosageQuantity() {
return dosageQuantity;
}
/**
* @return the dosageRate
*/
public Ratio getDosageRate() {
return dosageRate;
}
/**
* @return the dosageSite
*/
public Code getDosageSite() {
return dosageSite;
}
/**
* @return the dosageTiming
*/
public Date getDosageTiming() {
return dosageTiming;
}
/**
* @return the dosageTimingPeriod
*/
public Period getDosageTimingPeriod() {
return dosageTimingPeriod;
}
/**
* @return the drug
*/
public List<Identifier> getDrug() {
return drug;
}
/**
* @return the endDate
*/
public Date getEndDate() {
return endDate;
}
/**
* @return the expectedSupplyDuration
*/
public Quantity getExpectedSupplyDuration() {
return expectedSupplyDuration;
}
/**
* @return the maxDosePerPeriod
*/
public Ratio getMaxDosePerPeriod() {
return maxDosePerPeriod;
}
/**
* @return the medication
*/
public List<Identifier> getMedication() {
return medication;
}
/**
* @return the numberOfRepeatsAllowed
*/
public int getNumberOfRepeatsAllowed() {
return numberOfRepeatsAllowed;
}
/**
* @return the prescriber
*/<|fim▁hole|> public Code getPrescriber() {
return prescriber;
}
/**
* @return the route
*/
public Code getRoute() {
return route;
}
/**
* @return the schedule
*/
public String getSchedule() {
return schedule;
}
/**
* @return the startDate
*/
public Date getStartDate() {
return startDate;
}
/**
* @param additionalDosageIntructions the additionalDosageIntructions to set
*/
public void setAdditionalDosageIntructions(String additionalDosageIntructions) {
this.additionalDosageIntructions = additionalDosageIntructions;
}
/**
* @param comment the comment to set
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* @param dispenseQuantity the dispenseQuantity to set
*/
public void setDispenseQuantity(Quantity dispenseQuantity) {
this.dispenseQuantity = dispenseQuantity;
}
/**
* @param dosageInstructions the dosageInstructions to set
*/
public void setDosageInstructions(String dosageInstructions) {
this.dosageInstructions = dosageInstructions;
}
/**
* @param dosageMethod the dosageMethod to set
*/
public void setDosageMethod(Code dosageMethod) {
this.dosageMethod = dosageMethod;
}
/**
* @param dosageQuantity the dosageQuantity to set
*/
public void setDosageQuantity(Quantity dosageQuantity) {
this.dosageQuantity = dosageQuantity;
}
/**
* @param dosageRate the dosageRate to set
*/
public void setDosageRate(Ratio dosageRate) {
this.dosageRate = dosageRate;
}
/**
* @param dosageSite the dosageSite to set
*/
public void setDosageSite(Code dosageSite) {
this.dosageSite = dosageSite;
}
/**
* @param dosageTiming the dosageTiming to set
*/
public void setDosageTiming(Date dosageTiming) {
this.dosageTiming = dosageTiming;
}
/**
* @param dosageTimingPeriod the dosageTimingPeriod to set
*/
public void setDosageTimingPeriod(Period dosageTimingPeriod) {
this.dosageTimingPeriod = dosageTimingPeriod;
}
/**
* @param drug the drug to set
*/
public void setDrug(List<Identifier> drug) {
this.drug = drug;
}
/**
* @param endDate the endDate to set
*/
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
/**
* @param expectedSupplyDuration the expectedSupplyDuration to set
*/
public void setExpectedSupplyDuration(Quantity expectedSupplyDuration) {
this.expectedSupplyDuration = expectedSupplyDuration;
}
/**
* @param maxDosePerPeriod the maxDosePerPeriod to set
*/
public void setMaxDosePerPeriod(Ratio maxDosePerPeriod) {
this.maxDosePerPeriod = maxDosePerPeriod;
}
/**
* @param medication the medication to set
*/
public void setMedication(List<Identifier> medication) {
this.medication = medication;
}
/**
* @param numberOfRepeatsAllowed the numberOfRepeatsAllowed to set
*/
public void setNumberOfRepeatsAllowed(int numberOfRepeatsAllowed) {
this.numberOfRepeatsAllowed = numberOfRepeatsAllowed;
}
/**
* @param prescriber the prescriber to set
*/
public void setPrescriber(Code prescriber) {
this.prescriber = prescriber;
}
/**
* @param route the route to set
*/
public void setRoute(Code route) {
this.route = route;
}
/**
* @param schedule the schedule to set
*/
public void setSchedule(String schedule) {
this.schedule = schedule;
}
/**
* @param startDate the startDate to set
*/
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
}<|fim▁end|> | |
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>// Copyright 2020 NetApp, Inc. All Rights Reserved.
// Code generated by informer-gen. DO NOT EDIT.
package v1
import (
internalinterfaces "github.com/netapp/trident/operator/controllers/provisioner/client/informers/externalversions/internalinterfaces"
)
// Interface provides access to all the informers in this group version.
type Interface interface {
// TridentProvisioners returns a TridentProvisionerInformer.
TridentProvisioners() TridentProvisionerInformer
}
type version struct {
factory internalinterfaces.SharedInformerFactory
namespace string<|fim▁hole|>// New returns a new Interface.
func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface {
return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions}
}
// TridentProvisioners returns a TridentProvisionerInformer.
func (v *version) TridentProvisioners() TridentProvisionerInformer {
return &tridentProvisionerInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}<|fim▁end|> | tweakListOptions internalinterfaces.TweakListOptionsFunc
}
|
<|file_name|>scrape.py<|end_file_name|><|fim▁begin|># Functions to download yesterday's races and associated raceforms from host
import configparser
import requests
import re
import sys
import time
import pymongo
import random
from datetime import datetime, timedelta
from hracing.db import parse_racesheet
from hracing.db import mongo_insert_race
from hracing.tools import delay_scraping
from hracing.tools import shuffle_ids
def download_list_of_races(header,pastdays=3,datestr=None):
""" Fetch a list of all raceIDs and raceURLs listed on host for a given day.
Date is selected either as:
a) pastdays (e.g. pastdays=1 means yesterday).
OR
b) by specifying a datestr of the format YYYY-MM-DD.
Default is to download races from THREE DAYS AGO, which is useful for
data-base building since this avoids unfinished US/CAN races
Returns a lists of raceids and a lists of raceid_urls,
nested in a list of race-locations"""
# Compose URL
if datestr == None:
d = datetime.today()-timedelta(days=int(pastdays))
datestr = d.strftime('%Y-%m-%d')
yesterdayurl = '/races?date=' + datestr
baseurl = 'https://' + header['host']
url = baseurl + yesterdayurl
# Actual download
tpage=requests.get(url) <|fim▁hole|> print("Time: " + d.strftime(('%Y-%m-%d-%H-%M')))
print("Import race IDs for " + datestr)
print("From " + url)
#Get list of race-locations (TR)
tr_urls_raw=re.split('\<div class\=\"dayHeader\"\>',tpage.text)
tr_urls=re.findall(
'\<div class\=\"meetingRaces\" '
'data-url\=\"(/meetings/meeting\?id\=\d+)\">',
tr_urls_raw[1])
# Loop through race-locations, get raceIDs and urls
raceid_urls=[]
raceids=[]
for tr_url in tr_urls:
url=baseurl+tr_url
temp_race=requests.get(url)
raceid_urls.extend(
re.findall(
'\<li\sclass\=\"raceli\s*status_.*\s*clearfix\"\s*data-url\=\"'
'(\/race\?id\=\d*\&country\=.+\&track\=.*\&date=\d\d\d\d-\d\d-\d\d)\"',
temp_race.text))
raceids.extend(
re.findall(
'\<li\sclass\=\"raceli\s*status_.*\s*clearfix\"\s*data-url\=\"'
'\/race\?id\=(\d*)\&country\=.+\&track\=.*\&date=\d\d\d\d-\d\d-\d\d\"',
temp_race.text))
print("Finished importing raceIDs: " + d.strftime(('%Y-%m-%d-%H-%M')))
return raceids, raceid_urls
def scrape_races(raceids,raceid_urls,header,payload):
""" Fetch a list of all races from host for a given day.
Date is selected either as:
a) pastdays (e.g. pastdays=1 means yesterday).
OR
b) by specifying a datestr of the format YYYY-MM-DD.
Default is to download races from TWO DAYS AGO, which is useful for
data-base building since this avoids US/CAN races are not finished
Return a list of raceids and raceid_urls, which are clustered according to race-location"""
baseurl='https://'+header['host']
race_min_dur=40 # minimum time(s)/race download to avoid getting kicked
form_min_dur=10 # minimum time(s)/form download to avoid getting kicked
reconnect_dur=500 # minimum time ind s to wait before reconnecting after losing connection
d=datetime.today()
a=time.monotonic()
tries=1
#Shuffle order of races
raceids,raceid_urls=shuffle_ids(raceids,raceid_urls)
#Open new session...
with requests.Session() as s:
p = s.post(baseurl+'/auth/validatepostajax',
headers = header,
data=payload)
#For each race location...
for (i, raceid_url) in enumerate(raceid_urls):
if not re.search('"login":true',p.text):
with requests.Session() as s:
p = s.post(baseurl+'/auth/validatepostajax',
headers = header,
data=payload)
try:
#For each single race...
print("Start downloading race_ID: "+raceids[i]+
" ("+str(i) +"/"+str(len(raceid_urls))+")")
#Check current time
start_time=time.monotonic()
#Get current racesheet
racesheet=s.get(baseurl+raceid_url,
headers = header,
cookies=s.cookies)
#Get horseforms urls for that race
horseform_urls=(re.findall("window.open\(\'(.+?)', \'Formguide\'",
racesheet.text))
forms=[]
#Get horseforms-sheets for that race
for (k, horseform_url) in enumerate(horseform_urls):
start_time_2=time.monotonic()
forms.append(s.get(baseurl+horseform_url,
headers = header,
cookies=s.cookies))
delay_scraping(start_time_2,form_min_dur)
# Try parsing current race and add to mogodb. If something fails
# Save race as .txt in folder for troubleshooting.
# UNCOMMENT TRY/EXCEPT WHEN UP AND RUNNING
#try:
race=parse_racesheet(racesheet,forms)
mongo_insert_race(race)
# except Exception as e:
# #Save raw html text to file for debugging purposes, overwrite every time
# errordump='../hracing_private/failed_parsing/'
# rawtextFilename=errordump+str(raceids[i][j])+'.txt'
# print('Error when parsing race_ID: '+str(raceids[i][j])+'. Page saved in '+errordump)
# print('Error msg for '+str(raceids[i][j])+': \n'+str(e))
#
# with open(rawtextFilename, 'wb') as text_file:
# text_file.write(racesheet.content)
delay_scraping(start_time,race_min_dur)# Slow scraping to avoid getting kicked from server.
# Print current runtime, current race, and number of forms extracted
print("Finished: " +str(time.monotonic()-a))
# +" n forms: "+str(len(curr_forms)))
#Exception of Request
except requests.exceptions.RequestException as e:
print(e)
tries=tries+1
time.sleep(reconnect_dur) # wait ten minutes before next try
print("Download exception, trying to continue in 10 mins"
+d.strftime('%Y-%m-%d-%H-%M'))
if tries > 10:
print(str(tries) + "Download exceptions, exiting loop")
break
print("Finished: Download race xmls: "
+ d.strftime('%Y-%m-%d-%H-%M'))
def get_races_IDs_not_in_db(raceids, raceid_urls):
client = pymongo.MongoClient()
db = client.races
race_IDs_db=[]
for race in db.races.find({},{'race_ID':1, '_id': 0}):
race_IDs_db.append(race['race_ID'])
race_zip=zip(raceids,raceid_urls)
filtered_race_zip = filter(lambda x: int(x[0]) not in race_IDs_db,race_zip)
novel_raceids,novel_raceid_urls =zip(*filtered_race_zip)
return list(novel_raceids), list(novel_raceid_urls)
def main():
# get scraping target and login info from config file
configFile='../hracing_private/scraping_payload.ini'
pageConfig = configparser.ConfigParser()
pageConfig.read(configFile)
header=dict(pageConfig['header'])
payload=dict(pageConfig['payload'])
raceids, raceid_urls = download_list_of_races(header)
filtered_raceids, filtered_raceid_urls = get_races_IDs_not_in_db(raceids,raceid_urls)
scrape_races(filtered_raceids, filtered_raceid_urls, header, payload)
if __name__ == "__main__":
main()<|fim▁end|> | #Console feedback |
<|file_name|>test_orm.py<|end_file_name|><|fim▁begin|>"""Testing for ORM"""
from unittest import TestCase
import nose
from nose.tools import eq_
from sets import Set
from mdcorpus.orm import *
class ORMTestCase(TestCase):
def setUp(self):
self.store = Store(create_database("sqlite:"))
self.store.execute(MovieTitlesMetadata.CREATE_SQL)
self.store.execute(MovieCharactersMetadata.CREATE_SQL)
self.store.execute(RawScriptUrl.CREATE_SQL)
self.store.execute(MovieConversation.CREATE_SQL)
self.store.execute(MovieLine.CREATE_SQL)
movie = self.store.add(MovieTitlesMetadata(0,
u"10 things i hate about you",
1999,
6.90,
62847))
bianca = self.store.add(MovieCharactersMetadata(0,
"BIANCA",
"f",
4))
bruce = self.store.add(MovieCharactersMetadata(1,
"BRUCE",
"?",
"?"))
cameron = self.store.add(MovieCharactersMetadata(2,
"CAMERON",
"m",
"3"))
url = self.store.add(RawScriptUrl("http://www.dailyscript.com/scripts/10Things.html"))
conversation = self.store.add(MovieConversation(0, 2, 0))
line194 = self.store.add(MovieLine(
194, "Can we make this quick? Roxanne Korrine and Andrew Barrett are having an incredibly horrendous public break- up on the quad. Again."))
line195 = self.store.add(MovieLine(
195, "Well, I thought we'd start with pronunciation, if that's okay with you."))
line196 = self.store.add(MovieLine(
196, "Not the hacking and gagging and spitting part. Please."))
line197 = self.store.add(MovieLine(
197, "Okay... then how 'bout we try out some French cuisine. Saturday? Night?"))
self.store.flush()
movie.characters.add(bianca)
movie.characters.add(bruce)
movie.characters.add(cameron)
url.movie = movie
line_id_list = [194, 195, 196, 197]
for (i, line_id) in enumerate(line_id_list):
line = self.store.find(MovieLine, MovieLine.id == line_id).one()
line.number = i + 1
conversation.lines.add(line)
self.store.commit()
def tearDown(self):
print "done"
class MovieTitlesMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_url(self):
movie = self.store.find(MovieTitlesMetadata, MovieTitlesMetadata.id == 0).one()
eq_(movie.url(), "http://www.dailyscript.com/scripts/10Things.html")
class MovieCharactersMetadataTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_gender(self):
bianca = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 0).one()
bruce = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 1).one()
cameron = self.store.find(MovieCharactersMetadata, MovieCharactersMetadata.id == 2).one()
eq_(bianca.gender(), "f")
eq_(bruce.gender(), "?")
eq_(cameron.gender(), "m")
<|fim▁hole|> conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
eq_(conversation.first_character.movie.title, conversation.movie.title)
eq_(conversation.second_character.movie.title, conversation.movie.title)
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_line_list(self):
conversation = self.store.find(MovieConversation, MovieConversation.id == 1).one()
line_ids = [line.id for line in conversation.line_list()]
eq_(line_ids, [194, 195, 196, 197])<|fim▁end|> | class MovieConversationTestCase(ORMTestCase):
@nose.with_setup(ORMTestCase.setUp, ORMTestCase.tearDown)
def test_consistency(self): |
<|file_name|>AdminTownsController.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('adsApp').controller('AdminTownsController', ['$scope', '$rootScope', 'catalog', 'config', 'notify',
function ($scope, $rootScope, catalog, config, notify) {
$rootScope.pageTitle = 'Towns';
var usersConfig = config.users;
var townsParams = {
startPage: usersConfig.startPage,
pageSize: usersConfig.pageSize
};
$scope.getTowns = function () {
$rootScope.loading = true;
catalog.get('admin/towns', townsParams).then(function (towns) {
$scope.towns = towns;
}, function (error) {
notify.message('Users filed to load!', error);
}).finally(function () {<|fim▁hole|> });
};
$scope.getTowns();
}
]);<|fim▁end|> | $rootScope.loading = false; |
<|file_name|>plugin.rs<|end_file_name|><|fim▁begin|>//! Plugin specific structures.
use std::{mem, ptr};
use std::os::raw::c_void;
use channels::ChannelInfo;
use host::{self, Host};
use api::{AEffect, HostCallbackProc, Supported};
use api::consts::VST_MAGIC;
use buffer::AudioBuffer;
use editor::Editor;
use event::Event;
/// Plugin type. Generally either Effect or Synth.
///
/// Other types are not necessary to build a plugin and are only useful for the host to categorize
/// the plugin.
#[repr(usize)]
#[derive(Clone, Copy, Debug)]
pub enum Category {
/// Unknown / not implemented
Unknown,
/// Any effect
Effect,
/// VST instrument
Synth,
/// Scope, tuner, spectrum analyser, etc.
Analysis,
/// Dynamics, etc.
Mastering,
/// Panners, etc.
Spacializer,
/// Delays and Reverbs
RoomFx,
/// Dedicated surround processor.
SurroundFx,
/// Denoiser, etc.
Restoration,
/// Offline processing.
OfflineProcess,
/// Contains other plugins.
Shell,
/// Tone generator, etc.
Generator
}
impl_clike!(Category);
#[repr(usize)]
#[derive(Clone, Copy, Debug)]
#[doc(hidden)]
pub enum OpCode {
/// Called when plugin is initialized.
Initialize,
/// Called when plugin is being shut down.
Shutdown,
/// [value]: preset number to change to.
ChangePreset,
/// [return]: current preset number.
GetCurrentPresetNum,
/// [ptr]: char array with new preset name, limited to `consts::MAX_PRESET_NAME_LEN`.
SetCurrentPresetName,
/// [ptr]: char buffer for current preset name, limited to `consts::MAX_PRESET_NAME_LEN`.
GetCurrentPresetName,
/// [index]: parameter
/// [ptr]: char buffer, limited to `consts::MAX_PARAM_STR_LEN` (e.g. "db", "ms", etc)
GetParameterLabel,
/// [index]: paramter
/// [ptr]: char buffer, limited to `consts::MAX_PARAM_STR_LEN` (e.g. "0.5", "ROOM", etc).
GetParameterDisplay,
/// [index]: parameter
/// [ptr]: char buffer, limited to `consts::MAX_PARAM_STR_LEN` (e.g. "Release", "Gain")
GetParameterName,
/// Deprecated.
_GetVu,
/// [opt]: new sample rate.
SetSampleRate,
/// [value]: new maximum block size.
SetBlockSize,
/// [value]: 1 when plugin enabled, 0 when disabled.
StateChanged,
/// [ptr]: Rect** receiving pointer to editor size.
EditorGetRect,
/// [ptr]: system dependent window pointer, eg HWND on Windows.
EditorOpen,
/// Close editor. No arguments.
EditorClose,
/// Deprecated.
_EditorDraw,
/// Deprecated.
_EditorMouse,
/// Deprecated.
_EditorKey,
/// Idle call from host.
EditorIdle,
/// Deprecated.
_EditorTop,
/// Deprecated.
_EditorSleep,
/// Deprecated.
_EditorIdentify,
/// [ptr]: pointer for chunk data address (void**).
/// [index]: 0 for bank, 1 for program
GetData,
/// [ptr]: data (void*)
/// [value]: data size in bytes<|fim▁hole|> /// [index]: 0 for bank, 1 for program
SetData,
/// [ptr]: VstEvents* TODO: Events
ProcessEvents,
/// [index]: param index
/// [return]: 1=true, 0=false
CanBeAutomated,
/// [index]: param index
/// [ptr]: parameter string
/// [return]: true for success
StringToParameter,
/// Deprecated.
_GetNumCategories,
/// [index]: program name
/// [ptr]: char buffer for name, limited to `consts::MAX_PRESET_NAME_LEN`
/// [return]: true for success
GetPresetName,
/// Deprecated.
_CopyPreset,
/// Deprecated.
_ConnectIn,
/// Deprecated.
_ConnectOut,
/// [index]: input index
/// [ptr]: `VstPinProperties`
/// [return]: 1 if supported
GetInputInfo,
/// [index]: output index
/// [ptr]: `VstPinProperties`
/// [return]: 1 if supported
GetOutputInfo,
/// [return]: `PluginCategory` category.
GetCategory,
/// Deprecated.
_GetCurrentPosition,
/// Deprecated.
_GetDestinationBuffer,
/// [ptr]: `VstAudioFile` array
/// [value]: count
/// [index]: start flag
OfflineNotify,
/// [ptr]: `VstOfflineTask` array
/// [value]: count
OfflinePrepare,
/// [ptr]: `VstOfflineTask` array
/// [value]: count
OfflineRun,
/// [ptr]: `VstVariableIo`
/// [use]: used for variable I/O processing (offline e.g. timestretching)
ProcessVarIo,
/// TODO: implement
/// [value]: input `*mut VstSpeakerArrangement`.
/// [ptr]: output `*mut VstSpeakerArrangement`.
SetSpeakerArrangement,
/// Deprecated.
_SetBlocksizeAndSampleRate,
/// Soft bypass (automatable).
/// [value]: 1 = bypass, 0 = nobypass.
SoftBypass,
// [ptr]: buffer for effect name, limited to `kVstMaxEffectNameLen`
GetEffectName,
/// Deprecated.
_GetErrorText,
/// [ptr]: buffer for vendor name, limited to `consts::MAX_VENDOR_STR_LEN`.
GetVendorName,
/// [ptr]: buffer for product name, limited to `consts::MAX_PRODUCT_STR_LEN`.
GetProductName,
/// [return]: vendor specific version.
GetVendorVersion,
/// no definition, vendor specific.
VendorSpecific,
/// [ptr]: "Can do" string.
/// [return]: 1 = yes, 0 = maybe, -1 = no.
CanDo,
/// [return]: tail size (e.g. reverb time). 0 is defualt, 1 means no tail.
GetTailSize,
/// Deprecated.
_Idle,
/// Deprecated.
_GetIcon,
/// Deprecated.
_SetVewPosition,
/// [index]: param index
/// [ptr]: `*mut VstParamInfo` //TODO: Implement
/// [return]: 1 if supported
GetParamInfo,
/// Deprecated.
_KeysRequired,
/// [return]: 2400 for vst 2.4.
GetApiVersion,
/// [index]: ASCII char.
/// [value]: `Key` keycode.
/// [opt]: `flags::modifier_key` bitmask.
/// [return]: 1 if used.
EditorKeyDown,
/// [index]: ASCII char.
/// [value]: `Key` keycode.
/// [opt]: `flags::modifier_key` bitmask.
/// [return]: 1 if used.
EditorKeyUp,
/// [value]: 0 = circular, 1 = circular relative, 2 = linear.
EditorSetKnobMode,
/// [index]: MIDI channel.
/// [ptr]: `*mut MidiProgramName`. //TODO: Implement
/// [return]: number of used programs, 0 = unsupported.
GetMidiProgramName,
/// [index]: MIDI channel.
/// [ptr]: `*mut MidiProgramName`. //TODO: Implement
/// [return]: index of current program.
GetCurrentMidiProgram,
/// [index]: MIDI channel.
/// [ptr]: `*mut MidiProgramCategory`. //TODO: Implement
/// [return]: number of used categories.
GetMidiProgramCategory,
/// [index]: MIDI channel.
/// [return]: 1 if `MidiProgramName` or `MidiKeyName` has changed. //TODO: Implement
HasMidiProgramsChanged,
/// [index]: MIDI channel.
/// [ptr]: `*mut MidiKeyName`. //TODO: Implement
/// [return]: 1 = supported 0 = not.
GetMidiKeyName,
/// Called before a preset is loaded.
BeginSetPreset,
/// Called after a preset is loaded.
EndSetPreset,
/// [value]: inputs `*mut VstSpeakerArrangement` //TODO: Implement
/// [ptr]: Outputs `*mut VstSpeakerArrangement`
GetSpeakerArrangement,
/// [ptr]: buffer for plugin name, limited to `consts::MAX_PRODUCT_STR_LEN`.
/// [return]: next plugin's uniqueID.
ShellGetNextPlugin,
/// No args. Called once before start of process call. This indicates that the process call
/// will be interrupted (e.g. Host reconfiguration or bypass when plugin doesn't support
/// SoftBypass)
StartProcess,
/// No arguments. Called after stop of process call.
StopProcess,
/// [value]: number of samples to process. Called in offline mode before process.
SetTotalSampleToProcess,
/// [value]: pan law `PanLaw`. //TODO: Implement
/// [opt]: gain.
SetPanLaw,
/// [ptr]: `*mut VstPatchChunkInfo`. //TODO: Implement
/// [return]: -1 = bank cant be loaded, 1 = can be loaded, 0 = unsupported.
BeginLoadBank,
/// [ptr]: `*mut VstPatchChunkInfo`. //TODO: Implement
/// [return]: -1 = bank cant be loaded, 1 = can be loaded, 0 = unsupported.
BeginLoadPreset,
/// [value]: 0 if 32 bit, anything else if 64 bit.
SetPrecision,
/// [return]: number of used MIDI Inputs (1-15).
GetNumMidiInputs,
/// [return]: number of used MIDI Outputs (1-15).
GetNumMidiOutputs,
}
impl_clike!(OpCode);
/// A structure representing static plugin information.
#[derive(Clone, Debug)]
pub struct Info {
/// Plugin Name.
pub name: String,
/// Plugin Vendor.
pub vendor: String,
/// Number of different presets.
pub presets: i32,
/// Number of parameters.
pub parameters: i32,
/// Number of inputs.
pub inputs: i32,
/// Number of outputs.
pub outputs: i32,
/// Unique plugin ID. Can be registered with Steinberg to prevent conflicts with other plugins.
///
/// This ID is used to identify a plugin during save and load of a preset and project.
pub unique_id: i32,
/// Plugin version (e.g. 0001 = `v0.0.0.1`, 1283 = `v1.2.8.3`).
pub version: i32,
/// Plugin category. Possible values are found in `enums::PluginCategory`.
pub category: Category,
/// Latency of the plugin in samples.
///
/// This reports how many samples it takes for the plugin to create an output (group delay).
pub initial_delay: i32,
/// Indicates that preset data is handled in formatless chunks.
///
/// If false, host saves and restores plugin by reading/writing parameter data. If true, it is
/// up to the plugin to manage saving preset data by implementing the
/// `{get, load}_{preset, bank}_chunks()` methods. Default is `false`.
pub preset_chunks: bool,
/// Indicates whether this plugin can process f64 based `AudioBuffer` buffers.
///
/// Default is `true`.
pub f64_precision: bool,
/// If this is true, the plugin will not produce sound when the input is silence.
///
/// Default is `false`.
pub silent_when_stopped: bool,
}
impl Default for Info {
fn default() -> Info {
Info {
name: "VST".to_string(),
vendor: String::new(),
presets: 1, // default preset
parameters: 0,
inputs: 2, // Stereo in,out
outputs: 2,
unique_id: 0, // This must be changed.
version: 0001, // v0.0.0.1
category: Category::Effect,
initial_delay: 0,
preset_chunks: false,
f64_precision: true,
silent_when_stopped: false,
}
}
}
/// Features which are optionally supported by a plugin. These are queried by the host at run time.
#[derive(Debug)]
#[allow(missing_docs)]
pub enum CanDo {
SendEvents,
SendMidiEvent,
ReceiveEvents,
ReceiveMidiEvent,
ReceiveTimeInfo,
Offline,
MidiProgramNames,
Bypass,
ReceiveSysExEvent,
//Bitwig specific?
MidiSingleNoteTuningChange,
MidiKeyBasedInstrumentControl,
Other(String)
}
use std::str::FromStr;
impl FromStr for CanDo {
type Err = String;
fn from_str(s: &str) -> Result<CanDo, String> {
use self::CanDo::*;
Ok(match s {
"sendVstEvents" => SendEvents,
"sendVstMidiEvent" => SendMidiEvent,
"receiveVstEvents" => ReceiveEvents,
"receiveVstMidiEvent" => ReceiveMidiEvent,
"receiveVstTimeInfo" => ReceiveTimeInfo,
"offline" => Offline,
"midiProgramNames" => MidiProgramNames,
"bypass" => Bypass,
"receiveVstSysexEvent" => ReceiveSysExEvent,
"midiSingleNoteTuningChange" => MidiSingleNoteTuningChange,
"midiKeyBasedInstrumentControl" => MidiKeyBasedInstrumentControl,
otherwise => Other(otherwise.to_string())
})
}
}
impl Into<String> for CanDo {
fn into(self) -> String {
use self::CanDo::*;
match self {
SendEvents => "sendVstEvents".to_string(),
SendMidiEvent => "sendVstMidiEvent".to_string(),
ReceiveEvents => "receiveVstEvents".to_string(),
ReceiveMidiEvent => "receiveVstMidiEvent".to_string(),
ReceiveTimeInfo => "receiveVstTimeInfo".to_string(),
Offline => "offline".to_string(),
MidiProgramNames => "midiProgramNames".to_string(),
Bypass => "bypass".to_string(),
ReceiveSysExEvent => "receiveVstSysexEvent".to_string(),
MidiSingleNoteTuningChange => "midiSingleNoteTuningChange".to_string(),
MidiKeyBasedInstrumentControl => "midiKeyBasedInstrumentControl".to_string(),
Other(other) => other
}
}
}
/// Must be implemented by all VST plugins.
///
/// All methods except `get_info` provide a default implementation which does nothing and can be
/// safely overridden.
#[allow(unused_variables)]
pub trait Plugin {
/// This method must return an `Info` struct.
fn get_info(&self) -> Info;
/// Called during initialization to pass a `HostCallback` to the plugin.
///
/// This method can be overriden to set `host` as a field in the plugin struct.
///
/// # Example
///
/// ```
/// // ...
/// # extern crate vst2;
/// # #[macro_use] extern crate log;
/// # use vst2::plugin::{Plugin, Info};
/// use vst2::plugin::HostCallback;
///
/// # #[derive(Default)]
/// struct ExamplePlugin {
/// host: HostCallback
/// }
///
/// impl Plugin for ExamplePlugin {
/// fn new(host: HostCallback) -> ExamplePlugin {
/// ExamplePlugin {
/// host: host
/// }
/// }
///
/// fn init(&mut self) {
/// info!("loaded with host vst version: {}", self.host.vst_version());
/// }
///
/// // ...
/// # fn get_info(&self) -> Info {
/// # Info {
/// # name: "Example Plugin".to_string(),
/// # ..Default::default()
/// # }
/// # }
/// }
///
/// # fn main() {}
/// ```
fn new(host: HostCallback) -> Self where Self: Sized + Default {
Default::default()
}
/// Called when plugin is fully initialized.
fn init(&mut self) { trace!("Initialized vst plugin."); }
/// Set the current preset to the index specified by `preset`.
fn change_preset(&mut self, preset: i32) { }
/// Get the current preset index.
fn get_preset_num(&self) -> i32 { 0 }
/// Set the current preset name.
fn set_preset_name(&mut self, name: String) { }
/// Get the name of the preset at the index specified by `preset`.
fn get_preset_name(&self, preset: i32) -> String { "".to_string() }
/// Get parameter label for parameter at `index` (e.g. "db", "sec", "ms", "%").
fn get_parameter_label(&self, index: i32) -> String { "".to_string() }
/// Get the parameter value for parameter at `index` (e.g. "1.0", "150", "Plate", "Off").
fn get_parameter_text(&self, index: i32) -> String {
format!("{:.3}", self.get_parameter(index))
}
/// Get the name of parameter at `index`.
fn get_parameter_name(&self, index: i32) -> String { format!("Param {}", index) }
/// Get the value of paramater at `index`. Should be value between 0.0 and 1.0.
fn get_parameter(&self, index: i32) -> f32 { 0.0 }
/// Set the value of parameter at `index`. `value` is between 0.0 and 1.0.
fn set_parameter(&mut self, index: i32, value: f32) { }
/// Return whether parameter at `index` can be automated.
fn can_be_automated(&self, index: i32) -> bool { false }
/// Use String as input for parameter value. Used by host to provide an editable field to
/// adjust a parameter value. E.g. "100" may be interpreted as 100hz for parameter. Returns if
/// the input string was used.
fn string_to_parameter(&mut self, index: i32, text: String) -> bool { false }
/// Called when sample rate is changed by host.
fn set_sample_rate(&mut self, rate: f32) { }
/// Called when block size is changed by host.
fn set_block_size(&mut self, size: i64) { }
/// Called when plugin is turned on.
fn resume(&mut self) { }
/// Called when plugin is turned off.
fn suspend(&mut self) { }
/// Vendor specific handling.
fn vendor_specific(&mut self, index: i32, value: isize, ptr: *mut c_void, opt: f32) -> isize { 0 }
/// Return whether plugin supports specified action.
fn can_do(&self, can_do: CanDo) -> Supported {
info!("Host is asking if plugin can: {:?}.", can_do);
Supported::Maybe
}
/// Get the tail size of plugin when it is stopped. Used in offline processing as well.
fn get_tail_size(&self) -> isize { 0 }
/// Process an audio buffer containing `f32` values.
///
/// # Example
/// ```no_run
/// # use vst2::plugin::{Info, Plugin};
/// # use vst2::buffer::AudioBuffer;
/// #
/// # struct ExamplePlugin;
/// # impl Plugin for ExamplePlugin {
/// # fn get_info(&self) -> Info { Default::default() }
/// #
/// // Processor that clips samples above 0.4 or below -0.4:
/// fn process(&mut self, buffer: AudioBuffer<f32>){
/// let (inputs, mut outputs) = buffer.split();
///
/// for (channel, ibuf) in inputs.iter().enumerate() {
/// for (i, sample) in ibuf.iter().enumerate() {
/// outputs[channel][i] = if *sample > 0.4 {
/// 0.4
/// } else if *sample < -0.4 {
/// -0.4
/// } else {
/// *sample
/// };
/// }
/// }
/// }
/// # }
/// ```
fn process(&mut self, buffer: AudioBuffer<f32>) {
// For each input and output
for (input, output) in buffer.zip() {
// For each input sample and output sample in buffer
for (in_frame, out_frame) in input.into_iter().zip(output.into_iter()) {
*out_frame = *in_frame;
}
}
}
/// Process an audio buffer containing `f64` values.
///
/// # Example
/// ```no_run
/// # use vst2::plugin::{Info, Plugin};
/// # use vst2::buffer::AudioBuffer;
/// #
/// # struct ExamplePlugin;
/// # impl Plugin for ExamplePlugin {
/// # fn get_info(&self) -> Info { Default::default() }
/// #
/// // Processor that clips samples above 0.4 or below -0.4:
/// fn process_f64(&mut self, buffer: AudioBuffer<f64>){
/// let (inputs, mut outputs) = buffer.split();
///
/// for (channel, ibuf) in inputs.iter().enumerate() {
/// for (i, sample) in ibuf.iter().enumerate() {
/// outputs[channel][i] = if *sample > 0.4 {
/// 0.4
/// } else if *sample < -0.4 {
/// -0.4
/// } else {
/// *sample
/// };
/// }
/// }
/// }
/// # }
/// ```
fn process_f64(&mut self, buffer: AudioBuffer<f64>) {
// For each input and output
for (input, output) in buffer.zip() {
// For each input sample and output sample in buffer
for (in_frame, out_frame) in input.into_iter().zip(output.into_iter()) {
*out_frame = *in_frame;
}
}
}
/// Handle incoming events sent from the host.
///
/// This is always called before the start of `process` or `process_f64`.
fn process_events(&mut self, events: Vec<Event>) {}
/// Return handle to plugin editor if supported.
fn get_editor(&mut self) -> Option<&mut Editor> { None }
/// If `preset_chunks` is set to true in plugin info, this should return the raw chunk data for
/// the current preset.
fn get_preset_data(&mut self) -> Vec<u8> { Vec::new() }
/// If `preset_chunks` is set to true in plugin info, this should return the raw chunk data for
/// the current plugin bank.
fn get_bank_data(&mut self) -> Vec<u8> { Vec::new() }
/// If `preset_chunks` is set to true in plugin info, this should load a preset from the given
/// chunk data.
fn load_preset_data(&mut self, data: &[u8]) {}
/// If `preset_chunks` is set to true in plugin info, this should load a preset bank from the
/// given chunk data.
fn load_bank_data(&mut self, data: &[u8]) {}
/// Get information about an input channel. Only used by some hosts.
fn get_input_info(&self, input: i32) -> ChannelInfo {
ChannelInfo::new(format!("Input channel {}", input),
Some(format!("In {}", input)),
true, None)
}
/// Get information about an output channel. Only used by some hosts.
fn get_output_info(&self, output: i32) -> ChannelInfo {
ChannelInfo::new(format!("Output channel {}", output),
Some(format!("Out {}", output)),
true, None)
}
}
/// A reference to the host which allows the plugin to call back and access information.
///
/// # Panics
///
/// All methods in this struct will panic if the plugin has not yet been initialized. In practice,
/// this can only occur if the plugin queries the host for information when `Default::default()` is
/// called.
///
/// ```should_panic
/// # use vst2::plugin::{Info, Plugin, HostCallback};
/// struct ExamplePlugin;
///
/// impl Default for ExamplePlugin {
/// fn default() -> ExamplePlugin {
/// // Will panic, don't do this. If needed, you can query
/// // the host during initialization via Vst::new()
/// let host: HostCallback = Default::default();
/// let version = host.vst_version();
///
/// // ...
/// # ExamplePlugin
/// }
/// }
/// #
/// # impl Plugin for ExamplePlugin {
/// # fn get_info(&self) -> Info { Default::default() }
/// # }
/// # fn main() { let plugin: ExamplePlugin = Default::default(); }
/// ```
pub struct HostCallback {
callback: Option<HostCallbackProc>,
effect: *mut AEffect,
}
/// `HostCallback` implements `Default` so that the plugin can implement `Default` and have a
/// `HostCallback` field.
impl Default for HostCallback {
fn default() -> HostCallback {
HostCallback {
callback: None,
effect: ptr::null_mut(),
}
}
}
impl HostCallback {
/// Wrap callback in a function to avoid using fn pointer notation.
#[doc(hidden)]
fn callback(&self,
effect: *mut AEffect,
opcode: host::OpCode,
index: i32,
value: isize,
ptr: *mut c_void,
opt: f32)
-> isize {
let callback = self.callback.unwrap_or_else(|| panic!("Host not yet initialized."));
callback(effect, opcode.into(), index, value, ptr, opt)
}
/// Check whether the plugin has been initialized.
#[doc(hidden)]
fn is_effect_valid(&self) -> bool {
// Check whether `effect` points to a valid AEffect struct
unsafe { *mem::transmute::<*mut AEffect, *mut i32>(self.effect) == VST_MAGIC }
}
/// Create a new Host structure wrapping a host callback.
#[doc(hidden)]
pub fn wrap(callback: HostCallbackProc, effect: *mut AEffect) -> HostCallback {
HostCallback {
callback: Some(callback),
effect: effect,
}
}
/// Get the VST API version supported by the host e.g. `2400 = VST 2.4`.
pub fn vst_version(&self) -> i32 {
self.callback(self.effect, host::OpCode::Version,
0, 0, ptr::null_mut(), 0.0) as i32
}
fn read_string(&self, opcode: host::OpCode, max: usize) -> String {
self.read_string_param(opcode, 0, 0, 0.0, max)
}
fn read_string_param(&self,
opcode: host::OpCode,
index: i32,
value: isize,
opt: f32,
max: usize)
-> String {
let mut buf = vec![0; max];
self.callback(self.effect, opcode, index, value, buf.as_mut_ptr() as *mut c_void, opt);
String::from_utf8_lossy(&buf).chars().take_while(|c| *c != '\0').collect()
}
}
impl Host for HostCallback {
fn automate(&mut self, index: i32, value: f32) {
if self.is_effect_valid() { // TODO: Investigate removing this check, should be up to host
self.callback(self.effect, host::OpCode::Automate,
index, 0, ptr::null_mut(), value);
}
}
fn get_plugin_id(&self) -> i32 {
self.callback(self.effect, host::OpCode::CurrentId,
0, 0, ptr::null_mut(), 0.0) as i32
}
fn idle(&self) {
self.callback(self.effect, host::OpCode::Idle,
0, 0, ptr::null_mut(), 0.0);
}
fn get_info(&self) -> (isize, String, String) {
use api::consts::*;
let version = self.callback(self.effect, host::OpCode::CurrentId, 0, 0, ptr::null_mut(), 0.0) as isize;
let vendor_name = self.read_string(host::OpCode::GetVendorString, MAX_VENDOR_STR_LEN);
let product_name = self.read_string(host::OpCode::GetProductString, MAX_PRODUCT_STR_LEN);
(version, vendor_name, product_name)
}
/// Send events to the host.
///
/// This should only be called within [`process`] or [`process_f64`]. Calling `process_events`
/// anywhere else is undefined behaviour and may crash some hosts.
///
/// [`process`]: trait.Plugin.html#method.process
/// [`process_f64`]: trait.Plugin.html#method.process_f64
fn process_events(&mut self, events: Vec<Event>) {
use interfaces;
interfaces::process_events(
events,
|ptr| {
self.callback(
self.effect,
host::OpCode::ProcessEvents,
0,
0,
ptr,
0.0
);
}
);
}
}
#[cfg(test)]
mod tests {
use std::ptr;
use plugin;
/// Create a plugin instance.
///
/// This is a macro to allow you to specify attributes on the created struct.
macro_rules! make_plugin {
($($attr:meta) *) => {
use std::os::raw::c_void;
use main;
use api::AEffect;
use host::{Host, OpCode};
use plugin::{HostCallback, Info, Plugin};
$(#[$attr]) *
struct TestPlugin {
host: HostCallback
}
impl Plugin for TestPlugin {
fn get_info(&self) -> Info {
Info {
name: "Test Plugin".to_string(),
..Default::default()
}
}
fn new(host: HostCallback) -> TestPlugin {
TestPlugin {
host: host
}
}
fn init(&mut self) {
info!("Loaded with host vst version: {}", self.host.vst_version());
assert_eq!(2400, self.host.vst_version());
assert_eq!(9876, self.host.get_plugin_id());
// Callback will assert these.
self.host.automate(123, 12.3);
self.host.idle();
}
}
#[allow(dead_code)]
fn instance() -> *mut AEffect {
fn host_callback(_effect: *mut AEffect,
opcode: i32,
index: i32,
_value: isize,
_ptr: *mut c_void,
opt: f32)
-> isize {
let opcode = OpCode::from(opcode);
match opcode {
OpCode::Automate => {
assert_eq!(index, 123);
assert_eq!(opt, 12.3);
0
}
OpCode::Version => 2400,
OpCode::CurrentId => 9876,
OpCode::Idle => 0,
_ => 0
}
}
main::<TestPlugin>(host_callback)
}
}
}
make_plugin!(derive(Default));
#[test]
#[should_panic]
fn null_panic() {
make_plugin!(/* no `derive(Default)` */);
impl Default for TestPlugin {
fn default() -> TestPlugin {
let plugin = TestPlugin { host: Default::default() };
// Should panic
let version = plugin.host.vst_version();
info!("Loaded with host vst version: {}", version);
plugin
}
}
TestPlugin::default();
}
#[test]
fn host_callbacks() {
let aeffect = instance();
(unsafe { (*aeffect).dispatcher })(aeffect, plugin::OpCode::Initialize.into(),
0, 0, ptr::null_mut(), 0.0);
}
}<|fim▁end|> | |
<|file_name|>0001_setup_extensions.py<|end_file_name|><|fim▁begin|>from unittest import mock
from django.db import connection, migrations
try:
from django.contrib.postgres.operations import (
BloomExtension, BtreeGinExtension, BtreeGistExtension, CITextExtension,
CreateExtension, CryptoExtension, HStoreExtension, TrigramExtension,
UnaccentExtension,
)
except ImportError:
BloomExtension = mock.Mock()
BtreeGinExtension = mock.Mock()
BtreeGistExtension = mock.Mock()
CITextExtension = mock.Mock()
CreateExtension = mock.Mock()
CryptoExtension = mock.Mock()
HStoreExtension = mock.Mock()
TrigramExtension = mock.Mock()
UnaccentExtension = mock.Mock()<|fim▁hole|>
operations = [
(
BloomExtension()
if getattr(connection.features, 'has_bloom_index', False)
else mock.Mock()
),
BtreeGinExtension(),
BtreeGistExtension(),
CITextExtension(),
# Ensure CreateExtension quotes extension names by creating one with a
# dash in its name.
CreateExtension('uuid-ossp'),
CryptoExtension(),
HStoreExtension(),
TrigramExtension(),
UnaccentExtension(),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>unsized-trait-impl-trait-arg.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> fn foo(&self, z: Z);
}
struct S4<Y: ?Sized>(Box<Y>);
impl<X: ?Sized> T2<X> for S4<X> {
//~^ ERROR the size for values of type
}
fn main() { }<|fim▁end|> | // Test sized-ness checking in substitution in impls.
// impl - unbounded
trait T2<Z> { |
<|file_name|>es6-block-scope.js<|end_file_name|><|fim▁begin|>// -*- coding: utf-8 -*-
// Copyright (C) 2014 Yusuke Suzuki <[email protected]>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import { expect } from 'chai';
import { parse } from '../third_party/esprima';
import { analyze } from '..';
describe('ES6 block scope', function() {
it('let is materialized in ES6 block scope#1', function() {
const ast = parse(`
{
let i = 20;
i;
}
`);
const scopeManager = analyze(ast, {ecmaVersion: 6});
expect(scopeManager.scopes).to.have.length(2); // Program and BlcokStatement scope.
let scope = scopeManager.scopes[0];
expect(scope.type).to.be.equal('global');
expect(scope.variables).to.have.length(0); // No variable in Program scope.
scope = scopeManager.scopes[1];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1); // `i` in block scope.
expect(scope.variables[0].name).to.be.equal('i');
expect(scope.references).to.have.length(2);
expect(scope.references[0].identifier.name).to.be.equal('i');
expect(scope.references[1].identifier.name).to.be.equal('i');
});
it('let is materialized in ES6 block scope#2', function() {
const ast = parse(`
{
let i = 20;
var i = 20;
i;
}
`);
const scopeManager = analyze(ast, {ecmaVersion: 6});
expect(scopeManager.scopes).to.have.length(2); // Program and BlcokStatement scope.
let scope = scopeManager.scopes[0];<|fim▁hole|> expect(scope.variables[0].name).to.be.equal('i');
scope = scopeManager.scopes[1];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1); // `i` in block scope.
expect(scope.variables[0].name).to.be.equal('i');
expect(scope.references).to.have.length(3);
expect(scope.references[0].identifier.name).to.be.equal('i');
expect(scope.references[1].identifier.name).to.be.equal('i');
expect(scope.references[2].identifier.name).to.be.equal('i');
});
it('function delaration is materialized in ES6 block scope', function() {
const ast = parse(`
{
function test() {
}
test();
}
`);
const scopeManager = analyze(ast, {ecmaVersion: 6});
expect(scopeManager.scopes).to.have.length(3);
let scope = scopeManager.scopes[0];
expect(scope.type).to.be.equal('global');
expect(scope.variables).to.have.length(0);
scope = scopeManager.scopes[1];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1);
expect(scope.variables[0].name).to.be.equal('test');
expect(scope.references).to.have.length(1);
expect(scope.references[0].identifier.name).to.be.equal('test');
scope = scopeManager.scopes[2];
expect(scope.type).to.be.equal('function');
expect(scope.variables).to.have.length(1);
expect(scope.variables[0].name).to.be.equal('arguments');
expect(scope.references).to.have.length(0);
});
it('let is not hoistable#1', function() {
const ast = parse(`
var i = 42; (1)
{
i; // (2) ReferenceError at runtime.
let i = 20; // (2)
i; // (2)
}
`);
const scopeManager = analyze(ast, {ecmaVersion: 6});
expect(scopeManager.scopes).to.have.length(2);
const globalScope = scopeManager.scopes[0];
expect(globalScope.type).to.be.equal('global');
expect(globalScope.variables).to.have.length(1);
expect(globalScope.variables[0].name).to.be.equal('i');
expect(globalScope.references).to.have.length(1);
const scope = scopeManager.scopes[1];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1);
expect(scope.variables[0].name).to.be.equal('i');
expect(scope.references).to.have.length(3);
expect(scope.references[0].resolved).to.be.equal(scope.variables[0]);
expect(scope.references[1].resolved).to.be.equal(scope.variables[0]);
expect(scope.references[2].resolved).to.be.equal(scope.variables[0]);
});
it('let is not hoistable#2', function() {
const ast = parse(`
(function () {
var i = 42; // (1)
i; // (1)
{
i; // (3)
{
i; // (2)
let i = 20; // (2)
i; // (2)
}
let i = 30; // (3)
i; // (3)
}
i; // (1)
}());
`);
const scopeManager = analyze(ast, {ecmaVersion: 6});
expect(scopeManager.scopes).to.have.length(4);
const globalScope = scopeManager.scopes[0];
expect(globalScope.type).to.be.equal('global');
expect(globalScope.variables).to.have.length(0);
expect(globalScope.references).to.have.length(0);
let scope = scopeManager.scopes[1];
expect(scope.type).to.be.equal('function');
expect(scope.variables).to.have.length(2);
expect(scope.variables[0].name).to.be.equal('arguments');
expect(scope.variables[1].name).to.be.equal('i');
const v1 = scope.variables[1];
expect(scope.references).to.have.length(3);
expect(scope.references[0].resolved).to.be.equal(v1);
expect(scope.references[1].resolved).to.be.equal(v1);
expect(scope.references[2].resolved).to.be.equal(v1);
scope = scopeManager.scopes[2];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1);
expect(scope.variables[0].name).to.be.equal('i');
const v3 = scope.variables[0];
expect(scope.references).to.have.length(3);
expect(scope.references[0].resolved).to.be.equal(v3);
expect(scope.references[1].resolved).to.be.equal(v3);
expect(scope.references[2].resolved).to.be.equal(v3);
scope = scopeManager.scopes[3];
expect(scope.type).to.be.equal('block');
expect(scope.variables).to.have.length(1);
expect(scope.variables[0].name).to.be.equal('i');
const v2 = scope.variables[0];
expect(scope.references).to.have.length(3);
expect(scope.references[0].resolved).to.be.equal(v2);
expect(scope.references[1].resolved).to.be.equal(v2);
expect(scope.references[2].resolved).to.be.equal(v2);
});
});
// vim: set sw=4 ts=4 et tw=80 :<|fim▁end|> | expect(scope.type).to.be.equal('global');
expect(scope.variables).to.have.length(1); // No variable in Program scope. |
<|file_name|>ProductList.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
const ProductList = (props) => {
const pl = props.productList;
const products = Object.keys(pl);
return (<ul>
{products.map(key => <li key={key}><a href={`#/product/${key}`} >{pl[key].name}</a></li>)}
</ul>);
};
ProductList.propTypes = {
productList: PropTypes.object.isRequired
};
export default ProductList;<|fim▁end|> | import React, { PropTypes } from 'react'; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|><|fim▁end|> | # See the License for the specific language governing permissions and
# limitations under the License. |
<|file_name|>EventListTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 Stephan D. Cote' - All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the MIT License which accompanies this distribution, and is
* available at http://creativecommons.org/licenses/MIT/
*
* Contributors:
* Stephan D. Cote
* - Initial concept and implementation
*/
package coyote.i13n;
//import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.AfterClass;
import org.junit.Test;
/**
*
*/
public class EventListTest {
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {}
/**
* Test method for {@link coyote.i13n.EventList#lastSequence()}.
*/
//@Test
public void testLastSequence() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#EventList()}.
*/
//@Test
public void testEventList() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#getMaxEvents()}.
*/
@Test
public void testGetMaxEvents() {
EventList list = new EventList();
list.setMaxEvents( 5 );
AppEvent alert0 = list.createEvent( "Zero" );
AppEvent alert1 = list.createEvent( "One" );
AppEvent alert2 = list.createEvent( "Two" );
AppEvent alert3 = list.createEvent( "Three" );
AppEvent alert4 = list.createEvent( "Four" );
AppEvent alert5 = list.createEvent( "Five" );
<|fim▁hole|> // should result in the list being trimmed immediately
list.setMaxEvents( 2 );
assertTrue( list._list.size() == 2 );
list.add( alert0 );
list.add( alert1 );
list.add( alert2 );
list.add( alert3 );
list.add( alert4 );
list.add( alert5 );
list.add( alert6 );
// should still only contain 2 events
assertTrue( list._list.size() == 2 );
// Check the first and last event in the list
assertEquals( alert5, list.getFirst() );
assertEquals( alert6, list.getLast() );
}
/**
* Test method for {@link coyote.i13n.EventList#setMaxEvents(int)}.
*/
//@Test
public void testSetMaxEvents() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#add(coyote.i13n.AppEvent)}.
*/
//@Test
public void testAdd() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#remove(coyote.i13n.AppEvent)}.
*/
//@Test
public void testRemove() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#get(long)}.
*/
//@Test
public void testGet() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#getFirst()}.
*/
//@Test
public void testGetFirst() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#getLast()}.
*/
//@Test
public void testGetLast() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#getSize()}.
*/
//@Test
public void testGetSize() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#createEvent(java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, int, int, java.lang.String)}.
*/
//@Test
public void testCreateEventStringStringStringStringIntIntIntString() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#createEvent(java.lang.String)}.
*/
//@Test
public void testCreateEventString() {
fail( "Not yet implemented" );
}
/**
* Test method for {@link coyote.i13n.EventList#createEvent(java.lang.String, int, int)}.
*/
//@Test
public void testCreateEventStringIntInt() {
fail( "Not yet implemented" );
}
}<|fim▁end|> | AppEvent alert6 = list.createEvent( "Six" );
//System.out.println( "Max="+list.getMaxEvents()+" Size=" + list.getSize() );
assertTrue( list._list.size() == 5 );
|
<|file_name|>import_media.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import aaf2
import traceback
import subprocess
import json
import os
import datetime
import sys
import tempfile
import shutil
import time
import fractions
from aaf2 import auid
from pprint import pprint
FFMPEG_EXEC = "ffmpeg"
FFPROBE_EXEC = "ffprobe"
Audio_Profiles = aaf2.audio.pcm_profiles
Video_Profiles = aaf2.video.dnx_profiles
# FFMPEG_EXEC = "/Users/mark/Dev/ffmpeg/ffmpeg_g"
# FFPROBE_EXEC = "/Users/mark/Dev/ffmpeg/ffprobe_g"
def probe(path, show_packets=False):
cmd = [FFPROBE_EXEC, '-of','json','-show_format','-show_streams', path]
if show_packets:
cmd.extend(['-show_packets',])
print(subprocess.list2cmdline(cmd))<|fim▁hole|>
if p.returncode != 0:
raise subprocess.CalledProcessError(p.returncode, subprocess.list2cmdline(cmd), stderr)
return json.loads(stdout)
def timecode_to_seconds(time_string):
try:
return float(time_string)
except:
pass
for format in ("%H:%M:%S.%f", "%H:%M:%S", "%M:%S.%f","%M:%S"):
try:
t = datetime.datetime.strptime(time_string, format)
seconds = 0
if t.minute:
seconds += 60*t.minute
if t.hour:
seconds += 60 * 60 * t.hour
seconds += t.second
seconds += float(t.strftime(".%f"))
return seconds
except:
#print traceback.format_exc()
pass
raise ValueError("invalid time format: %s" % time_string)
def seconds_to_timecode(seconds):
format = "%S.%f"
t = datetime.timedelta(seconds=float(seconds))
return str(t)
def has_alpha(stream):
if stream['pix_fmt'] in ('yuva444p10le','rgba'):
return True
return False
def conform_media(path,
output_dir,
start=None,
end=None,
duration=None,
width=None,
height=None,
frame_rate=None,
video_profile_name=None,
audio_profile_name=None,
ignore_alpha=False):
if not video_profile_name:
video_profile_name = 'dnx_1080p_36_23.97'
if not audio_profile_name:
audio_profile_name = 'pcm_48000_s16le'
video_profile = Video_Profiles[video_profile_name]
audio_profile = Audio_Profiles[audio_profile_name]
format = probe(path)
out_files = []
cmd = [FFMPEG_EXEC,'-y', '-nostdin']
# cmd.extend(['-loglevel', 'debug'])
if end:
duration = timecode_to_seconds(end) - timecode_to_seconds(start)
duration = seconds_to_timecode(duration)
end = None
if start:
start_seconds = timecode_to_seconds(start)
fast_start = max(0,int(start_seconds-30))
if fast_start:
start = seconds_to_timecode(start_seconds - fast_start)
cmd.extend(['-ss', seconds_to_timecode(fast_start)])
frame_rate = video_profile['frame_rate']
pix_fmt = video_profile['pix_fmt']
bitrate = video_profile['bitrate']
dnxhd_profile = video_profile.get("video_profile", None)
if format['format']['format_name'] == "image2":
frame_rate = frame_rate or "24000/1001"
cmd.extend([ '-framerate', frame_rate])
cmd.extend(['-i', path,])
if video_profile['size']:
width, height = video_profile['size']
else:
width = None
height = None
interlaced = video_profile['interlaced']
#sample_rate =44100
sample_rate = audio_profile['sample_rate']
for stream in format['streams']:
#pprint(stream)
stream_index = stream['index']
if stream['codec_type'] == 'video':
out_meta = {}
# pprint(stream)
alpha = has_alpha(stream)
passes = 1
if alpha and not ignore_alpha:
passes = 2
for i in range(passes):
if i == 1:
cmd.extend(['-an', '-f', 'rawvideo', '-pix_fmt', 'gray'])
if frame_rate:
cmd.extend(['-r', frame_rate])
else:
cmd.extend(['-an','-vcodec', 'dnxhd', '-pix_fmt', pix_fmt])
if dnxhd_profile:
cmd.extend(['-profile:v', dnxhd_profile])
if bitrate:
cmd.extend(['-vb', '%dM' % bitrate])
if frame_rate:
cmd.extend(['-r', frame_rate])
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
vfilter = []
if i == 1:
vfilter.append("alphaextract")
if width and height:
out_width = width
out_height = height
input_width = stream['width']
input_height = stream['height']
max_width = width
max_height = height
scale = min(max_width/ float(input_width), max_height/float(input_height) )
scale_width = int(input_width*scale)
scale_height = int(input_height*scale)
padding_ofs_x = (max_width - scale_width)//2
padding_ofs_y = (max_height - scale_height)//2
vfilter.append("scale=%d:%d,pad=%d:%d:%d:%d" % (scale_width,scale_height,
max_width,max_height, padding_ofs_x,padding_ofs_y))
else:
out_width = stream['width']
out_height = stream['height']
if vfilter:
cmd.extend(['-vf', ','.join(vfilter)])
# cmd.extend(['-s', "%dx%d" % (width, height)])
if i == 1:
out_file = os.path.join(output_dir, 'out_%d.alpha' % (stream_index))
out_meta['path_alpha'] = out_file
else:
out_rate = frame_rate or str(stream['avg_frame_rate'])
out_file = os.path.join(output_dir, 'out_%d.dnxhd' % (stream_index))
out_meta = {'path':out_file, 'frame_rate':out_rate, 'type': 'video', 'profile':video_profile_name}
out_meta['width'] = out_width
out_meta['height'] = out_height
cmd.extend([out_file])
#pprint(stream)
print("USING FRAMREATE", out_rate, str(stream['avg_frame_rate']))
out_files.append(out_meta)
elif stream['codec_type'] == 'audio':
input_sample_rate = int(stream['sample_rate'])
channels = stream['channels']
cmd.extend(['-vn', '-acodec', 'pcm_s16le', '-ar', str(sample_rate)])
# afilter = ['-af', "aresample=async=1:first_pts=0"]
# cmd.extend(afilter)
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
out_file = os.path.join(output_dir, 'out_%d_%d_%d.wav' % (stream_index, sample_rate, channels))
cmd.extend([out_file])
out_files.append({'path':out_file, 'sample_rate':sample_rate, 'channels':channels,'type': 'audio'})
print(subprocess.list2cmdline(cmd))
subprocess.check_call(cmd)
return out_files
def create_matte_key_definition(f):
opdef = f.create.OperationDef(auid.AUID("0c864774-e428-3b2d-8115-1c736806191a"), 'MatteKey_2')
opdef['IsTimeWarp'].value = False
opdef['OperationCategory'].value = 'OperationCategory_Effect'
opdef['NumberInputs'].value = 3
opdef['Bypass'].value = 2
opdef.media_kind = "picture"
f.dictionary.register_def(opdef)
return opdef
def import_video_essence(f, mastermob, stream, compmob=None, tapemob=None):
tape = None
edit_rate = stream['frame_rate']
if tapemob:
timecode_fps= int(round(float(fractions.Fraction(edit_rate))))
start_time = timecode_fps * 60 * 60
tape = tapemob.create_source_clip(1, start=start_time)
alpha_path = stream.get("path_alpha", None)
color_slot = mastermob.import_dnxhd_essence(stream['path'], edit_rate, tape=tape)
if alpha_path:
pixel_layout = [{u'Code': u'CompAlpha', u'Size': 8}]
width = stream['width']
height = stream['height']
source_mob = f.create.SourceMob()
f.content.mobs.append(source_mob)
if tapemob:
tape = tapemob.create_source_clip(1, start=start_time)
source_slot = source_mob.import_rawvideo_essence(alpha_path, edit_rate, width, height, pixel_layout, tape=tape)
length = source_slot.segment.length
essence_group = f.create.EssenceGroup()
alpha_slot = mastermob.create_picture_slot(edit_rate)
alpha_slot.segment = essence_group
source_clip = source_mob.create_source_clip(source_slot.slot_id)
source_clip.length = length
essence_group['Choices'].append(source_clip)
essence_group.length = length
opdef = create_matte_key_definition(f)
slot = compmob.create_picture_slot(edit_rate)
op_group = f.create.OperationGroup(opdef)
slot.segment = op_group
scope = f.create.ScopeReference()
scope['RelativeScope'].value = 1
scope['RelativeSlot'].value = 1
scope.length = length
sequence = f.create.Sequence(length=length)
sequence.components.append(scope)
op_group.segments.append(sequence)
op_group.segments.append(mastermob.create_source_clip(color_slot.slot_id, length=length))
op_group.segments.append(mastermob.create_source_clip(alpha_slot.slot_id, length=length))
def create_aaf(path, media_streams, mobname, tape_name=None, start_timecode=None):
with aaf2.open(path, 'w') as f:
mastermob = f.create.MasterMob(mobname)
f.content.mobs.append(mastermob)
edit_rate = None
for stream in media_streams:
if stream['type'] == 'video':
edit_rate =fractions.Fraction(stream['frame_rate'])
break
alpha = False
compmob = None
for stream in media_streams:
if stream.get('path_alpha', False):
alpha = True
compmob = f.create.CompositionMob(mastermob.name)
compmob.usage = 'Usage_Template'
f.content.mobs.append(compmob)
# this hides the mastermob in avid bin
mastermob['AppCode'].value = 1
mastermob.usage = "Usage_LowerLevel"
break
tapemob = None
timecode_fps= int(round(float(edit_rate)))
if tape_name:
tapemob = f.create.SourceMob()
tapemob.create_tape_slots(tape_name, edit_rate, timecode_fps)
f.content.mobs.append(tapemob)
for stream in media_streams:
if stream['type'] == 'video':
print("importing video...")
start = time.time()
import_video_essence(f, mastermob, stream, compmob, tapemob)
print("imported video in %f secs" % (time.time()- start))
for stream in media_streams:
if stream['type'] == 'audio':
print("importing audio...")
start = time.time()
sample_rate = stream['sample_rate']
slot = mastermob.import_audio_essence(stream['path'], edit_rate)
if compmob:
sound_slot = compmob.create_sound_slot(edit_rate)
sound_slot.segment = mastermob.create_source_clip(slot.slot_id, length = slot.segment.length)
print("imported audio in %f secs" % (time.time()- start))
if __name__ == "__main__":
from optparse import OptionParser
usage = "usage: %prog [options] output_aaf_file media_file"
parser = OptionParser(usage=usage)
parser.add_option('-s', '--start', type="string", dest="start",default=None,
help = "start recording at, in timecode or seconds")
parser.add_option('-e', '--end', type="string", dest='end',default=None,
help = "end recording at in timecode or seconds")
parser.add_option('-d', '--duration', type="string", dest='duration',default=None,
help = "record duration in timecode or seconds")
parser.add_option('--tape', type="string", dest="tape_name",default=None,
help = "tape name")
parser.add_option('--start_timecode', type="string", dest="start_timecode", default=None,
help = "start timecode [default 01:00:00:00]")
parser.add_option('--ignore_alpha', action='store_true', dest="ignore_alpha", default=False,
help = "ignore alpha channel if present")
parser.add_option("-v", '--video-profile', type='string', dest = 'video_profile', default="dnx_1080p_36_23.97",
help = "encoding profile for video [default: 1080p_36_23.97]")
parser.add_option("-a", '--audio-profile', type='string', dest = 'audio_profile',default='pcm_48000_s16le',
help = 'encoding profile for audio [default: pcm_48000]')
parser.add_option("--size", type='string', dest='size', default=None,
help = "video resolution for dnxhr [default: src size]")
parser.add_option("--framerate", type='string', dest='framerate',
help = "video framerate for dnxhr [default: use src rate]")
parser.add_option('--list-profiles', dest='list_profiles',
action="store_true",default=False,
help = "lists profiles")
(options, args) = parser.parse_args()
if options.list_profiles:
titles = ['Audio Profile', 'Sample Rate', 'Sample Fmt']
row_format ="{:<25}{:<15}{:<15}"
print("")
print(row_format.format( *titles))
print("")
for key,value in sorted(Audio_Profiles.items()):
print(row_format.format(key, value['sample_rate'], value['sample_format']))
titles = ['Video Profile', "Size", 'Frame Rate', "Bitrate", "Pix Fmt", "Codec"]
row_format ="{:<25}{:<15}{:<15}{:<10}{:<12}{:<10}"
print("")
print(row_format.format( *titles))
print("")
for key, value in sorted(Video_Profiles.items()):
codec = 'dnxhd'
if key.startswith("dnxhr"):
codec = 'dnxhr'
print(row_format.format(key, value['size'],
value['frame_rate'], value['bitrate'], value['pix_fmt'], codec))
sys.exit()
if len(args) < 2:
parser.error("not enough args")
details = probe(args[1])
#if not os.path.exists(args[1]):
#parser.error("No such file or directory: %s" % args[1])
if options.end and options.duration:
parser.error("Can only use --duration or --end not both")
print(options.audio_profile)
if not options.audio_profile in Audio_Profiles:
parser.error("No such audio profile: %s" % options.audio_profile)
if not options.video_profile.lower() in Video_Profiles:
parser.error("No such video profile: %s" % options.video_profile)
aaf_file = args[0]
# tempdir = os.path.join(os.getcwd(), 'samples', 'convert')
# if not os.path.exists(tempdir):
# os.makedirs(tempdir)
tempdir = tempfile.mkdtemp("-aaf_import")
print(tempdir)
media_streams = []
width = None
height = None
if options.size and options.video_profile.lower().startswith("dnxhr"):
width,height = options.size.split("x")
width = int(width)
height = int(height)
try:
for src in args[1:]:
media_streams.extend(conform_media(src,
output_dir=tempdir,
start=options.start,
end=options.end,
duration=options.duration,
width=width,
height=height,
frame_rate=options.framerate,
video_profile_name = options.video_profile.lower(),
audio_profile_name = options.audio_profile.lower(),
ignore_alpha = options.ignore_alpha)
)
except:
print(traceback.format_exc())
shutil.rmtree(tempdir)
parser.error("error conforming media")
try:
basename = os.path.basename(args[1])
name,ext = os.path.splitext(basename)
if details['format']['format_name'] == 'image2':
name, padding = os.path.splitext(name)
create_aaf(aaf_file, media_streams, name, options.tape_name, options.start_timecode)
finally:
pass
shutil.rmtree(tempdir)<|fim▁end|> | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout,stderr = p.communicate() |
<|file_name|>oauth2_getAllUsers.py<|end_file_name|><|fim▁begin|>#import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def downloadUsers(domain, account, customerId):
superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break
outputFile.close()
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the first entry in this row is the domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId)
'''
for user in page:<|fim▁hole|> primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': '[email protected]',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='[email protected]', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = '[email protected]')
pprint.pprint(user.execute())
'''<|fim▁end|> | |
<|file_name|>main.ts<|end_file_name|><|fim▁begin|>import {bootstrap} from '@angular/platform-browser-dynamic';
import {ROUTER_PROVIDERS} from '@angular/router-deprecated';
import {HTTP_PROVIDERS} from '@angular/http';
import {AppComponent} from './app.component';
import {LoggerService} from './blocks/logger.service';
bootstrap(AppComponent, [
LoggerService, ROUTER_PROVIDERS, HTTP_PROVIDERS<|fim▁hole|><|fim▁end|> | ]); |
<|file_name|>windows.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15883
//! Windows file path handling
use self::PathPrefix::*;
use ascii::AsciiExt;
use char::CharExt;
use clone::Clone;
use cmp::{Ordering, Eq, Ord, PartialEq, PartialOrd};
use fmt;
use hash;
use io::Writer;
use iter::{AdditiveIterator, Extend};
use iter::{Iterator, IteratorExt, Map, repeat};
use mem;
use option::Option::{self, Some, None};
use ops::FullRange;
use slice::{SliceExt, SliceConcatExt};
use str::{SplitTerminator, FromStr, StrExt};
use string::{String, ToString};
use vec::Vec;
use super::{contains_nul, BytesContainer, GenericPath, GenericPathUnsafe};
/// Iterator that yields successive components of a Path as &str
///
/// Each component is yielded as Option<&str> for compatibility with PosixPath, but
/// every component in WindowsPath is guaranteed to be Some.
pub type StrComponents<'a> =
Map<&'a str, Option<&'a str>, SplitTerminator<'a, char>, fn(&'a str) -> Option<&'a str>>;
/// Iterator that yields successive components of a Path as &[u8]
pub type Components<'a> =
Map<Option<&'a str>, &'a [u8], StrComponents<'a>, fn(Option<&str>) -> &[u8]>;
/// Represents a Windows path
// Notes for Windows path impl:
// The MAX_PATH is 260, but 253 is the practical limit due to some API bugs
// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx for good information
// about windows paths.
// That same page puts a bunch of restrictions on allowed characters in a path.
// `\foo.txt` means "relative to current drive", but will not be considered to be absolute here
// as `∃P | P.join("\foo.txt") != "\foo.txt"`.
// `C:` is interesting, that means "the current directory on drive C".
// Long absolute paths need to have \\?\ prefix (or, for UNC, \\?\UNC\). I think that can be
// ignored for now, though, and only added in a hypothetical .to_pwstr() function.
// However, if a path is parsed that has \\?\, this needs to be preserved as it disables the
// processing of "." and ".." components and / as a separator.
// Experimentally, \\?\foo is not the same thing as \foo.
// Also, \\foo is not valid either (certainly not equivalent to \foo).
// Similarly, C:\\Users is not equivalent to C:\Users, although C:\Users\\foo is equivalent
// to C:\Users\foo. In fact the command prompt treats C:\\foo\bar as UNC path. But it might be
// best to just ignore that and normalize it to C:\foo\bar.
//
// Based on all this, I think the right approach is to do the following:
// * Require valid utf-8 paths. Windows API may use WCHARs, but we don't, and utf-8 is convertible
// to UTF-16 anyway (though does Windows use UTF-16 or UCS-2? Not sure).
// * Parse the prefixes \\?\UNC\, \\?\, and \\.\ explicitly.
// * If \\?\UNC\, treat following two path components as server\share. Don't error for missing
// server\share.
// * If \\?\, parse disk from following component, if present. Don't error for missing disk.
// * If \\.\, treat rest of path as just regular components. I don't know how . and .. are handled
// here, they probably aren't, but I'm not going to worry about that.
// * Else if starts with \\, treat following two components as server\share. Don't error for missing
// server\share.
// * Otherwise, attempt to parse drive from start of path.
//
// The only error condition imposed here is valid utf-8. All other invalid paths are simply
// preserved by the data structure; let the Windows API error out on them.
#[derive(Clone)]
pub struct Path {
repr: String, // assumed to never be empty
prefix: Option<PathPrefix>,
sepidx: Option<uint> // index of the final separator in the non-prefix portion of repr
}
impl fmt::Show for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Show::fmt(&self.display(), f)
}
}
impl PartialEq for Path {
#[inline]
fn eq(&self, other: &Path) -> bool {
self.repr == other.repr
}
}
impl Eq for Path {}
impl PartialOrd for Path {
fn partial_cmp(&self, other: &Path) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Path {
fn cmp(&self, other: &Path) -> Ordering {
self.repr.cmp(&other.repr)
}
}
impl FromStr for Path {
fn from_str(s: &str) -> Option<Path> {
Path::new_opt(s)
}
}
impl<S: hash::Writer + hash::Hasher> hash::Hash<S> for Path {
#[cfg(not(test))]
#[inline]
fn hash(&self, state: &mut S) {
self.repr.hash(state)
}
#[cfg(test)]
#[inline]
fn hash(&self, _: &mut S) {
// No-op because the `hash` implementation will be wrong.
}
}
impl BytesContainer for Path {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_vec()
}
#[inline]
fn container_as_str<'a>(&'a self) -> Option<&'a str> {
self.as_str()
}
#[inline]
fn is_str(_: Option<&Path>) -> bool { true }
}
impl GenericPathUnsafe for Path {
/// See `GenericPathUnsafe::from_vec_unchecked`.
///
/// # Panics
///
/// Panics if not valid UTF-8.
#[inline]
unsafe fn new_unchecked<T: BytesContainer>(path: T) -> Path {
let (prefix, path) = Path::normalize_(path.container_as_str().unwrap());
assert!(!path.is_empty());
let mut ret = Path{ repr: path, prefix: prefix, sepidx: None };
ret.update_sepidx();
ret
}
/// See `GenericPathUnsafe::set_filename_unchecked`.
///
/// # Panics
///
/// Panics if not valid UTF-8.
unsafe fn set_filename_unchecked<T: BytesContainer>(&mut self, filename: T) {
let filename = filename.container_as_str().unwrap();
match self.sepidx_or_prefix_len() {
None if ".." == self.repr => {
let mut s = String::with_capacity(3 + filename.len());
s.push_str("..");
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
None => {
self.update_normalized(filename);
}
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
let mut s = String::with_capacity(end + 1 + filename.len());
s.push_str(&self.repr[..end]);
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
let mut s = String::with_capacity(idxb + filename.len());
s.push_str(&self.repr[..idxb]);
s.push_str(filename);
self.update_normalized(&s[]);
}
Some((idxb,_,_)) => {
let mut s = String::with_capacity(idxb + 1 + filename.len());
s.push_str(&self.repr[..idxb]);
s.push(SEP);
s.push_str(filename);
self.update_normalized(&s[]);
}
}
}
/// See `GenericPathUnsafe::push_unchecked`.
///
/// Concatenating two Windows Paths is rather complicated.
/// For the most part, it will behave as expected, except in the case of
/// pushing a volume-relative path, e.g. `C:foo.txt`. Because we have no
/// concept of per-volume cwds like Windows does, we can't behave exactly
/// like Windows will. Instead, if the receiver is an absolute path on
/// the same volume as the new path, it will be treated as the cwd that
/// the new path is relative to. Otherwise, the new path will be treated
/// as if it were absolute and will replace the receiver outright.
unsafe fn push_unchecked<T: BytesContainer>(&mut self, path: T) {
let path = path.container_as_str().unwrap();
fn is_vol_abs(path: &str, prefix: Option<PathPrefix>) -> bool {
// assume prefix is Some(DiskPrefix)
let rest = &path[prefix_len(prefix)..];
!rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char)
}
fn shares_volume(me: &Path, path: &str) -> bool {
// path is assumed to have a prefix of Some(DiskPrefix)
let repr = &me.repr[];
match me.prefix {
Some(DiskPrefix) => {
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase()
}
Some(VerbatimDiskPrefix) => {
repr.as_bytes()[4] == path.as_bytes()[0].to_ascii_uppercase()
}
_ => false
}
}
fn is_sep_(prefix: Option<PathPrefix>, u: u8) -> bool {
if prefix_is_verbatim(prefix) { is_sep_verbatim(u as char) }
else { is_sep(u as char) }
}
fn replace_path(me: &mut Path, path: &str, prefix: Option<PathPrefix>) {
let newpath = Path::normalize__(path, prefix);
me.repr = match newpath {
Some(p) => p,
None => String::from_str(path)
};
me.prefix = prefix;
me.update_sepidx();
}
fn append_path(me: &mut Path, path: &str) {
// appends a path that has no prefix
// if me is verbatim, we need to pre-normalize the new path
let path_ = if is_verbatim(me) { Path::normalize__(path, None) }
else { None };
let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
s.push_str(&me.repr[]);
let plen = me.prefix_len();
// if me is "C:" we don't want to add a path separator
match me.prefix {
Some(DiskPrefix) if me.repr.len() == plen => (),
_ if !(me.repr.len() > plen && me.repr.as_bytes()[me.repr.len()-1] == SEP_BYTE) => {
s.push(SEP);
}
_ => ()
}
match path_ {
None => s.push_str(path),
Some(p) => s.push_str(&p[]),
};
me.update_normalized(&s[])
}
if !path.is_empty() {
let prefix = parse_prefix(path);
match prefix {
Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => {
// cwd-relative path, self is on the same volume
append_path(self, &path[prefix_len(prefix)..]);
}
Some(_) => {
// absolute path, or cwd-relative and self is not same volume
replace_path(self, path, prefix);
}
None if !path.is_empty() && is_sep_(self.prefix, path.as_bytes()[0]) => {
// volume-relative path
if self.prefix.is_some() {
// truncate self down to the prefix, then append
let n = self.prefix_len();
self.repr.truncate(n);
append_path(self, path);
} else {
// we have no prefix, so nothing to be relative to
replace_path(self, path, prefix);
}
}
None => {
// relative path
append_path(self, path);
}
}
}
}
}
impl GenericPath for Path {
#[inline]
fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
match path.container_as_str() {
None => None,
Some(ref s) => {
if contains_nul(s) {
None
} else {
Some(unsafe { GenericPathUnsafe::new_unchecked(*s) })
}
}
}
}
/// See `GenericPath::as_str` for info.
/// Always returns a `Some` value.
#[inline]
fn as_str<'a>(&'a self) -> Option<&'a str> {
Some(&self.repr[])
}
#[inline]
fn as_vec<'a>(&'a self) -> &'a [u8] {
self.repr.as_bytes()
}
#[inline]
fn into_vec(self) -> Vec<u8> {
self.repr.into_bytes()
}
#[inline]
fn dirname<'a>(&'a self) -> &'a [u8] {
self.dirname_str().unwrap().as_bytes()
}
/// See `GenericPath::dirname_str` for info.
/// Always returns a `Some` value.
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
Some(match self.sepidx_or_prefix_len() {
None if ".." == self.repr => &self.repr[],
None => ".",
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
&self.repr[]
}
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => {
&self.repr[]
}
Some((0,idxa,_)) => &self.repr[..idxa],
Some((idxb,idxa,_)) => {
match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => {
&self.repr[..idxa]
}
_ => &self.repr[..idxb]
}
}
})
}
#[inline]
fn filename<'a>(&'a self) -> Option<&'a [u8]> {
self.filename_str().map(|x| x.as_bytes())
}
/// See `GenericPath::filename_str` for info.
/// Always returns a `Some` value if `filename` returns a `Some` value.
fn filename_str<'a>(&'a self) -> Option<&'a str> {
let repr = &self.repr[];
match self.sepidx_or_prefix_len() {
None if "." == repr || ".." == repr => None,
None => Some(repr),
Some((_,idxa,end)) if &repr[idxa..end] == ".." => None,
Some((_,idxa,end)) if idxa == end => None,
Some((_,idxa,end)) => Some(&repr[idxa..end])
}
}
/// See `GenericPath::filestem_str` for info.
/// Always returns a `Some` value if `filestem` returns a `Some` value.
#[inline]
fn filestem_str<'a>(&'a self) -> Option<&'a str> {
// filestem() returns a byte vector that's guaranteed valid UTF-8
self.filestem().map(|t| unsafe { mem::transmute(t) })
}
#[inline]
fn extension_str<'a>(&'a self) -> Option<&'a str> {
// extension() returns a byte vector that's guaranteed valid UTF-8
self.extension().map(|t| unsafe { mem::transmute(t) })
}
fn dir_path(&self) -> Path {
unsafe { GenericPathUnsafe::new_unchecked(self.dirname_str().unwrap()) }
}
#[inline]
fn pop(&mut self) -> bool {
match self.sepidx_or_prefix_len() {
None if "." == self.repr => false,
None => {
self.repr = String::from_str(".");
self.sepidx = None;
true
}
Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false,
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => false,
Some((idxb,idxa,_)) => {
let trunc = match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => {
let plen = self.prefix_len();
if idxb == plen { idxa } else { idxb }
}
_ => idxb
};
self.repr.truncate(trunc);
self.update_sepidx();
true
}
}
}
fn root_path(&self) -> Option<Path> {
if self.prefix.is_some() {
Some(Path::new(match self.prefix {
Some(DiskPrefix) if self.is_absolute() => {
&self.repr[..(self.prefix_len()+1)]
}
Some(VerbatimDiskPrefix) => {
&self.repr[..(self.prefix_len()+1)]
}
_ => &self.repr[..self.prefix_len()]
}))
} else if is_vol_relative(self) {
Some(Path::new(&self.repr[..1]))
} else {
None
}
}
/// See `GenericPath::is_absolute` for info.
///
/// A Windows Path is considered absolute only if it has a non-volume prefix,
/// or if it has a volume prefix and the path starts with '\'.
/// A path of `\foo` is not considered absolute because it's actually
/// relative to the "current volume". A separate method `Path::is_vol_relative`
/// is provided to indicate this case. Similarly a path of `C:foo` is not
/// considered absolute because it's relative to the cwd on volume C:. A
/// separate method `Path::is_cwd_relative` is provided to indicate this case.
#[inline]
fn is_absolute(&self) -> bool {
match self.prefix {
Some(DiskPrefix) => {
let rest = &self.repr[self.prefix_len()..];
rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE
}
Some(_) => true,
None => false
}
}
#[inline]
fn is_relative(&self) -> bool {
self.prefix.is_none() && !is_vol_relative(self)
}
fn is_ancestor_of(&self, other: &Path) -> bool {
if !self.equiv_prefix(other) {
false
} else if self.is_absolute() != other.is_absolute() ||
is_vol_relative(self) != is_vol_relative(other) {
false
} else {
let mut ita = self.str_components().map(|x|x.unwrap());
let mut itb = other.str_components().map(|x|x.unwrap());
if "." == self.repr {
return itb.next() != Some("..");
}
loop {
match (ita.next(), itb.next()) {
(None, _) => break,
(Some(a), Some(b)) if a == b => { continue },
(Some(a), _) if a == ".." => {
// if ita contains only .. components, it's an ancestor
return ita.all(|x| x == "..");
}
_ => return false
}
}
true
}
}
fn path_relative_from(&self, base: &Path) -> Option<Path> {
fn comp_requires_verbatim(s: &str) -> bool {
s == "." || s == ".." || s.contains_char(SEP2)
}
if !self.equiv_prefix(base) {
// prefixes differ
if self.is_absolute() {
Some(self.clone())
} else if self.prefix == Some(DiskPrefix) && base.prefix == Some(DiskPrefix) {
// both drives, drive letters must differ or they'd be equiv
Some(self.clone())
} else {
None
}
} else if self.is_absolute() != base.is_absolute() {
if self.is_absolute() {
Some(self.clone())
} else {
None
}
} else if is_vol_relative(self) != is_vol_relative(base) {
if is_vol_relative(self) {
Some(self.clone())
} else {
None
}
} else {
let mut ita = self.str_components().map(|x|x.unwrap());
let mut itb = base.str_components().map(|x|x.unwrap());
let mut comps = vec![];
let a_verb = is_verbatim(self);
let b_verb = is_verbatim(base);
loop {
match (ita.next(), itb.next()) {
(None, None) => break,
(Some(a), None) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), None) => {
comps.push(a);
if !a_verb {
comps.extend(ita.by_ref());
break;
}
}
(None, _) => comps.push(".."),
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
(Some(a), Some(b)) if !b_verb && b == "." => {
if a_verb && comp_requires_verbatim(a) {
return Some(self.clone())
} else { comps.push(a) }
}
(Some(_), Some(b)) if !b_verb && b == ".." => return None,
(Some(a), Some(_)) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), Some(_)) => {
comps.push("..");
for _ in itb {
comps.push("..");
}
comps.push(a);
if !a_verb {
comps.extend(ita.by_ref());
break;
}
}
}
}
Some(Path::new(comps.connect("\\")))
}
}
fn ends_with_path(&self, child: &Path) -> bool {
if !child.is_relative() { return false; }
let mut selfit = self.str_components().rev();
let mut childit = child.str_components().rev();
loop {
match (selfit.next(), childit.next()) {
(Some(a), Some(b)) => if a != b { return false; },
(Some(_), None) => break,
(None, Some(_)) => return false,
(None, None) => break
}
}<|fim▁hole|>
impl Path {
/// Returns a new `Path` from a `BytesContainer`.
///
/// # Panics
///
/// Panics if the vector contains a `NUL`, or if it contains invalid UTF-8.
///
/// # Example
///
/// ```
/// println!("{}", Path::new(r"C:\some\path").display());
/// ```
#[inline]
pub fn new<T: BytesContainer>(path: T) -> Path {
GenericPath::new(path)
}
/// Returns a new `Some(Path)` from a `BytesContainer`.
///
/// Returns `None` if the vector contains a `NUL`, or if it contains invalid UTF-8.
///
/// # Example
///
/// ```
/// let path = Path::new_opt(r"C:\some\path");
///
/// match path {
/// Some(path) => println!("{}", path.display()),
/// None => println!("There was a problem with your path."),
/// }
/// ```
#[inline]
pub fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
GenericPath::new_opt(path)
}
/// Returns an iterator that yields each component of the path in turn as a Option<&str>.
/// Every component is guaranteed to be Some.
/// Does not yield the path prefix (including server/share components in UNC paths).
/// Does not distinguish between volume-relative and relative paths, e.g.
/// \a\b\c and a\b\c.
/// Does not distinguish between absolute and cwd-relative paths, e.g.
/// C:\foo and C:foo.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
let repr = &self.repr[];
let s = match self.prefix {
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
&repr[(plen+1)..]
} else { &repr[plen..] }
}
None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..],
None => repr
};
let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr
let ret = s.split_terminator(SEP).map(some);
ret
}
/// Returns an iterator that yields each component of the path in turn as a &[u8].
/// See str_components() for details.
pub fn components<'a>(&'a self) -> Components<'a> {
fn convert<'a>(x: Option<&'a str>) -> &'a [u8] {
#![inline]
x.unwrap().as_bytes()
}
let convert: for<'b> fn(Option<&'b str>) -> &'b [u8] = convert; // coerce to fn ptr
self.str_components().map(convert)
}
fn equiv_prefix(&self, other: &Path) -> bool {
let s_repr = &self.repr[];
let o_repr = &other.repr[];
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
s_repr.as_bytes()[0].to_ascii_lowercase() ==
o_repr.as_bytes()[4].to_ascii_lowercase()
}
(Some(VerbatimDiskPrefix), Some(DiskPrefix)) => {
other.is_absolute() &&
s_repr.as_bytes()[4].to_ascii_lowercase() ==
o_repr.as_bytes()[0].to_ascii_lowercase()
}
(Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => {
s_repr.as_bytes()[4].to_ascii_lowercase() ==
o_repr.as_bytes()[4].to_ascii_lowercase()
}
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
&s_repr[2..self.prefix_len()] == &o_repr[8..other.prefix_len()]
}
(Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => {
&s_repr[8..self.prefix_len()] == &o_repr[2..other.prefix_len()]
}
(None, None) => true,
(a, b) if a == b => {
&s_repr[..self.prefix_len()] == &o_repr[..other.prefix_len()]
}
_ => false
}
}
fn normalize_(s: &str) -> (Option<PathPrefix>, String) {
// make borrowck happy
let (prefix, val) = {
let prefix = parse_prefix(s);
let path = Path::normalize__(s, prefix);
(prefix, path)
};
(prefix, match val {
None => s.to_string(),
Some(val) => val
})
}
fn normalize__(s: &str, prefix: Option<PathPrefix>) -> Option<String> {
if prefix_is_verbatim(prefix) {
// don't do any normalization
match prefix {
Some(VerbatimUNCPrefix(x, 0)) if s.len() == 8 + x => {
// the server component has no trailing '\'
let mut s = String::from_str(s);
s.push(SEP);
Some(s)
}
_ => None
}
} else {
let (is_abs, comps) = normalize_helper(s, prefix);
let mut comps = comps;
match (comps.is_some(),prefix) {
(false, Some(DiskPrefix)) => {
if s.as_bytes()[0] >= b'a' && s.as_bytes()[0] <= b'z' {
comps = Some(vec![]);
}
}
(false, Some(VerbatimDiskPrefix)) => {
if s.as_bytes()[4] >= b'a' && s.as_bytes()[0] <= b'z' {
comps = Some(vec![]);
}
}
_ => ()
}
match comps {
None => None,
Some(comps) => {
if prefix.is_some() && comps.is_empty() {
match prefix.unwrap() {
DiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = String::from_str(&s[..len]);
unsafe {
let v = s.as_mut_vec();
v[0] = (*v)[0].to_ascii_uppercase();
}
if is_abs {
// normalize C:/ to C:\
unsafe {
s.as_mut_vec()[2] = SEP_BYTE;
}
}
Some(s)
}
VerbatimDiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = String::from_str(&s[..len]);
unsafe {
let v = s.as_mut_vec();
v[4] = (*v)[4].to_ascii_uppercase();
}
Some(s)
}
_ => {
let plen = prefix_len(prefix);
if s.len() > plen {
Some(String::from_str(&s[..plen]))
} else { None }
}
}
} else if is_abs && comps.is_empty() {
Some(repeat(SEP).take(1).collect())
} else {
let prefix_ = &s[..prefix_len(prefix)];
let n = prefix_.len() +
if is_abs { comps.len() } else { comps.len() - 1} +
comps.iter().map(|v| v.len()).sum();
let mut s = String::with_capacity(n);
match prefix {
Some(DiskPrefix) => {
s.push(prefix_.as_bytes()[0].to_ascii_uppercase() as char);
s.push(':');
}
Some(VerbatimDiskPrefix) => {
s.push_str(&prefix_[..4]);
s.push(prefix_.as_bytes()[4].to_ascii_uppercase() as char);
s.push_str(&prefix_[5..]);
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
s.push_str(&prefix_[2..(a+2)]);
s.push(SEP);
s.push_str(&prefix_[(3+a)..(3+a+b)]);
}
Some(_) => s.push_str(prefix_),
None => ()
}
let mut it = comps.into_iter();
if !is_abs {
match it.next() {
None => (),
Some(comp) => s.push_str(comp)
}
}
for comp in it {
s.push(SEP);
s.push_str(comp);
}
Some(s)
}
}
}
}
}
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
&self.repr[..(self.repr.len()-1)]
} else { &self.repr[] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
} else {
is_sep_verbatim
};
let idx = s.rfind(sep_test);
let prefixlen = self.prefix_len();
self.sepidx = idx.and_then(|x| if x < prefixlen { None } else { Some(x) });
}
fn prefix_len(&self) -> uint {
prefix_len(self.prefix)
}
// Returns a tuple (before, after, end) where before is the index of the separator
// and after is the index just after the separator.
// end is the length of the string, normally, or the index of the final character if it is
// a non-semantic trailing separator in a verbatim string.
// If the prefix is considered the separator, before and after are the same.
fn sepidx_or_prefix_len(&self) -> Option<(uint,uint,uint)> {
match self.sepidx {
None => match self.prefix_len() { 0 => None, x => Some((x,x,self.repr.len())) },
Some(x) => {
if self.has_nonsemantic_trailing_slash() {
Some((x,x+1,self.repr.len()-1))
} else { Some((x,x+1,self.repr.len())) }
}
}
}
fn has_nonsemantic_trailing_slash(&self) -> bool {
is_verbatim(self) && self.repr.len() > self.prefix_len()+1 &&
self.repr.as_bytes()[self.repr.len()-1] == SEP_BYTE
}
fn update_normalized(&mut self, s: &str) {
let (prefix, path) = Path::normalize_(s);
self.repr = path;
self.prefix = prefix;
self.update_sepidx();
}
}
/// Returns whether the path is considered "volume-relative", which means a path
/// that looks like "\foo". Paths of this form are relative to the current volume,
/// but absolute within that volume.
#[inline]
pub fn is_vol_relative(path: &Path) -> bool {
path.prefix.is_none() && is_sep_byte(&path.repr.as_bytes()[0])
}
/// Returns whether the path is considered "cwd-relative", which means a path
/// with a volume prefix that is not absolute. This look like "C:foo.txt". Paths
/// of this form are relative to the cwd on the given volume.
#[inline]
pub fn is_cwd_relative(path: &Path) -> bool {
path.prefix == Some(DiskPrefix) && !path.is_absolute()
}
/// Returns the PathPrefix for this Path
#[inline]
pub fn prefix(path: &Path) -> Option<PathPrefix> {
path.prefix
}
/// Returns whether the Path's prefix is a verbatim prefix, i.e. `\\?\`
#[inline]
pub fn is_verbatim(path: &Path) -> bool {
prefix_is_verbatim(path.prefix)
}
/// Returns the non-verbatim equivalent of the input path, if possible.
/// If the input path is a device namespace path, None is returned.
/// If the input path is not verbatim, it is returned as-is.
/// If the input path is verbatim, but the same path can be expressed as
/// non-verbatim, the non-verbatim version is returned.
/// Otherwise, None is returned.
pub fn make_non_verbatim(path: &Path) -> Option<Path> {
let repr = &path.repr[];
let new_path = match path.prefix {
Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
Some(VerbatimDiskPrefix) => {
// \\?\D:\
Path::new(&repr[4..])
}
Some(VerbatimUNCPrefix(_,_)) => {
// \\?\UNC\server\share
Path::new(format!(r"\{}", &repr[7..]))
}
};
if new_path.prefix.is_none() {
// \\?\UNC\server is a VerbatimUNCPrefix
// but \\server is nothing
return None;
}
// now ensure normalization didn't change anything
if &repr[path.prefix_len()..] == &new_path.repr[new_path.prefix_len()..] {
Some(new_path)
} else {
None
}
}
/// The standard path separator character
pub const SEP: char = '\\';
/// The standard path separator byte
pub const SEP_BYTE: u8 = SEP as u8;
/// The alternative path separator character
pub const SEP2: char = '/';
/// The alternative path separator character
pub const SEP2_BYTE: u8 = SEP2 as u8;
/// Returns whether the given char is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep(c: char) -> bool {
c == SEP || c == SEP2
}
/// Returns whether the given char is a path separator.
/// Only allows the primary separator '\'; use is_sep to allow '/'.
#[inline]
pub fn is_sep_verbatim(c: char) -> bool {
c == SEP
}
/// Returns whether the given byte is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep_byte(u: &u8) -> bool {
*u == SEP_BYTE || *u == SEP2_BYTE
}
/// Returns whether the given byte is a path separator.
/// Only allows the primary separator '\'; use is_sep_byte to allow '/'.
#[inline]
pub fn is_sep_byte_verbatim(u: &u8) -> bool {
*u == SEP_BYTE
}
/// Prefix types for Path
#[derive(Copy, PartialEq, Clone, Show)]
pub enum PathPrefix {
/// Prefix `\\?\`, uint is the length of the following component
VerbatimPrefix(uint),
/// Prefix `\\?\UNC\`, uints are the lengths of the UNC components
VerbatimUNCPrefix(uint, uint),
/// Prefix `\\?\C:\` (for any alphabetic character)
VerbatimDiskPrefix,
/// Prefix `\\.\`, uint is the length of the following component
DeviceNSPrefix(uint),
/// UNC prefix `\\server\share`, uints are the lengths of the server/share
UNCPrefix(uint, uint),
/// Prefix `C:` for any alphabetic character
DiskPrefix
}
fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
if path.starts_with("\\\\") {
// \\
path = &path[2..];
if path.starts_with("?\\") {
// \\?\
path = &path[2..];
if path.starts_with("UNC\\") {
// \\?\UNC\server\share
path = &path[4..];
let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) {
Some(x) => x,
None => (path.len(), 0)
};
return Some(VerbatimUNCPrefix(idx_a, idx_b));
} else {
// \\?\path
let idx = path.find('\\');
if idx == Some(2) && path.as_bytes()[1] == b':' {
let c = path.as_bytes()[0];
if c.is_ascii() && (c as char).is_alphabetic() {
// \\?\C:\ path
return Some(VerbatimDiskPrefix);
}
}
let idx = idx.unwrap_or(path.len());
return Some(VerbatimPrefix(idx));
}
} else if path.starts_with(".\\") {
// \\.\path
path = &path[2..];
let idx = path.find('\\').unwrap_or(path.len());
return Some(DeviceNSPrefix(idx));
}
match parse_two_comps(path, is_sep) {
Some((idx_a, idx_b)) if idx_a > 0 && idx_b > 0 => {
// \\server\share
return Some(UNCPrefix(idx_a, idx_b));
}
_ => ()
}
} else if path.len() > 1 && path.as_bytes()[1] == b':' {
// C:
let c = path.as_bytes()[0];
if c.is_ascii() && (c as char).is_alphabetic() {
return Some(DiskPrefix);
}
}
return None;
fn parse_two_comps(mut path: &str, f: fn(char) -> bool) -> Option<(uint, uint)> {
let idx_a = match path.find(f) {
None => return None,
Some(x) => x
};
path = &path[(idx_a+1)..];
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}
}
// None result means the string didn't need normalizing
fn normalize_helper<'a>(s: &'a str, prefix: Option<PathPrefix>) -> (bool, Option<Vec<&'a str>>) {
let f: fn(char) -> bool = if !prefix_is_verbatim(prefix) {
is_sep
} else {
is_sep_verbatim
};
let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
let s_ = &s[prefix_len(prefix)..];
let s_ = if is_abs { &s_[1..] } else { s_ };
if is_abs && s_.is_empty() {
return (is_abs, match prefix {
Some(DiskPrefix) | None => (if is_sep_verbatim(s.char_at(prefix_len(prefix))) { None }
else { Some(vec![]) }),
Some(_) => Some(vec![]), // need to trim the trailing separator
});
}
let mut comps: Vec<&'a str> = vec![];
let mut n_up = 0u;
let mut changed = false;
for comp in s_.split(f) {
if comp.is_empty() { changed = true }
else if comp == "." { changed = true }
else if comp == ".." {
let has_abs_prefix = match prefix {
Some(DiskPrefix) => false,
Some(_) => true,
None => false
};
if (is_abs || has_abs_prefix) && comps.is_empty() { changed = true }
else if comps.len() == n_up { comps.push(".."); n_up += 1 }
else { comps.pop().unwrap(); changed = true }
} else { comps.push(comp) }
}
if !changed && !prefix_is_verbatim(prefix) {
changed = s.find(is_sep).is_some();
}
if changed {
if comps.is_empty() && !is_abs && prefix.is_none() {
if s == "." {
return (is_abs, None);
}
comps.push(".");
}
(is_abs, Some(comps))
} else {
(is_abs, None)
}
}
fn prefix_is_verbatim(p: Option<PathPrefix>) -> bool {
match p {
Some(VerbatimPrefix(_)) | Some(VerbatimUNCPrefix(_,_)) | Some(VerbatimDiskPrefix) => true,
Some(DeviceNSPrefix(_)) => true, // not really sure, but I think so
_ => false
}
}
fn prefix_len(p: Option<PathPrefix>) -> uint {
match p {
None => 0,
Some(VerbatimPrefix(x)) => 4 + x,
Some(VerbatimUNCPrefix(x,y)) => 8 + x + 1 + y,
Some(VerbatimDiskPrefix) => 6,
Some(UNCPrefix(x,y)) => 2 + x + 1 + y,
Some(DeviceNSPrefix(x)) => 4 + x,
Some(DiskPrefix) => 2
}
}
#[cfg(test)]
mod tests {
use super::PathPrefix::*;
use super::parse_prefix;
use super::*;
use clone::Clone;
use iter::IteratorExt;
use option::Option::{self, Some, None};
use path::GenericPath;
use slice::{AsSlice, SliceExt};
use str::Str;
use string::ToString;
use vec::Vec;
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_str(), Some($exp));
}
);
(v: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_vec(), $exp);
}
)
}
#[test]
fn test_parse_prefix() {
macro_rules! t {
($path:expr, $exp:expr) => (
{
let path = $path;
let exp = $exp;
let res = parse_prefix(path);
assert_eq!(res, exp);
}
)
}
t!("\\\\SERVER\\share\\foo", Some(UNCPrefix(6,5)));
t!("\\\\", None);
t!("\\\\SERVER", None);
t!("\\\\SERVER\\", None);
t!("\\\\SERVER\\\\", None);
t!("\\\\SERVER\\\\foo", None);
t!("\\\\SERVER\\share", Some(UNCPrefix(6,5)));
t!("\\\\SERVER/share/foo", Some(UNCPrefix(6,5)));
t!("\\\\SERVER\\share/foo", Some(UNCPrefix(6,5)));
t!("//SERVER/share/foo", None);
t!("\\\\\\a\\b\\c", None);
t!("\\\\?\\a\\b\\c", Some(VerbatimPrefix(1)));
t!("\\\\?\\a/b/c", Some(VerbatimPrefix(5)));
t!("//?/a/b/c", None);
t!("\\\\.\\a\\b", Some(DeviceNSPrefix(1)));
t!("\\\\.\\a/b", Some(DeviceNSPrefix(3)));
t!("//./a/b", None);
t!("\\\\?\\UNC\\server\\share\\foo", Some(VerbatimUNCPrefix(6,5)));
t!("\\\\?\\UNC\\\\share\\foo", Some(VerbatimUNCPrefix(0,5)));
t!("\\\\?\\UNC\\", Some(VerbatimUNCPrefix(0,0)));
t!("\\\\?\\UNC\\server/share/foo", Some(VerbatimUNCPrefix(16,0)));
t!("\\\\?\\UNC\\server", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC\\server\\", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC/server/share", Some(VerbatimPrefix(16)));
t!("\\\\?\\UNC", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a\\b.txt", Some(VerbatimDiskPrefix));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\C:a.txt", Some(VerbatimPrefix(7)));
t!("\\\\?\\C:a\\b.txt", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("C:\\foo", Some(DiskPrefix));
t!("z:/foo", Some(DiskPrefix));
t!("d:", Some(DiskPrefix));
t!("ab:", None);
t!("ü:\\foo", None);
t!("3:\\foo", None);
t!(" :\\foo", None);
t!("::\\foo", None);
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\ab:\\", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("\\\\?\\C:\\a/b", Some(VerbatimDiskPrefix));
}
#[test]
fn test_paths() {
let empty: &[u8] = &[];
t!(v: Path::new(empty), b".");
t!(v: Path::new(b"\\"), b"\\");
t!(v: Path::new(b"a\\b\\c"), b"a\\b\\c");
t!(s: Path::new(""), ".");
t!(s: Path::new("\\"), "\\");
t!(s: Path::new("hi"), "hi");
t!(s: Path::new("hi\\"), "hi");
t!(s: Path::new("\\lib"), "\\lib");
t!(s: Path::new("\\lib\\"), "\\lib");
t!(s: Path::new("hi\\there"), "hi\\there");
t!(s: Path::new("hi\\there.txt"), "hi\\there.txt");
t!(s: Path::new("/"), "\\");
t!(s: Path::new("hi/"), "hi");
t!(s: Path::new("/lib"), "\\lib");
t!(s: Path::new("/lib/"), "\\lib");
t!(s: Path::new("hi/there"), "hi\\there");
t!(s: Path::new("hi\\there\\"), "hi\\there");
t!(s: Path::new("hi\\..\\there"), "there");
t!(s: Path::new("hi/../there"), "there");
t!(s: Path::new("..\\hi\\there"), "..\\hi\\there");
t!(s: Path::new("\\..\\hi\\there"), "\\hi\\there");
t!(s: Path::new("/../hi/there"), "\\hi\\there");
t!(s: Path::new("foo\\.."), ".");
t!(s: Path::new("\\foo\\.."), "\\");
t!(s: Path::new("\\foo\\..\\.."), "\\");
t!(s: Path::new("\\foo\\..\\..\\bar"), "\\bar");
t!(s: Path::new("\\.\\hi\\.\\there\\."), "\\hi\\there");
t!(s: Path::new("\\.\\hi\\.\\there\\.\\.."), "\\hi");
t!(s: Path::new("foo\\..\\.."), "..");
t!(s: Path::new("foo\\..\\..\\.."), "..\\..");
t!(s: Path::new("foo\\..\\..\\bar"), "..\\bar");
assert_eq!(Path::new(b"foo\\bar").into_vec(), b"foo\\bar");
assert_eq!(Path::new(b"\\foo\\..\\..\\bar").into_vec(), b"\\bar");
t!(s: Path::new("\\\\a"), "\\a");
t!(s: Path::new("\\\\a\\"), "\\a");
t!(s: Path::new("\\\\a\\b"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b\\"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b/"), "\\\\a\\b");
t!(s: Path::new("\\\\\\b"), "\\b");
t!(s: Path::new("\\\\a\\\\b"), "\\a\\b");
t!(s: Path::new("\\\\a\\b\\c"), "\\\\a\\b\\c");
t!(s: Path::new("\\\\server\\share/path"), "\\\\server\\share\\path");
t!(s: Path::new("\\\\server/share/path"), "\\\\server\\share\\path");
t!(s: Path::new("C:a\\b.txt"), "C:a\\b.txt");
t!(s: Path::new("C:a/b.txt"), "C:a\\b.txt");
t!(s: Path::new("z:\\a\\b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("z:/a/b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("ab:/a/b.txt"), "ab:\\a\\b.txt");
t!(s: Path::new("C:\\"), "C:\\");
t!(s: Path::new("C:"), "C:");
t!(s: Path::new("q:"), "Q:");
t!(s: Path::new("C:/"), "C:\\");
t!(s: Path::new("C:\\foo\\.."), "C:\\");
t!(s: Path::new("C:foo\\.."), "C:");
t!(s: Path::new("C:\\a\\"), "C:\\a");
t!(s: Path::new("C:\\a/"), "C:\\a");
t!(s: Path::new("C:\\a\\b\\"), "C:\\a\\b");
t!(s: Path::new("C:\\a\\b/"), "C:\\a\\b");
t!(s: Path::new("C:a\\"), "C:a");
t!(s: Path::new("C:a/"), "C:a");
t!(s: Path::new("C:a\\b\\"), "C:a\\b");
t!(s: Path::new("C:a\\b/"), "C:a\\b");
t!(s: Path::new("\\\\?\\z:\\a\\b.txt"), "\\\\?\\z:\\a\\b.txt");
t!(s: Path::new("\\\\?\\C:/a/b.txt"), "\\\\?\\C:/a/b.txt");
t!(s: Path::new("\\\\?\\C:\\a/b.txt"), "\\\\?\\C:\\a/b.txt");
t!(s: Path::new("\\\\?\\test\\a\\b.txt"), "\\\\?\\test\\a\\b.txt");
t!(s: Path::new("\\\\?\\foo\\bar\\"), "\\\\?\\foo\\bar\\");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
t!(s: Path::new("\\\\.\\"), "\\\\.\\");
t!(s: Path::new("\\\\?\\UNC\\server\\share\\foo"), "\\\\?\\UNC\\server\\share\\foo");
t!(s: Path::new("\\\\?\\UNC\\server/share"), "\\\\?\\UNC\\server/share\\");
t!(s: Path::new("\\\\?\\UNC\\server"), "\\\\?\\UNC\\server\\");
t!(s: Path::new("\\\\?\\UNC\\"), "\\\\?\\UNC\\\\");
t!(s: Path::new("\\\\?\\UNC"), "\\\\?\\UNC");
// I'm not sure whether \\.\foo/bar should normalize to \\.\foo\bar
// as information is sparse and this isn't really googleable.
// I'm going to err on the side of not normalizing it, as this skips the filesystem
t!(s: Path::new("\\\\.\\foo/bar"), "\\\\.\\foo/bar");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
}
#[test]
fn test_opt_paths() {
assert!(Path::new_opt(b"foo\\bar\0") == None);
assert!(Path::new_opt(b"foo\\bar\x80") == None);
t!(v: Path::new_opt(b"foo\\bar").unwrap(), b"foo\\bar");
assert!(Path::new_opt("foo\\bar\0") == None);
t!(s: Path::new_opt("foo\\bar").unwrap(), "foo\\bar");
}
#[test]
fn test_null_byte() {
use thread::Thread;
let result = Thread::scoped(move|| {
Path::new(b"foo/bar\0")
}).join();
assert!(result.is_err());
let result = Thread::scoped(move|| {
Path::new("test").set_filename(b"f\0o")
}).join();
assert!(result.is_err());
let result = Thread::scoped(move || {
Path::new("test").push(b"f\0o");
}).join();
assert!(result.is_err());
}
#[test]
#[should_fail]
fn test_not_utf8_panics() {
Path::new(b"hello\x80.txt");
}
#[test]
fn test_display_str() {
let path = Path::new("foo");
assert_eq!(path.display().to_string(), "foo");
let path = Path::new(b"\\");
assert_eq!(path.filename_display().to_string(), "");
let path = Path::new("foo");
let mo = path.display().as_cow();
assert_eq!(mo.as_slice(), "foo");
let path = Path::new(b"\\");
let mo = path.filename_display().as_cow();
assert_eq!(mo.as_slice(), "");
}
#[test]
fn test_display() {
macro_rules! t {
($path:expr, $exp:expr, $expf:expr) => (
{
let path = Path::new($path);
let f = format!("{}", path.display());
assert_eq!(f, $exp);
let f = format!("{}", path.filename_display());
assert_eq!(f, $expf);
}
)
}
t!("foo", "foo", "foo");
t!("foo\\bar", "foo\\bar", "bar");
t!("\\", "\\", "");
}
#[test]
fn test_components() {
macro_rules! t {
(s: $path:expr, $op:ident, $exp:expr) => (
{
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), Some($exp));
}
);
(s: $path:expr, $op:ident, $exp:expr, opt) => (
{
let path = $path;
let path = Path::new(path);
let left = path.$op();
assert_eq!(left, $exp);
}
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), $exp);
}
)
}
t!(v: b"a\\b\\c", filename, Some(b"c"));
t!(s: "a\\b\\c", filename_str, "c");
t!(s: "\\a\\b\\c", filename_str, "c");
t!(s: "a", filename_str, "a");
t!(s: "\\a", filename_str, "a");
t!(s: ".", filename_str, None, opt);
t!(s: "\\", filename_str, None, opt);
t!(s: "..", filename_str, None, opt);
t!(s: "..\\..", filename_str, None, opt);
t!(s: "c:\\foo.txt", filename_str, "foo.txt");
t!(s: "C:\\", filename_str, None, opt);
t!(s: "C:", filename_str, None, opt);
t!(s: "\\\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\server\\share", filename_str, None, opt);
t!(s: "\\\\server", filename_str, "server");
t!(s: "\\\\?\\bar\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\bar", filename_str, None, opt);
t!(s: "\\\\?\\", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\UNC\\server", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\", filename_str, None, opt);
t!(s: "\\\\?\\C:\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\C:\\", filename_str, None, opt);
t!(s: "\\\\?\\C:", filename_str, None, opt);
t!(s: "\\\\?\\foo/bar", filename_str, None, opt);
t!(s: "\\\\?\\C:/foo", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar", filename_str, "bar");
t!(s: "\\\\.\\foo", filename_str, None, opt);
t!(s: "\\\\.\\foo/bar", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar/baz", filename_str, "bar/baz");
t!(s: "\\\\.\\", filename_str, None, opt);
t!(s: "\\\\?\\a\\b\\", filename_str, "b");
t!(v: b"a\\b\\c", dirname, b"a\\b");
t!(s: "a\\b\\c", dirname_str, "a\\b");
t!(s: "\\a\\b\\c", dirname_str, "\\a\\b");
t!(s: "a", dirname_str, ".");
t!(s: "\\a", dirname_str, "\\");
t!(s: ".", dirname_str, ".");
t!(s: "\\", dirname_str, "\\");
t!(s: "..", dirname_str, "..");
t!(s: "..\\..", dirname_str, "..\\..");
t!(s: "c:\\foo.txt", dirname_str, "C:\\");
t!(s: "C:\\", dirname_str, "C:\\");
t!(s: "C:", dirname_str, "C:");
t!(s: "C:foo.txt", dirname_str, "C:");
t!(s: "\\\\server\\share\\foo.txt", dirname_str, "\\\\server\\share");
t!(s: "\\\\server\\share", dirname_str, "\\\\server\\share");
t!(s: "\\\\server", dirname_str, "\\");
t!(s: "\\\\?\\bar\\foo.txt", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\bar", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\", dirname_str, "\\\\?\\");
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", dirname_str, "\\\\?\\UNC\\server\\share");
t!(s: "\\\\?\\UNC\\server", dirname_str, "\\\\?\\UNC\\server\\");
t!(s: "\\\\?\\UNC\\", dirname_str, "\\\\?\\UNC\\\\");
t!(s: "\\\\?\\C:\\foo.txt", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:\\", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:", dirname_str, "\\\\?\\C:");
t!(s: "\\\\?\\C:/foo/bar", dirname_str, "\\\\?\\C:/foo/bar");
t!(s: "\\\\?\\foo/bar", dirname_str, "\\\\?\\foo/bar");
t!(s: "\\\\.\\foo\\bar", dirname_str, "\\\\.\\foo");
t!(s: "\\\\.\\foo", dirname_str, "\\\\.\\foo");
t!(s: "\\\\?\\a\\b\\", dirname_str, "\\\\?\\a");
t!(v: b"hi\\there.txt", filestem, Some(b"there"));
t!(s: "hi\\there.txt", filestem_str, "there");
t!(s: "hi\\there", filestem_str, "there");
t!(s: "there.txt", filestem_str, "there");
t!(s: "there", filestem_str, "there");
t!(s: ".", filestem_str, None, opt);
t!(s: "\\", filestem_str, None, opt);
t!(s: "foo\\.bar", filestem_str, ".bar");
t!(s: ".bar", filestem_str, ".bar");
t!(s: "..bar", filestem_str, ".");
t!(s: "hi\\there..txt", filestem_str, "there.");
t!(s: "..", filestem_str, None, opt);
t!(s: "..\\..", filestem_str, None, opt);
// filestem is based on filename, so we don't need the full set of prefix tests
t!(v: b"hi\\there.txt", extension, Some(b"txt"));
t!(v: b"hi\\there", extension, None);
t!(s: "hi\\there.txt", extension_str, Some("txt"), opt);
t!(s: "hi\\there", extension_str, None, opt);
t!(s: "there.txt", extension_str, Some("txt"), opt);
t!(s: "there", extension_str, None, opt);
t!(s: ".", extension_str, None, opt);
t!(s: "\\", extension_str, None, opt);
t!(s: "foo\\.bar", extension_str, None, opt);
t!(s: ".bar", extension_str, None, opt);
t!(s: "..bar", extension_str, Some("bar"), opt);
t!(s: "hi\\there..txt", extension_str, Some("txt"), opt);
t!(s: "..", extension_str, None, opt);
t!(s: "..\\..", extension_str, None, opt);
// extension is based on filename, so we don't need the full set of prefix tests
}
#[test]
fn test_push() {
macro_rules! t {
(s: $path:expr, $join:expr) => (
{
let path = $path;
let join = $join;
let mut p1 = Path::new(path);
let p2 = p1.clone();
p1.push(join);
assert_eq!(p1, p2.join(join));
}
)
}
t!(s: "a\\b\\c", "..");
t!(s: "\\a\\b\\c", "d");
t!(s: "a\\b", "c\\d");
t!(s: "a\\b", "\\c\\d");
// this is just a sanity-check test. push and join share an implementation,
// so there's no need for the full set of prefix tests
// we do want to check one odd case though to ensure the prefix is re-parsed
let mut p = Path::new("\\\\?\\C:");
assert_eq!(prefix(&p), Some(VerbatimPrefix(2)));
p.push("foo");
assert_eq!(prefix(&p), Some(VerbatimDiskPrefix));
assert_eq!(p.as_str(), Some("\\\\?\\C:\\foo"));
// and another with verbatim non-normalized paths
let mut p = Path::new("\\\\?\\C:\\a\\");
p.push("foo");
assert_eq!(p.as_str(), Some("\\\\?\\C:\\a\\foo"));
}
#[test]
fn test_push_path() {
macro_rules! t {
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
let push = Path::new($push);
p.push(&push);
assert_eq!(p.as_str(), Some($exp));
}
)
}
t!(s: "a\\b\\c", "d", "a\\b\\c\\d");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: "a\\b", ".", "a\\b");
t!(s: "a\\b", "..\\c", "a\\c");
t!(s: "a\\b", "C:a.txt", "C:a.txt");
t!(s: "a\\b", "..\\..\\..\\c", "..\\c");
t!(s: "a\\b", "C:\\a.txt", "C:\\a.txt");
t!(s: "C:\\a", "C:\\b.txt", "C:\\b.txt");
t!(s: "C:\\a\\b\\c", "C:d", "C:\\a\\b\\c\\d");
t!(s: "C:a\\b\\c", "C:d", "C:a\\b\\c\\d");
t!(s: "C:a\\b", "..\\..\\..\\c", "C:..\\c");
t!(s: "C:\\a\\b", "..\\..\\..\\c", "C:\\c");
t!(s: "C:", r"a\b\c", r"C:a\b\c");
t!(s: "C:", r"..\a", r"C:..\a");
t!(s: "\\\\server\\share\\foo", "bar", "\\\\server\\share\\foo\\bar");
t!(s: "\\\\server\\share\\foo", "..\\..\\bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share\\foo", "C:baz", "C:baz");
t!(s: "\\\\?\\C:\\a\\b", "C:c\\d", "\\\\?\\C:\\a\\b\\c\\d");
t!(s: "\\\\?\\C:a\\b", "C:c\\d", "C:c\\d");
t!(s: "\\\\?\\C:\\a\\b", "C:\\c\\d", "C:\\c\\d");
t!(s: "\\\\?\\foo\\bar", "baz", "\\\\?\\foo\\bar\\baz");
t!(s: "\\\\?\\C:\\a\\b", "..\\..\\..\\c", "\\\\?\\C:\\a\\b\\..\\..\\..\\c");
t!(s: "\\\\?\\foo\\bar", "..\\..\\c", "\\\\?\\foo\\bar\\..\\..\\c");
t!(s: "\\\\?\\", "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\UNC\\server\\share\\foo", "bar", "\\\\?\\UNC\\server\\share\\foo\\bar");
t!(s: "\\\\?\\UNC\\server\\share", "C:\\a", "C:\\a");
t!(s: "\\\\?\\UNC\\server\\share", "C:a", "C:a");
t!(s: "\\\\?\\UNC\\server", "foo", "\\\\?\\UNC\\server\\\\foo");
t!(s: "C:\\a", "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share");
t!(s: "\\\\.\\foo\\bar", "baz", "\\\\.\\foo\\bar\\baz");
t!(s: "\\\\.\\foo\\bar", "C:a", "C:a");
// again, not sure about the following, but I'm assuming \\.\ should be verbatim
t!(s: "\\\\.\\foo", "..\\bar", "\\\\.\\foo\\..\\bar");
t!(s: "\\\\?\\C:", "foo", "\\\\?\\C:\\foo"); // this is a weird one
}
#[test]
fn test_push_many() {
macro_rules! t {
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert_eq!(p.as_str(), Some($exp));
}
);
(v: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert_eq!(p.as_vec(), $exp);
}
)
}
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["d", "\\e"], "\\e");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", ["d".to_string(), "e".to_string()], "a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"e"], b"a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"\\e", b"f"], b"\\e\\f");
t!(v: b"a\\b\\c", [b"d".to_vec(), b"e".to_vec()],
b"a\\b\\c\\d\\e");
}
#[test]
fn test_pop() {
macro_rules! t {
(s: $path:expr, $left:expr, $right:expr) => (
{
let pstr = $path;
let mut p = Path::new(pstr);
let result = p.pop();
let left = $left;
assert_eq!(p.as_str(), Some(left));
assert_eq!(result, $right);
}
);
(b: $path:expr, $left:expr, $right:expr) => (
{
let mut p = Path::new($path);
let result = p.pop();
assert_eq!(p.as_vec(), $left);
assert_eq!(result, $right);
}
)
}
t!(s: "a\\b\\c", "a\\b", true);
t!(s: "a", ".", true);
t!(s: ".", ".", false);
t!(s: "\\a", "\\", true);
t!(s: "\\", "\\", false);
t!(b: b"a\\b\\c", b"a\\b", true);
t!(b: b"a", b".", true);
t!(b: b".", b".", false);
t!(b: b"\\a", b"\\", true);
t!(b: b"\\", b"\\", false);
t!(s: "C:\\a\\b", "C:\\a", true);
t!(s: "C:\\a", "C:\\", true);
t!(s: "C:\\", "C:\\", false);
t!(s: "C:a\\b", "C:a", true);
t!(s: "C:a", "C:", true);
t!(s: "C:", "C:", false);
t!(s: "\\\\server\\share\\a\\b", "\\\\server\\share\\a", true);
t!(s: "\\\\server\\share\\a", "\\\\server\\share", true);
t!(s: "\\\\server\\share", "\\\\server\\share", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a", true);
t!(s: "\\\\?\\a", "\\\\?\\a", false);
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\", true);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\", false);
t!(s: "\\\\?\\UNC\\server\\share\\a\\b", "\\\\?\\UNC\\server\\share\\a", true);
t!(s: "\\\\?\\UNC\\server\\share\\a", "\\\\?\\UNC\\server\\share", true);
t!(s: "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share", false);
t!(s: "\\\\.\\a\\b\\c", "\\\\.\\a\\b", true);
t!(s: "\\\\.\\a\\b", "\\\\.\\a", true);
t!(s: "\\\\.\\a", "\\\\.\\a", false);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a", true);
}
#[test]
fn test_root_path() {
assert_eq!(Path::new("a\\b\\c").root_path(), None);
assert_eq!(Path::new("\\a\\b\\c").root_path(), Some(Path::new("\\")));
assert_eq!(Path::new("C:a").root_path(), Some(Path::new("C:")));
assert_eq!(Path::new("C:\\a").root_path(), Some(Path::new("C:\\")));
assert_eq!(Path::new("\\\\a\\b\\c").root_path(), Some(Path::new("\\\\a\\b")));
assert_eq!(Path::new("\\\\?\\a\\b").root_path(), Some(Path::new("\\\\?\\a")));
assert_eq!(Path::new("\\\\?\\C:\\a").root_path(), Some(Path::new("\\\\?\\C:\\")));
assert_eq!(Path::new("\\\\?\\UNC\\a\\b\\c").root_path(),
Some(Path::new("\\\\?\\UNC\\a\\b")));
assert_eq!(Path::new("\\\\.\\a\\b").root_path(), Some(Path::new("\\\\.\\a")));
}
#[test]
fn test_join() {
t!(s: Path::new("a\\b\\c").join(".."), "a\\b");
t!(s: Path::new("\\a\\b\\c").join("d"), "\\a\\b\\c\\d");
t!(s: Path::new("a\\b").join("c\\d"), "a\\b\\c\\d");
t!(s: Path::new("a\\b").join("\\c\\d"), "\\c\\d");
t!(s: Path::new(".").join("a\\b"), "a\\b");
t!(s: Path::new("\\").join("a\\b"), "\\a\\b");
t!(v: Path::new(b"a\\b\\c").join(b".."), b"a\\b");
t!(v: Path::new(b"\\a\\b\\c").join(b"d"), b"\\a\\b\\c\\d");
// full join testing is covered under test_push_path, so no need for
// the full set of prefix tests
}
#[test]
fn test_join_path() {
macro_rules! t {
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let join = Path::new($join);
let res = path.join(&join);
assert_eq!(res.as_str(), Some($exp));
}
)
}
t!(s: "a\\b\\c", "..", "a\\b");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: ".", "a\\b", "a\\b");
t!(s: "\\", "a\\b", "\\a\\b");
// join is implemented using push, so there's no need for
// the full set of prefix tests
}
#[test]
fn test_join_many() {
macro_rules! t {
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert_eq!(res.as_str(), Some($exp));
}
);
(v: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert_eq!(res.as_vec(), $exp);
}
)
}
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", ["d".to_string(), "e".to_string()], "a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d", b"e"], b"a\\b\\c\\d\\e");
t!(v: b"a\\b\\c", [b"d".to_vec(), b"e".to_vec()],
b"a\\b\\c\\d\\e");
}
#[test]
fn test_with_helpers() {
macro_rules! t {
(s: $path:expr, $op:ident, $arg:expr, $res:expr) => (
{
let pstr = $path;
let path = Path::new(pstr);
let arg = $arg;
let res = path.$op(arg);
let exp = Path::new($res);
assert_eq!(res, exp);
}
)
}
t!(s: "a\\b\\c", with_filename, "d", "a\\b\\d");
t!(s: ".", with_filename, "foo", "foo");
t!(s: "\\a\\b\\c", with_filename, "d", "\\a\\b\\d");
t!(s: "\\", with_filename, "foo", "\\foo");
t!(s: "\\a", with_filename, "foo", "\\foo");
t!(s: "foo", with_filename, "bar", "bar");
t!(s: "\\", with_filename, "foo\\", "\\foo");
t!(s: "\\a", with_filename, "foo\\", "\\foo");
t!(s: "a\\b\\c", with_filename, "", "a\\b");
t!(s: "a\\b\\c", with_filename, ".", "a\\b");
t!(s: "a\\b\\c", with_filename, "..", "a");
t!(s: "\\a", with_filename, "", "\\");
t!(s: "foo", with_filename, "", ".");
t!(s: "a\\b\\c", with_filename, "d\\e", "a\\b\\d\\e");
t!(s: "a\\b\\c", with_filename, "\\d", "a\\b\\d");
t!(s: "..", with_filename, "foo", "..\\foo");
t!(s: "..\\..", with_filename, "foo", "..\\..\\foo");
t!(s: "..", with_filename, "", "..");
t!(s: "..\\..", with_filename, "", "..\\..");
t!(s: "C:\\foo\\bar", with_filename, "baz", "C:\\foo\\baz");
t!(s: "C:\\foo", with_filename, "bar", "C:\\bar");
t!(s: "C:\\", with_filename, "foo", "C:\\foo");
t!(s: "C:foo\\bar", with_filename, "baz", "C:foo\\baz");
t!(s: "C:foo", with_filename, "bar", "C:bar");
t!(s: "C:", with_filename, "foo", "C:foo");
t!(s: "C:\\foo", with_filename, "", "C:\\");
t!(s: "C:foo", with_filename, "", "C:");
t!(s: "C:\\foo\\bar", with_filename, "..", "C:\\");
t!(s: "C:\\foo", with_filename, "..", "C:\\");
t!(s: "C:\\", with_filename, "..", "C:\\");
t!(s: "C:foo\\bar", with_filename, "..", "C:");
t!(s: "C:foo", with_filename, "..", "C:..");
t!(s: "C:", with_filename, "..", "C:..");
t!(s: "\\\\server\\share\\foo", with_filename, "bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share", with_filename, "foo", "\\\\server\\share\\foo");
t!(s: "\\\\server\\share\\foo", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share\\foo", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\?\\C:\\foo\\bar", with_filename, "baz", "\\\\?\\C:\\foo\\baz");
t!(s: "\\\\?\\C:\\foo", with_filename, "bar", "\\\\?\\C:\\bar");
t!(s: "\\\\?\\C:\\", with_filename, "foo", "\\\\?\\C:\\foo");
t!(s: "\\\\?\\C:\\foo", with_filename, "..", "\\\\?\\C:\\..");
t!(s: "\\\\?\\foo\\bar", with_filename, "baz", "\\\\?\\foo\\baz");
t!(s: "\\\\?\\foo", with_filename, "bar", "\\\\?\\foo\\bar");
t!(s: "\\\\?\\", with_filename, "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\foo\\bar", with_filename, "..", "\\\\?\\foo\\..");
t!(s: "\\\\.\\foo\\bar", with_filename, "baz", "\\\\.\\foo\\baz");
t!(s: "\\\\.\\foo", with_filename, "bar", "\\\\.\\foo\\bar");
t!(s: "\\\\.\\foo\\bar", with_filename, "..", "\\\\.\\foo\\..");
t!(s: "hi\\there.txt", with_extension, "exe", "hi\\there.exe");
t!(s: "hi\\there.txt", with_extension, "", "hi\\there");
t!(s: "hi\\there.txt", with_extension, ".", "hi\\there..");
t!(s: "hi\\there.txt", with_extension, "..", "hi\\there...");
t!(s: "hi\\there", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\there", with_extension, ".", "hi\\there..");
t!(s: "hi\\there", with_extension, "..", "hi\\there...");
t!(s: "hi\\there.", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\.foo", with_extension, "txt", "hi\\.foo.txt");
t!(s: "hi\\there.txt", with_extension, ".foo", "hi\\there..foo");
t!(s: "\\", with_extension, "txt", "\\");
t!(s: "\\", with_extension, ".", "\\");
t!(s: "\\", with_extension, "..", "\\");
t!(s: ".", with_extension, "txt", ".");
// extension setter calls filename setter internally, no need for extended tests
}
#[test]
fn test_setters() {
macro_rules! t {
(s: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
);
(v: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
)
}
t!(v: b"a\\b\\c", set_filename, with_filename, b"d");
t!(v: b"\\", set_filename, with_filename, b"foo");
t!(s: "a\\b\\c", set_filename, with_filename, "d");
t!(s: "\\", set_filename, with_filename, "foo");
t!(s: ".", set_filename, with_filename, "foo");
t!(s: "a\\b", set_filename, with_filename, "");
t!(s: "a", set_filename, with_filename, "");
t!(v: b"hi\\there.txt", set_extension, with_extension, b"exe");
t!(s: "hi\\there.txt", set_extension, with_extension, "exe");
t!(s: "hi\\there.", set_extension, with_extension, "txt");
t!(s: "hi\\there", set_extension, with_extension, "txt");
t!(s: "hi\\there.txt", set_extension, with_extension, "");
t!(s: "hi\\there", set_extension, with_extension, "");
t!(s: ".", set_extension, with_extension, "txt");
// with_ helpers use the setter internally, so the tests for the with_ helpers
// will suffice. No need for the full set of prefix tests.
}
#[test]
fn test_getters() {
macro_rules! t {
(s: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
assert_eq!(path.filename_str(), $filename);
assert_eq!(path.dirname_str(), $dirname);
assert_eq!(path.filestem_str(), $filestem);
assert_eq!(path.extension_str(), $ext);
}
);
(v: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
assert_eq!(path.filename(), $filename);
assert_eq!(path.dirname(), $dirname);
assert_eq!(path.filestem(), $filestem);
assert_eq!(path.extension(), $ext);
}
)
}
t!(v: Path::new(b"a\\b\\c"), Some(b"c"), b"a\\b", Some(b"c"), None);
t!(s: Path::new("a\\b\\c"), Some("c"), Some("a\\b"), Some("c"), None);
t!(s: Path::new("."), None, Some("."), None, None);
t!(s: Path::new("\\"), None, Some("\\"), None, None);
t!(s: Path::new(".."), None, Some(".."), None, None);
t!(s: Path::new("..\\.."), None, Some("..\\.."), None, None);
t!(s: Path::new("hi\\there.txt"), Some("there.txt"), Some("hi"),
Some("there"), Some("txt"));
t!(s: Path::new("hi\\there"), Some("there"), Some("hi"), Some("there"), None);
t!(s: Path::new("hi\\there."), Some("there."), Some("hi"),
Some("there"), Some(""));
t!(s: Path::new("hi\\.there"), Some(".there"), Some("hi"), Some(".there"), None);
t!(s: Path::new("hi\\..there"), Some("..there"), Some("hi"),
Some("."), Some("there"));
// these are already tested in test_components, so no need for extended tests
}
#[test]
fn test_dir_path() {
t!(s: Path::new("hi\\there").dir_path(), "hi");
t!(s: Path::new("hi").dir_path(), ".");
t!(s: Path::new("\\hi").dir_path(), "\\");
t!(s: Path::new("\\").dir_path(), "\\");
t!(s: Path::new("..").dir_path(), "..");
t!(s: Path::new("..\\..").dir_path(), "..\\..");
// dir_path is just dirname interpreted as a path.
// No need for extended tests
}
#[test]
fn test_is_absolute() {
macro_rules! t {
($path:expr, $abs:expr, $vol:expr, $cwd:expr, $rel:expr) => (
{
let path = Path::new($path);
let (abs, vol, cwd, rel) = ($abs, $vol, $cwd, $rel);
assert_eq!(path.is_absolute(), abs);
assert_eq!(is_vol_relative(&path), vol);
assert_eq!(is_cwd_relative(&path), cwd);
assert_eq!(path.is_relative(), rel);
}
)
}
t!("a\\b\\c", false, false, false, true);
t!("\\a\\b\\c", false, true, false, false);
t!("a", false, false, false, true);
t!("\\a", false, true, false, false);
t!(".", false, false, false, true);
t!("\\", false, true, false, false);
t!("..", false, false, false, true);
t!("..\\..", false, false, false, true);
t!("C:a\\b.txt", false, false, true, false);
t!("C:\\a\\b.txt", true, false, false, false);
t!("\\\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\?\\a\\b\\c.txt", true, false, false, false);
t!("\\\\?\\C:\\a\\b.txt", true, false, false, false);
t!("\\\\?\\C:a\\b.txt", true, false, false, false); // NB: not equivalent to C:a\b.txt
t!("\\\\?\\UNC\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\.\\a\\b", true, false, false, false);
}
#[test]
fn test_is_ancestor_of() {
macro_rules! t {
(s: $path:expr, $dest:expr, $exp:expr) => (
{
let path = Path::new($path);
let dest = Path::new($dest);
let exp = $exp;
let res = path.is_ancestor_of(&dest);
assert_eq!(res, exp);
}
)
}
t!(s: "a\\b\\c", "a\\b\\c\\d", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "\\a\\b\\c", "\\a\\b\\c", true);
t!(s: "\\a\\b", "\\a\\b\\c", true);
t!(s: "\\a\\b\\c\\d", "\\a\\b\\c", false);
t!(s: "\\a\\b", "a\\b\\c", false);
t!(s: "a\\b", "\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\d", false);
t!(s: "..\\a\\b\\c", "a\\b\\c", false);
t!(s: "a\\b\\c", "..\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\cd", false);
t!(s: "a\\b\\cd", "a\\b\\c", false);
t!(s: "..\\a\\b", "..\\a\\b\\c", true);
t!(s: ".", "a\\b", true);
t!(s: ".", ".", true);
t!(s: "\\", "\\", true);
t!(s: "\\", "\\a\\b", true);
t!(s: "..", "a\\b", true);
t!(s: "..\\..", "a\\b", true);
t!(s: "foo\\bar", "foobar", false);
t!(s: "foobar", "foo\\bar", false);
t!(s: "foo", "C:foo", false);
t!(s: "C:foo", "foo", false);
t!(s: "C:foo", "C:foo\\bar", true);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "C:\\foo\\bar", true);
t!(s: "C:", "C:", true);
t!(s: "C:", "C:\\", false);
t!(s: "C:\\", "C:", false);
t!(s: "C:\\", "C:\\", true);
t!(s: "C:\\foo\\bar", "C:\\foo", false);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "\\foo", false);
t!(s: "\\foo", "C:\\foo", false);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share\\foo\\bar", true);
t!(s: "\\\\server\\share", "\\\\server\\share\\foo", true);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share", false);
t!(s: "C:\\foo", "\\\\server\\share\\foo", false);
t!(s: "\\\\server\\share\\foo", "C:\\foo", false);
t!(s: "\\\\?\\foo\\bar", "\\\\?\\foo\\bar\\baz", true);
t!(s: "\\\\?\\foo\\bar\\baz", "\\\\?\\foo\\bar", false);
t!(s: "\\\\?\\foo\\bar", "\\foo\\bar\\baz", false);
t!(s: "\\foo\\bar", "\\\\?\\foo\\bar\\baz", false);
t!(s: "\\\\?\\C:\\foo\\bar", "\\\\?\\C:\\foo\\bar\\baz", true);
t!(s: "\\\\?\\C:\\foo\\bar\\baz", "\\\\?\\C:\\foo\\bar", false);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\foo", true);
t!(s: "\\\\?\\C:", "\\\\?\\C:\\", false); // this is a weird one
t!(s: "\\\\?\\C:\\", "\\\\?\\C:", false);
t!(s: "\\\\?\\C:\\a", "\\\\?\\c:\\a\\b", true);
t!(s: "\\\\?\\c:\\a", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a\\b", false);
t!(s: "\\\\?\\foo", "\\\\?\\foobar", false);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\", true);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b\\", false);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c\\d", true);
t!(s: "\\\\?\\UNC\\a\\b\\c\\d", "\\\\?\\UNC\\a\\b\\c", false);
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\.\\foo\\bar", "\\\\.\\foo\\bar\\baz", true);
t!(s: "\\\\.\\foo\\bar\\baz", "\\\\.\\foo\\bar", false);
t!(s: "\\\\.\\foo", "\\\\.\\foo\\bar", true);
t!(s: "\\\\.\\foo", "\\\\.\\foobar", false);
t!(s: "\\a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "\\a\\b", false);
t!(s: "\\a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "\\a\\b", false);
t!(s: "Z:\\a\\b", "\\\\?\\z:\\a\\b", true);
t!(s: "C:\\a\\b", "\\\\?\\D:\\a\\b", false);
t!(s: "a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a\\b", "C:\\a\\b", true);
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "C:a\\b", "\\\\?\\C:a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", false);
t!(s: "\\\\?\\C:a\\b", "C:a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b\\", true);
t!(s: "\\\\?\\C:\\a\\b\\", "C:\\a\\b", true);
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b\\c", true);
}
#[test]
fn test_ends_with_path() {
macro_rules! t {
(s: $path:expr, $child:expr, $exp:expr) => (
{
let path = Path::new($path);
let child = Path::new($child);
assert_eq!(path.ends_with_path(&child), $exp);
}
);
}
t!(s: "a\\b\\c", "c", true);
t!(s: "a\\b\\c", "d", false);
t!(s: "foo\\bar\\quux", "bar", false);
t!(s: "foo\\bar\\quux", "barquux", false);
t!(s: "a\\b\\c", "b\\c", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "\\a\\b\\c", "a\\b\\c", true);
t!(s: "\\a\\b\\c", "\\a\\b\\c", false); // child must be relative
t!(s: "\\a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "a\\b\\c", "", false);
t!(s: "", "", true);
t!(s: "\\a\\b\\c", "d\\e\\f", false);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "a\\b\\c", "b", false);
t!(s: "C:\\a\\b", "b", true);
t!(s: "C:\\a\\b", "C:b", false);
t!(s: "C:\\a\\b", "C:a\\b", false);
}
#[test]
fn test_path_relative_from() {
macro_rules! t {
(s: $path:expr, $other:expr, $exp:expr) => (
{
assert_eq!(Path::new($path).path_relative_from(&Path::new($other))
.as_ref().and_then(|x| x.as_str()), $exp);
}
)
}
t!(s: "a\\b\\c", "a\\b", Some("c"));
t!(s: "a\\b\\c", "a\\b\\d", Some("..\\c"));
t!(s: "a\\b\\c", "a\\b\\c\\d", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "a\\b\\c", "a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "a\\b\\c", "a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "a\\b\\c", "d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "a\\b\\c", "\\a\\b\\c", None);
t!(s: "\\a\\b\\c", "a\\b\\c", Some("\\a\\b\\c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d", Some(".."));
t!(s: "\\a\\b\\c", "\\a\\b", Some("c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "\\a\\b\\c", "\\a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "\\a\\b\\c", "\\d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "hi\\there.txt", "hi\\there", Some("..\\there.txt"));
t!(s: ".", "a", Some(".."));
t!(s: ".", "a\\b", Some("..\\.."));
t!(s: ".", ".", Some("."));
t!(s: "a", ".", Some("a"));
t!(s: "a\\b", ".", Some("a\\b"));
t!(s: "..", ".", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "\\a\\b\\c", "\\a\\b\\c", Some("."));
t!(s: "\\", "\\", Some("."));
t!(s: "\\", ".", Some("\\"));
t!(s: "..\\..\\a", "b", Some("..\\..\\..\\a"));
t!(s: "a", "..\\..\\b", None);
t!(s: "..\\..\\a", "..\\..\\b", Some("..\\a"));
t!(s: "..\\..\\a", "..\\..\\a\\b", Some(".."));
t!(s: "..\\..\\a\\b", "..\\..\\a", Some("b"));
t!(s: "C:a\\b\\c", "C:a\\b", Some("c"));
t!(s: "C:a\\b", "C:a\\b\\c", Some(".."));
t!(s: "C:" ,"C:a\\b", Some("..\\.."));
t!(s: "C:a\\b", "C:c\\d", Some("..\\..\\a\\b"));
t!(s: "C:a\\b", "D:c\\d", Some("C:a\\b"));
t!(s: "C:a\\b", "C:..\\c", None);
t!(s: "C:..\\a", "C:b\\c", Some("..\\..\\..\\a"));
t!(s: "C:\\a\\b\\c", "C:\\a\\b", Some("c"));
t!(s: "C:\\a\\b", "C:\\a\\b\\c", Some(".."));
t!(s: "C:\\", "C:\\a\\b", Some("..\\.."));
t!(s: "C:\\a\\b", "C:\\c\\d", Some("..\\..\\a\\b"));
t!(s: "C:\\a\\b", "C:a\\b", Some("C:\\a\\b"));
t!(s: "C:a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:a\\b", None);
t!(s: "a\\b", "C:\\a\\b", None);
t!(s: "a\\b", "C:a\\b", None);
t!(s: "\\\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\a\\b\\c\\e", "\\\\a\\b\\c\\d", Some("..\\e"));
t!(s: "\\\\a\\c\\d", "\\\\a\\b\\d", Some("\\\\a\\c\\d"));
t!(s: "\\\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\b\\c\\d"));
t!(s: "\\\\a\\b\\c", "\\d\\e", Some("\\\\a\\b\\c"));
t!(s: "\\d\\e", "\\\\a\\b\\c", None);
t!(s: "d\\e", "\\\\a\\b\\c", None);
t!(s: "C:\\a\\b\\c", "\\\\a\\b\\c", Some("C:\\a\\b\\c"));
t!(s: "C:\\c", "\\\\a\\b\\c", Some("C:\\c"));
t!(s: "\\\\?\\a\\b", "\\a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", Some("c"));
t!(s: "\\\\?\\a\\b", "\\\\?\\c\\d", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a", "\\\\?\\b", Some("\\\\?\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\b", Some("..\\a"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\c:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "C:\\a\\b", Some(".."));
t!(s: "C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\c:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", Some("\\\\?\\C:\\a\\b"));
t!(s: "\\\\?\\C:\\a\\.\\b", "C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", None);
t!(s: "\\\\?\\C:\\a\\.\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "\\\\?\\C:\\a\\b\\", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\.\\b", "\\\\?\\C:\\.", Some("b"));
t!(s: "C:\\b", "\\\\?\\C:\\.", Some("..\\b"));
t!(s: "\\\\?\\a\\.\\b\\c", "\\\\?\\a\\.\\b", Some("c"));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\.\\d", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\a\\..\\b", "\\\\?\\a\\..", Some("b"));
t!(s: "\\\\?\\a\\b\\..", "\\\\?\\a\\b", Some("\\\\?\\a\\b\\.."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\..\\b", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\C:\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\a\\b\\c"));
}
#[test]
fn test_str_components() {
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.str_components().map(|x|x.unwrap())
.collect::<Vec<&str>>();
let exp: &[&str] = &$exp;
assert_eq!(comps, exp);
let comps = path.str_components().rev().map(|x|x.unwrap())
.collect::<Vec<&str>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<&str>>();
assert_eq!(comps, exp);
}
);
}
t!(s: b"a\\b\\c", ["a", "b", "c"]);
t!(s: "a\\b\\c", ["a", "b", "c"]);
t!(s: "a\\b\\d", ["a", "b", "d"]);
t!(s: "a\\b\\cd", ["a", "b", "cd"]);
t!(s: "\\a\\b\\c", ["a", "b", "c"]);
t!(s: "a", ["a"]);
t!(s: "\\a", ["a"]);
t!(s: "\\", []);
t!(s: ".", ["."]);
t!(s: "..", [".."]);
t!(s: "..\\..", ["..", ".."]);
t!(s: "..\\..\\foo", ["..", "..", "foo"]);
t!(s: "C:foo\\bar", ["foo", "bar"]);
t!(s: "C:foo", ["foo"]);
t!(s: "C:", []);
t!(s: "C:\\foo\\bar", ["foo", "bar"]);
t!(s: "C:\\foo", ["foo"]);
t!(s: "C:\\", []);
t!(s: "\\\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\server\\share\\foo", ["foo"]);
t!(s: "\\\\server\\share", []);
t!(s: "\\\\?\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\?\\foo\\bar", ["bar"]);
t!(s: "\\\\?\\foo", []);
t!(s: "\\\\?\\", []);
t!(s: "\\\\?\\a\\b", ["b"]);
t!(s: "\\\\?\\a\\b\\", ["b"]);
t!(s: "\\\\?\\foo\\bar\\\\baz", ["bar", "", "baz"]);
t!(s: "\\\\?\\C:\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\C:\\foo", ["foo"]);
t!(s: "\\\\?\\C:\\", []);
t!(s: "\\\\?\\C:\\foo\\", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share", []);
t!(s: "\\\\.\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\.\\foo\\bar", ["bar"]);
t!(s: "\\\\.\\foo", []);
}
#[test]
fn test_components_iter() {
macro_rules! t {
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.components().collect::<Vec<&[u8]>>();
let exp: &[&[u8]] = &$exp;
assert_eq!(comps, exp);
let comps = path.components().rev().collect::<Vec<&[u8]>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<&[u8]>>();
assert_eq!(comps, exp);
}
)
}
t!(s: "a\\b\\c", [b"a", b"b", b"c"]);
t!(s: ".", [b"."]);
// since this is really a wrapper around str_components, those tests suffice
}
#[test]
fn test_make_non_verbatim() {
macro_rules! t {
($path:expr, $exp:expr) => (
{
let path = Path::new($path);
let exp: Option<&str> = $exp;
let exp = exp.map(|s| Path::new(s));
assert_eq!(make_non_verbatim(&path), exp);
}
)
}
t!(r"\a\b\c", Some(r"\a\b\c"));
t!(r"a\b\c", Some(r"a\b\c"));
t!(r"C:\a\b\c", Some(r"C:\a\b\c"));
t!(r"C:a\b\c", Some(r"C:a\b\c"));
t!(r"\\server\share\foo", Some(r"\\server\share\foo"));
t!(r"\\.\foo", None);
t!(r"\\?\foo", None);
t!(r"\\?\C:", None);
t!(r"\\?\C:foo", None);
t!(r"\\?\C:\", Some(r"C:\"));
t!(r"\\?\C:\foo", Some(r"C:\foo"));
t!(r"\\?\C:\foo\bar\baz", Some(r"C:\foo\bar\baz"));
t!(r"\\?\C:\foo\.\bar\baz", None);
t!(r"\\?\C:\foo\bar\..\baz", None);
t!(r"\\?\C:\foo\bar\..", None);
t!(r"\\?\UNC\server\share\foo", Some(r"\\server\share\foo"));
t!(r"\\?\UNC\server\share", Some(r"\\server\share"));
t!(r"\\?\UNC\server", None);
t!(r"\\?\UNC\server\", None);
}
}<|fim▁end|> | true
}
} |
<|file_name|>flash_jlink.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2020 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=logging-fstring-interpolation
"""Flashes firmware using Segger J-Link.
This script requires Segger hardware attached via JTAG/SWD.
See
https://chromium.googlesource.com/chromiumos/platform/ec/+/HEAD/docs/fingerprint/fingerprint-debugging.md#flash
for instructions.
"""
import argparse
import logging
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import time
DEFAULT_SEGGER_REMOTE_PORT = 19020
# Commands are documented here: https://wiki.segger.com/J-Link_Commander
JLINK_COMMANDS = """
exitonerror 1
r
loadfile {FIRMWARE} {FLASH_ADDRESS}
r<|fim▁hole|>"""
class BoardConfig:
"""Board configuration."""
def __init__(self, interface, device, flash_address):
self.interface = interface
self.device = device
self.flash_address = flash_address
SWD_INTERFACE = 'SWD'
STM32_DEFAULT_FLASH_ADDRESS = '0x8000000'
DRAGONCLAW_CONFIG = BoardConfig(interface=SWD_INTERFACE, device='STM32F412CG',
flash_address=STM32_DEFAULT_FLASH_ADDRESS)
ICETOWER_CONFIG = BoardConfig(interface=SWD_INTERFACE, device='STM32H743ZI',
flash_address=STM32_DEFAULT_FLASH_ADDRESS)
BOARD_CONFIGS = {
'dragonclaw': DRAGONCLAW_CONFIG,
'bloonchipper': DRAGONCLAW_CONFIG,
'nucleo-f412zg': DRAGONCLAW_CONFIG,
'dartmonkey': ICETOWER_CONFIG,
'icetower': ICETOWER_CONFIG,
'nucleo-dartmonkey': ICETOWER_CONFIG,
'nucleo-h743zi': ICETOWER_CONFIG,
}
def is_tcp_port_open(host: str, tcp_port: int) -> bool:
"""Checks if the TCP host port is open."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2) # 2 Second Timeout
try:
sock.connect((host, tcp_port))
sock.shutdown(socket.SHUT_RDWR)
except ConnectionRefusedError:
return False
except socket.timeout:
return False
finally:
sock.close()
# Other errors are propagated as odd exceptions.
# We shutdown and closed the connection, but the server may need a second
# to start listening again. If the following error is seen, this timeout
# should be increased. 300ms seems to be the minimum.
#
# Connecting to J-Link via IP...FAILED: Can not connect to J-Link via \
# TCP/IP (127.0.0.1, port 19020)
time.sleep(0.5)
return True
def create_jlink_command_file(firmware_file, config):
tmp = tempfile.NamedTemporaryFile()
tmp.write(JLINK_COMMANDS.format(FIRMWARE=firmware_file,
FLASH_ADDRESS=config.flash_address).encode(
'utf-8'))
tmp.flush()
return tmp
def flash(jlink_exe, remote, device, interface, cmd_file):
cmd = [
jlink_exe,
]
if remote:
logging.debug(f'Connecting to J-Link over TCP/IP {remote}.')
remote_components = remote.split(':')
if len(remote_components) not in [1, 2]:
logging.debug(f'Given remote "{remote}" is malformed.')
return 1
host = remote_components[0]
try:
ip = socket.gethostbyname(host)
except socket.gaierror as e:
logging.error(f'Failed to resolve host "{host}": {e}.')
return 1
logging.debug(f'Resolved {host} as {ip}.')
port = DEFAULT_SEGGER_REMOTE_PORT
if len(remote_components) == 2:
try:
port = int(remote_components[1])
except ValueError:
logging.error(
f'Given remote port "{remote_components[1]}" is malformed.')
return 1
remote = f'{ip}:{port}'
logging.debug(f'Checking connection to {remote}.')
if not is_tcp_port_open(ip, port):
logging.error(
f"JLink server doesn't seem to be listening on {remote}.")
logging.error('Ensure that JLinkRemoteServerCLExe is running.')
return 1
cmd.extend(['-ip', remote])
cmd.extend([
'-device', device,
'-if', interface,
'-speed', 'auto',
'-autoconnect', '1',
'-CommandFile', cmd_file,
])
logging.debug('Running command: "%s"', ' '.join(cmd))
completed_process = subprocess.run(cmd) # pylint: disable=subprocess-run-check
logging.debug('JLink return code: %d', completed_process.returncode)
return completed_process.returncode
def main(argv: list):
parser = argparse.ArgumentParser()
default_jlink = './JLink_Linux_V684a_x86_64/JLinkExe'
if shutil.which(default_jlink) is None:
default_jlink = 'JLinkExe'
parser.add_argument(
'--jlink', '-j',
help='JLinkExe path (default: ' + default_jlink + ')',
default=default_jlink)
parser.add_argument(
'--remote', '-n',
help='Use TCP/IP host[:port] to connect to a J-Link or '
'JLinkRemoteServerCLExe. If unspecified, connect over USB.')
default_board = 'bloonchipper'
parser.add_argument(
'--board', '-b',
help='Board (default: ' + default_board + ')',
default=default_board)
default_firmware = os.path.join('./build', default_board, 'ec.bin')
parser.add_argument(
'--image', '-i',
help='Firmware binary (default: ' + default_firmware + ')',
default=default_firmware)
log_level_choices = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
parser.add_argument(
'--log_level', '-l',
choices=log_level_choices,
default='DEBUG'
)
args = parser.parse_args(argv)
logging.basicConfig(level=args.log_level)
if args.board not in BOARD_CONFIGS:
logging.error('Unable to find a config for board: "%s"', args.board)
sys.exit(1)
config = BOARD_CONFIGS[args.board]
args.image = os.path.realpath(args.image)
args.jlink = args.jlink
cmd_file = create_jlink_command_file(args.image, config)
ret_code = flash(args.jlink, args.remote, config.device, config.interface,
cmd_file.name)
cmd_file.close()
return ret_code
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))<|fim▁end|> | go
exit |
<|file_name|>test_auth.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose.tools import * # noqa; PEP8 asserts
from webtest_plus import TestApp
import mock
import httplib as http
from flask import Flask
from modularodm import Q
from werkzeug.wrappers import BaseResponse
from framework import auth
from framework.auth import cas
from framework.sessions import Session
from framework.exceptions import HTTPError
from tests.base import OsfTestCase, assert_is_redirect
from tests.factories import (
UserFactory, UnregUserFactory, AuthFactory,
ProjectFactory, NodeFactory, AuthUserFactory, PrivateLinkFactory
)
from framework.auth import User, Auth
from framework.auth.decorators import must_be_logged_in
from website import mails
from website import settings
from website.project.decorators import (
must_have_permission, must_be_contributor,
must_have_addon, must_be_addon_authorizer,
)
class TestAuthUtils(OsfTestCase):
def test_unreg_user_can_register(self):
user = UnregUserFactory()
auth.register_unconfirmed(
username=user.username,
password='gattaca',
fullname='Rosie',
)
assert_true(user.get_confirmation_token(user.username))
@mock.patch('framework.auth.views.mails.send_mail')
def test_confirm_email(self, mock_mail):
user = UnregUserFactory()
auth.register_unconfirmed(
username=user.username,
password='gattaca',
fullname='Rosie',
)
token = user.get_confirmation_token(user.username)
res = self.app.get('/confirm/{}/{}'.format(user._id, token), allow_redirects=False)
res = res.follow()
assert_equal(res.status_code, 302)
assert_in('login?service=', res.location)
user.reload()
assert_equal(len(mock_mail.call_args_list), 1)
empty, kwargs = mock_mail.call_args
kwargs['user'].reload()
assert_equal(empty, ())
assert_equal(kwargs, {
'user': user,
'mimetype': 'html',
'mail': mails.WELCOME,
'to_addr': user.username,
})
self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie())
res = self.app.get('/confirm/{}/{}'.format(user._id, token))
res = res.follow()
assert_equal(res.status_code, 302)
assert_in('dashboard', res.location)
assert_equal(len(mock_mail.call_args_list), 1)
session = Session.find(
Q('data.auth_user_id', 'eq', user._id)
).sort(
'-date_modified'
).limit(1)[0]
assert_equal(len(session.data['status']), 1)
def test_get_user_by_id(self):
user = UserFactory()
assert_equal(User.load(user._id), user)
def test_get_user_by_email(self):
user = UserFactory()
assert_equal(auth.get_user(email=user.username), user)
def test_get_user_with_wrong_password_returns_false(self):
user = UserFactory.build()
user.set_password('killerqueen')
assert_false(
auth.get_user(email=user.username, password='wrong')
)
class TestAuthObject(OsfTestCase):
def test_repr(self):
auth = AuthFactory()
rep = repr(auth)
assert_in(str(auth.user), rep)
def test_factory(self):
auth_obj = AuthFactory()
assert_true(isinstance(auth_obj.user, auth.User))
def test_from_kwargs(self):
user = UserFactory()
request_args = {'view_only': 'mykey'}
kwargs = {'user': user}
auth_obj = Auth.from_kwargs(request_args, kwargs)
assert_equal(auth_obj.user, user)
assert_equal(auth_obj.private_key, request_args['view_only'])
def test_logged_in(self):
user = UserFactory()
auth_obj = Auth(user=user)
assert_true(auth_obj.logged_in)
auth2 = Auth(user=None)
assert_false(auth2.logged_in)
class TestPrivateLink(OsfTestCase):
def setUp(self):
super(TestPrivateLink, self).setUp()
self.flaskapp = Flask('testing_private_links')
@self.flaskapp.route('/project/<pid>/')
@must_be_contributor
def project_get(**kwargs):
return 'success', 200
self.app = TestApp(self.flaskapp)
<|fim▁hole|> self.link.nodes.append(self.project)
self.link.save()
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_has_private_link_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'view_only': self.link.key})
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.body, 'success')
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_does_not_have_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'key': None})
assert_is_redirect(res)
# Flask app for testing view decorators
decoratorapp = Flask('decorators')
@must_be_contributor
def view_that_needs_contributor(**kwargs):
return kwargs.get('node') or kwargs.get('parent')
class AuthAppTestCase(OsfTestCase):
def setUp(self):
self.ctx = decoratorapp.test_request_context()
self.ctx.push()
def tearDown(self):
self.ctx.pop()
class TestMustBeContributorDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustBeContributorDecorator, self).setUp()
self.contrib = AuthUserFactory()
self.project = ProjectFactory()
self.project.add_contributor(self.contrib, auth=Auth(self.project.creator))
self.project.save()
def test_must_be_contributor_when_user_is_contributor(self):
result = view_that_needs_contributor(
pid=self.project._primary_key,
user=self.contrib)
assert_equal(result, self.project)
def test_must_be_contributor_when_user_is_not_contributor_raises_error(self):
non_contributor = AuthUserFactory()
with assert_raises(HTTPError):
view_that_needs_contributor(
pid=self.project._primary_key,
user=non_contributor
)
def test_must_be_contributor_no_user(self):
res = view_that_needs_contributor(
pid=self.project._primary_key,
user=None,
)
assert_is_redirect(res)
# redirects to login url
redirect_url = res.headers['Location']
login_url = cas.get_login_url(service_url='http://localhost/')
assert_equal(redirect_url, login_url)
def test_must_be_contributor_parent_admin(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
res = view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(res, node)
def test_must_be_contributor_parent_write(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
self.project.set_permissions(self.project.creator, ['read', 'write'])
self.project.save()
with assert_raises(HTTPError) as exc_info:
view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(exc_info.exception.code, 403)
@must_be_logged_in
def protected(**kwargs):
return 'open sesame'
@must_have_permission('dance')
def thriller(**kwargs):
return 'chiller'
class TestPermissionDecorators(AuthAppTestCase):
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_logged_in_decorator_with_user(self, mock_from_kwargs):
user = UserFactory()
mock_from_kwargs.return_value = Auth(user=user)
protected()
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_logged_in_decorator_with_no_user(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth()
resp = protected()
assert_true(isinstance(resp, BaseResponse))
login_url = cas.get_login_url(service_url='http://localhost/')
assert_in(login_url, resp.headers.get('location'))
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_true(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
project.add_permission(project.creator, 'dance')
mock_from_kwargs.return_value = Auth(user=project.creator)
mock_to_nodes.return_value = (None, project)
thriller(node=project)
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_false(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
mock_from_kwargs.return_value = Auth(user=project.creator)
mock_to_nodes.return_value = (None, project)
with assert_raises(HTTPError) as ctx:
thriller(node=project)
assert_equal(ctx.exception.code, http.FORBIDDEN)
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_not_logged_in(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
mock_from_kwargs.return_value = Auth()
mock_to_nodes.return_value = (None, project)
with assert_raises(HTTPError) as ctx:
thriller(node=project)
assert_equal(ctx.exception.code, http.UNAUTHORIZED)
def needs_addon_view(**kwargs):
return 'openaddon'
class TestMustHaveAddonDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustHaveAddonDecorator, self).setUp()
self.project = ProjectFactory()
@mock.patch('website.project.decorators._kwargs_to_nodes')
def test_must_have_addon_node_true(self, mock_kwargs_to_nodes):
mock_kwargs_to_nodes.return_value = (None, self.project)
self.project.add_addon('github', auth=None)
decorated = must_have_addon('github', 'node')(needs_addon_view)
res = decorated()
assert_equal(res, 'openaddon')
@mock.patch('website.project.decorators._kwargs_to_nodes')
def test_must_have_addon_node_false(self, mock_kwargs_to_nodes):
mock_kwargs_to_nodes.return_value = (None, self.project)
self.project.delete_addon('github', auth=None)
decorated = must_have_addon('github', 'node')(needs_addon_view)
with assert_raises(HTTPError):
decorated()
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_addon_user_true(self, mock_current_user):
mock_current_user.return_value = Auth(self.project.creator)
self.project.creator.add_addon('github')
decorated = must_have_addon('github', 'user')(needs_addon_view)
res = decorated()
assert_equal(res, 'openaddon')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_addon_user_false(self, mock_current_user):
mock_current_user.return_value = Auth(self.project.creator)
self.project.creator.delete_addon('github')
decorated = must_have_addon('github', 'user')(needs_addon_view)
with assert_raises(HTTPError):
decorated()
class TestMustBeAddonAuthorizerDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustBeAddonAuthorizerDecorator, self).setUp()
self.project = ProjectFactory()
self.decorated = must_be_addon_authorizer('github')(needs_addon_view)
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_authorizer_true(self, mock_get_current_user, mock_kwargs_to_nodes):
# Mock
mock_get_current_user.return_value = Auth(self.project.creator)
mock_kwargs_to_nodes.return_value = (None, self.project)
# Setup
self.project.add_addon('github', auth=None)
node_settings = self.project.get_addon('github')
self.project.creator.add_addon('github')
user_settings = self.project.creator.get_addon('github')
node_settings.user_settings = user_settings
# Test
res = self.decorated()
assert_equal(res, 'openaddon')
def test_must_be_authorizer_false(self):
# Setup
self.project.add_addon('github', auth=None)
node_settings = self.project.get_addon('github')
user2 = UserFactory()
user2.add_addon('github')
user_settings = user2.get_addon('github')
node_settings.user_settings = user_settings
# Test
with assert_raises(HTTPError):
self.decorated()
def test_must_be_authorizer_no_user_settings(self):
self.project.add_addon('github', auth=None)
with assert_raises(HTTPError):
self.decorated()
def test_must_be_authorizer_no_node_settings(self):
with assert_raises(HTTPError):
self.decorated()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.