prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>interaction.spec.js<|end_file_name|><|fim▁begin|>/* eslint-env jest */
describe('Interaction', () => {
describe('Interaction - security level loose', () => {
it('Graph: should handle a click on a node with a bound function', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-Function-2')
.click();
cy.get('.created-by-click').should('have.text', 'Clicked By Flow');
});
it('Graph: should handle a click on a node with a bound function with args', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-FunctionArg-18')
.click();
cy.get('.created-by-click-2').should('have.text', 'Clicked By Flow: ARGUMENT');
});
it('Flowchart: should handle a click on a node with a bound function where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-FunctionArg-22"]')
.click();
cy.get('.created-by-click-2').should('have.text', 'Clicked By Flow: ARGUMENT');
});
it('Graph: should handle a click on a node with a bound url', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('#flowchart-URL-3')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('Graph: should handle a click on a node with a bound url where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-2URL-7"]')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('Flowchart-v2: should handle a click on a node with a bound function', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-Function-10')
.click();
cy.get('.created-by-click').should('have.text', 'Clicked By Flow');
});
it('Flowchart-v2: should handle a click on a node with a bound function where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-1Function-14"]')
.click();
cy.get('.created-by-click').should('have.text', 'Clicked By Flow');
});
it('Flowchart-v2: should handle a click on a node with a bound url', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('#flowchart-URL-11')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('Flowchart-v2: should handle a click on a node with a bound url where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-2URL-15"]')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound URL clicking on the rect', () => {
const url = 'http://localhost:9000/click_security_loose.html';<|fim▁hole|> cy.visit(url);
cy.get('body')
.find('rect#cl1')
.click({ force: true });
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound URL clicking on the text', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl1-text')
.click({ force: true });
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound function without args', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('rect#cl2')
.click({ force: true });
cy.get('.created-by-gant-click').should('have.text', 'Clicked By Gant cl2');
});
it('should handle a click on a task with a bound function with args', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('rect#cl3')
.click({ force: true });
cy.get('.created-by-gant-click').should('have.text', 'Clicked By Gant test1 test2 test3');
});
it('should handle a click on a task with a bound function without args', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl2-text')
.click({ force: true });
cy.get('.created-by-gant-click').should('have.text', 'Clicked By Gant cl2');
});
it('should handle a click on a task with a bound function with args ', () => {
const url = 'http://localhost:9000/click_security_loose.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl3-text')
.click({ force: true });
cy.get('.created-by-gant-click').should('have.text', 'Clicked By Gant test1 test2 test3');
});
});
describe('Interaction - security level tight', () => {
it('should handle a click on a node without a bound function', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-Function-2')
.click();
cy.get('.created-by-click').should('not.exist');
// cy.get('.created-by-click').should('not.have.text', 'Clicked By Flow');
});
it('should handle a click on a node with a bound function where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-1Function-6"]')
.click();
// cy.get('.created-by-click').should('not.have.text', 'Clicked By Flow');
cy.get('.created-by-click').should('not.exist');
});
it('should handle a click on a node with a bound url', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-URL-3')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a node with a bound url where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-2URL-7"]')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound URL clicking on the rect', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('rect#cl1')
.click({ force: true });
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound URL clicking on the text', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl1-text')
.click({ force: true });
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound function', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('rect#cl2')
.click({ force: true });
// cy.get('.created-by-gant-click').should('not.have.text', 'Clicked By Gant cl2');
cy.get('.created-by-gant-click').should('not.exist')
});
it('should handle a click on a task with a bound function', () => {
const url = 'http://localhost:9000/click_security_strict.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl2-text')
.click({ force: true });
// cy.get('.created-by-gant-click').should('not.have.text', 'Clicked By Gant cl2');
cy.get('.created-by-gant-click').should('not.exist')
});
});
describe('Interaction - security level other, missspelling', () => {
it('should handle a click on a node with a bound function', () => {
const url = 'http://localhost:9000/click_security_other.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-Function-2')
.click();
// cy.get('.created-by-click').should('not.have.text', 'Clicked By Flow');
cy.get('.created-by-click').should('not.exist');
});
it('should handle a click on a node with a bound function where the node starts with a number', () => {
const url = 'http://localhost:9000/click_security_other.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g[id="flowchart-1Function-6"]')
.click();
cy.get('.created-by-click').should('not.exist');
cy.get('.created-by-click').should('not.exist');
});
it('should handle a click on a node with a bound url', () => {
const url = 'http://localhost:9000/click_security_other.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('g#flowchart-URL-3')
.click();
cy.location().should(location => {
expect(location.href).to.eq('http://localhost:9000/webpackUsage.html');
});
});
it('should handle a click on a task with a bound function', () => {
const url = 'http://localhost:9000/click_security_other.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('rect#cl2')
.click({ force: true });
cy.get('.created-by-gant-click').should('not.exist');
});
it('should handle a click on a task with a bound function', () => {
const url = 'http://localhost:9000/click_security_other.html';
cy.viewport(1440, 1024);
cy.visit(url);
cy.get('body')
.find('text#cl2-text')
.click({ force: true });
cy.get('.created-by-gant-click').should('not.exist');
});
});
});<|fim▁end|> | cy.viewport(1440, 1024); |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! # The Redox OS Kernel, version 2
//!
//! The Redox OS Kernel is a hybrid kernel that supports X86_64 systems and<|fim▁hole|>//! provides Unix-like syscalls for primarily Rust applications
#![deny(warnings)]
#![feature(alloc)]
#![feature(asm)]
#![feature(collections)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(drop_types_in_const)]
#![feature(heap_api)]
#![feature(integer_atomics)]
#![feature(never_type)]
#![feature(thread_local)]
#![no_std]
use arch::interrupt;
/// Architecture specific items (test)
#[cfg(test)]
#[macro_use]
extern crate arch_test as arch;
/// Architecture specific items (ARM)
#[cfg(all(not(test), target_arch = "arm"))]
#[macro_use]
extern crate arch_arm as arch;
/// Architecture specific items (x86_64)
#[cfg(all(not(test), target_arch = "x86_64"))]
#[macro_use]
extern crate arch_x86_64 as arch;
extern crate alloc;
#[macro_use]
extern crate collections;
#[macro_use]
extern crate bitflags;
extern crate goblin;
extern crate spin;
use core::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
use scheme::FileHandle;
#[macro_use]
/// Shared data structures
pub mod common;
/// Context management
pub mod context;
/// ELF file parsing
pub mod elf;
/// Schemes, filesystem handlers
pub mod scheme;
/// Synchronization primitives
pub mod sync;
/// Syscall handlers
pub mod syscall;
/// Tests
#[cfg(test)]
pub mod tests;
/// A unique number that identifies the current CPU - used for scheduling
#[thread_local]
static CPU_ID: AtomicUsize = ATOMIC_USIZE_INIT;
/// Get the current CPU's scheduling ID
#[inline(always)]
pub fn cpu_id() -> usize {
CPU_ID.load(Ordering::Relaxed)
}
/// The count of all CPUs that can have work scheduled
static CPU_COUNT : AtomicUsize = ATOMIC_USIZE_INIT;
/// Get the number of CPUs currently active
#[inline(always)]
pub fn cpu_count() -> usize {
CPU_COUNT.load(Ordering::Relaxed)
}
/// Initialize userspace by running the initfs:bin/init process
/// This function will also set the CWD to initfs:bin and open debug: as stdio
pub extern fn userspace_init() {
assert_eq!(syscall::chdir(b"initfs:"), Ok(0));
assert_eq!(syscall::open(b"debug:", syscall::flag::O_RDONLY).map(FileHandle::into), Ok(0));
assert_eq!(syscall::open(b"debug:", syscall::flag::O_WRONLY).map(FileHandle::into), Ok(1));
assert_eq!(syscall::open(b"debug:", syscall::flag::O_WRONLY).map(FileHandle::into), Ok(2));
syscall::exec(b"/bin/init", &[]).expect("failed to execute init");
panic!("init returned");
}
/// Allow exception handlers to send signal to arch-independant kernel
#[no_mangle]
pub extern fn ksignal(signal: usize) {
println!("SIGNAL {}, CPU {}, PID {:?}", signal, cpu_id(), context::context_id());
{
let contexts = context::contexts();
if let Some(context_lock) = contexts.current() {
let context = context_lock.read();
println!("NAME {}", unsafe { ::core::str::from_utf8_unchecked(&context.name.lock()) });
}
}
syscall::exit(signal & 0x7F);
}
/// This is the kernel entry point for the primary CPU. The arch crate is responsible for calling this
#[no_mangle]
pub extern fn kmain(cpus: usize) {
CPU_ID.store(0, Ordering::SeqCst);
CPU_COUNT.store(cpus, Ordering::SeqCst);
context::init();
let pid = syscall::getpid();
println!("BSP: {:?} {}", pid, cpus);
match context::contexts_mut().spawn(userspace_init) {
Ok(context_lock) => {
let mut context = context_lock.write();
context.status = context::Status::Runnable;
},
Err(err) => {
panic!("failed to spawn userspace_init: {:?}", err);
}
}
loop {
unsafe {
interrupt::disable();
if context::switch() {
interrupt::enable_and_nop();
} else {
// Enable interrupts, then halt CPU (to save power) until the next interrupt is actually fired.
interrupt::enable_and_halt();
}
}
}
}
/// This is the main kernel entry point for secondary CPUs
#[no_mangle]
pub extern fn kmain_ap(_id: usize) {
// Disable APs for now
loop {
unsafe { interrupt::enable_and_halt(); }
}
/*
CPU_ID.store(id, Ordering::SeqCst);
context::init();
let pid = syscall::getpid();
println!("AP {}: {:?}", id, pid);
loop {
unsafe {
interrupt::disable();
if context::switch() {
interrupt::enable_and_nop();
} else {
// Enable interrupts, then halt CPU (to save power) until the next interrupt is actually fired.
interrupt::enable_and_halt();
}
}
}
*/
}<|fim▁end|> | |
<|file_name|>curve.py<|end_file_name|><|fim▁begin|>from org.jfree.data.xy import XYSeries, XYSeriesCollection
from org.jfree.chart.plot import PlotOrientation
from org.jfree.chart import ChartFactory
from geoscript.plot.chart import Chart
from org.jfree.chart.renderer.xy import XYSplineRenderer, XYLine3DRenderer
def curve(data, name="", smooth=True, trid=True):
"""
Creates a curve based on a list of (x,y) tuples.
Setting *smooth* to ``True`` results in a spline renderer renderer is used.
Setting *trid* to ``True`` results in a 3D plot. In this case the ``smooth``
argument is ignored.
"""
dataset = XYSeriesCollection()
xy = XYSeries(name);
for d in data:
xy.add(d[0], d[1])
dataset.addSeries(xy);
chart = ChartFactory.createXYLineChart(None, None, None, dataset, <|fim▁hole|> chart.getXYPlot().setRenderer(XYSplineRenderer())
if trid:
chart.getXYPlot().setRenderer(XYLine3DRenderer())
return Chart(chart)<|fim▁end|> | PlotOrientation.VERTICAL, True, True, False)
if smooth: |
<|file_name|>interval-bar.ts<|end_file_name|><|fim▁begin|>//intervalbar.ts
//
//
import {BaseBar} from './basebar';
import {IntervalledViewModel} from '../data/intervalmodel';
import {IIntervalledSigleItem} from 'infodata';
import {customElement} from 'aurelia-framework';
//
@customElement('interval-bar')
export class IntervalBar extends BaseBar<IntervalledViewModel<IIntervalledSigleItem>> {
//
constructor() {
super();
}
public get isEditable(): boolean {
return (this.parent !== null) ? this.parent.isEditable : false;
}
public get isReadOnly(): boolean {
return (this.parent !== null) ? this.parent.isReadOnly : true;
}
public get minDate(): string {
<|fim▁hole|> return (this.parent !== null) ? this.parent.minDate : null;
}
public get maxDate(): string {
return (this.parent !== null) ? this.parent.maxDate : null;
}
public get startDate(): string {
return (this.parent !== null) ? this.parent.startDate : null;
}
public set startDate(s: string) {
if (this.parent !== null) {
this.parent.startDate = s;
}
}
public get endDate(): string {
return (this.parent !== null) ? this.parent.endDate : null;
}
public set endDate(s: string) {
if (this.parent !== null) {
this.parent.endDate = s;
}
}
}// IntervalBar<|fim▁end|> | |
<|file_name|>utils-drag.js<|end_file_name|><|fim▁begin|>'use strict';
// event
// eventType - 'dragStart', 'drag', 'dragStop'
// pos - absolute pos,
// returnCallback
var app = angular.module('utilsDrag', []);
app.directive('utilsDrag', function ( ) {
return {
restrict: 'A',
scope:
{
dragCallback: '&utilsDrag'
},
replace: true,
controller: [ '$scope', '$rootScope', function ( $scope )
{
window.ud = $scope;
$scope.dragElement = U;
$scope.dragStartPosition = U;
$scope.returnCallback = function ()
{
$scope.dragElement.css( $scope.dragStartPosition );
}
}],
link: function ( scope, element )<|fim▁hole|> function getCursorPosition ( topLeft, event )
{
// console.log( 'event: ', event );
// var eventPos = getEventPos( event );
var x = event.pageX;
var y = event.pageY;
return { x: x, y: y };
}
$(element).draggable(
{
cursorAt:
{
top: cursorTop,
left: cursorLeft
},
start: function ( event, ui )
{
scope.dragElement = $(this);
scope.dragElement.css('pointer-events', 'none');
var startDragging = scope.dragCallback(
{
event: event,
eventType: 'dragStart',
pos: getCursorPosition( ui.position, event ),
returnCallback: scope.returnCallback
});
scope.dragStartPosition = ui.position;
},
drag: function( event, ui )
{
var drag = scope.dragCallback(
{
event: event,
eventType: 'drag',
pos: getCursorPosition( ui.position, event ),
returnCallback: scope.returnCallback
});
if ( drag )
return false;
},
stop: function ( event, ui )
{
var stopDragging = scope.dragCallback(
{
event: event,
eventType: 'dragStop',
pos: getCursorPosition( ui.position, event ),
returnCallback: scope.returnCallback
});
scope.dragElement.css('pointer-events', 'all');
}
});
}
};
});<|fim▁end|> | {
var cursorTop = $(element).height() / 2;
var cursorLeft = $(element).width() / 2;
|
<|file_name|>RestTestHelper.java<|end_file_name|><|fim▁begin|>package org.regkas.service.rest;
import javax.enterprise.context.Dependent;
import javax.enterprise.inject.Specializes;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import java.net.URL;
import java.util.function.Function;
@Specializes
@Dependent
public class RestTestHelper extends org.boatpos.common.test.rest.RestTestHelper {
public Invocation.Builder createRestCallWithHeaderCredentialsForTestUser(URL url, Function<WebTarget, WebTarget> addPath) throws Exception {<|fim▁hole|>
public Invocation.Builder createRestCallWithHeaderCredentialsForTestUser(URL url, Function<WebTarget, WebTarget> addPath, MediaType mediaType) throws Exception {
return super.createRestCall(url, addPath, mediaType)
.header("username", "Maria Musterfrau")
.header("password", "abc123")
.header("cashbox", "RegKas1");
}
}<|fim▁end|> | return createRestCallWithHeaderCredentialsForTestUser(url, addPath, MediaType.APPLICATION_JSON_TYPE);
} |
<|file_name|>springyui.js<|end_file_name|><|fim▁begin|>/**
Copyright (c) 2010 Dennis Hotson
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
jQuery.fn.springy = function(params) {
var graph = this.graph = params.graph || new Springy.Graph();
var nodeFont = "16px Verdana, sans-serif";
var edgeFont = "8px Verdana, sans-serif";
var stiffness = params.stiffness || 400.0;
var repulsion = params.repulsion || 400.0;
var damping = params.damping || 0.5;
var minEnergyThreshold = params.minEnergyThreshold || 0.00001;
var nodeSelected = params.nodeSelected || null;
var nodeImages = {};
var edgeLabelsUpright = true;
var canvas = this[0];
var ctx = canvas.getContext("2d");
var layout = this.layout = new Springy.Layout.ForceDirected(graph, stiffness, repulsion, damping, minEnergyThreshold);
// calculate bounding box of graph layout.. with ease-in
var currentBB = layout.getBoundingBox();
var targetBB = {bottomleft: new Springy.Vector(-2, -2), topright: new Springy.Vector(2, 2)};
// auto adjusting bounding box
Springy.requestAnimationFrame(function adjust() {
targetBB = layout.getBoundingBox();
// current gets 20% closer to target every iteration
currentBB = {
bottomleft: currentBB.bottomleft.add( targetBB.bottomleft.subtract(currentBB.bottomleft)
.divide(10)),
topright: currentBB.topright.add( targetBB.topright.subtract(currentBB.topright)
.divide(10))
};
Springy.requestAnimationFrame(adjust);
});
// convert to/from screen coordinates
var toScreen = function(p) {
var size = currentBB.topright.subtract(currentBB.bottomleft);
var sx = p.subtract(currentBB.bottomleft).divide(size.x).x * canvas.width;
var sy = p.subtract(currentBB.bottomleft).divide(size.y).y * canvas.height;
return new Springy.Vector(sx, sy);
};
var fromScreen = function(s) {
var size = currentBB.topright.subtract(currentBB.bottomleft);
var px = (s.x / canvas.width) * size.x + currentBB.bottomleft.x;
var py = (s.y / canvas.height) * size.y + currentBB.bottomleft.y;
return new Springy.Vector(px, py);
};
// half-assed drag and drop
var selected = null;
var nearest = null;
var dragged = null;
// jQuery(canvas).click(function(e) {
// var pos = jQuery(this).offset();
// var p = fromScreen({x: e.pageX - pos.left, y: e.pageY - pos.top});
// selected = layout.nearest(p);
// });
jQuery(canvas).mousedown(function(e) {
var pos = jQuery(this).offset();
var p = fromScreen({x: e.pageX - pos.left, y: e.pageY - pos.top});
nearest = dragged = layout.nearest(p);
if (dragged.node) {
dragged.point.m = 10000.0;
}
renderer.start();
});
// Basic double click handler
jQuery(canvas).dblclick(function(e) {
var pos = jQuery(this).offset();
var p = fromScreen({x: e.pageX - pos.left, y: e.pageY - pos.top});
var selected = layout.nearest(p);
var node = selected.node;
if (node && nodeSelected) {
nodeSelected(node);
}
if (node && node.data && node.data.ondoubleclick) {
node.data.ondoubleclick();
}
renderer.start();
});
jQuery(canvas).mousemove(function(e) {
var pos = jQuery(this).offset();
var p = fromScreen({x: e.pageX - pos.left, y: e.pageY - pos.top});
nearest = layout.nearest(p);
if (dragged !== null && dragged.node !== null) {
dragged.point.p.x = p.x;
dragged.point.p.y = p.y;
}
renderer.start();
});
jQuery(window).bind('mouseup',function(e) {
dragged = null;
});
var getTextWidth = function(node) {
var text = (node.data.label !== undefined) ? node.data.label : node.id;<|fim▁hole|> if (node._width && node._width[text])
return node._width[text];
ctx.save();
ctx.font = (node.data.font !== undefined) ? node.data.font : nodeFont;
var width = ctx.measureText(text).width;
ctx.restore();
node._width || (node._width = {});
node._width[text] = width;
return width;
};
var getTextHeight = function(node) {
return 16;
// In a more modular world, this would actually read the font size, but I think leaving it a constant is sufficient for now.
// If you change the font size, I'd adjust this too.
};
var getImageWidth = function(node) {
var width = (node.data.image.width !== undefined) ? node.data.image.width : nodeImages[node.data.image.src].object.width;
return width;
}
var getImageHeight = function(node) {
var height = (node.data.image.height !== undefined) ? node.data.image.height : nodeImages[node.data.image.src].object.height;
return height;
}
Springy.Node.prototype.getHeight = function() {
var height;
if (this.data.image == undefined) {
height = getTextHeight(this);
} else {
if (this.data.image.src in nodeImages && nodeImages[this.data.image.src].loaded) {
height = getImageHeight(this);
} else {height = 10;}
}
return height;
}
Springy.Node.prototype.getWidth = function() {
var width;
if (this.data.image == undefined) {
width = getTextWidth(this);
} else {
if (this.data.image.src in nodeImages && nodeImages[this.data.image.src].loaded) {
width = getImageWidth(this);
} else {width = 10;}
}
return width;
}
var renderer = this.renderer = new Springy.Renderer(layout,
function clear() {
ctx.clearRect(0,0,canvas.width,canvas.height);
},
function drawEdge(edge, p1, p2) {
var x1 = toScreen(p1).x;
var y1 = toScreen(p1).y;
var x2 = toScreen(p2).x;
var y2 = toScreen(p2).y;
var direction = new Springy.Vector(x2-x1, y2-y1);
var normal = direction.normal().normalise();
var from = graph.getEdges(edge.source, edge.target);
var to = graph.getEdges(edge.target, edge.source);
var total = from.length + to.length;
// Figure out edge's position in relation to other edges between the same nodes
var n = 0;
for (var i=0; i<from.length; i++) {
if (from[i].id === edge.id) {
n = i;
}
}
//change default to 10.0 to allow text fit between edges
var spacing = 12.0;
// Figure out how far off center the line should be drawn
var offset = normal.multiply(-((total - 1) * spacing)/2.0 + (n * spacing));
var paddingX = 6;
var paddingY = 6;
var s1 = toScreen(p1).add(offset);
var s2 = toScreen(p2).add(offset);
var boxWidth = edge.target.getWidth() + paddingX;
var boxHeight = edge.target.getHeight() + paddingY;
var intersection = intersect_line_box(s1, s2, {x: x2-boxWidth/2.0, y: y2-boxHeight/2.0}, boxWidth, boxHeight);
if (!intersection) {
intersection = s2;
}
var stroke = (edge.data.color !== undefined) ? edge.data.color : '#000000';
var arrowWidth;
var arrowLength;
var weight = (edge.data.weight !== undefined) ? edge.data.weight : 1.0;
ctx.lineWidth = Math.max(weight * 2, 0.1);
arrowWidth = 1 + ctx.lineWidth;
arrowLength = 8;
var directional = (edge.data.directional !== undefined) ? edge.data.directional : true;
// line
var lineEnd;
if (directional) {
lineEnd = intersection.subtract(direction.normalise().multiply(arrowLength * 0.5));
} else {
lineEnd = s2;
}
ctx.strokeStyle = stroke;
ctx.beginPath();
ctx.moveTo(s1.x, s1.y);
ctx.lineTo(lineEnd.x, lineEnd.y);
ctx.stroke();
// arrow
if (directional) {
ctx.save();
ctx.fillStyle = stroke;
ctx.translate(intersection.x, intersection.y);
ctx.rotate(Math.atan2(y2 - y1, x2 - x1));
ctx.beginPath();
ctx.moveTo(-arrowLength, arrowWidth);
ctx.lineTo(0, 0);
ctx.lineTo(-arrowLength, -arrowWidth);
ctx.lineTo(-arrowLength * 0.8, -0);
ctx.closePath();
ctx.fill();
ctx.restore();
}
// label
if (edge.data.label !== undefined) {
text = edge.data.label
ctx.save();
ctx.textAlign = "center";
ctx.textBaseline = "top";
ctx.font = (edge.data.font !== undefined) ? edge.data.font : edgeFont;
ctx.fillStyle = stroke;
var angle = Math.atan2(s2.y - s1.y, s2.x - s1.x);
var displacement = -8;
if (edgeLabelsUpright && (angle > Math.PI/2 || angle < -Math.PI/2)) {
displacement = 8;
angle += Math.PI;
}
var textPos = s1.add(s2).divide(2).add(normal.multiply(displacement));
ctx.translate(textPos.x, textPos.y);
ctx.rotate(angle);
ctx.fillText(text, 0,-2);
ctx.restore();
}
},
function drawNode(node, p) {
var s = toScreen(p);
ctx.save();
// Pulled out the padding aspect sso that the size functions could be used in multiple places
// These should probably be settable by the user (and scoped higher) but this suffices for now
var paddingX = 6;
var paddingY = 6;
var contentWidth = node.getWidth();
var contentHeight = node.getHeight();
var boxWidth = contentWidth + paddingX;
var boxHeight = contentHeight + paddingY;
// clear background
ctx.clearRect(s.x - boxWidth/2, s.y - boxHeight/2, boxWidth, boxHeight);
// fill background
if (selected !== null && selected.node !== null && selected.node.id === node.id) {
ctx.fillStyle = node.data.selectedBackgroundColor || params.selectedBackgroundColor || "#FFFFE0";
} else {
ctx.fillStyle = node.data.backgroundColor || params.backgroundColor || "#FFFFFF";
}
ctx.fillRect(s.x - boxWidth/2, s.y - boxHeight/2, boxWidth, boxHeight);
if (node.data.image == undefined) {
ctx.textAlign = "left";
ctx.textBaseline = "top";
ctx.font = (node.data.font !== undefined) ? node.data.font : nodeFont;
ctx.fillStyle = (node.data.color !== undefined) ? node.data.color : "#000000";
var text = (node.data.label !== undefined) ? node.data.label : node.id;
ctx.fillText(text, s.x - contentWidth/2, s.y - contentHeight/2);
} else {
// Currently we just ignore any labels if the image object is set. One might want to extend this logic to allow for both, or other composite nodes.
var src = node.data.image.src; // There should probably be a sanity check here too, but un-src-ed images aren't exaclty a disaster.
if (src in nodeImages) {
if (nodeImages[src].loaded) {
// Our image is loaded, so it's safe to draw
ctx.drawImage(nodeImages[src].object, s.x - contentWidth/2, s.y - contentHeight/2, contentWidth, contentHeight);
}
}else{
// First time seeing an image with this src address, so add it to our set of image objects
// Note: we index images by their src to avoid making too many duplicates
nodeImages[src] = {};
var img = new Image();
nodeImages[src].object = img;
img.addEventListener("load", function () {
// HTMLImageElement objects are very finicky about being used before they are loaded, so we set a flag when it is done
nodeImages[src].loaded = true;
});
img.src = src;
}
}
ctx.restore();
}
);
renderer.start();
// helpers for figuring out where to draw arrows
function intersect_line_line(p1, p2, p3, p4) {
var denom = ((p4.y - p3.y)*(p2.x - p1.x) - (p4.x - p3.x)*(p2.y - p1.y));
// lines are parallel
if (denom === 0) {
return false;
}
var ua = ((p4.x - p3.x)*(p1.y - p3.y) - (p4.y - p3.y)*(p1.x - p3.x)) / denom;
var ub = ((p2.x - p1.x)*(p1.y - p3.y) - (p2.y - p1.y)*(p1.x - p3.x)) / denom;
if (ua < 0 || ua > 1 || ub < 0 || ub > 1) {
return false;
}
return new Springy.Vector(p1.x + ua * (p2.x - p1.x), p1.y + ua * (p2.y - p1.y));
}
function intersect_line_box(p1, p2, p3, w, h) {
var tl = {x: p3.x, y: p3.y};
var tr = {x: p3.x + w, y: p3.y};
var bl = {x: p3.x, y: p3.y + h};
var br = {x: p3.x + w, y: p3.y + h};
var result;
if (result = intersect_line_line(p1, p2, tl, tr)) { return result; } // top
if (result = intersect_line_line(p1, p2, tr, br)) { return result; } // right
if (result = intersect_line_line(p1, p2, br, bl)) { return result; } // bottom
if (result = intersect_line_line(p1, p2, bl, tl)) { return result; } // left
return false;
}
return this;
}
})();<|fim▁end|> | |
<|file_name|>switch_file.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin
import os.path
import platform
def compare_file_names(x, y):
if platform.system() == 'Windows' or platform.system() == 'Darwin':
return x.lower() == y.lower()
else:
return x == y
class SwitchFileCommand(sublime_plugin.WindowCommand):
def run(self, extensions=[]):
if not self.window.active_view():
return
fname = self.window.active_view().file_name()
if not fname:
return
path = os.path.dirname(fname)
base, ext = os.path.splitext(fname)
<|fim▁hole|> ext = ext[1:]
for i in range(0, len(extensions)):
if compare_file_names(extensions[i], ext):
start = i + 1
count -= 1
break
for i in range(0, count):
idx = (start + i) % len(extensions)
new_path = base + '.' + extensions[idx]
if os.path.exists(new_path):
self.window.open_file(new_path)
break<|fim▁end|> | start = 0
count = len(extensions)
if ext != "": |
<|file_name|>caroussel.js<|end_file_name|><|fim▁begin|>$(document).ready(function(){
createCaroussel($('div[data-caroussel=caroussel]'));
setVisibleInCaroussel($('div[data-caroussel=caroussel]'), 0);
setAutoChange($('div[data-caroussel=caroussel]'));
});
function createCaroussel(carousselElement){
carousselElement.append('<div class="caroussel"></div>');
carousselElement.append('<div class="caroussel-pin-wrapper"></div>');
var pins = carousselElement.find('.caroussel-pin-wrapper');
var data = carousselElement.find('[data-caroussel=data]');
data.hide();
// ADD EACH IMAGE FROM DATA
data.children('span[data-url]').each(function(){
$(this).closest('div[data-caroussel=caroussel]').find('.caroussel').append('<div class="caroussel-img-wrapper"><img src="'+$(this).attr('data-url')+'"/></div>');
if ($(this).parent().attr('data-caroussel-pin') != 'false')
$(this).closest('div[data-caroussel=caroussel]').find('.caroussel-pin-wrapper').append('<div class="caroussel-pin"></div>');
});
// COUNT THE NUMBER OF IMAGES AND MEMORIZE DELAY AND COUNT
carousselElement.each(function(){
$(this).attr('data-nbr-images', $(this).find('.caroussel-img-wrapper').length);
var delay = parseInt($(this).find('[data-caroussel=data]').attr('data-caroussel-delay'));
if (delay){
$(this).attr('data-delay', delay);
// ADD A PROGRESS INDICATOR ON THE IMAGE
if ($(this).find('[data-caroussel=data]').attr('data-caroussel-progress-bar') == 'true')
$(this).find('.caroussel').append('<div class="caroussel-progress-bar"></div>');
$(window).resize(function(e){
adjustProgressBar($('div[data-caroussel=caroussel]'));
});
}
});
// ADD EVENT HANDLER ON PINS
pins.find('.caroussel-pin').click(function(e){
setVisibleInCaroussel($(this).closest('div[data-caroussel=caroussel]'), $(this).index());
setAutoChange($(this).closest('div[data-caroussel=caroussel]'));
});
// ADD CLICK EVENT ON PHOTOS
carousselElement.find('.caroussel-img-wrapper img').click(function(e){
// click on right of the photo
if (e.pageX < ($(this).offset().left + ($(this).width() / 4))){
var caroussel = $(this).closest('div[data-caroussel=caroussel]');
decreaseVisibleInCaroussel(caroussel);
setAutoChange(caroussel);
}
else if (e.pageX > ($(this).offset().left + (3 * ($(this).width() / 4)))){
var caroussel = $(this).closest('div[data-caroussel=caroussel]');
increaseVisibleInCaroussel(caroussel);
setAutoChange(caroussel);
}
});
}
function setAutoChange(carousselElement){
// SET AUTOMATIC FUNCTION
carousselElement.each(function(){
var caroussel = $(this);
if (parseInt(caroussel.attr('data-delay'))){
// IF A LOOP FUNCTION IS ALREADY ATTACHED, WE CLOSE IT
if (parseInt(caroussel.attr('data-interval-function'))) clearInterval(parseInt(caroussel.attr('data-interval-function')));
if (parseInt(caroussel.attr('data-interval-function-progress-bar'))) clearInterval(parseInt(caroussel.attr('data-interval-function-progress-bar')));
// WE LAUNCH A LOOP FUNCTION TO CHANGE THE IMAGE
caroussel.attr('data-interval-function', setInterval(function(){
increaseVisibleInCaroussel(caroussel);
}, parseInt(caroussel.attr('data-delay'))));
// WE LAUNCH A LOOP FUNCTION TO CHANGE THE PROGRESS BAR
if (caroussel.find('[data-caroussel=data]').attr('data-caroussel-progress-bar') == 'true'){
var nbrOfRefreshRequired = parseInt(caroussel.attr('data-delay')) / 40;
caroussel.attr('data-interval-function-progress-bar', setInterval(function(){
var progressBar = caroussel.find('.caroussel-progress-bar');
progressBar.css('width', Math.min(progressBar.width() + parseInt(progressBar.attr('data-width'))/nbrOfRefreshRequired, parseInt(progressBar.attr('data-width'))));
}, 39));
}
}<|fim▁hole|>}
function increaseVisibleInCaroussel(carousselElement){
setVisibleInCaroussel(carousselElement, (parseInt(carousselElement.attr('data-current-index'))+1) % carousselElement.attr('data-nbr-images'));
}
function decreaseVisibleInCaroussel(carousselElement){
var index = parseInt(carousselElement.attr('data-current-index')) - 1;
if (index < 0) index = parseInt(carousselElement.attr('data-nbr-images')) + index;
setVisibleInCaroussel(carousselElement, index);
}
function setVisibleInCaroussel(carousselElement, index){
// MEMORIZE THE INDEX
carousselElement.attr('data-current-index', index);
// SHOW THE IMAGE
carousselElement.find('.caroussel').find('.caroussel-img-wrapper').hide();
carousselElement.find('.caroussel').find('.caroussel-img-wrapper:eq('+index+')').show();
// ACTIVE THE PIN
carousselElement.find('.caroussel-pin-wrapper').find('.caroussel-pin').removeClass('active');
carousselElement.find('.caroussel-pin-wrapper').find('.caroussel-pin:eq('+index+')').addClass('active');
// INITIALIZE THE PROGRESS BAR
if (carousselElement.find('[data-caroussel=data]').attr('data-caroussel-progress-bar') == 'true'){
adjustProgressBar(carousselElement);
carousselElement.find('.caroussel').each(function(){
$(this).find('.caroussel-progress-bar').css('width', 0);
});
}
}
function adjustProgressBar(carousselElement){
carousselElement.find('.caroussel').each(function(){
var progressBar = $(this).find('.caroussel-progress-bar');
var visibleImgWrapper = $(this).find('.caroussel-img-wrapper:visible');
progressBar.css('top', visibleImgWrapper.offset().top + (visibleImgWrapper.height()*(9/10)));
progressBar.css('left', visibleImgWrapper.offset().left);
progressBar.css('height', visibleImgWrapper.height()/10);
progressBar.attr('data-width', visibleImgWrapper.width());
});
}<|fim▁end|> | }); |
<|file_name|>ty.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
pub use self::terr_vstore_kind::*;
pub use self::type_err::*;
pub use self::BuiltinBound::*;
pub use self::InferTy::*;
pub use self::InferRegion::*;
pub use self::ImplOrTraitItemId::*;
pub use self::ClosureKind::*;
pub use self::Variance::*;
pub use self::AutoAdjustment::*;
pub use self::Representability::*;
pub use self::AutoRef::*;
pub use self::ExprKind::*;
pub use self::DtorKind::*;
pub use self::ExplicitSelfCategory::*;
pub use self::FnOutput::*;
pub use self::Region::*;
pub use self::ImplOrTraitItemContainer::*;
pub use self::BorrowKind::*;
pub use self::ImplOrTraitItem::*;
pub use self::BoundRegion::*;
pub use self::sty::*;
pub use self::IntVarValue::*;
pub use self::MethodOrigin::*;
pub use self::CopyImplementationError::*;
use back::svh::Svh;
use session::Session;
use lint;
use metadata::csearch;
use middle;
use middle::cast;
use middle::check_const;
use middle::const_eval;
use middle::def::{self, DefMap, ExportMap};
use middle::dependency_format;
use middle::fast_reject;
use middle::free_region::FreeRegionMap;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::mem_categorization as mc;
use middle::region;
use middle::resolve_lifetime;
use middle::infer;
use middle::pat_util;
use middle::region::RegionMaps;
use middle::stability;
use middle::subst::{self, ParamSpace, Subst, Substs, VecPerParamSpace};
use middle::traits;
use middle::ty;
use middle::ty_fold::{self, TypeFoldable, TypeFolder};
use middle::ty_walk::{self, TypeWalker};
use util::ppaux::{note_and_explain_region, bound_region_ptr_to_string};
use util::ppaux::ty_to_string;
use util::ppaux::{Repr, UserString};
use util::common::{memoized, ErrorReported};
use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
use util::nodemap::FnvHashMap;
use util::num::ToPrimitive;
use arena::TypedArena;
use std::borrow::{Borrow, Cow};
use std::cell::{Cell, RefCell, Ref};
use std::cmp;
use std::fmt;
use std::hash::{Hash, SipHasher, Hasher};
use std::mem;
use std::ops;
use std::rc::Rc;
use std::vec::IntoIter;
use collections::enum_set::{EnumSet, CLike};
use std::collections::{HashMap, HashSet};
use syntax::abi;
use syntax::ast::{CrateNum, DefId, ItemImpl, ItemTrait, LOCAL_CRATE};
use syntax::ast::{MutImmutable, MutMutable, Name, NamedField, NodeId};
use syntax::ast::{StmtExpr, StmtSemi, StructField, UnnamedField, Visibility};
use syntax::ast_util::{self, is_local, lit_is_str, local_def};
use syntax::attr::{self, AttrMetaMethods, SignedInt, UnsignedInt};
use syntax::codemap::Span;
use syntax::parse::token::{self, InternedString, special_idents};
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::ast;
use syntax::ast_map::{self, LinkedPath};
pub type Disr = u64;
pub const INITIAL_DISCRIMINANT_VALUE: Disr = 0;
// Data types
/// The complete set of all analyses described in this module. This is
/// produced by the driver and fed to trans and later passes.
pub struct CrateAnalysis<'tcx> {
pub export_map: ExportMap,
pub exported_items: middle::privacy::ExportedItems,
pub public_items: middle::privacy::PublicItems,
pub ty_cx: ty::ctxt<'tcx>,
pub reachable: NodeSet,
pub name: String,
pub glob_map: Option<GlobMap>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct field<'tcx> {
pub name: ast::Name,
pub mt: mt<'tcx>
}
#[derive(Clone, Copy, Debug)]
pub enum ImplOrTraitItemContainer {
TraitContainer(ast::DefId),
ImplContainer(ast::DefId),
}
impl ImplOrTraitItemContainer {
pub fn id(&self) -> ast::DefId {
match *self {
TraitContainer(id) => id,
ImplContainer(id) => id,
}
}
}
#[derive(Clone, Debug)]
pub enum ImplOrTraitItem<'tcx> {
ConstTraitItem(Rc<AssociatedConst<'tcx>>),
MethodTraitItem(Rc<Method<'tcx>>),
TypeTraitItem(Rc<AssociatedType<'tcx>>),
}
impl<'tcx> ImplOrTraitItem<'tcx> {
fn id(&self) -> ImplOrTraitItemId {
match *self {
ConstTraitItem(ref associated_const) => {
ConstTraitItemId(associated_const.def_id)
}
MethodTraitItem(ref method) => MethodTraitItemId(method.def_id),
TypeTraitItem(ref associated_type) => {
TypeTraitItemId(associated_type.def_id)
}
}
}
pub fn def_id(&self) -> ast::DefId {
match *self {
ConstTraitItem(ref associated_const) => associated_const.def_id,
MethodTraitItem(ref method) => method.def_id,
TypeTraitItem(ref associated_type) => associated_type.def_id,
}
}
pub fn name(&self) -> ast::Name {
match *self {
ConstTraitItem(ref associated_const) => associated_const.name,
MethodTraitItem(ref method) => method.name,
TypeTraitItem(ref associated_type) => associated_type.name,
}
}
pub fn vis(&self) -> ast::Visibility {
match *self {
ConstTraitItem(ref associated_const) => associated_const.vis,
MethodTraitItem(ref method) => method.vis,
TypeTraitItem(ref associated_type) => associated_type.vis,
}
}
pub fn container(&self) -> ImplOrTraitItemContainer {
match *self {
ConstTraitItem(ref associated_const) => associated_const.container,
MethodTraitItem(ref method) => method.container,
TypeTraitItem(ref associated_type) => associated_type.container,
}
}
pub fn as_opt_method(&self) -> Option<Rc<Method<'tcx>>> {
match *self {
MethodTraitItem(ref m) => Some((*m).clone()),
_ => None,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum ImplOrTraitItemId {
ConstTraitItemId(ast::DefId),
MethodTraitItemId(ast::DefId),
TypeTraitItemId(ast::DefId),
}
impl ImplOrTraitItemId {
pub fn def_id(&self) -> ast::DefId {
match *self {
ConstTraitItemId(def_id) => def_id,
MethodTraitItemId(def_id) => def_id,
TypeTraitItemId(def_id) => def_id,
}
}
}
#[derive(Clone, Debug)]
pub struct Method<'tcx> {
pub name: ast::Name,
pub generics: Generics<'tcx>,
pub predicates: GenericPredicates<'tcx>,
pub fty: BareFnTy<'tcx>,
pub explicit_self: ExplicitSelfCategory,
pub vis: ast::Visibility,
pub def_id: ast::DefId,
pub container: ImplOrTraitItemContainer,
// If this method is provided, we need to know where it came from
pub provided_source: Option<ast::DefId>
}
impl<'tcx> Method<'tcx> {
pub fn new(name: ast::Name,
generics: ty::Generics<'tcx>,
predicates: GenericPredicates<'tcx>,
fty: BareFnTy<'tcx>,
explicit_self: ExplicitSelfCategory,
vis: ast::Visibility,
def_id: ast::DefId,
container: ImplOrTraitItemContainer,
provided_source: Option<ast::DefId>)
-> Method<'tcx> {
Method {
name: name,
generics: generics,
predicates: predicates,
fty: fty,
explicit_self: explicit_self,
vis: vis,
def_id: def_id,
container: container,
provided_source: provided_source
}
}
pub fn container_id(&self) -> ast::DefId {
match self.container {
TraitContainer(id) => id,
ImplContainer(id) => id,
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct AssociatedConst<'tcx> {
pub name: ast::Name,
pub ty: Ty<'tcx>,
pub vis: ast::Visibility,
pub def_id: ast::DefId,
pub container: ImplOrTraitItemContainer,
pub default: Option<ast::DefId>,
}
#[derive(Clone, Copy, Debug)]
pub struct AssociatedType<'tcx> {
pub name: ast::Name,
pub ty: Option<Ty<'tcx>>,
pub vis: ast::Visibility,
pub def_id: ast::DefId,
pub container: ImplOrTraitItemContainer,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct mt<'tcx> {
pub ty: Ty<'tcx>,
pub mutbl: ast::Mutability,
}
#[derive(Clone, Copy, Debug)]
pub struct field_ty {
pub name: Name,
pub id: DefId,
pub vis: ast::Visibility,
pub origin: ast::DefId, // The DefId of the struct in which the field is declared.
}
#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable)]
pub struct ItemVariances {
pub types: VecPerParamSpace<Variance>,
pub regions: VecPerParamSpace<Variance>,
}
#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Debug, Copy)]
pub enum Variance {
Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type
Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell
Contravariant, // T<A> <: T<B> iff B <: A -- e.g., function param type
Bivariant, // T<A> <: T<B> -- e.g., unused type parameter
}
#[derive(Copy, Clone, Debug)]
pub enum AutoAdjustment<'tcx> {
AdjustReifyFnPointer, // go from a fn-item type to a fn-pointer type
AdjustUnsafeFnPointer, // go from a safe fn pointer to an unsafe fn pointer
AdjustDerefRef(AutoDerefRef<'tcx>),
}
/// Represents coercing a pointer to a different kind of pointer - where 'kind'
/// here means either or both of raw vs borrowed vs unique and fat vs thin.
///
/// We transform pointers by following the following steps in order:
/// 1. Deref the pointer `self.autoderefs` times (may be 0).
/// 2. If `autoref` is `Some(_)`, then take the address and produce either a
/// `&` or `*` pointer.
/// 3. If `unsize` is `Some(_)`, then apply the unsize transformation,
/// which will do things like convert thin pointers to fat
/// pointers, or convert structs containing thin pointers to
/// structs containing fat pointers, or convert between fat
/// pointers. We don't store the details of how the transform is
/// done (in fact, we don't know that, because it might depend on
/// the precise type parameters). We just store the target
/// type. Trans figures out what has to be done at monomorphization
/// time based on the precise source/target type at hand.
///
/// To make that more concrete, here are some common scenarios:
///
/// 1. The simplest cases are where the pointer is not adjusted fat vs thin.
/// Here the pointer will be dereferenced N times (where a dereference can
/// happen to to raw or borrowed pointers or any smart pointer which implements
/// Deref, including Box<_>). The number of dereferences is given by
/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
/// None.
///
/// 2. A thin-to-fat coercon involves unsizing the underlying data. We start
/// with a thin pointer, deref a number of times, unsize the underlying data,
/// then autoref. The 'unsize' phase may change a fixed length array to a
/// dynamically sized one, a concrete object to a trait object, or statically
/// sized struct to a dyncamically sized one. E.g., &[i32; 4] -> &[i32] is
/// represented by:
///
/// ```
/// AutoDerefRef {
/// autoderefs: 1, // &[i32; 4] -> [i32; 4]
/// autoref: Some(AutoPtr), // [i32] -> &[i32]
/// unsize: Some([i32]), // [i32; 4] -> [i32]
/// }
/// ```
///
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
/// The autoderef and -ref are the same as in the above example, but the type
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
/// the underlying conversions from `[i32; 4]` to `[i32]`.
///
/// 3. Coercing a `Box<T>` to `Box<Trait>` is an interesting special case. In
/// that case, we have the pointer we need coming in, so there are no
/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transformating a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is represented by:
///
/// ```
/// AutoDerefRef {
/// autoderefs: 0,
/// autoref: None,
/// unsize: Some(Box<[i32]>),
/// }
/// ```
#[derive(Copy, Clone, Debug)]
pub struct AutoDerefRef<'tcx> {
/// Step 1. Apply a number of dereferences, producing an lvalue.
pub autoderefs: usize,
/// Step 2. Optionally produce a pointer/reference from the value.
pub autoref: Option<AutoRef<'tcx>>,
/// Step 3. Unsize a pointer/reference value, e.g. `&[T; n]` to
/// `&[T]`. The stored type is the target pointer type. Note that
/// the source could be a thin or fat pointer.
pub unsize: Option<Ty<'tcx>>,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum AutoRef<'tcx> {
/// Convert from T to &T.
AutoPtr(&'tcx Region, ast::Mutability),
/// Convert from T to *T.
/// Value to thin pointer.
AutoUnsafe(ast::Mutability),
}
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
pub enum CustomCoerceUnsized {
/// Records the index of the field being coerced.
Struct(usize)
}
#[derive(Clone, Debug)]
pub enum MethodOrigin<'tcx> {
// fully statically resolved method
MethodStatic(ast::DefId),
// fully statically resolved closure invocation
MethodStaticClosure(ast::DefId),
// method invoked on a type parameter with a bounded trait
MethodTypeParam(MethodParam<'tcx>),
// method invoked on a trait instance
MethodTraitObject(MethodObject<'tcx>),
}
// details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait.
#[derive(Clone, Debug)]
pub struct MethodParam<'tcx> {
// the precise trait reference that occurs as a bound -- this may
// be a supertrait of what the user actually typed. Note that it
// never contains bound regions; those regions should have been
// instantiated with fresh variables at this point.
pub trait_ref: ty::TraitRef<'tcx>,
// index of usize in the list of trait items. Note that this is NOT
// the index into the vtable, because the list of trait items
// includes associated types.
pub method_num: usize,
/// The impl for the trait from which the method comes. This
/// should only be used for certain linting/heuristic purposes
/// since there is no guarantee that this is Some in every
/// situation that it could/should be.
pub impl_def_id: Option<ast::DefId>,
}
// details for a method invoked with a receiver whose type is an object
#[derive(Clone, Debug)]
pub struct MethodObject<'tcx> {
// the (super)trait containing the method to be invoked
pub trait_ref: TraitRef<'tcx>,
// the actual base trait id of the object
pub object_trait_id: ast::DefId,
// index of the method to be invoked amongst the trait's items
pub method_num: usize,
// index into the actual runtime vtable.
// the vtable is formed by concatenating together the method lists of
// the base object trait and all supertraits; this is the index into
// that vtable
pub vtable_index: usize,
}
#[derive(Clone, Debug)]
pub struct MethodCallee<'tcx> {
pub origin: MethodOrigin<'tcx>,
pub ty: Ty<'tcx>,
pub substs: subst::Substs<'tcx>
}
/// With method calls, we store some extra information in
/// side tables (i.e method_map). We use
/// MethodCall as a key to index into these tables instead of
/// just directly using the expression's NodeId. The reason
/// for this being that we may apply adjustments (coercions)
/// with the resulting expression also needing to use the
/// side tables. The problem with this is that we don't
/// assign a separate NodeId to this new expression
/// and so it would clash with the base expression if both
/// needed to add to the side tables. Thus to disambiguate
/// we also keep track of whether there's an adjustment in
/// our key.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct MethodCall {
pub expr_id: ast::NodeId,
pub autoderef: u32
}
impl MethodCall {
pub fn expr(id: ast::NodeId) -> MethodCall {
MethodCall {
expr_id: id,
autoderef: 0
}
}
pub fn autoderef(expr_id: ast::NodeId, autoderef: u32) -> MethodCall {
MethodCall {
expr_id: expr_id,
autoderef: 1 + autoderef
}
}
}
// maps from an expression id that corresponds to a method call to the details
// of the method to be invoked
pub type MethodMap<'tcx> = RefCell<FnvHashMap<MethodCall, MethodCallee<'tcx>>>;
// Contains information needed to resolve types and (in the future) look up
// the types of AST nodes.
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct creader_cache_key {
pub cnum: CrateNum,
pub pos: usize,
pub len: usize
}
/// A restriction that certain types must be the same size. The use of
/// `transmute` gives rise to these restrictions. These generally
/// cannot be checked until trans; therefore, each call to `transmute`
/// will push one or more such restriction into the
/// `transmute_restrictions` vector during `intrinsicck`. They are
/// then checked during `trans` by the fn `check_intrinsics`.
#[derive(Copy, Clone)]
pub struct TransmuteRestriction<'tcx> {
/// The span whence the restriction comes.
pub span: Span,
/// The type being transmuted from.
pub original_from: Ty<'tcx>,
/// The type being transmuted to.
pub original_to: Ty<'tcx>,
/// The type being transmuted from, with all type parameters
/// substituted for an arbitrary representative. Not to be shown
/// to the end user.
pub substituted_from: Ty<'tcx>,
/// The type being transmuted to, with all type parameters
/// substituted for an arbitrary representative. Not to be shown
/// to the end user.
pub substituted_to: Ty<'tcx>,
/// NodeId of the transmute intrinsic.
pub id: ast::NodeId,
}
/// Internal storage
pub struct CtxtArenas<'tcx> {
// internings
type_: TypedArena<TyS<'tcx>>,
substs: TypedArena<Substs<'tcx>>,
bare_fn: TypedArena<BareFnTy<'tcx>>,
region: TypedArena<Region>,
stability: TypedArena<attr::Stability>,
// references
trait_defs: TypedArena<TraitDef<'tcx>>,
}
impl<'tcx> CtxtArenas<'tcx> {
pub fn new() -> CtxtArenas<'tcx> {
CtxtArenas {
type_: TypedArena::new(),
substs: TypedArena::new(),
bare_fn: TypedArena::new(),
region: TypedArena::new(),
stability: TypedArena::new(),
trait_defs: TypedArena::new()
}
}
}
pub struct CommonTypes<'tcx> {
pub bool: Ty<'tcx>,
pub char: Ty<'tcx>,
pub isize: Ty<'tcx>,
pub i8: Ty<'tcx>,
pub i16: Ty<'tcx>,
pub i32: Ty<'tcx>,
pub i64: Ty<'tcx>,
pub usize: Ty<'tcx>,
pub u8: Ty<'tcx>,
pub u16: Ty<'tcx>,
pub u32: Ty<'tcx>,
pub u64: Ty<'tcx>,
pub f32: Ty<'tcx>,
pub f64: Ty<'tcx>,
pub err: Ty<'tcx>,
}
/// The data structure to keep track of all the information that typechecker
/// generates so that so that it can be reused and doesn't have to be redone
/// later on.
pub struct ctxt<'tcx> {
/// The arenas that types etc are allocated from.
arenas: &'tcx CtxtArenas<'tcx>,
/// Specifically use a speedy hash algorithm for this hash map, it's used
/// quite often.
// FIXME(eddyb) use a FnvHashSet<InternedTy<'tcx>> when equivalent keys can
// queried from a HashSet.
interner: RefCell<FnvHashMap<InternedTy<'tcx>, Ty<'tcx>>>,
// FIXME as above, use a hashset if equivalent elements can be queried.
substs_interner: RefCell<FnvHashMap<&'tcx Substs<'tcx>, &'tcx Substs<'tcx>>>,
bare_fn_interner: RefCell<FnvHashMap<&'tcx BareFnTy<'tcx>, &'tcx BareFnTy<'tcx>>>,
region_interner: RefCell<FnvHashMap<&'tcx Region, &'tcx Region>>,
stability_interner: RefCell<FnvHashMap<&'tcx attr::Stability, &'tcx attr::Stability>>,
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
pub sess: Session,
pub def_map: DefMap,
pub named_region_map: resolve_lifetime::NamedRegionMap,
pub region_maps: RegionMaps,
// For each fn declared in the local crate, type check stores the
// free-region relationships that were deduced from its where
// clauses and parameter types. These are then read-again by
// borrowck. (They are not used during trans, and hence are not
// serialized or needed for cross-crate fns.)
free_region_maps: RefCell<NodeMap<FreeRegionMap>>,
/// Stores the types for various nodes in the AST. Note that this table
/// is not guaranteed to be populated until after typeck. See
/// typeck::check::fn_ctxt for details.
node_types: RefCell<NodeMap<Ty<'tcx>>>,
/// Stores the type parameters which were substituted to obtain the type
/// of this node. This only applies to nodes that refer to entities
/// parameterized by type parameters, such as generic fns, types, or
/// other items.
pub item_substs: RefCell<NodeMap<ItemSubsts<'tcx>>>,
/// Maps from a trait item to the trait item "descriptor"
pub impl_or_trait_items: RefCell<DefIdMap<ImplOrTraitItem<'tcx>>>,
/// Maps from a trait def-id to a list of the def-ids of its trait items
pub trait_item_def_ids: RefCell<DefIdMap<Rc<Vec<ImplOrTraitItemId>>>>,
/// A cache for the trait_items() routine
pub trait_items_cache: RefCell<DefIdMap<Rc<Vec<ImplOrTraitItem<'tcx>>>>>,
pub impl_trait_refs: RefCell<DefIdMap<Option<TraitRef<'tcx>>>>,
pub trait_defs: RefCell<DefIdMap<&'tcx TraitDef<'tcx>>>,
/// Maps from the def-id of an item (trait/struct/enum/fn) to its
/// associated predicates.
pub predicates: RefCell<DefIdMap<GenericPredicates<'tcx>>>,
/// Maps from the def-id of a trait to the list of
/// super-predicates. This is a subset of the full list of
/// predicates. We store these in a separate map because we must
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
pub super_predicates: RefCell<DefIdMap<GenericPredicates<'tcx>>>,
pub map: ast_map::Map<'tcx>,
pub freevars: RefCell<FreevarMap>,
pub tcache: RefCell<DefIdMap<TypeScheme<'tcx>>>,
pub rcache: RefCell<FnvHashMap<creader_cache_key, Ty<'tcx>>>,
pub tc_cache: RefCell<FnvHashMap<Ty<'tcx>, TypeContents>>,
pub ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
pub enum_var_cache: RefCell<DefIdMap<Rc<Vec<Rc<VariantInfo<'tcx>>>>>>,
pub ty_param_defs: RefCell<NodeMap<TypeParameterDef<'tcx>>>,
pub adjustments: RefCell<NodeMap<AutoAdjustment<'tcx>>>,
pub normalized_cache: RefCell<FnvHashMap<Ty<'tcx>, Ty<'tcx>>>,
pub lang_items: middle::lang_items::LanguageItems,
/// A mapping of fake provided method def_ids to the default implementation
pub provided_method_sources: RefCell<DefIdMap<ast::DefId>>,
pub struct_fields: RefCell<DefIdMap<Rc<Vec<field_ty>>>>,
/// Maps from def-id of a type or region parameter to its
/// (inferred) variance.
pub item_variance_map: RefCell<DefIdMap<Rc<ItemVariances>>>,
/// True if the variance has been computed yet; false otherwise.
pub variance_computed: Cell<bool>,
/// A mapping from the def ID of an enum or struct type to the def ID
/// of the method that implements its destructor. If the type is not
/// present in this map, it does not have a destructor. This map is
/// populated during the coherence phase of typechecking.
pub destructor_for_type: RefCell<DefIdMap<ast::DefId>>,
/// A method will be in this list if and only if it is a destructor.
pub destructors: RefCell<DefIdSet>,
/// Maps a DefId of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
pub inherent_impls: RefCell<DefIdMap<Rc<Vec<ast::DefId>>>>,
/// Maps a DefId of an impl to a list of its items.
/// Note that this contains all of the impls that we know about,
/// including ones in other crates. It's not clear that this is the best
/// way to do it.
pub impl_items: RefCell<DefIdMap<Vec<ImplOrTraitItemId>>>,
/// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
/// present in this set can be warned about.
pub used_unsafe: RefCell<NodeSet>,
/// Set of nodes which mark locals as mutable which end up getting used at
/// some point. Local variable definitions not in this set can be warned
/// about.
pub used_mut_nodes: RefCell<NodeSet>,
/// The set of external nominal types whose implementations have been read.
/// This is used for lazy resolution of methods.
pub populated_external_types: RefCell<DefIdSet>,
/// The set of external primitive types whose implementations have been read.
/// FIXME(arielb1): why is this separate from populated_external_types?
pub populated_external_primitive_impls: RefCell<DefIdSet>,
/// Borrows
pub upvar_capture_map: RefCell<UpvarCaptureMap>,
/// These caches are used by const_eval when decoding external constants.
pub extern_const_statics: RefCell<DefIdMap<ast::NodeId>>,
pub extern_const_variants: RefCell<DefIdMap<ast::NodeId>>,
pub extern_const_fns: RefCell<DefIdMap<ast::NodeId>>,
pub method_map: MethodMap<'tcx>,
pub dependency_formats: RefCell<dependency_format::Dependencies>,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
pub closure_kinds: RefCell<DefIdMap<ClosureKind>>,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
pub closure_tys: RefCell<DefIdMap<ClosureTy<'tcx>>>,
pub node_lint_levels: RefCell<FnvHashMap<(ast::NodeId, lint::LintId),
lint::LevelSource>>,
/// The types that must be asserted to be the same size for `transmute`
/// to be valid. We gather up these restrictions in the intrinsicck pass
/// and check them in trans.
pub transmute_restrictions: RefCell<Vec<TransmuteRestriction<'tcx>>>,
/// Maps any item's def-id to its stability index.
pub stability: RefCell<stability::Index<'tcx>>,
/// Caches the results of trait selection. This cache is used
/// for things that do not have to do with the parameters in scope.
pub selection_cache: traits::SelectionCache<'tcx>,
/// Caches the representation hints for struct definitions.
pub repr_hint_cache: RefCell<DefIdMap<Rc<Vec<attr::ReprAttr>>>>,
/// Maps Expr NodeId's to their constant qualification.
pub const_qualif_map: RefCell<NodeMap<check_const::ConstQualif>>,
/// Caches CoerceUnsized kinds for impls on custom types.
pub custom_coerce_unsized_kinds: RefCell<DefIdMap<CustomCoerceUnsized>>,
/// Maps a cast expression to its kind. This is keyed on the
/// *from* expression of the cast, not the cast itself.
pub cast_kinds: RefCell<NodeMap<cast::CastKind>>,
}
impl<'tcx> ctxt<'tcx> {
pub fn node_types(&self) -> Ref<NodeMap<Ty<'tcx>>> { self.node_types.borrow() }
pub fn node_type_insert(&self, id: NodeId, ty: Ty<'tcx>) {
self.node_types.borrow_mut().insert(id, ty);
}
pub fn intern_trait_def(&self, def: TraitDef<'tcx>) -> &'tcx TraitDef<'tcx> {
let did = def.trait_ref.def_id;
let interned = self.arenas.trait_defs.alloc(def);
self.trait_defs.borrow_mut().insert(did, interned);
interned
}
pub fn intern_stability(&self, stab: attr::Stability) -> &'tcx attr::Stability {
if let Some(st) = self.stability_interner.borrow().get(&stab) {
return st;
}
let interned = self.arenas.stability.alloc(stab);
self.stability_interner.borrow_mut().insert(interned, interned);
interned
}
pub fn store_free_region_map(&self, id: NodeId, map: FreeRegionMap) {
self.free_region_maps.borrow_mut()
.insert(id, map);
}
pub fn free_region_map(&self, id: NodeId) -> FreeRegionMap {
self.free_region_maps.borrow()[&id].clone()
}
}
// Flags that we track on types. These flags are propagated upwards
// through the type during type construction, so that we can quickly
// check whether the type has various kinds of types in it without
// recursing over the type itself.
bitflags! {
flags TypeFlags: u32 {
const HAS_PARAMS = 1 << 0,
const HAS_SELF = 1 << 1,
const HAS_TY_INFER = 1 << 2,
const HAS_RE_INFER = 1 << 3,
const HAS_RE_LATE_BOUND = 1 << 4,
const HAS_REGIONS = 1 << 5,
const HAS_TY_ERR = 1 << 6,
const HAS_PROJECTION = 1 << 7,
const HAS_TY_CLOSURE = 1 << 8,
const NEEDS_SUBST = TypeFlags::HAS_PARAMS.bits |
TypeFlags::HAS_SELF.bits |
TypeFlags::HAS_REGIONS.bits,
// Flags representing the nominal content of a type,
// computed by FlagsComputetion
const NOMINAL_FLAGS = TypeFlags::HAS_PARAMS.bits |
TypeFlags::HAS_SELF.bits |
TypeFlags::HAS_TY_INFER.bits |
TypeFlags::HAS_RE_INFER.bits |
TypeFlags::HAS_RE_LATE_BOUND.bits |
TypeFlags::HAS_REGIONS.bits |
TypeFlags::HAS_TY_ERR.bits |
TypeFlags::HAS_PROJECTION.bits,
// Caches for type_is_sized, type_moves_by_default
const SIZEDNESS_CACHED = 1 << 16,
const IS_SIZED = 1 << 17,
const MOVENESS_CACHED = 1 << 18,
const MOVES_BY_DEFAULT = 1 << 19,
}
}
macro_rules! sty_debug_print {
($ctxt: expr, $($variant: ident),*) => {{
// curious inner module to allow variant names to be used as
// variable names.
mod inner {
use middle::ty;
#[derive(Copy, Clone)]
struct DebugStat {
total: usize,
region_infer: usize,
ty_infer: usize,
both_infer: usize,
}
pub fn go(tcx: &ty::ctxt) {
let mut total = DebugStat {
total: 0,
region_infer: 0, ty_infer: 0, both_infer: 0,
};
$(let mut $variant = total;)*
for (_, t) in &*tcx.interner.borrow() {
let variant = match t.sty {
ty::ty_bool | ty::ty_char | ty::ty_int(..) | ty::ty_uint(..) |
ty::ty_float(..) | ty::ty_str => continue,
ty::ty_err => /* unimportant */ continue,
$(ty::$variant(..) => &mut $variant,)*
};
let region = t.flags.get().intersects(ty::TypeFlags::HAS_RE_INFER);
let ty = t.flags.get().intersects(ty::TypeFlags::HAS_TY_INFER);
variant.total += 1;
total.total += 1;
if region { total.region_infer += 1; variant.region_infer += 1 }
if ty { total.ty_infer += 1; variant.ty_infer += 1 }
if region && ty { total.both_infer += 1; variant.both_infer += 1 }
}
println!("Ty interner total ty region both");
$(println!(" {:18}: {uses:6} {usespc:4.1}%, \
{ty:4.1}% {region:5.1}% {both:4.1}%",
stringify!($variant),
uses = $variant.total,
usespc = $variant.total as f64 * 100.0 / total.total as f64,
ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
region = $variant.region_infer as f64 * 100.0 / total.total as f64,
both = $variant.both_infer as f64 * 100.0 / total.total as f64);
)*
println!(" total {uses:6} \
{ty:4.1}% {region:5.1}% {both:4.1}%",
uses = total.total,
ty = total.ty_infer as f64 * 100.0 / total.total as f64,
region = total.region_infer as f64 * 100.0 / total.total as f64,
both = total.both_infer as f64 * 100.0 / total.total as f64)
}
}
inner::go($ctxt)
}}
}
impl<'tcx> ctxt<'tcx> {
pub fn print_debug_stats(&self) {
sty_debug_print!(
self,
ty_enum, ty_uniq, ty_vec, ty_ptr, ty_rptr, ty_bare_fn, ty_trait,
ty_struct, ty_closure, ty_tup, ty_param, ty_infer, ty_projection);
println!("Substs interner: #{}", self.substs_interner.borrow().len());
println!("BareFnTy interner: #{}", self.bare_fn_interner.borrow().len());
println!("Region interner: #{}", self.region_interner.borrow().len());
println!("Stability interner: #{}", self.stability_interner.borrow().len());
}
}
#[derive(Debug)]
pub struct TyS<'tcx> {
pub sty: sty<'tcx>,
pub flags: Cell<TypeFlags>,
// the maximal depth of any bound regions appearing in this type.
region_depth: u32,
}
impl fmt::Debug for TypeFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.bits)
}
}
impl<'tcx> PartialEq for TyS<'tcx> {
#[inline]
fn eq(&self, other: &TyS<'tcx>) -> bool {
// (self as *const _) == (other as *const _)
(self as *const TyS<'tcx>) == (other as *const TyS<'tcx>)
}
}
impl<'tcx> Eq for TyS<'tcx> {}
impl<'tcx> Hash for TyS<'tcx> {
fn hash<H: Hasher>(&self, s: &mut H) {
(self as *const TyS).hash(s)
}
}
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
/// An entry in the type interner.
pub struct InternedTy<'tcx> {
ty: Ty<'tcx>
}
// NB: An InternedTy compares and hashes as a sty.
impl<'tcx> PartialEq for InternedTy<'tcx> {
fn eq(&self, other: &InternedTy<'tcx>) -> bool {
self.ty.sty == other.ty.sty
}
}
impl<'tcx> Eq for InternedTy<'tcx> {}
impl<'tcx> Hash for InternedTy<'tcx> {
fn hash<H: Hasher>(&self, s: &mut H) {
self.ty.sty.hash(s)
}
}
impl<'tcx> Borrow<sty<'tcx>> for InternedTy<'tcx> {
fn borrow<'a>(&'a self) -> &'a sty<'tcx> {
&self.ty.sty
}
}
pub fn type_has_params(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_PARAMS)
}
pub fn type_has_self(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_SELF)
}
pub fn type_has_ty_infer(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_TY_INFER)
}
pub fn type_needs_infer(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_TY_INFER | TypeFlags::HAS_RE_INFER)
}
pub fn type_has_projection(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_PROJECTION)
}
pub fn type_has_ty_closure(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_TY_CLOSURE)
}
pub fn type_has_late_bound_regions(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_RE_LATE_BOUND)
}
/// An "escaping region" is a bound region whose binder is not part of `t`.
///
/// So, for example, consider a type like the following, which has two binders:
///
/// for<'a> fn(x: for<'b> fn(&'a isize, &'b isize))
/// ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ outer scope
/// ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ inner scope
///
/// This type has *bound regions* (`'a`, `'b`), but it does not have escaping regions, because the
/// binders of both `'a` and `'b` are part of the type itself. However, if we consider the *inner
/// fn type*, that type has an escaping region: `'a`.
///
/// Note that what I'm calling an "escaping region" is often just called a "free region". However,
/// we already use the term "free region". It refers to the regions that we use to represent bound
/// regions on a fn definition while we are typechecking its body.
///
/// To clarify, conceptually there is no particular difference between an "escaping" region and a
/// "free" region. However, there is a big difference in practice. Basically, when "entering" a
/// binding level, one is generally required to do some sort of processing to a bound region, such
/// as replacing it with a fresh/skolemized region, or making an entry in the environment to
/// represent the scope to which it is attached, etc. An escaping region represents a bound region
/// for which this processing has not yet been done.
pub fn type_has_escaping_regions(ty: Ty) -> bool {
type_escapes_depth(ty, 0)
}
pub fn type_escapes_depth(ty: Ty, depth: u32) -> bool {
ty.region_depth > depth
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct BareFnTy<'tcx> {
pub unsafety: ast::Unsafety,
pub abi: abi::Abi,
pub sig: PolyFnSig<'tcx>,
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ClosureTy<'tcx> {
pub unsafety: ast::Unsafety,
pub abi: abi::Abi,
pub sig: PolyFnSig<'tcx>,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum FnOutput<'tcx> {
FnConverging(Ty<'tcx>),
FnDiverging
}
impl<'tcx> FnOutput<'tcx> {
pub fn diverges(&self) -> bool {
*self == FnDiverging
}
pub fn unwrap(self) -> Ty<'tcx> {
match self {
ty::FnConverging(t) => t,
ty::FnDiverging => unreachable!()
}
}
pub fn unwrap_or(self, def: Ty<'tcx>) -> Ty<'tcx> {
match self {
ty::FnConverging(t) => t,
ty::FnDiverging => def
}
}
}
pub type PolyFnOutput<'tcx> = Binder<FnOutput<'tcx>>;
impl<'tcx> PolyFnOutput<'tcx> {
pub fn diverges(&self) -> bool {
self.0.diverges()
}
}
/// Signature of a function type, which I have arbitrarily
/// decided to use to refer to the input/output types.
///
/// - `inputs` is the list of arguments and their modes.
/// - `output` is the return type.
/// - `variadic` indicates whether this is a variadic function. (only true for foreign fns)
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct FnSig<'tcx> {
pub inputs: Vec<Ty<'tcx>>,
pub output: FnOutput<'tcx>,
pub variadic: bool
}
pub type PolyFnSig<'tcx> = Binder<FnSig<'tcx>>;
impl<'tcx> PolyFnSig<'tcx> {
pub fn inputs(&self) -> ty::Binder<Vec<Ty<'tcx>>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs.clone())
}
pub fn input(&self, index: usize) -> ty::Binder<Ty<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs[index])
}
pub fn output(&self) -> ty::Binder<FnOutput<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.output.clone())
}
pub fn variadic(&self) -> bool {
self.skip_binder().variadic
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct ParamTy {
pub space: subst::ParamSpace,
pub idx: u32,
pub name: ast::Name,
}
/// A [De Bruijn index][dbi] is a standard means of representing
/// regions (and perhaps later types) in a higher-ranked setting. In
/// particular, imagine a type like this:
///
/// for<'a> fn(for<'b> fn(&'b isize, &'a isize), &'a char)
/// ^ ^ | | |
/// | | | | |
/// | +------------+ 1 | |
/// | | |
/// +--------------------------------+ 2 |
/// | |
/// +------------------------------------------+ 1
///
/// In this type, there are two binders (the outer fn and the inner
/// fn). We need to be able to determine, for any given region, which
/// fn type it is bound by, the inner or the outer one. There are
/// various ways you can do this, but a De Bruijn index is one of the
/// more convenient and has some nice properties. The basic idea is to
/// count the number of binders, inside out. Some examples should help
/// clarify what I mean.
///
/// Let's start with the reference type `&'b isize` that is the first
/// argument to the inner function. This region `'b` is assigned a De
/// Bruijn index of 1, meaning "the innermost binder" (in this case, a
/// fn). The region `'a` that appears in the second argument type (`&'a
/// isize`) would then be assigned a De Bruijn index of 2, meaning "the
/// second-innermost binder". (These indices are written on the arrays
/// in the diagram).
///
/// What is interesting is that De Bruijn index attached to a particular
/// variable will vary depending on where it appears. For example,
/// the final type `&'a char` also refers to the region `'a` declared on
/// the outermost fn. But this time, this reference is not nested within
/// any other binders (i.e., it is not an argument to the inner fn, but
/// rather the outer one). Therefore, in this case, it is assigned a
/// De Bruijn index of 1, because the innermost binder in that location
/// is the outer fn.
///
/// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct DebruijnIndex {
// We maintain the invariant that this is never 0. So 1 indicates
// the innermost binder. To ensure this, create with `DebruijnIndex::new`.
pub depth: u32,
}
/// Representation of regions:
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum Region {
// Region bound in a type or fn declaration which will be
// substituted 'early' -- that is, at the same time when type
// parameters are substituted.
ReEarlyBound(EarlyBoundRegion),
// Region bound in a function scope, which will be substituted when the
// function is called.
ReLateBound(DebruijnIndex, BoundRegion),
/// When checking a function body, the types of all arguments and so forth
/// that refer to bound region parameters are modified to refer to free
/// region parameters.
ReFree(FreeRegion),
/// A concrete region naming some statically determined extent
/// (e.g. an expression or sequence of statements) within the
/// current function.
ReScope(region::CodeExtent),
/// Static data that has an "infinite" lifetime. Top in the region lattice.
ReStatic,
/// A region variable. Should not exist after typeck.
ReInfer(InferRegion),
/// Empty lifetime is for data that is never accessed.
/// Bottom in the region lattice. We treat ReEmpty somewhat
/// specially; at least right now, we do not generate instances of
/// it during the GLB computations, but rather
/// generate an error instead. This is to improve error messages.
/// The only way to get an instance of ReEmpty is to have a region
/// variable with no constraints.
ReEmpty,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub struct EarlyBoundRegion {
pub param_id: ast::NodeId,
pub space: subst::ParamSpace,
pub index: u32,
pub name: ast::Name,
}
/// Upvars do not get their own node-id. Instead, we use the pair of
/// the original var id (that is, the root variable that is referenced
/// by the upvar) and the id of the closure expression.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct UpvarId {
pub var_id: ast::NodeId,
pub closure_expr_id: ast::NodeId,
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy)]
pub enum BorrowKind {
/// Data must be immutable and is aliasable.
ImmBorrow,
/// Data must be immutable but not aliasable. This kind of borrow
/// cannot currently be expressed by the user and is used only in
/// implicit closure bindings. It is needed when you the closure
/// is borrowing or mutating a mutable referent, e.g.:
///
/// let x: &mut isize = ...;
/// let y = || *x += 5;
///
/// If we were to try to translate this closure into a more explicit
/// form, we'd encounter an error with the code as written:
///
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
///
/// This is then illegal because you cannot mutate a `&mut` found
/// in an aliasable location. To solve, you'd have to translate with
/// an `&mut` borrow:
///
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
///
/// Now the assignment to `**env.x` is legal, but creating a
/// mutable pointer to `x` is not because `x` is not mutable. We
/// could fix this by declaring `x` as `let mut x`. This is ok in
/// user code, if awkward, but extra weird for closures, since the
/// borrow is hidden.
///
/// So we introduce a "unique imm" borrow -- the referent is
/// immutable, but not aliasable. This solves the problem. For
/// simplicity, we don't give users the way to express this
/// borrow, it's just used when translating closures.
UniqueImmBorrow,
/// Data is mutable and not aliasable.
MutBorrow
}
/// Information describing the capture of an upvar. This is computed
/// during `typeck`, specifically by `regionck`.
#[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum UpvarCapture {
/// Upvar is captured by value. This is always true when the
/// closure is labeled `move`, but can also be true in other cases
/// depending on inference.
ByValue,
/// Upvar is captured by reference.
ByRef(UpvarBorrow),
}
#[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct UpvarBorrow {
/// The kind of borrow: by-ref upvars have access to shared
/// immutable borrows, which are not part of the normal language
/// syntax.
pub kind: BorrowKind,
/// Region of the resulting reference.
pub region: ty::Region,
}
pub type UpvarCaptureMap = FnvHashMap<UpvarId, UpvarCapture>;
impl Region {
pub fn is_bound(&self) -> bool {
match *self {
ty::ReEarlyBound(..) => true,
ty::ReLateBound(..) => true,
_ => false
}
}
pub fn escapes_depth(&self, depth: u32) -> bool {
match *self {
ty::ReLateBound(debruijn, _) => debruijn.depth > depth,
_ => false,
}
}
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Debug, Copy)]
/// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`".
pub struct FreeRegion {
pub scope: region::DestructionScopeData,
pub bound_region: BoundRegion
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum BoundRegion {
/// An anonymous region parameter for a given fn (&T)
BrAnon(u32),
/// Named region parameters for functions (a in &'a T)
///
/// The def-id is needed to distinguish free regions in
/// the event of shadowing.
BrNamed(ast::DefId, ast::Name),
/// Fresh bound identifiers created during GLB computations.
BrFresh(u32),
// Anonymous region for the implicit env pointer parameter
// to a closure
BrEnv
}
// NB: If you change this, you'll probably want to change the corresponding
// AST structure in libsyntax/ast.rs as well.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum sty<'tcx> {
ty_bool,
ty_char,
ty_int(ast::IntTy),
ty_uint(ast::UintTy),
ty_float(ast::FloatTy),
/// Substs here, possibly against intuition, *may* contain `ty_param`s.
/// That is, even after substitution it is possible that there are type
/// variables. This happens when the `ty_enum` corresponds to an enum
/// definition and not a concrete use of it. To get the correct `ty_enum`
/// from the tcx, use the `NodeId` from the `ast::Ty` and look it up in
/// the `ast_ty_to_ty_cache`. This is probably true for `ty_struct` as
/// well.
ty_enum(DefId, &'tcx Substs<'tcx>),
ty_uniq(Ty<'tcx>),
ty_str,
ty_vec(Ty<'tcx>, Option<usize>), // Second field is length.
ty_ptr(mt<'tcx>),
ty_rptr(&'tcx Region, mt<'tcx>),
// If the def-id is Some(_), then this is the type of a specific
// fn item. Otherwise, if None(_), it a fn pointer type.
ty_bare_fn(Option<DefId>, &'tcx BareFnTy<'tcx>),
ty_trait(Box<TyTrait<'tcx>>),
ty_struct(DefId, &'tcx Substs<'tcx>),
ty_closure(DefId, &'tcx Substs<'tcx>),
ty_tup(Vec<Ty<'tcx>>),
ty_projection(ProjectionTy<'tcx>),
ty_param(ParamTy), // type parameter
ty_infer(InferTy), // something used only during inference/typeck
ty_err, // Also only used during inference/typeck, to represent
// the type of an erroneous expression (helps cut down
// on non-useful type error messages)
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TyTrait<'tcx> {
pub principal: ty::PolyTraitRef<'tcx>,
pub bounds: ExistentialBounds<'tcx>,
}
impl<'tcx> TyTrait<'tcx> {
pub fn principal_def_id(&self) -> ast::DefId {
self.principal.0.def_id
}
/// Object types don't have a self-type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self-type. A common choice is `mk_err()`
/// or some skolemized type.
pub fn principal_trait_ref_with_self_ty(&self,
tcx: &ctxt<'tcx>,
self_ty: Ty<'tcx>)
-> ty::PolyTraitRef<'tcx>
{
// otherwise the escaping regions would be captured by the binder
assert!(!self_ty.has_escaping_regions());
ty::Binder(TraitRef {
def_id: self.principal.0.def_id,
substs: tcx.mk_substs(self.principal.0.substs.with_self_ty(self_ty)),
})
}
pub fn projection_bounds_with_self_ty(&self,
tcx: &ctxt<'tcx>,
self_ty: Ty<'tcx>)
-> Vec<ty::PolyProjectionPredicate<'tcx>>
{
// otherwise the escaping regions would be captured by the binders
assert!(!self_ty.has_escaping_regions());
self.bounds.projection_bounds.iter()
.map(|in_poly_projection_predicate| {
let in_projection_ty = &in_poly_projection_predicate.0.projection_ty;
let substs = tcx.mk_substs(in_projection_ty.trait_ref.substs.with_self_ty(self_ty));
let trait_ref = ty::TraitRef::new(in_projection_ty.trait_ref.def_id,
substs);
let projection_ty = ty::ProjectionTy {
trait_ref: trait_ref,
item_name: in_projection_ty.item_name
};
ty::Binder(ty::ProjectionPredicate {
projection_ty: projection_ty,
ty: in_poly_projection_predicate.0.ty
})
})
.collect()
}
}
/// A complete reference to a trait. These take numerous guises in syntax,
/// but perhaps the most recognizable form is in a where clause:
///
/// T : Foo<U>
///
/// This would be represented by a trait-reference where the def-id is the
/// def-id for the trait `Foo` and the substs defines `T` as parameter 0 in the
/// `SelfSpace` and `U` as parameter 0 in the `TypeSpace`.
///
/// Trait references also appear in object types like `Foo<U>`, but in
/// that case the `Self` parameter is absent from the substitutions.
///
/// Note that a `TraitRef` introduces a level of region binding, to
/// account for higher-ranked trait bounds like `T : for<'a> Foo<&'a
/// U>` or higher-ranked object types.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct TraitRef<'tcx> {
pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
}
pub type PolyTraitRef<'tcx> = Binder<TraitRef<'tcx>>;
impl<'tcx> PolyTraitRef<'tcx> {
pub fn self_ty(&self) -> Ty<'tcx> {
self.0.self_ty()
}
pub fn def_id(&self) -> ast::DefId {
self.0.def_id
}
pub fn substs(&self) -> &'tcx Substs<'tcx> {
// FIXME(#20664) every use of this fn is probably a bug, it should yield Binder<>
self.0.substs
}
pub fn input_types(&self) -> &[Ty<'tcx>] {
// FIXME(#20664) every use of this fn is probably a bug, it should yield Binder<>
self.0.input_types()
}
pub fn to_poly_trait_predicate(&self) -> PolyTraitPredicate<'tcx> {
// Note that we preserve binding levels
Binder(TraitPredicate { trait_ref: self.0.clone() })
}
}
/// Binder is a binder for higher-ranked lifetimes. It is part of the
/// compiler's representation for things like `for<'a> Fn(&'a isize)`
/// (which would be represented by the type `PolyTraitRef ==
/// Binder<TraitRef>`). Note that when we skolemize, instantiate,
/// erase, or otherwise "discharge" these bound regions, we change the
/// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`).
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Binder<T>(pub T);
impl<T> Binder<T> {
/// Skips the binder and returns the "bound" value. This is a
/// risky thing to do because it's easy to get confused about
/// debruijn indices and the like. It is usually better to
/// discharge the binder using `no_late_bound_regions` or
/// `replace_late_bound_regions` or something like
/// that. `skip_binder` is only valid when you are either
/// extracting data that has nothing to do with bound regions, you
/// are doing some sort of test that does not involve bound
/// regions, or you are being very careful about your depth
/// accounting.
///
/// Some examples where `skip_binder` is reasonable:
/// - extracting the def-id from a PolyTraitRef;
/// - comparing the self type of a PolyTraitRef to see if it is equal to
/// a type parameter `X`, since the type `X` does not reference any regions
pub fn skip_binder(&self) -> &T {
&self.0
}
pub fn as_ref(&self) -> Binder<&T> {
ty::Binder(&self.0)
}
pub fn map_bound_ref<F,U>(&self, f: F) -> Binder<U>
where F: FnOnce(&T) -> U
{
self.as_ref().map_bound(f)
}
pub fn map_bound<F,U>(self, f: F) -> Binder<U>
where F: FnOnce(T) -> U
{
ty::Binder(f(self.0))
}
}
#[derive(Clone, Copy, PartialEq)]
pub enum IntVarValue {
IntType(ast::IntTy),
UintType(ast::UintTy),
}
#[derive(Clone, Copy, Debug)]
pub enum terr_vstore_kind {
terr_vec,
terr_str,
terr_fn,
terr_trait
}
#[derive(Clone, Copy, Debug)]
pub struct expected_found<T> {
pub expected: T,
pub found: T
}
// Data structures used in type unification
#[derive(Clone, Copy, Debug)]
pub enum type_err<'tcx> {
terr_mismatch,
terr_unsafety_mismatch(expected_found<ast::Unsafety>),
terr_abi_mismatch(expected_found<abi::Abi>),
terr_mutability,
terr_box_mutability,
terr_ptr_mutability,
terr_ref_mutability,
terr_vec_mutability,
terr_tuple_size(expected_found<usize>),
terr_fixed_array_size(expected_found<usize>),
terr_ty_param_size(expected_found<usize>),
terr_arg_count,
terr_regions_does_not_outlive(Region, Region),
terr_regions_not_same(Region, Region),
terr_regions_no_overlap(Region, Region),
terr_regions_insufficiently_polymorphic(BoundRegion, Region),
terr_regions_overly_polymorphic(BoundRegion, Region),
terr_sorts(expected_found<Ty<'tcx>>),
terr_integer_as_char,
terr_int_mismatch(expected_found<IntVarValue>),
terr_float_mismatch(expected_found<ast::FloatTy>),
terr_traits(expected_found<ast::DefId>),
terr_builtin_bounds(expected_found<BuiltinBounds>),
terr_variadic_mismatch(expected_found<bool>),
terr_cyclic_ty,
terr_convergence_mismatch(expected_found<bool>),
terr_projection_name_mismatched(expected_found<ast::Name>),
terr_projection_bounds_length(expected_found<usize>),
}
/// Bounds suitable for a named type parameter like `A` in `fn foo<A>`
/// as well as the existential type parameter in an object type.
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ParamBounds<'tcx> {
pub region_bounds: Vec<ty::Region>,
pub builtin_bounds: BuiltinBounds,
pub trait_bounds: Vec<PolyTraitRef<'tcx>>,
pub projection_bounds: Vec<PolyProjectionPredicate<'tcx>>,
}
/// Bounds suitable for an existentially quantified type parameter
/// such as those that appear in object types or closure types. The
/// major difference between this case and `ParamBounds` is that
/// general purpose trait bounds are omitted and there must be
/// *exactly one* region.
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ExistentialBounds<'tcx> {
pub region_bound: ty::Region,
pub builtin_bounds: BuiltinBounds,
pub projection_bounds: Vec<PolyProjectionPredicate<'tcx>>,
}
pub type BuiltinBounds = EnumSet<BuiltinBound>;
#[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash,
Debug, Copy)]
#[repr(usize)]
pub enum BuiltinBound {
BoundSend,
BoundSized,
BoundCopy,
BoundSync,
}
pub fn empty_builtin_bounds() -> BuiltinBounds {
EnumSet::new()
}
pub fn all_builtin_bounds() -> BuiltinBounds {
let mut set = EnumSet::new();
set.insert(BoundSend);
set.insert(BoundSized);
set.insert(BoundSync);
set
}
/// An existential bound that does not implement any traits.
pub fn region_existential_bound<'tcx>(r: ty::Region) -> ExistentialBounds<'tcx> {
ty::ExistentialBounds { region_bound: r,
builtin_bounds: empty_builtin_bounds(),
projection_bounds: Vec::new() }
}
impl CLike for BuiltinBound {
fn to_usize(&self) -> usize {
*self as usize
}
fn from_usize(v: usize) -> BuiltinBound {
unsafe { mem::transmute(v) }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct TyVid {
pub index: u32
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct IntVid {
pub index: u32
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct FloatVid {
pub index: u32
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct RegionVid {
pub index: u32
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum InferTy {
TyVar(TyVid),
IntVar(IntVid),
FloatVar(FloatVid),
/// A `FreshTy` is one that is generated as a replacement for an
/// unbound type variable. This is convenient for caching etc. See
/// `middle::infer::freshen` for more details.
FreshTy(u32),
FreshIntTy(u32),
FreshFloatTy(u32)
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum UnconstrainedNumeric {
UnconstrainedFloat,
UnconstrainedInt,
Neither,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Eq, Hash, Debug, Copy)]
pub enum InferRegion {
ReVar(RegionVid),
ReSkolemized(u32, BoundRegion)
}
impl cmp::PartialEq for InferRegion {
fn eq(&self, other: &InferRegion) -> bool {
match ((*self), *other) {
(ReVar(rva), ReVar(rvb)) => {
rva == rvb
}
(ReSkolemized(rva, _), ReSkolemized(rvb, _)) => {
rva == rvb
}
_ => false
}
}
fn ne(&self, other: &InferRegion) -> bool {
!((*self) == (*other))
}
}
impl fmt::Debug for TyVid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result{
write!(f, "_#{}t", self.index)
}
}
impl fmt::Debug for IntVid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "_#{}i", self.index)
}
}
impl fmt::Debug for FloatVid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "_#{}f", self.index)
}
}
impl fmt::Debug for RegionVid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "'_#{}r", self.index)
}
}
impl<'tcx> fmt::Debug for FnSig<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({:?}; variadic: {})->{:?}", self.inputs, self.variadic, self.output)
}
}
impl fmt::Debug for InferTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
TyVar(ref v) => v.fmt(f),
IntVar(ref v) => v.fmt(f),
FloatVar(ref v) => v.fmt(f),
FreshTy(v) => write!(f, "FreshTy({:?})", v),
FreshIntTy(v) => write!(f, "FreshIntTy({:?})", v),
FreshFloatTy(v) => write!(f, "FreshFloatTy({:?})", v)
}
}
}
impl fmt::Debug for IntVarValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
IntType(ref v) => v.fmt(f),
UintType(ref v) => v.fmt(f),
}
}
}
/// Default region to use for the bound of objects that are
/// supplied as the value for this type parameter. This is derived
/// from `T:'a` annotations appearing in the type definition. If
/// this is `None`, then the default is inherited from the
/// surrounding context. See RFC #599 for details.
#[derive(Copy, Clone, Debug)]
pub enum ObjectLifetimeDefault {
/// Require an explicit annotation. Occurs when multiple
/// `T:'a` constraints are found.
Ambiguous,
/// Use the given region as the default.
Specific(Region),
}
#[derive(Clone, Debug)]
pub struct TypeParameterDef<'tcx> {
pub name: ast::Name,
pub def_id: ast::DefId,
pub space: subst::ParamSpace,
pub index: u32,
pub default: Option<Ty<'tcx>>,
pub object_lifetime_default: Option<ObjectLifetimeDefault>,
}
#[derive(RustcEncodable, RustcDecodable, Clone, Debug)]
pub struct RegionParameterDef {
pub name: ast::Name,
pub def_id: ast::DefId,
pub space: subst::ParamSpace,
pub index: u32,
pub bounds: Vec<ty::Region>,
}
impl RegionParameterDef {
pub fn to_early_bound_region(&self) -> ty::Region {
ty::ReEarlyBound(ty::EarlyBoundRegion {
param_id: self.def_id.node,
space: self.space,
index: self.index,
name: self.name,
})
}
pub fn to_bound_region(&self) -> ty::BoundRegion {
ty::BoundRegion::BrNamed(self.def_id, self.name)
}
}
/// Information about the formal type/lifetime parameters associated
/// with an item or method. Analogous to ast::Generics.
#[derive(Clone, Debug)]
pub struct Generics<'tcx> {
pub types: VecPerParamSpace<TypeParameterDef<'tcx>>,
pub regions: VecPerParamSpace<RegionParameterDef>,
}
impl<'tcx> Generics<'tcx> {
pub fn empty() -> Generics<'tcx> {
Generics {
types: VecPerParamSpace::empty(),
regions: VecPerParamSpace::empty(),
}
}
pub fn is_empty(&self) -> bool {
self.types.is_empty() && self.regions.is_empty()
}
pub fn has_type_params(&self, space: subst::ParamSpace) -> bool {
!self.types.is_empty_in(space)
}
pub fn has_region_params(&self, space: subst::ParamSpace) -> bool {
!self.regions.is_empty_in(space)
}
}
/// Bounds on generics.
#[derive(Clone, Debug)]
pub struct GenericPredicates<'tcx> {
pub predicates: VecPerParamSpace<Predicate<'tcx>>,
}
impl<'tcx> GenericPredicates<'tcx> {
pub fn empty() -> GenericPredicates<'tcx> {
GenericPredicates {
predicates: VecPerParamSpace::empty(),
}
}
pub fn instantiate(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>)
-> InstantiatedPredicates<'tcx> {
InstantiatedPredicates {
predicates: self.predicates.subst(tcx, substs),
}
}
pub fn instantiate_supertrait(&self,
tcx: &ty::ctxt<'tcx>,
poly_trait_ref: &ty::PolyTraitRef<'tcx>)
-> InstantiatedPredicates<'tcx>
{
InstantiatedPredicates {
predicates: self.predicates.map(|pred| pred.subst_supertrait(tcx, poly_trait_ref))
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum Predicate<'tcx> {
/// Corresponds to `where Foo : Bar<A,B,C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C`
/// would be the parameters in the `TypeSpace`.
Trait(PolyTraitPredicate<'tcx>),
/// where `T1 == T2`.
Equate(PolyEquatePredicate<'tcx>),
/// where 'a : 'b
RegionOutlives(PolyRegionOutlivesPredicate),
/// where T : 'a
TypeOutlives(PolyTypeOutlivesPredicate<'tcx>),
/// where <T as TraitRef>::Name == X, approximately.
/// See `ProjectionPredicate` struct for details.
Projection(PolyProjectionPredicate<'tcx>),
}
impl<'tcx> Predicate<'tcx> {
/// Performs a substitution suitable for going from a
/// poly-trait-ref to supertraits that must hold if that
/// poly-trait-ref holds. This is slightly different from a normal
/// substitution in terms of what happens with bound regions. See
/// lengthy comment below for details.
pub fn subst_supertrait(&self,
tcx: &ty::ctxt<'tcx>,
trait_ref: &ty::PolyTraitRef<'tcx>)
-> ty::Predicate<'tcx>
{
// The interaction between HRTB and supertraits is not entirely
// obvious. Let me walk you (and myself) through an example.
//
// Let's start with an easy case. Consider two traits:
//
// trait Foo<'a> : Bar<'a,'a> { }
// trait Bar<'b,'c> { }
//
// Now, if we have a trait reference `for<'x> T : Foo<'x>`, then
// we can deduce that `for<'x> T : Bar<'x,'x>`. Basically, if we
// knew that `Foo<'x>` (for any 'x) then we also know that
// `Bar<'x,'x>` (for any 'x). This more-or-less falls out from
// normal substitution.
//
// In terms of why this is sound, the idea is that whenever there
// is an impl of `T:Foo<'a>`, it must show that `T:Bar<'a,'a>`
// holds. So if there is an impl of `T:Foo<'a>` that applies to
// all `'a`, then we must know that `T:Bar<'a,'a>` holds for all
// `'a`.
//
// Another example to be careful of is this:
//
// trait Foo1<'a> : for<'b> Bar1<'a,'b> { }
// trait Bar1<'b,'c> { }
//
// Here, if we have `for<'x> T : Foo1<'x>`, then what do we know?
// The answer is that we know `for<'x,'b> T : Bar1<'x,'b>`. The
// reason is similar to the previous example: any impl of
// `T:Foo1<'x>` must show that `for<'b> T : Bar1<'x, 'b>`. So
// basically we would want to collapse the bound lifetimes from
// the input (`trait_ref`) and the supertraits.
//
// To achieve this in practice is fairly straightforward. Let's
// consider the more complicated scenario:
//
// - We start out with `for<'x> T : Foo1<'x>`. In this case, `'x`
// has a De Bruijn index of 1. We want to produce `for<'x,'b> T : Bar1<'x,'b>`,
// where both `'x` and `'b` would have a DB index of 1.
// The substitution from the input trait-ref is therefore going to be
// `'a => 'x` (where `'x` has a DB index of 1).
// - The super-trait-ref is `for<'b> Bar1<'a,'b>`, where `'a` is an
// early-bound parameter and `'b' is a late-bound parameter with a
// DB index of 1.
// - If we replace `'a` with `'x` from the input, it too will have
// a DB index of 1, and thus we'll have `for<'x,'b> Bar1<'x,'b>`
// just as we wanted.
//
// There is only one catch. If we just apply the substitution `'a
// => 'x` to `for<'b> Bar1<'a,'b>`, the substitution code will
// adjust the DB index because we substituting into a binder (it
// tries to be so smart...) resulting in `for<'x> for<'b>
// Bar1<'x,'b>` (we have no syntax for this, so use your
// imagination). Basically the 'x will have DB index of 2 and 'b
// will have DB index of 1. Not quite what we want. So we apply
// the substitution to the *contents* of the trait reference,
// rather than the trait reference itself (put another way, the
// substitution code expects equal binding levels in the values
// from the substitution and the value being substituted into, and
// this trick achieves that).
let substs = &trait_ref.0.substs;
match *self {
Predicate::Trait(ty::Binder(ref data)) =>
Predicate::Trait(ty::Binder(data.subst(tcx, substs))),
Predicate::Equate(ty::Binder(ref data)) =>
Predicate::Equate(ty::Binder(data.subst(tcx, substs))),
Predicate::RegionOutlives(ty::Binder(ref data)) =>
Predicate::RegionOutlives(ty::Binder(data.subst(tcx, substs))),
Predicate::TypeOutlives(ty::Binder(ref data)) =>
Predicate::TypeOutlives(ty::Binder(data.subst(tcx, substs))),
Predicate::Projection(ty::Binder(ref data)) =>
Predicate::Projection(ty::Binder(data.subst(tcx, substs))),
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TraitPredicate<'tcx> {
pub trait_ref: TraitRef<'tcx>
}
pub type PolyTraitPredicate<'tcx> = ty::Binder<TraitPredicate<'tcx>>;
impl<'tcx> TraitPredicate<'tcx> {
pub fn def_id(&self) -> ast::DefId {
self.trait_ref.def_id
}
pub fn input_types(&self) -> &[Ty<'tcx>] {
self.trait_ref.substs.types.as_slice()
}
pub fn self_ty(&self) -> Ty<'tcx> {
self.trait_ref.self_ty()
}
}
impl<'tcx> PolyTraitPredicate<'tcx> {
pub fn def_id(&self) -> ast::DefId {
self.0.def_id()
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1`
pub type PolyEquatePredicate<'tcx> = ty::Binder<EquatePredicate<'tcx>>;
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A : B`
pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>;
pub type PolyRegionOutlivesPredicate = PolyOutlivesPredicate<ty::Region, ty::Region>;
pub type PolyTypeOutlivesPredicate<'tcx> = PolyOutlivesPredicate<Ty<'tcx>, ty::Region>;
/// This kind of predicate has no *direct* correspondent in the
/// syntax, but it roughly corresponds to the syntactic forms:
///
/// 1. `T : TraitRef<..., Item=Type>`
/// 2. `<T as TraitRef<...>>::Item == Type` (NYI)
///
/// In particular, form #1 is "desugared" to the combination of a
/// normal trait predicate (`T : TraitRef<...>`) and one of these
/// predicates. Form #2 is a broader form in that it also permits
/// equality between arbitrary types. Processing an instance of Form
/// #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>,
}
pub type PolyProjectionPredicate<'tcx> = Binder<ProjectionPredicate<'tcx>>;
impl<'tcx> PolyProjectionPredicate<'tcx> {
pub fn item_name(&self) -> ast::Name {
self.0.projection_ty.item_name // safe to skip the binder to access a name
}
pub fn sort_key(&self) -> (ast::DefId, ast::Name) {
self.0.projection_ty.sort_key()
}
}
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ProjectionTy<'tcx> {
/// The trait reference `T as Trait<..>`.
pub trait_ref: ty::TraitRef<'tcx>,
/// The name `N` of the associated type.
pub item_name: ast::Name,
}
impl<'tcx> ProjectionTy<'tcx> {
pub fn sort_key(&self) -> (ast::DefId, ast::Name) {
(self.trait_ref.def_id, self.item_name)
}
}
pub trait ToPolyTraitRef<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>;
}
impl<'tcx> ToPolyTraitRef<'tcx> for TraitRef<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> {
assert!(!self.has_escaping_regions());
ty::Binder(self.clone())
}
}
impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> {
self.map_bound_ref(|trait_pred| trait_pred.trait_ref.clone())
}
}
impl<'tcx> ToPolyTraitRef<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> {
// Note: unlike with TraitRef::to_poly_trait_ref(),
// self.0.trait_ref is permitted to have escaping regions.
// This is because here `self` has a `Binder` and so does our
// return value, so we are preserving the number of binding
// levels.
ty::Binder(self.0.projection_ty.trait_ref.clone())
}
}
pub trait AsPredicate<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx>;
}
impl<'tcx> AsPredicate<'tcx> for TraitRef<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx> {
// we're about to add a binder, so let's check that we don't
// accidentally capture anything, or else that might be some
// weird debruijn accounting.
assert!(!self.has_escaping_regions());
ty::Predicate::Trait(ty::Binder(ty::TraitPredicate {
trait_ref: self.clone()
}))
}
}
impl<'tcx> AsPredicate<'tcx> for PolyTraitRef<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx> {
ty::Predicate::Trait(self.to_poly_trait_predicate())
}
}
impl<'tcx> AsPredicate<'tcx> for PolyEquatePredicate<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx> {
Predicate::Equate(self.clone())
}
}
impl<'tcx> AsPredicate<'tcx> for PolyRegionOutlivesPredicate {
fn as_predicate(&self) -> Predicate<'tcx> {
Predicate::RegionOutlives(self.clone())
}
}
impl<'tcx> AsPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx> {
Predicate::TypeOutlives(self.clone())
}
}
impl<'tcx> AsPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn as_predicate(&self) -> Predicate<'tcx> {
Predicate::Projection(self.clone())
}
}
impl<'tcx> Predicate<'tcx> {
/// Iterates over the types in this predicate. Note that in all
/// cases this is skipping over a binder, so late-bound regions
/// with depth 0 are bound by the predicate.
pub fn walk_tys(&self) -> IntoIter<Ty<'tcx>> {
let vec: Vec<_> = match *self {
ty::Predicate::Trait(ref data) => {
data.0.trait_ref.substs.types.as_slice().to_vec()
}
ty::Predicate::Equate(ty::Binder(ref data)) => {
vec![data.0, data.1]
}
ty::Predicate::TypeOutlives(ty::Binder(ref data)) => {
vec![data.0]
}
ty::Predicate::RegionOutlives(..) => {
vec![]
}
ty::Predicate::Projection(ref data) => {
let trait_inputs = data.0.projection_ty.trait_ref.substs.types.as_slice();
trait_inputs.iter()
.cloned()
.chain(Some(data.0.ty).into_iter())
.collect()
}
};
// The only reason to collect into a vector here is that I was
// too lazy to make the full (somewhat complicated) iterator
// type that would be needed here. But I wanted this fn to
// return an iterator conceptually, rather than a `Vec`, so as
// to be closer to `Ty::walk`.
vec.into_iter()
}
pub fn has_escaping_regions(&self) -> bool {
match *self {
Predicate::Trait(ref trait_ref) => trait_ref.has_escaping_regions(),
Predicate::Equate(ref p) => p.has_escaping_regions(),
Predicate::RegionOutlives(ref p) => p.has_escaping_regions(),
Predicate::TypeOutlives(ref p) => p.has_escaping_regions(),
Predicate::Projection(ref p) => p.has_escaping_regions(),
}
}
pub fn to_opt_poly_trait_ref(&self) -> Option<PolyTraitRef<'tcx>> {
match *self {
Predicate::Trait(ref t) => {
Some(t.to_poly_trait_ref())
}
Predicate::Projection(..) |
Predicate::Equate(..) |
Predicate::RegionOutlives(..) |
Predicate::TypeOutlives(..) => {
None
}
}
}
}
/// Represents the bounds declared on a particular set of type
/// parameters. Should eventually be generalized into a flag list of
/// where clauses. You can obtain a `InstantiatedPredicates` list from a
/// `GenericPredicates` by using the `instantiate` method. Note that this method
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
/// the `GenericPredicates` are expressed in terms of the bound type
/// parameters of the impl/trait/whatever, an `InstantiatedPredicates` instance
/// represented a set of bounds for some particular instantiation,
/// meaning that the generic parameters have been substituted with
/// their values.
///
/// Example:
///
/// struct Foo<T,U:Bar<T>> { ... }
///
/// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like
/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
/// like `Foo<isize,usize>`, then the `InstantiatedPredicates` would be `[[],
/// [usize:Bar<isize>]]`.
#[derive(Clone, Debug)]
pub struct InstantiatedPredicates<'tcx> {
pub predicates: VecPerParamSpace<Predicate<'tcx>>,
}
impl<'tcx> InstantiatedPredicates<'tcx> {
pub fn empty() -> InstantiatedPredicates<'tcx> {
InstantiatedPredicates { predicates: VecPerParamSpace::empty() }
}
pub fn has_escaping_regions(&self) -> bool {
self.predicates.any(|p| p.has_escaping_regions())
}
pub fn is_empty(&self) -> bool {
self.predicates.is_empty()
}
}
impl<'tcx> TraitRef<'tcx> {
pub fn new(def_id: ast::DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> {
TraitRef { def_id: def_id, substs: substs }
}
pub fn self_ty(&self) -> Ty<'tcx> {
self.substs.self_ty().unwrap()
}
pub fn input_types(&self) -> &[Ty<'tcx>] {
// Select only the "input types" from a trait-reference. For
// now this is all the types that appear in the
// trait-reference, but it should eventually exclude
// associated types.
self.substs.types.as_slice()
}
}
/// When type checking, we use the `ParameterEnvironment` to track
/// details about the type/lifetime parameters that are in scope.
/// It primarily stores the bounds information.
///
/// Note: This information might seem to be redundant with the data in
/// `tcx.ty_param_defs`, but it is not. That table contains the
/// parameter definitions from an "outside" perspective, but this
/// struct will contain the bounds for a parameter as seen from inside
/// the function body. Currently the only real distinction is that
/// bound lifetime parameters are replaced with free ones, but in the
/// future I hope to refine the representation of types so as to make
/// more distinctions clearer.
#[derive(Clone)]
pub struct ParameterEnvironment<'a, 'tcx:'a> {
pub tcx: &'a ctxt<'tcx>,
/// See `construct_free_substs` for details.
pub free_substs: Substs<'tcx>,
/// Each type parameter has an implicit region bound that
/// indicates it must outlive at least the function body (the user
/// may specify stronger requirements). This field indicates the
/// region of the callee.
pub implicit_region_bound: ty::Region,
/// Obligations that the caller must satisfy. This is basically
/// the set of bounds on the in-scope type parameters, translated
/// into Obligations.
pub caller_bounds: Vec<ty::Predicate<'tcx>>,
/// Caches the results of trait selection. This cache is used
/// for things that have to do with the parameters in scope.
pub selection_cache: traits::SelectionCache<'tcx>,
}
impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> {
pub fn with_caller_bounds(&self,
caller_bounds: Vec<ty::Predicate<'tcx>>)
-> ParameterEnvironment<'a,'tcx>
{
ParameterEnvironment {
tcx: self.tcx,
free_substs: self.free_substs.clone(),
implicit_region_bound: self.implicit_region_bound,
caller_bounds: caller_bounds,
selection_cache: traits::SelectionCache::new(),
}
}
pub fn for_item(cx: &'a ctxt<'tcx>, id: NodeId) -> ParameterEnvironment<'a, 'tcx> {
match cx.map.find(id) {
Some(ast_map::NodeImplItem(ref impl_item)) => {
match impl_item.node {
ast::ConstImplItem(_, _) => {
let def_id = ast_util::local_def(id);
let scheme = lookup_item_type(cx, def_id);
let predicates = lookup_predicates(cx, def_id);
construct_parameter_environment(cx,
impl_item.span,
&scheme.generics,
&predicates,
id)
}
ast::MethodImplItem(_, ref body) => {
let method_def_id = ast_util::local_def(id);
match ty::impl_or_trait_item(cx, method_def_id) {
MethodTraitItem(ref method_ty) => {
let method_generics = &method_ty.generics;
let method_bounds = &method_ty.predicates;
construct_parameter_environment(
cx,
impl_item.span,
method_generics,
method_bounds,
body.id)
}
_ => {
cx.sess
.bug("ParameterEnvironment::for_item(): \
got non-method item from impl method?!")
}
}
}
ast::TypeImplItem(_) => {
cx.sess.bug("ParameterEnvironment::for_item(): \
can't create a parameter environment \
for type impl items")
}
ast::MacImplItem(_) => cx.sess.bug("unexpanded macro")
}
}
Some(ast_map::NodeTraitItem(trait_item)) => {
match trait_item.node {
ast::ConstTraitItem(_, ref default) => {
match *default {
Some(_) => {
let def_id = ast_util::local_def(id);
let scheme = lookup_item_type(cx, def_id);
let predicates = lookup_predicates(cx, def_id);
construct_parameter_environment(cx,
trait_item.span,
&scheme.generics,
&predicates,
id)
}
None => {
cx.sess.bug("ParameterEnvironment::from_item(): \
can't create a parameter environment \
for const trait items without defaults")
}
}
}
ast::MethodTraitItem(_, None) => {
cx.sess.span_bug(trait_item.span,
"ParameterEnvironment::for_item():
can't create a parameter \
environment for required trait \
methods")
}
ast::MethodTraitItem(_, Some(ref body)) => {
let method_def_id = ast_util::local_def(id);
match ty::impl_or_trait_item(cx, method_def_id) {
MethodTraitItem(ref method_ty) => {
let method_generics = &method_ty.generics;
let method_bounds = &method_ty.predicates;
construct_parameter_environment(
cx,
trait_item.span,
method_generics,
method_bounds,
body.id)
}
_ => {
cx.sess
.bug("ParameterEnvironment::for_item(): \
got non-method item from provided \
method?!")
}
}
}
ast::TypeTraitItem(..) => {
cx.sess.bug("ParameterEnvironment::from_item(): \
can't create a parameter environment \
for type trait items")
}
}
}
Some(ast_map::NodeItem(item)) => {
match item.node {
ast::ItemFn(_, _, _, _, _, ref body) => {
// We assume this is a function.
let fn_def_id = ast_util::local_def(id);
let fn_scheme = lookup_item_type(cx, fn_def_id);
let fn_predicates = lookup_predicates(cx, fn_def_id);
construct_parameter_environment(cx,
item.span,
&fn_scheme.generics,
&fn_predicates,
body.id)
}
ast::ItemEnum(..) |
ast::ItemStruct(..) |
ast::ItemImpl(..) |
ast::ItemConst(..) |
ast::ItemStatic(..) => {
let def_id = ast_util::local_def(id);
let scheme = lookup_item_type(cx, def_id);
let predicates = lookup_predicates(cx, def_id);
construct_parameter_environment(cx,
item.span,
&scheme.generics,
&predicates,
id)
}
_ => {
cx.sess.span_bug(item.span,
"ParameterEnvironment::from_item():
can't create a parameter \
environment for this kind of item")
}
}
}
Some(ast_map::NodeExpr(..)) => {
// This is a convenience to allow closures to work.
ParameterEnvironment::for_item(cx, cx.map.get_parent(id))
}
_ => {
cx.sess.bug(&format!("ParameterEnvironment::from_item(): \
`{}` is not an item",
cx.map.node_to_string(id)))
}
}
}
}
/// A "type scheme", in ML terminology, is a type combined with some
/// set of generic types that the type is, well, generic over. In Rust
/// terms, it is the "type" of a fn item or struct -- this type will
/// include various generic parameters that must be substituted when
/// the item/struct is referenced. That is called converting the type
/// scheme to a monotype.
///
/// - `generics`: the set of type parameters and their bounds
/// - `ty`: the base types, which may reference the parameters defined
/// in `generics`
///
/// Note that TypeSchemes are also sometimes called "polytypes" (and
/// in fact this struct used to carry that name, so you may find some
/// stray references in a comment or something). We try to reserve the
/// "poly" prefix to refer to higher-ranked things, as in
/// `PolyTraitRef`.
///
/// Note that each item also comes with predicates, see
/// `lookup_predicates`.
#[derive(Clone, Debug)]
pub struct TypeScheme<'tcx> {
pub generics: Generics<'tcx>,
pub ty: Ty<'tcx>,
}
bitflags! {
flags TraitFlags: u32 {
const NO_TRAIT_FLAGS = 0,
const HAS_DEFAULT_IMPL = 1 << 0,
const IS_OBJECT_SAFE = 1 << 1,
const OBJECT_SAFETY_VALID = 1 << 2,
const IMPLS_VALID = 1 << 3,
}
}
/// As `TypeScheme` but for a trait ref.
pub struct TraitDef<'tcx> {
pub unsafety: ast::Unsafety,
/// If `true`, then this trait had the `#[rustc_paren_sugar]`
/// attribute, indicating that it should be used with `Foo()`
/// sugar. This is a temporary thing -- eventually any trait wil
/// be usable with the sugar (or without it).
pub paren_sugar: bool,
/// Generic type definitions. Note that `Self` is listed in here
/// as having a single bound, the trait itself (e.g., in the trait
/// `Eq`, there is a single bound `Self : Eq`). This is so that
/// default methods get to assume that the `Self` parameters
/// implements the trait.
pub generics: Generics<'tcx>,
pub trait_ref: TraitRef<'tcx>,
/// A list of the associated types defined in this trait. Useful
/// for resolving `X::Foo` type markers.
pub associated_type_names: Vec<ast::Name>,
// Impls of this trait. To allow for quicker lookup, the impls are indexed
// by a simplified version of their Self type: impls with a simplifiable
// Self are stored in nonblanket_impls keyed by it, while all other impls
// are stored in blanket_impls.
/// Impls of the trait.
pub nonblanket_impls: RefCell<
FnvHashMap<fast_reject::SimplifiedType, Vec<DefId>>
>,
/// Blanket impls associated with the trait.
pub blanket_impls: RefCell<Vec<DefId>>,
/// Various flags
pub flags: Cell<TraitFlags>
}
impl<'tcx> TraitDef<'tcx> {
// returns None if not yet calculated
pub fn object_safety(&self) -> Option<bool> {
if self.flags.get().intersects(TraitFlags::OBJECT_SAFETY_VALID) {
Some(self.flags.get().intersects(TraitFlags::IS_OBJECT_SAFE))
} else {
None
}
}
pub fn set_object_safety(&self, is_safe: bool) {
assert!(self.object_safety().map(|cs| cs == is_safe).unwrap_or(true));
self.flags.set(
self.flags.get() | if is_safe {
TraitFlags::OBJECT_SAFETY_VALID | TraitFlags::IS_OBJECT_SAFE
} else {
TraitFlags::OBJECT_SAFETY_VALID
}
);
}
/// Records a trait-to-implementation mapping.
pub fn record_impl(&self,
tcx: &ctxt<'tcx>,
impl_def_id: DefId,
impl_trait_ref: TraitRef<'tcx>) {
debug!("TraitDef::record_impl for {}, from {}",
self.repr(tcx), impl_trait_ref.repr(tcx));
// We don't want to borrow_mut after we already populated all impls,
// so check if an impl is present with an immutable borrow first.
if let Some(sty) = fast_reject::simplify_type(tcx,
impl_trait_ref.self_ty(), false) {
if let Some(is) = self.nonblanket_impls.borrow().get(&sty) {
if is.contains(&impl_def_id) {
return // duplicate - skip
}
}
self.nonblanket_impls.borrow_mut().entry(sty).or_insert(vec![]).push(impl_def_id)
} else {
if self.blanket_impls.borrow().contains(&impl_def_id) {
return // duplicate - skip
}
self.blanket_impls.borrow_mut().push(impl_def_id)
}
}
pub fn for_each_impl<F: FnMut(DefId)>(&self, tcx: &ctxt<'tcx>, mut f: F) {
ty::populate_implementations_for_trait_if_necessary(tcx, self.trait_ref.def_id);
for &impl_def_id in self.blanket_impls.borrow().iter() {
f(impl_def_id);
}
for v in self.nonblanket_impls.borrow().values() {
for &impl_def_id in v {
f(impl_def_id);
}
}
}
pub fn for_each_relevant_impl<F: FnMut(DefId)>(&self,
tcx: &ctxt<'tcx>,
self_ty: Ty<'tcx>,
mut f: F)
{
ty::populate_implementations_for_trait_if_necessary(tcx, self.trait_ref.def_id);
for &impl_def_id in self.blanket_impls.borrow().iter() {
f(impl_def_id);
}
if let Some(simp) = fast_reject::simplify_type(tcx, self_ty, false) {
if let Some(impls) = self.nonblanket_impls.borrow().get(&simp) {
for &impl_def_id in impls {
f(impl_def_id);
}
return; // we don't need to process the other non-blanket impls
}
}
for v in self.nonblanket_impls.borrow().values() {
for &impl_def_id in v {
f(impl_def_id);
}
}
}
}
/// Records the substitutions used to translate the polytype for an
/// item into the monotype of an item reference.
#[derive(Clone)]
pub struct ItemSubsts<'tcx> {
pub substs: Substs<'tcx>,
}
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Debug, RustcEncodable, RustcDecodable)]
pub enum ClosureKind {
// Warning: Ordering is significant here! The ordering is chosen
// because the trait Fn is a subtrait of FnMut and so in turn, and
// hence we order it so that Fn < FnMut < FnOnce.
FnClosureKind,
FnMutClosureKind,
FnOnceClosureKind,
}
impl ClosureKind {
pub fn trait_did(&self, cx: &ctxt) -> ast::DefId {
let result = match *self {
FnClosureKind => cx.lang_items.require(FnTraitLangItem),
FnMutClosureKind => {
cx.lang_items.require(FnMutTraitLangItem)
}
FnOnceClosureKind => {
cx.lang_items.require(FnOnceTraitLangItem)
}
};
match result {
Ok(trait_did) => trait_did,
Err(err) => cx.sess.fatal(&err[..]),
}
}
/// True if this a type that impls this closure kind
/// must also implement `other`.
pub fn extends(self, other: ty::ClosureKind) -> bool {
match (self, other) {
(FnClosureKind, FnClosureKind) => true,
(FnClosureKind, FnMutClosureKind) => true,
(FnClosureKind, FnOnceClosureKind) => true,
(FnMutClosureKind, FnMutClosureKind) => true,
(FnMutClosureKind, FnOnceClosureKind) => true,
(FnOnceClosureKind, FnOnceClosureKind) => true,
_ => false,
}
}
}
pub trait ClosureTyper<'tcx> {
fn tcx(&self) -> &ty::ctxt<'tcx> {
self.param_env().tcx
}
fn param_env<'a>(&'a self) -> &'a ty::ParameterEnvironment<'a, 'tcx>;
/// Is this a `Fn`, `FnMut` or `FnOnce` closure? During typeck,
/// returns `None` if the kind of this closure has not yet been
/// inferred.
fn closure_kind(&self,
def_id: ast::DefId)
-> Option<ty::ClosureKind>;
/// Returns the argument/return types of this closure.
fn closure_type(&self,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>)
-> ty::ClosureTy<'tcx>;
/// Returns the set of all upvars and their transformed
/// types. During typeck, maybe return `None` if the upvar types
/// have not yet been inferred.
fn closure_upvars(&self,
def_id: ast::DefId,
substs: &Substs<'tcx>)
-> Option<Vec<ClosureUpvar<'tcx>>>;
}
impl<'tcx> CommonTypes<'tcx> {
fn new(arena: &'tcx TypedArena<TyS<'tcx>>,
interner: &mut FnvHashMap<InternedTy<'tcx>, Ty<'tcx>>)
-> CommonTypes<'tcx>
{
CommonTypes {
bool: intern_ty(arena, interner, ty_bool),
char: intern_ty(arena, interner, ty_char),
err: intern_ty(arena, interner, ty_err),
isize: intern_ty(arena, interner, ty_int(ast::TyIs)),
i8: intern_ty(arena, interner, ty_int(ast::TyI8)),
i16: intern_ty(arena, interner, ty_int(ast::TyI16)),
i32: intern_ty(arena, interner, ty_int(ast::TyI32)),
i64: intern_ty(arena, interner, ty_int(ast::TyI64)),
usize: intern_ty(arena, interner, ty_uint(ast::TyUs)),
u8: intern_ty(arena, interner, ty_uint(ast::TyU8)),
u16: intern_ty(arena, interner, ty_uint(ast::TyU16)),
u32: intern_ty(arena, interner, ty_uint(ast::TyU32)),
u64: intern_ty(arena, interner, ty_uint(ast::TyU64)),
f32: intern_ty(arena, interner, ty_float(ast::TyF32)),
f64: intern_ty(arena, interner, ty_float(ast::TyF64)),
}
}
}
pub fn mk_ctxt<'tcx>(s: Session,
arenas: &'tcx CtxtArenas<'tcx>,
def_map: DefMap,
named_region_map: resolve_lifetime::NamedRegionMap,
map: ast_map::Map<'tcx>,
freevars: RefCell<FreevarMap>,
region_maps: RegionMaps,
lang_items: middle::lang_items::LanguageItems,
stability: stability::Index<'tcx>) -> ctxt<'tcx>
{
let mut interner = FnvHashMap();
let common_types = CommonTypes::new(&arenas.type_, &mut interner);
ctxt {
arenas: arenas,
interner: RefCell::new(interner),
substs_interner: RefCell::new(FnvHashMap()),
bare_fn_interner: RefCell::new(FnvHashMap()),
region_interner: RefCell::new(FnvHashMap()),
stability_interner: RefCell::new(FnvHashMap()),
types: common_types,
named_region_map: named_region_map,
region_maps: region_maps,
free_region_maps: RefCell::new(FnvHashMap()),
item_variance_map: RefCell::new(DefIdMap()),
variance_computed: Cell::new(false),
sess: s,
def_map: def_map,
node_types: RefCell::new(FnvHashMap()),
item_substs: RefCell::new(NodeMap()),
impl_trait_refs: RefCell::new(DefIdMap()),
trait_defs: RefCell::new(DefIdMap()),
predicates: RefCell::new(DefIdMap()),
super_predicates: RefCell::new(DefIdMap()),
map: map,
freevars: freevars,
tcache: RefCell::new(DefIdMap()),
rcache: RefCell::new(FnvHashMap()),
tc_cache: RefCell::new(FnvHashMap()),
ast_ty_to_ty_cache: RefCell::new(NodeMap()),
enum_var_cache: RefCell::new(DefIdMap()),
impl_or_trait_items: RefCell::new(DefIdMap()),
trait_item_def_ids: RefCell::new(DefIdMap()),
trait_items_cache: RefCell::new(DefIdMap()),
ty_param_defs: RefCell::new(NodeMap()),
adjustments: RefCell::new(NodeMap()),
normalized_cache: RefCell::new(FnvHashMap()),
lang_items: lang_items,
provided_method_sources: RefCell::new(DefIdMap()),
struct_fields: RefCell::new(DefIdMap()),
destructor_for_type: RefCell::new(DefIdMap()),
destructors: RefCell::new(DefIdSet()),
inherent_impls: RefCell::new(DefIdMap()),
impl_items: RefCell::new(DefIdMap()),
used_unsafe: RefCell::new(NodeSet()),
used_mut_nodes: RefCell::new(NodeSet()),
populated_external_types: RefCell::new(DefIdSet()),
populated_external_primitive_impls: RefCell::new(DefIdSet()),
upvar_capture_map: RefCell::new(FnvHashMap()),
extern_const_statics: RefCell::new(DefIdMap()),
extern_const_variants: RefCell::new(DefIdMap()),
extern_const_fns: RefCell::new(DefIdMap()),
method_map: RefCell::new(FnvHashMap()),
dependency_formats: RefCell::new(FnvHashMap()),
closure_kinds: RefCell::new(DefIdMap()),
closure_tys: RefCell::new(DefIdMap()),
node_lint_levels: RefCell::new(FnvHashMap()),
transmute_restrictions: RefCell::new(Vec::new()),
stability: RefCell::new(stability),
selection_cache: traits::SelectionCache::new(),
repr_hint_cache: RefCell::new(DefIdMap()),
const_qualif_map: RefCell::new(NodeMap()),
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
cast_kinds: RefCell::new(NodeMap()),
}
}
// Type constructors
impl<'tcx> ctxt<'tcx> {
pub fn mk_substs(&self, substs: Substs<'tcx>) -> &'tcx Substs<'tcx> {
if let Some(substs) = self.substs_interner.borrow().get(&substs) {
return *substs;
}
let substs = self.arenas.substs.alloc(substs);
self.substs_interner.borrow_mut().insert(substs, substs);
substs
}
/// Create an unsafe fn ty based on a safe fn ty.
pub fn safe_to_unsafe_fn_ty(&self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> {
assert_eq!(bare_fn.unsafety, ast::Unsafety::Normal);
let unsafe_fn_ty_a = self.mk_bare_fn(ty::BareFnTy {
unsafety: ast::Unsafety::Unsafe,
abi: bare_fn.abi,
sig: bare_fn.sig.clone()
});
ty::mk_bare_fn(self, None, unsafe_fn_ty_a)
}
pub fn mk_bare_fn(&self, bare_fn: BareFnTy<'tcx>) -> &'tcx BareFnTy<'tcx> {
if let Some(bare_fn) = self.bare_fn_interner.borrow().get(&bare_fn) {
return *bare_fn;
}
let bare_fn = self.arenas.bare_fn.alloc(bare_fn);
self.bare_fn_interner.borrow_mut().insert(bare_fn, bare_fn);
bare_fn
}
pub fn mk_region(&self, region: Region) -> &'tcx Region {
if let Some(region) = self.region_interner.borrow().get(®ion) {
return *region;
}
let region = self.arenas.region.alloc(region);
self.region_interner.borrow_mut().insert(region, region);
region
}
pub fn closure_kind(&self, def_id: ast::DefId) -> ty::ClosureKind {
*self.closure_kinds.borrow().get(&def_id).unwrap()
}
pub fn closure_type(&self,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>)
-> ty::ClosureTy<'tcx>
{
self.closure_tys.borrow().get(&def_id).unwrap().subst(self, substs)
}
pub fn type_parameter_def(&self,
node_id: ast::NodeId)
-> TypeParameterDef<'tcx>
{
self.ty_param_defs.borrow().get(&node_id).unwrap().clone()
}
pub fn pat_contains_ref_binding(&self, pat: &ast::Pat) -> bool {
pat_util::pat_contains_ref_binding(&self.def_map, pat)
}
pub fn arm_contains_ref_binding(&self, arm: &ast::Arm) -> bool {
pat_util::arm_contains_ref_binding(&self.def_map, arm)
}
}
// Interns a type/name combination, stores the resulting box in cx.interner,
// and returns the box as cast to an unsafe ptr (see comments for Ty above).
pub fn mk_t<'tcx>(cx: &ctxt<'tcx>, st: sty<'tcx>) -> Ty<'tcx> {
let mut interner = cx.interner.borrow_mut();
intern_ty(&cx.arenas.type_, &mut *interner, st)
}
fn intern_ty<'tcx>(type_arena: &'tcx TypedArena<TyS<'tcx>>,
interner: &mut FnvHashMap<InternedTy<'tcx>, Ty<'tcx>>,
st: sty<'tcx>)
-> Ty<'tcx>
{
match interner.get(&st) {
Some(ty) => return *ty,
_ => ()
}
let flags = FlagComputation::for_sty(&st);
let ty = match () {
() => type_arena.alloc(TyS { sty: st,
flags: Cell::new(flags.flags),
region_depth: flags.depth, }),
};
debug!("Interned type: {:?} Pointer: {:?}",
ty, ty as *const TyS);
interner.insert(InternedTy { ty: ty }, ty);
ty
}
struct FlagComputation {
flags: TypeFlags,
// maximum depth of any bound region that we have seen thus far
depth: u32,
}
impl FlagComputation {
fn new() -> FlagComputation {
FlagComputation { flags: TypeFlags::empty(), depth: 0 }
}
fn for_sty(st: &sty) -> FlagComputation {
let mut result = FlagComputation::new();
result.add_sty(st);
result
}
fn add_flags(&mut self, flags: TypeFlags) {
self.flags = self.flags | (flags & TypeFlags::NOMINAL_FLAGS);
}
fn add_depth(&mut self, depth: u32) {
if depth > self.depth {
self.depth = depth;
}
}
/// Adds the flags/depth from a set of types that appear within the current type, but within a
/// region binder.
fn add_bound_computation(&mut self, computation: &FlagComputation) {
self.add_flags(computation.flags);
// The types that contributed to `computation` occurred within
// a region binder, so subtract one from the region depth
// within when adding the depth to `self`.
let depth = computation.depth;
if depth > 0 {
self.add_depth(depth - 1);
}
}
fn add_sty(&mut self, st: &sty) {
match st {
&ty_bool |
&ty_char |
&ty_int(_) |
&ty_float(_) |
&ty_uint(_) |
&ty_str => {
}
// You might think that we could just return ty_err for
// any type containing ty_err as a component, and get
// rid of the TypeFlags::HAS_TY_ERR flag -- likewise for ty_bot (with
// the exception of function types that return bot).
// But doing so caused sporadic memory corruption, and
// neither I (tjc) nor nmatsakis could figure out why,
// so we're doing it this way.
&ty_err => {
self.add_flags(TypeFlags::HAS_TY_ERR)
}
&ty_param(ref p) => {
if p.space == subst::SelfSpace {
self.add_flags(TypeFlags::HAS_SELF);
} else {
self.add_flags(TypeFlags::HAS_PARAMS);
}
}
&ty_closure(_, substs) => {
self.add_flags(TypeFlags::HAS_TY_CLOSURE);
self.add_substs(substs);
}
&ty_infer(_) => {
self.add_flags(TypeFlags::HAS_TY_INFER)
}
&ty_enum(_, substs) | &ty_struct(_, substs) => {
self.add_substs(substs);
}
&ty_projection(ref data) => {
self.add_flags(TypeFlags::HAS_PROJECTION);
self.add_projection_ty(data);
}
&ty_trait(box TyTrait { ref principal, ref bounds }) => {
let mut computation = FlagComputation::new();
computation.add_substs(principal.0.substs);
for projection_bound in &bounds.projection_bounds {
let mut proj_computation = FlagComputation::new();
proj_computation.add_projection_predicate(&projection_bound.0);
computation.add_bound_computation(&proj_computation);
}
self.add_bound_computation(&computation);
self.add_bounds(bounds);
}
&ty_uniq(tt) | &ty_vec(tt, _) => {
self.add_ty(tt)
}
&ty_ptr(ref m) => {
self.add_ty(m.ty);
}
&ty_rptr(r, ref m) => {
self.add_region(*r);
self.add_ty(m.ty);
}
&ty_tup(ref ts) => {
self.add_tys(&ts[..]);
}
&ty_bare_fn(_, ref f) => {
self.add_fn_sig(&f.sig);
}
}
}
fn add_ty(&mut self, ty: Ty) {
self.add_flags(ty.flags.get());
self.add_depth(ty.region_depth);
}
fn add_tys(&mut self, tys: &[Ty]) {
for &ty in tys {
self.add_ty(ty);
}
}
fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) {
let mut computation = FlagComputation::new();
computation.add_tys(&fn_sig.0.inputs);
if let ty::FnConverging(output) = fn_sig.0.output {
computation.add_ty(output);
}
self.add_bound_computation(&computation);
}
fn add_region(&mut self, r: Region) {
self.add_flags(TypeFlags::HAS_REGIONS);
match r {
ty::ReInfer(_) => { self.add_flags(TypeFlags::HAS_RE_INFER); }
ty::ReLateBound(debruijn, _) => {
self.add_flags(TypeFlags::HAS_RE_LATE_BOUND);
self.add_depth(debruijn.depth);
}
_ => { }
}
}
fn add_projection_predicate(&mut self, projection_predicate: &ProjectionPredicate) {
self.add_projection_ty(&projection_predicate.projection_ty);
self.add_ty(projection_predicate.ty);
}
fn add_projection_ty(&mut self, projection_ty: &ProjectionTy) {
self.add_substs(projection_ty.trait_ref.substs);
}
fn add_substs(&mut self, substs: &Substs) {
self.add_tys(substs.types.as_slice());
match substs.regions {
subst::ErasedRegions => {}
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
self.add_region(r);
}
}
}
}
fn add_bounds(&mut self, bounds: &ExistentialBounds) {
self.add_region(bounds.region_bound);
}
}
pub fn mk_mach_int<'tcx>(tcx: &ctxt<'tcx>, tm: ast::IntTy) -> Ty<'tcx> {
match tm {
ast::TyIs => tcx.types.isize,
ast::TyI8 => tcx.types.i8,
ast::TyI16 => tcx.types.i16,
ast::TyI32 => tcx.types.i32,
ast::TyI64 => tcx.types.i64,
}
}
pub fn mk_mach_uint<'tcx>(tcx: &ctxt<'tcx>, tm: ast::UintTy) -> Ty<'tcx> {
match tm {
ast::TyUs => tcx.types.usize,
ast::TyU8 => tcx.types.u8,
ast::TyU16 => tcx.types.u16,
ast::TyU32 => tcx.types.u32,
ast::TyU64 => tcx.types.u64,
}
}
pub fn mk_mach_float<'tcx>(tcx: &ctxt<'tcx>, tm: ast::FloatTy) -> Ty<'tcx> {
match tm {
ast::TyF32 => tcx.types.f32,
ast::TyF64 => tcx.types.f64,
}
}
pub fn mk_str<'tcx>(cx: &ctxt<'tcx>) -> Ty<'tcx> {
mk_t(cx, ty_str)
}
pub fn mk_str_slice<'tcx>(cx: &ctxt<'tcx>, r: &'tcx Region, m: ast::Mutability) -> Ty<'tcx> {
mk_rptr(cx, r,
mt {
ty: mk_t(cx, ty_str),
mutbl: m
})
}
pub fn mk_enum<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
// take a copy of substs so that we own the vectors inside
mk_t(cx, ty_enum(did, substs))
}
pub fn mk_uniq<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { mk_t(cx, ty_uniq(ty)) }
pub fn mk_ptr<'tcx>(cx: &ctxt<'tcx>, tm: mt<'tcx>) -> Ty<'tcx> { mk_t(cx, ty_ptr(tm)) }
pub fn mk_rptr<'tcx>(cx: &ctxt<'tcx>, r: &'tcx Region, tm: mt<'tcx>) -> Ty<'tcx> {
mk_t(cx, ty_rptr(r, tm))
}
pub fn mk_mut_rptr<'tcx>(cx: &ctxt<'tcx>, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> {
mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutMutable})
}
pub fn mk_imm_rptr<'tcx>(cx: &ctxt<'tcx>, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> {
mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutImmutable})
}
pub fn mk_mut_ptr<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
mk_ptr(cx, mt {ty: ty, mutbl: ast::MutMutable})
}
pub fn mk_imm_ptr<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
mk_ptr(cx, mt {ty: ty, mutbl: ast::MutImmutable})
}
pub fn mk_nil_ptr<'tcx>(cx: &ctxt<'tcx>) -> Ty<'tcx> {
mk_ptr(cx, mt {ty: mk_nil(cx), mutbl: ast::MutImmutable})
}
pub fn mk_vec<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>, sz: Option<usize>) -> Ty<'tcx> {
mk_t(cx, ty_vec(ty, sz))
}
pub fn mk_slice<'tcx>(cx: &ctxt<'tcx>, r: &'tcx Region, tm: mt<'tcx>) -> Ty<'tcx> {
mk_rptr(cx, r,
mt {
ty: mk_vec(cx, tm.ty, None),
mutbl: tm.mutbl
})
}
pub fn mk_tup<'tcx>(cx: &ctxt<'tcx>, ts: Vec<Ty<'tcx>>) -> Ty<'tcx> {
mk_t(cx, ty_tup(ts))
}
pub fn mk_nil<'tcx>(cx: &ctxt<'tcx>) -> Ty<'tcx> {
mk_tup(cx, Vec::new())
}
pub fn mk_bool<'tcx>(cx: &ctxt<'tcx>) -> Ty<'tcx> {
mk_t(cx, ty_bool)
}
pub fn mk_bare_fn<'tcx>(cx: &ctxt<'tcx>,
opt_def_id: Option<ast::DefId>,
fty: &'tcx BareFnTy<'tcx>) -> Ty<'tcx> {
mk_t(cx, ty_bare_fn(opt_def_id, fty))
}
pub fn mk_ctor_fn<'tcx>(cx: &ctxt<'tcx>,
def_id: ast::DefId,
input_tys: &[Ty<'tcx>],
output: Ty<'tcx>) -> Ty<'tcx> {
let input_args = input_tys.iter().cloned().collect();
mk_bare_fn(cx,
Some(def_id),
cx.mk_bare_fn(BareFnTy {
unsafety: ast::Unsafety::Normal,
abi: abi::Rust,
sig: ty::Binder(FnSig {
inputs: input_args,
output: ty::FnConverging(output),
variadic: false
})
}))
}
pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>,
principal: ty::PolyTraitRef<'tcx>,
bounds: ExistentialBounds<'tcx>)
-> Ty<'tcx>
{
assert!(bound_list_is_sorted(&bounds.projection_bounds));
let inner = box TyTrait {
principal: principal,
bounds: bounds
};
mk_t(cx, ty_trait(inner))
}
fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool {
bounds.is_empty() ||
bounds[1..].iter().enumerate().all(
|(index, bound)| bounds[index].sort_key() <= bound.sort_key())
}
pub fn sort_bounds_list(bounds: &mut [ty::PolyProjectionPredicate]) {
bounds.sort_by(|a, b| a.sort_key().cmp(&b.sort_key()))
}
pub fn mk_projection<'tcx>(cx: &ctxt<'tcx>,
trait_ref: TraitRef<'tcx>,
item_name: ast::Name)
-> Ty<'tcx> {
// take a copy of substs so that we own the vectors inside
let inner = ProjectionTy { trait_ref: trait_ref, item_name: item_name };
mk_t(cx, ty_projection(inner))
}
pub fn mk_struct<'tcx>(cx: &ctxt<'tcx>, struct_id: ast::DefId,
substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
// take a copy of substs so that we own the vectors inside
mk_t(cx, ty_struct(struct_id, substs))
}
pub fn mk_closure<'tcx>(cx: &ctxt<'tcx>, closure_id: ast::DefId, substs: &'tcx Substs<'tcx>)
-> Ty<'tcx> {
mk_t(cx, ty_closure(closure_id, substs))
}
pub fn mk_var<'tcx>(cx: &ctxt<'tcx>, v: TyVid) -> Ty<'tcx> {
mk_infer(cx, TyVar(v))
}
pub fn mk_int_var<'tcx>(cx: &ctxt<'tcx>, v: IntVid) -> Ty<'tcx> {
mk_infer(cx, IntVar(v))
}
pub fn mk_float_var<'tcx>(cx: &ctxt<'tcx>, v: FloatVid) -> Ty<'tcx> {
mk_infer(cx, FloatVar(v))
}
pub fn mk_infer<'tcx>(cx: &ctxt<'tcx>, it: InferTy) -> Ty<'tcx> {
mk_t(cx, ty_infer(it))
}
pub fn mk_param<'tcx>(cx: &ctxt<'tcx>,
space: subst::ParamSpace,
index: u32,
name: ast::Name) -> Ty<'tcx> {
mk_t(cx, ty_param(ParamTy { space: space, idx: index, name: name }))
}
pub fn mk_self_type<'tcx>(cx: &ctxt<'tcx>) -> Ty<'tcx> {
mk_param(cx, subst::SelfSpace, 0, special_idents::type_self.name)
}
pub fn mk_param_from_def<'tcx>(cx: &ctxt<'tcx>, def: &TypeParameterDef) -> Ty<'tcx> {
mk_param(cx, def.space, def.index, def.name)
}
impl<'tcx> TyS<'tcx> {
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```notrust
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(&'tcx self) -> TypeWalker<'tcx> {
TypeWalker::new(self)
}
/// Iterator that walks the immediate children of `self`. Hence
/// `Foo<Bar<i32>, u32>` yields the sequence `[Bar<i32>, u32]`
/// (but not `i32`, like `walk`).
pub fn walk_shallow(&'tcx self) -> IntoIter<Ty<'tcx>> {
ty_walk::walk_shallow(self)
}
pub fn as_opt_param_ty(&self) -> Option<ty::ParamTy> {
match self.sty {
ty::ty_param(ref d) => Some(d.clone()),
_ => None,
}
}
pub fn is_param(&self, space: ParamSpace, index: u32) -> bool {
match self.sty {
ty::ty_param(ref data) => data.space == space && data.idx == index,
_ => false,
}
}
}
pub fn walk_ty<'tcx, F>(ty_root: Ty<'tcx>, mut f: F)
where F: FnMut(Ty<'tcx>),
{
for ty in ty_root.walk() {
f(ty);
}
}
/// Walks `ty` and any types appearing within `ty`, invoking the
/// callback `f` on each type. If the callback returns false, then the
/// children of the current type are ignored.
///
/// Note: prefer `ty.walk()` where possible.
pub fn maybe_walk_ty<'tcx,F>(ty_root: Ty<'tcx>, mut f: F)
where F : FnMut(Ty<'tcx>) -> bool
{
let mut walker = ty_root.walk();
while let Some(ty) = walker.next() {
if !f(ty) {
walker.skip_current_subtree();
}
}
}
// Folds types from the bottom up.
pub fn fold_ty<'tcx, F>(cx: &ctxt<'tcx>, t0: Ty<'tcx>,
fldop: F)
-> Ty<'tcx> where
F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
{
let mut f = ty_fold::BottomUpFolder {tcx: cx, fldop: fldop};
f.fold_ty(t0)
}
impl ParamTy {
pub fn new(space: subst::ParamSpace,
index: u32,
name: ast::Name)
-> ParamTy {
ParamTy { space: space, idx: index, name: name }
}
pub fn for_self() -> ParamTy {
ParamTy::new(subst::SelfSpace, 0, special_idents::type_self.name)
}
pub fn for_def(def: &TypeParameterDef) -> ParamTy {
ParamTy::new(def.space, def.index, def.name)
}
pub fn to_ty<'tcx>(self, tcx: &ty::ctxt<'tcx>) -> Ty<'tcx> {
ty::mk_param(tcx, self.space, self.idx, self.name)
}
pub fn is_self(&self) -> bool {
self.space == subst::SelfSpace && self.idx == 0
}
}
impl<'tcx> ItemSubsts<'tcx> {
pub fn empty() -> ItemSubsts<'tcx> {
ItemSubsts { substs: Substs::empty() }
}
pub fn is_noop(&self) -> bool {
self.substs.is_noop()
}
}
impl<'tcx> ParamBounds<'tcx> {
pub fn empty() -> ParamBounds<'tcx> {
ParamBounds {
builtin_bounds: empty_builtin_bounds(),
trait_bounds: Vec::new(),
region_bounds: Vec::new(),
projection_bounds: Vec::new(),
}
}
}
// Type utilities
pub fn type_is_nil(ty: Ty) -> bool {
match ty.sty {
ty_tup(ref tys) => tys.is_empty(),
_ => false
}
}
pub fn type_is_error(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::HAS_TY_ERR)
}
pub fn type_needs_subst(ty: Ty) -> bool {
ty.flags.get().intersects(TypeFlags::NEEDS_SUBST)
}
pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool {
tref.substs.types.any(|&ty| type_is_error(ty))
}
pub fn type_is_ty_var(ty: Ty) -> bool {
match ty.sty {
ty_infer(TyVar(_)) => true,
_ => false
}
}
pub fn type_is_bool(ty: Ty) -> bool { ty.sty == ty_bool }
pub fn type_is_self(ty: Ty) -> bool {
match ty.sty {
ty_param(ref p) => p.space == subst::SelfSpace,
_ => false
}
}
fn type_is_slice(ty: Ty) -> bool {
match ty.sty {
ty_ptr(mt) | ty_rptr(_, mt) => match mt.ty.sty {
ty_vec(_, None) | ty_str => true,
_ => false,
},
_ => false
}
}
pub fn type_is_vec(ty: Ty) -> bool {
match ty.sty {
ty_vec(..) => true,
ty_ptr(mt{ty, ..}) | ty_rptr(_, mt{ty, ..}) |
ty_uniq(ty) => match ty.sty {
ty_vec(_, None) => true,
_ => false
},
_ => false
}
}
pub fn type_is_structural(ty: Ty) -> bool {
match ty.sty {
ty_struct(..) | ty_tup(_) | ty_enum(..) |
ty_vec(_, Some(_)) | ty_closure(..) => true,
_ => type_is_slice(ty) | type_is_trait(ty)
}
}
pub fn type_is_simd(cx: &ctxt, ty: Ty) -> bool {
match ty.sty {
ty_struct(did, _) => lookup_simd(cx, did),
_ => false
}
}
pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty_vec(ty, _) => ty,
ty_str => mk_mach_uint(cx, ast::TyU8),
_ => cx.sess.bug(&format!("sequence_element_type called on non-sequence value: {}",
ty_to_string(cx, ty))),
}
}
pub fn simd_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty_struct(did, substs) => {
let fields = lookup_struct_fields(cx, did);
lookup_field_type(cx, did, fields[0].id, substs)
}
_ => panic!("simd_type called on invalid type")
}
}
pub fn simd_size(cx: &ctxt, ty: Ty) -> usize {
match ty.sty {
ty_struct(did, _) => {
let fields = lookup_struct_fields(cx, did);
fields.len()
}
_ => panic!("simd_size called on invalid type")
}
}
pub fn type_is_region_ptr(ty: Ty) -> bool {
match ty.sty {
ty_rptr(..) => true,
_ => false
}
}
pub fn type_is_unsafe_ptr(ty: Ty) -> bool {
match ty.sty {
ty_ptr(_) => return true,
_ => return false
}
}
pub fn type_is_unique(ty: Ty) -> bool {
match ty.sty {
ty_uniq(_) => true,
_ => false
}
}
/*
A scalar type is one that denotes an atomic datum, with no sub-components.
(A ty_ptr is scalar because it represents a non-managed pointer, so its
contents are abstract to rustc.)
*/
pub fn type_is_scalar(ty: Ty) -> bool {
match ty.sty {
ty_bool | ty_char | ty_int(_) | ty_float(_) | ty_uint(_) |
ty_infer(IntVar(_)) | ty_infer(FloatVar(_)) |
ty_bare_fn(..) | ty_ptr(_) => true,
_ => false
}
}
/// Returns true if this type is a floating point type and false otherwise.
pub fn type_is_floating_point(ty: Ty) -> bool {
match ty.sty {
ty_float(_) |
ty_infer(FloatVar(_)) =>
true,
_ =>
false,
}
}
/// Type contents is how the type checker reasons about kinds.
/// They track what kinds of things are found within a type. You can
/// think of them as kind of an "anti-kind". They track the kinds of values
/// and thinks that are contained in types. Having a larger contents for
/// a type tends to rule that type *out* from various kinds. For example,
/// a type that contains a reference is not sendable.
///
/// The reason we compute type contents and not kinds is that it is
/// easier for me (nmatsakis) to think about what is contained within
/// a type than to think about what is *not* contained within a type.
#[derive(Clone, Copy)]
pub struct TypeContents {
pub bits: u64
}
macro_rules! def_type_content_sets {
(mod $mname:ident { $($name:ident = $bits:expr),+ }) => {
#[allow(non_snake_case)]
mod $mname {
use middle::ty::TypeContents;
$(
#[allow(non_upper_case_globals)]
pub const $name: TypeContents = TypeContents { bits: $bits };
)+
}
}
}
def_type_content_sets! {
mod TC {
None = 0b0000_0000__0000_0000__0000,
// Things that are interior to the value (first nibble):
InteriorUnsized = 0b0000_0000__0000_0000__0001,
InteriorUnsafe = 0b0000_0000__0000_0000__0010,
InteriorParam = 0b0000_0000__0000_0000__0100,
// InteriorAll = 0b00000000__00000000__1111,
// Things that are owned by the value (second and third nibbles):
OwnsOwned = 0b0000_0000__0000_0001__0000,
OwnsDtor = 0b0000_0000__0000_0010__0000,
OwnsAll = 0b0000_0000__1111_1111__0000,
// Things that are reachable by the value in any way (fourth nibble):
ReachesBorrowed = 0b0000_0010__0000_0000__0000,
ReachesMutable = 0b0000_1000__0000_0000__0000,
ReachesFfiUnsafe = 0b0010_0000__0000_0000__0000,
ReachesAll = 0b0011_1111__0000_0000__0000,
// Things that mean drop glue is necessary
NeedsDrop = 0b0000_0000__0000_0111__0000,
// Things that prevent values from being considered sized
Nonsized = 0b0000_0000__0000_0000__0001,
// All bits
All = 0b1111_1111__1111_1111__1111
}
}
impl TypeContents {
pub fn when(&self, cond: bool) -> TypeContents {
if cond {*self} else {TC::None}
}
pub fn intersects(&self, tc: TypeContents) -> bool {
(self.bits & tc.bits) != 0
}
pub fn owns_owned(&self) -> bool {
self.intersects(TC::OwnsOwned)
}
pub fn is_sized(&self, _: &ctxt) -> bool {
!self.intersects(TC::Nonsized)
}
pub fn interior_param(&self) -> bool {
self.intersects(TC::InteriorParam)
}
pub fn interior_unsafe(&self) -> bool {
self.intersects(TC::InteriorUnsafe)
}
pub fn interior_unsized(&self) -> bool {
self.intersects(TC::InteriorUnsized)
}
pub fn needs_drop(&self, _: &ctxt) -> bool {
self.intersects(TC::NeedsDrop)
}
/// Includes only those bits that still apply when indirected through a `Box` pointer
pub fn owned_pointer(&self) -> TypeContents {
TC::OwnsOwned | (
*self & (TC::OwnsAll | TC::ReachesAll))
}
/// Includes only those bits that still apply when indirected through a reference (`&`)
pub fn reference(&self, bits: TypeContents) -> TypeContents {
bits | (
*self & TC::ReachesAll)
}
/// Includes only those bits that still apply when indirected through an unsafe pointer (`*`)
pub fn unsafe_pointer(&self) -> TypeContents {
*self & TC::ReachesAll
}
pub fn union<T, F>(v: &[T], mut f: F) -> TypeContents where
F: FnMut(&T) -> TypeContents,
{
v.iter().fold(TC::None, |tc, ty| tc | f(ty))
}
pub fn has_dtor(&self) -> bool {
self.intersects(TC::OwnsDtor)
}
}
impl ops::BitOr for TypeContents {
type Output = TypeContents;
fn bitor(self, other: TypeContents) -> TypeContents {
TypeContents {bits: self.bits | other.bits}
}
}
impl ops::BitAnd for TypeContents {
type Output = TypeContents;
fn bitand(self, other: TypeContents) -> TypeContents {
TypeContents {bits: self.bits & other.bits}
}
}
impl ops::Sub for TypeContents {
type Output = TypeContents;
fn sub(self, other: TypeContents) -> TypeContents {
TypeContents {bits: self.bits & !other.bits}
}
}
impl fmt::Debug for TypeContents {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TypeContents({:b})", self.bits)
}
}
pub fn type_interior_is_unsafe<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
type_contents(cx, ty).interior_unsafe()
}
pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
return memoized(&cx.tc_cache, ty, |ty| {
tc_ty(cx, ty, &mut FnvHashMap())
});
fn tc_ty<'tcx>(cx: &ctxt<'tcx>,
ty: Ty<'tcx>,
cache: &mut FnvHashMap<Ty<'tcx>, TypeContents>) -> TypeContents
{
// Subtle: Note that we are *not* using cx.tc_cache here but rather a
// private cache for this walk. This is needed in the case of cyclic
// types like:
//
// struct List { next: Box<Option<List>>, ... }
//
// When computing the type contents of such a type, we wind up deeply
// recursing as we go. So when we encounter the recursive reference
// to List, we temporarily use TC::None as its contents. Later we'll
// patch up the cache with the correct value, once we've computed it
// (this is basically a co-inductive process, if that helps). So in
// the end we'll compute TC::OwnsOwned, in this case.
//
// The problem is, as we are doing the computation, we will also<|fim▁hole|> // TC::None. This is ok during the computation of List itself, but if
// we stored this intermediate value into cx.tc_cache, then later
// requests for the contents of Option<List> would also yield TC::None
// which is incorrect. This value was computed based on the crutch
// value for the type contents of list. The correct value is
// TC::OwnsOwned. This manifested as issue #4821.
match cache.get(&ty) {
Some(tc) => { return *tc; }
None => {}
}
match cx.tc_cache.borrow().get(&ty) { // Must check both caches!
Some(tc) => { return *tc; }
None => {}
}
cache.insert(ty, TC::None);
let result = match ty.sty {
// usize and isize are ffi-unsafe
ty_uint(ast::TyUs) | ty_int(ast::TyIs) => {
TC::ReachesFfiUnsafe
}
// Scalar and unique types are sendable, and durable
ty_infer(ty::FreshIntTy(_)) | ty_infer(ty::FreshFloatTy(_)) |
ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) |
ty_bare_fn(..) | ty::ty_char => {
TC::None
}
ty_uniq(typ) => {
TC::ReachesFfiUnsafe | match typ.sty {
ty_str => TC::OwnsOwned,
_ => tc_ty(cx, typ, cache).owned_pointer(),
}
}
ty_trait(box TyTrait { ref bounds, .. }) => {
object_contents(bounds) | TC::ReachesFfiUnsafe | TC::Nonsized
}
ty_ptr(ref mt) => {
tc_ty(cx, mt.ty, cache).unsafe_pointer()
}
ty_rptr(r, ref mt) => {
TC::ReachesFfiUnsafe | match mt.ty.sty {
ty_str => borrowed_contents(*r, ast::MutImmutable),
ty_vec(..) => tc_ty(cx, mt.ty, cache).reference(borrowed_contents(*r,
mt.mutbl)),
_ => tc_ty(cx, mt.ty, cache).reference(borrowed_contents(*r, mt.mutbl)),
}
}
ty_vec(ty, Some(_)) => {
tc_ty(cx, ty, cache)
}
ty_vec(ty, None) => {
tc_ty(cx, ty, cache) | TC::Nonsized
}
ty_str => TC::Nonsized,
ty_struct(did, substs) => {
let flds = struct_fields(cx, did, substs);
let mut res =
TypeContents::union(&flds[..],
|f| tc_mt(cx, f.mt, cache));
if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
res = res | TC::ReachesFfiUnsafe;
}
if ty::has_dtor(cx, did) {
res = res | TC::OwnsDtor;
}
apply_lang_items(cx, did, res)
}
ty_closure(did, substs) => {
// FIXME(#14449): `borrowed_contents` below assumes `&mut` closure.
let param_env = ty::empty_parameter_environment(cx);
let upvars = closure_upvars(¶m_env, did, substs).unwrap();
TypeContents::union(&upvars, |f| tc_ty(cx, &f.ty, cache))
}
ty_tup(ref tys) => {
TypeContents::union(&tys[..],
|ty| tc_ty(cx, *ty, cache))
}
ty_enum(did, substs) => {
let variants = substd_enum_variants(cx, did, substs);
let mut res =
TypeContents::union(&variants[..], |variant| {
TypeContents::union(&variant.args,
|arg_ty| {
tc_ty(cx, *arg_ty, cache)
})
});
if ty::has_dtor(cx, did) {
res = res | TC::OwnsDtor;
}
if !variants.is_empty() {
let repr_hints = lookup_repr_hints(cx, did);
if repr_hints.len() > 1 {
// this is an error later on, but this type isn't safe
res = res | TC::ReachesFfiUnsafe;
}
match repr_hints.get(0) {
Some(h) => if !h.is_ffi_safe() {
res = res | TC::ReachesFfiUnsafe;
},
// ReprAny
None => {
res = res | TC::ReachesFfiUnsafe;
// We allow ReprAny enums if they are eligible for
// the nullable pointer optimization and the
// contained type is an `extern fn`
if variants.len() == 2 {
let mut data_idx = 0;
if variants[0].args.is_empty() {
data_idx = 1;
}
if variants[data_idx].args.len() == 1 {
match variants[data_idx].args[0].sty {
ty_bare_fn(..) => { res = res - TC::ReachesFfiUnsafe; }
_ => { }
}
}
}
}
}
}
apply_lang_items(cx, did, res)
}
ty_projection(..) |
ty_param(_) => {
TC::All
}
ty_infer(_) |
ty_err => {
cx.sess.bug("asked to compute contents of error type");
}
};
cache.insert(ty, result);
result
}
fn tc_mt<'tcx>(cx: &ctxt<'tcx>,
mt: mt<'tcx>,
cache: &mut FnvHashMap<Ty<'tcx>, TypeContents>) -> TypeContents
{
let mc = TC::ReachesMutable.when(mt.mutbl == MutMutable);
mc | tc_ty(cx, mt.ty, cache)
}
fn apply_lang_items(cx: &ctxt, did: ast::DefId, tc: TypeContents)
-> TypeContents {
if Some(did) == cx.lang_items.unsafe_cell_type() {
tc | TC::InteriorUnsafe
} else {
tc
}
}
/// Type contents due to containing a reference with the region `region` and borrow kind `bk`
fn borrowed_contents(region: ty::Region,
mutbl: ast::Mutability)
-> TypeContents {
let b = match mutbl {
ast::MutMutable => TC::ReachesMutable,
ast::MutImmutable => TC::None,
};
b | (TC::ReachesBorrowed).when(region != ty::ReStatic)
}
fn object_contents(bounds: &ExistentialBounds) -> TypeContents {
// These are the type contents of the (opaque) interior. We
// make no assumptions (other than that it cannot have an
// in-scope type parameter within, which makes no sense).
let mut tc = TC::All - TC::InteriorParam;
for bound in &bounds.builtin_bounds {
tc = tc - match bound {
BoundSync | BoundSend | BoundCopy => TC::None,
BoundSized => TC::Nonsized,
};
}
return tc;
}
}
fn type_impls_bound<'a,'tcx>(param_env: Option<&ParameterEnvironment<'a,'tcx>>,
tcx: &ty::ctxt<'tcx>,
ty: Ty<'tcx>,
bound: ty::BuiltinBound,
span: Span)
-> bool
{
let pe;
let param_env = match param_env {
Some(e) => e,
None => {
pe = empty_parameter_environment(tcx);
&pe
}
};
let infcx = infer::new_infer_ctxt(tcx);
let is_impld = traits::type_known_to_meet_builtin_bound(&infcx, param_env, ty, bound, span);
debug!("type_impls_bound({}, {:?}) = {:?}",
ty.repr(tcx),
bound,
is_impld);
is_impld
}
pub fn type_moves_by_default<'a,'tcx>(param_env: &ParameterEnvironment<'a,'tcx>,
span: Span,
ty: Ty<'tcx>)
-> bool
{
if ty.flags.get().intersects(TypeFlags::MOVENESS_CACHED) {
return ty.flags.get().intersects(TypeFlags::MOVES_BY_DEFAULT);
}
assert!(!ty::type_needs_infer(ty));
// Fast-path for primitive types
let result = match ty.sty {
ty_bool | ty_char | ty_int(..) | ty_uint(..) | ty_float(..) |
ty_ptr(..) | ty_bare_fn(..) | ty_rptr(_, mt {
mutbl: ast::MutImmutable, ..
}) => Some(false),
ty_str | ty_uniq(..) | ty_rptr(_, mt {
mutbl: ast::MutMutable, ..
}) => Some(true),
ty_vec(..) | ty_trait(..) | ty_tup(..) |
ty_closure(..) | ty_enum(..) | ty_struct(..) |
ty_projection(..) | ty_param(..) | ty_infer(..) | ty_err => None
}.unwrap_or_else(|| !type_impls_bound(Some(param_env),
param_env.tcx,
ty,
ty::BoundCopy,
span));
if !type_has_params(ty) && !type_has_self(ty) {
ty.flags.set(ty.flags.get() | if result {
TypeFlags::MOVENESS_CACHED | TypeFlags::MOVES_BY_DEFAULT
} else {
TypeFlags::MOVENESS_CACHED
});
}
result
}
#[inline]
pub fn type_is_sized<'a,'tcx>(param_env: Option<&ParameterEnvironment<'a,'tcx>>,
tcx: &ty::ctxt<'tcx>,
span: Span,
ty: Ty<'tcx>)
-> bool
{
if ty.flags.get().intersects(TypeFlags::SIZEDNESS_CACHED) {
let result = ty.flags.get().intersects(TypeFlags::IS_SIZED);
return result;
}
type_is_sized_uncached(param_env, tcx, span, ty)
}
fn type_is_sized_uncached<'a,'tcx>(param_env: Option<&ParameterEnvironment<'a,'tcx>>,
tcx: &ty::ctxt<'tcx>,
span: Span,
ty: Ty<'tcx>) -> bool {
assert!(!ty::type_needs_infer(ty));
// Fast-path for primitive types
let result = match ty.sty {
ty_bool | ty_char | ty_int(..) | ty_uint(..) | ty_float(..) |
ty_uniq(..) | ty_ptr(..) | ty_rptr(..) | ty_bare_fn(..) |
ty_vec(_, Some(..)) | ty_tup(..) | ty_closure(..) => Some(true),
ty_str | ty_trait(..) | ty_vec(_, None) => Some(false),
ty_enum(..) | ty_struct(..) | ty_projection(..) | ty_param(..) |
ty_infer(..) | ty_err => None
}.unwrap_or_else(|| type_impls_bound(param_env, tcx, ty, ty::BoundSized, span));
if !type_has_params(ty) && !type_has_self(ty) {
ty.flags.set(ty.flags.get() | if result {
TypeFlags::SIZEDNESS_CACHED | TypeFlags::IS_SIZED
} else {
TypeFlags::SIZEDNESS_CACHED
});
}
result
}
pub fn is_ffi_safe<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
!type_contents(cx, ty).intersects(TC::ReachesFfiUnsafe)
}
// True if instantiating an instance of `r_ty` requires an instance of `r_ty`.
pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool {
fn type_requires<'tcx>(cx: &ctxt<'tcx>, seen: &mut Vec<DefId>,
r_ty: Ty<'tcx>, ty: Ty<'tcx>) -> bool {
debug!("type_requires({:?}, {:?})?",
::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_string(cx, ty));
let r = r_ty == ty || subtypes_require(cx, seen, r_ty, ty);
debug!("type_requires({:?}, {:?})? {:?}",
::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_string(cx, ty),
r);
return r;
}
fn subtypes_require<'tcx>(cx: &ctxt<'tcx>, seen: &mut Vec<DefId>,
r_ty: Ty<'tcx>, ty: Ty<'tcx>) -> bool {
debug!("subtypes_require({:?}, {:?})?",
::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_string(cx, ty));
let r = match ty.sty {
// fixed length vectors need special treatment compared to
// normal vectors, since they don't necessarily have the
// possibility to have length zero.
ty_vec(_, Some(0)) => false, // don't need no contents
ty_vec(ty, Some(_)) => type_requires(cx, seen, r_ty, ty),
ty_bool |
ty_char |
ty_int(_) |
ty_uint(_) |
ty_float(_) |
ty_str |
ty_bare_fn(..) |
ty_param(_) |
ty_projection(_) |
ty_vec(_, None) => {
false
}
ty_uniq(typ) => {
type_requires(cx, seen, r_ty, typ)
}
ty_rptr(_, ref mt) => {
type_requires(cx, seen, r_ty, mt.ty)
}
ty_ptr(..) => {
false // unsafe ptrs can always be NULL
}
ty_trait(..) => {
false
}
ty_struct(ref did, _) if seen.contains(did) => {
false
}
ty_struct(did, substs) => {
seen.push(did);
let fields = struct_fields(cx, did, substs);
let r = fields.iter().any(|f| type_requires(cx, seen, r_ty, f.mt.ty));
seen.pop().unwrap();
r
}
ty_err |
ty_infer(_) |
ty_closure(..) => {
// this check is run on type definitions, so we don't expect to see
// inference by-products or closure types
cx.sess.bug(&format!("requires check invoked on inapplicable type: {:?}", ty))
}
ty_tup(ref ts) => {
ts.iter().any(|ty| type_requires(cx, seen, r_ty, *ty))
}
ty_enum(ref did, _) if seen.contains(did) => {
false
}
ty_enum(did, substs) => {
seen.push(did);
let vs = enum_variants(cx, did);
let r = !vs.is_empty() && vs.iter().all(|variant| {
variant.args.iter().any(|aty| {
let sty = aty.subst(cx, substs);
type_requires(cx, seen, r_ty, sty)
})
});
seen.pop().unwrap();
r
}
};
debug!("subtypes_require({:?}, {:?})? {:?}",
::util::ppaux::ty_to_string(cx, r_ty),
::util::ppaux::ty_to_string(cx, ty),
r);
return r;
}
let mut seen = Vec::new();
!subtypes_require(cx, &mut seen, r_ty, r_ty)
}
/// Describes whether a type is representable. For types that are not
/// representable, 'SelfRecursive' and 'ContainsRecursive' are used to
/// distinguish between types that are recursive with themselves and types that
/// contain a different recursive type. These cases can therefore be treated
/// differently when reporting errors.
///
/// The ordering of the cases is significant. They are sorted so that cmp::max
/// will keep the "more erroneous" of two values.
#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)]
pub enum Representability {
Representable,
ContainsRecursive,
SelfRecursive,
}
/// Check whether a type is representable. This means it cannot contain unboxed
/// structural recursion. This check is needed for structs and enums.
pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>)
-> Representability {
// Iterate until something non-representable is found
fn find_nonrepresentable<'tcx, It: Iterator<Item=Ty<'tcx>>>(cx: &ctxt<'tcx>, sp: Span,
seen: &mut Vec<Ty<'tcx>>,
iter: It)
-> Representability {
iter.fold(Representable,
|r, ty| cmp::max(r, is_type_structurally_recursive(cx, sp, seen, ty)))
}
fn are_inner_types_recursive<'tcx>(cx: &ctxt<'tcx>, sp: Span,
seen: &mut Vec<Ty<'tcx>>, ty: Ty<'tcx>)
-> Representability {
match ty.sty {
ty_tup(ref ts) => {
find_nonrepresentable(cx, sp, seen, ts.iter().cloned())
}
// Fixed-length vectors.
// FIXME(#11924) Behavior undecided for zero-length vectors.
ty_vec(ty, Some(_)) => {
is_type_structurally_recursive(cx, sp, seen, ty)
}
ty_struct(did, substs) => {
let fields = struct_fields(cx, did, substs);
find_nonrepresentable(cx, sp, seen, fields.iter().map(|f| f.mt.ty))
}
ty_enum(did, substs) => {
let vs = enum_variants(cx, did);
let iter = vs.iter()
.flat_map(|variant| { variant.args.iter() })
.map(|aty| { aty.subst_spanned(cx, substs, Some(sp)) });
find_nonrepresentable(cx, sp, seen, iter)
}
ty_closure(..) => {
// this check is run on type definitions, so we don't expect
// to see closure types
cx.sess.bug(&format!("requires check invoked on inapplicable type: {:?}", ty))
}
_ => Representable,
}
}
fn same_struct_or_enum_def_id(ty: Ty, did: DefId) -> bool {
match ty.sty {
ty_struct(ty_did, _) | ty_enum(ty_did, _) => {
ty_did == did
}
_ => false
}
}
fn same_type<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
match (&a.sty, &b.sty) {
(&ty_struct(did_a, ref substs_a), &ty_struct(did_b, ref substs_b)) |
(&ty_enum(did_a, ref substs_a), &ty_enum(did_b, ref substs_b)) => {
if did_a != did_b {
return false;
}
let types_a = substs_a.types.get_slice(subst::TypeSpace);
let types_b = substs_b.types.get_slice(subst::TypeSpace);
let mut pairs = types_a.iter().zip(types_b.iter());
pairs.all(|(&a, &b)| same_type(a, b))
}
_ => {
a == b
}
}
}
// Does the type `ty` directly (without indirection through a pointer)
// contain any types on stack `seen`?
fn is_type_structurally_recursive<'tcx>(cx: &ctxt<'tcx>, sp: Span,
seen: &mut Vec<Ty<'tcx>>,
ty: Ty<'tcx>) -> Representability {
debug!("is_type_structurally_recursive: {:?}",
::util::ppaux::ty_to_string(cx, ty));
match ty.sty {
ty_struct(did, _) | ty_enum(did, _) => {
{
// Iterate through stack of previously seen types.
let mut iter = seen.iter();
// The first item in `seen` is the type we are actually curious about.
// We want to return SelfRecursive if this type contains itself.
// It is important that we DON'T take generic parameters into account
// for this check, so that Bar<T> in this example counts as SelfRecursive:
//
// struct Foo;
// struct Bar<T> { x: Bar<Foo> }
match iter.next() {
Some(&seen_type) => {
if same_struct_or_enum_def_id(seen_type, did) {
debug!("SelfRecursive: {:?} contains {:?}",
::util::ppaux::ty_to_string(cx, seen_type),
::util::ppaux::ty_to_string(cx, ty));
return SelfRecursive;
}
}
None => {}
}
// We also need to know whether the first item contains other types that
// are structurally recursive. If we don't catch this case, we will recurse
// infinitely for some inputs.
//
// It is important that we DO take generic parameters into account here,
// so that code like this is considered SelfRecursive, not ContainsRecursive:
//
// struct Foo { Option<Option<Foo>> }
for &seen_type in iter {
if same_type(ty, seen_type) {
debug!("ContainsRecursive: {:?} contains {:?}",
::util::ppaux::ty_to_string(cx, seen_type),
::util::ppaux::ty_to_string(cx, ty));
return ContainsRecursive;
}
}
}
// For structs and enums, track all previously seen types by pushing them
// onto the 'seen' stack.
seen.push(ty);
let out = are_inner_types_recursive(cx, sp, seen, ty);
seen.pop();
out
}
_ => {
// No need to push in other cases.
are_inner_types_recursive(cx, sp, seen, ty)
}
}
}
debug!("is_type_representable: {:?}",
::util::ppaux::ty_to_string(cx, ty));
// To avoid a stack overflow when checking an enum variant or struct that
// contains a different, structurally recursive type, maintain a stack
// of seen types and check recursion for each of them (issues #3008, #3779).
let mut seen: Vec<Ty> = Vec::new();
let r = is_type_structurally_recursive(cx, sp, &mut seen, ty);
debug!("is_type_representable: {:?} is {:?}",
::util::ppaux::ty_to_string(cx, ty), r);
r
}
pub fn type_is_trait(ty: Ty) -> bool {
match ty.sty {
ty_trait(..) => true,
_ => false
}
}
pub fn type_is_integral(ty: Ty) -> bool {
match ty.sty {
ty_infer(IntVar(_)) | ty_int(_) | ty_uint(_) => true,
_ => false
}
}
pub fn type_is_fresh(ty: Ty) -> bool {
match ty.sty {
ty_infer(FreshTy(_)) => true,
ty_infer(FreshIntTy(_)) => true,
ty_infer(FreshFloatTy(_)) => true,
_ => false
}
}
pub fn type_is_uint(ty: Ty) -> bool {
match ty.sty {
ty_infer(IntVar(_)) | ty_uint(ast::TyUs) => true,
_ => false
}
}
pub fn type_is_char(ty: Ty) -> bool {
match ty.sty {
ty_char => true,
_ => false
}
}
pub fn type_is_bare_fn(ty: Ty) -> bool {
match ty.sty {
ty_bare_fn(..) => true,
_ => false
}
}
pub fn type_is_bare_fn_item(ty: Ty) -> bool {
match ty.sty {
ty_bare_fn(Some(_), _) => true,
_ => false
}
}
pub fn type_is_fp(ty: Ty) -> bool {
match ty.sty {
ty_infer(FloatVar(_)) | ty_float(_) => true,
_ => false
}
}
pub fn type_is_numeric(ty: Ty) -> bool {
return type_is_integral(ty) || type_is_fp(ty);
}
pub fn type_is_signed(ty: Ty) -> bool {
match ty.sty {
ty_int(_) => true,
_ => false
}
}
pub fn type_is_machine(ty: Ty) -> bool {
match ty.sty {
ty_int(ast::TyIs) | ty_uint(ast::TyUs) => false,
ty_int(..) | ty_uint(..) | ty_float(..) => true,
_ => false
}
}
// Whether a type is enum like, that is an enum type with only nullary
// constructors
pub fn type_is_c_like_enum(cx: &ctxt, ty: Ty) -> bool {
match ty.sty {
ty_enum(did, _) => {
let variants = enum_variants(cx, did);
if variants.is_empty() {
false
} else {
variants.iter().all(|v| v.args.is_empty())
}
}
_ => false
}
}
// Returns the type and mutability of *ty.
//
// The parameter `explicit` indicates if this is an *explicit* dereference.
// Some types---notably unsafe ptrs---can only be dereferenced explicitly.
pub fn deref<'tcx>(ty: Ty<'tcx>, explicit: bool) -> Option<mt<'tcx>> {
match ty.sty {
ty_uniq(ty) => {
Some(mt {
ty: ty,
mutbl: ast::MutImmutable,
})
},
ty_rptr(_, mt) => Some(mt),
ty_ptr(mt) if explicit => Some(mt),
_ => None
}
}
pub fn type_content<'tcx>(ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty_uniq(ty) => ty,
ty_rptr(_, mt) | ty_ptr(mt) => mt.ty,
_ => ty
}
}
// Returns the type of ty[i]
pub fn index<'tcx>(ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
match ty.sty {
ty_vec(ty, _) => Some(ty),
_ => None
}
}
// Returns the type of elements contained within an 'array-like' type.
// This is exactly the same as the above, except it supports strings,
// which can't actually be indexed.
pub fn array_element_ty<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
match ty.sty {
ty_vec(ty, _) => Some(ty),
ty_str => Some(tcx.types.u8),
_ => None
}
}
/// Returns the type of element at index `i` in tuple or tuple-like type `t`.
/// For an enum `t`, `variant` is None only if `t` is a univariant enum.
pub fn positional_element_ty<'tcx>(cx: &ctxt<'tcx>,
ty: Ty<'tcx>,
i: usize,
variant: Option<ast::DefId>) -> Option<Ty<'tcx>> {
match (&ty.sty, variant) {
(&ty_tup(ref v), None) => v.get(i).cloned(),
(&ty_struct(def_id, substs), None) => lookup_struct_fields(cx, def_id)
.get(i)
.map(|&t|lookup_item_type(cx, t.id).ty.subst(cx, substs)),
(&ty_enum(def_id, substs), Some(variant_def_id)) => {
let variant_info = enum_variant_with_id(cx, def_id, variant_def_id);
variant_info.args.get(i).map(|t|t.subst(cx, substs))
}
(&ty_enum(def_id, substs), None) => {
assert!(enum_is_univariant(cx, def_id));
let enum_variants = enum_variants(cx, def_id);
let variant_info = &(*enum_variants)[0];
variant_info.args.get(i).map(|t|t.subst(cx, substs))
}
_ => None
}
}
/// Returns the type of element at field `n` in struct or struct-like type `t`.
/// For an enum `t`, `variant` must be some def id.
pub fn named_element_ty<'tcx>(cx: &ctxt<'tcx>,
ty: Ty<'tcx>,
n: ast::Name,
variant: Option<ast::DefId>) -> Option<Ty<'tcx>> {
match (&ty.sty, variant) {
(&ty_struct(def_id, substs), None) => {
let r = lookup_struct_fields(cx, def_id);
r.iter().find(|f| f.name == n)
.map(|&f| lookup_field_type(cx, def_id, f.id, substs))
}
(&ty_enum(def_id, substs), Some(variant_def_id)) => {
let variant_info = enum_variant_with_id(cx, def_id, variant_def_id);
variant_info.arg_names.as_ref()
.expect("must have struct enum variant if accessing a named fields")
.iter().zip(variant_info.args.iter())
.find(|&(&name, _)| name == n)
.map(|(_name, arg_t)| arg_t.subst(cx, substs))
}
_ => None
}
}
pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> {
match node_id_to_type_opt(cx, id) {
Some(ty) => ty,
None => cx.sess.bug(
&format!("node_id_to_type: no type for node `{}`",
cx.map.node_to_string(id)))
}
}
pub fn node_id_to_type_opt<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Option<Ty<'tcx>> {
match cx.node_types.borrow().get(&id) {
Some(&ty) => Some(ty),
None => None
}
}
pub fn node_id_item_substs<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> ItemSubsts<'tcx> {
match cx.item_substs.borrow().get(&id) {
None => ItemSubsts::empty(),
Some(ts) => ts.clone(),
}
}
pub fn fn_is_variadic(fty: Ty) -> bool {
match fty.sty {
ty_bare_fn(_, ref f) => f.sig.0.variadic,
ref s => {
panic!("fn_is_variadic() called on non-fn type: {:?}", s)
}
}
}
pub fn ty_fn_sig<'tcx>(fty: Ty<'tcx>) -> &'tcx PolyFnSig<'tcx> {
match fty.sty {
ty_bare_fn(_, ref f) => &f.sig,
ref s => {
panic!("ty_fn_sig() called on non-fn type: {:?}", s)
}
}
}
/// Returns the ABI of the given function.
pub fn ty_fn_abi(fty: Ty) -> abi::Abi {
match fty.sty {
ty_bare_fn(_, ref f) => f.abi,
_ => panic!("ty_fn_abi() called on non-fn type"),
}
}
// Type accessors for substructures of types
pub fn ty_fn_args<'tcx>(fty: Ty<'tcx>) -> ty::Binder<Vec<Ty<'tcx>>> {
ty_fn_sig(fty).inputs()
}
pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> Binder<FnOutput<'tcx>> {
match fty.sty {
ty_bare_fn(_, ref f) => f.sig.output(),
ref s => {
panic!("ty_fn_ret() called on non-fn type: {:?}", s)
}
}
}
pub fn is_fn_ty(fty: Ty) -> bool {
match fty.sty {
ty_bare_fn(..) => true,
_ => false
}
}
pub fn ty_region(tcx: &ctxt,
span: Span,
ty: Ty) -> Region {
match ty.sty {
ty_rptr(r, _) => *r,
ref s => {
tcx.sess.span_bug(
span,
&format!("ty_region() invoked on an inappropriate ty: {:?}",
s));
}
}
}
pub fn free_region_from_def(outlives_extent: region::DestructionScopeData,
def: &RegionParameterDef)
-> ty::Region
{
let ret =
ty::ReFree(ty::FreeRegion { scope: outlives_extent,
bound_region: ty::BrNamed(def.def_id,
def.name) });
debug!("free_region_from_def returns {:?}", ret);
ret
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
// doesn't provide type parameter substitutions.
pub fn pat_ty<'tcx>(cx: &ctxt<'tcx>, pat: &ast::Pat) -> Ty<'tcx> {
return node_id_to_type(cx, pat.id);
}
pub fn pat_ty_opt<'tcx>(cx: &ctxt<'tcx>, pat: &ast::Pat) -> Option<Ty<'tcx>> {
return node_id_to_type_opt(cx, pat.id);
}
// Returns the type of an expression as a monotype.
//
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
// some cases, we insert `AutoAdjustment` annotations such as auto-deref or
// auto-ref. The type returned by this function does not consider such
// adjustments. See `expr_ty_adjusted()` instead.
//
// NB (2): This type doesn't provide type parameter substitutions; e.g. if you
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
// instead of "fn(ty) -> T with T = isize".
pub fn expr_ty<'tcx>(cx: &ctxt<'tcx>, expr: &ast::Expr) -> Ty<'tcx> {
return node_id_to_type(cx, expr.id);
}
pub fn expr_ty_opt<'tcx>(cx: &ctxt<'tcx>, expr: &ast::Expr) -> Option<Ty<'tcx>> {
return node_id_to_type_opt(cx, expr.id);
}
/// Returns the type of `expr`, considering any `AutoAdjustment`
/// entry recorded for that expression.
///
/// It would almost certainly be better to store the adjusted ty in with
/// the `AutoAdjustment`, but I opted not to do this because it would
/// require serializing and deserializing the type and, although that's not
/// hard to do, I just hate that code so much I didn't want to touch it
/// unless it was to fix it properly, which seemed a distraction from the
/// thread at hand! -nmatsakis
pub fn expr_ty_adjusted<'tcx>(cx: &ctxt<'tcx>, expr: &ast::Expr) -> Ty<'tcx> {
adjust_ty(cx, expr.span, expr.id, expr_ty(cx, expr),
cx.adjustments.borrow().get(&expr.id),
|method_call| cx.method_map.borrow().get(&method_call).map(|method| method.ty))
}
pub fn expr_span(cx: &ctxt, id: NodeId) -> Span {
match cx.map.find(id) {
Some(ast_map::NodeExpr(e)) => {
e.span
}
Some(f) => {
cx.sess.bug(&format!("Node id {} is not an expr: {:?}",
id,
f));
}
None => {
cx.sess.bug(&format!("Node id {} is not present \
in the node map", id));
}
}
}
pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString {
match cx.map.find(id) {
Some(ast_map::NodeLocal(pat)) => {
match pat.node {
ast::PatIdent(_, ref path1, _) => {
token::get_ident(path1.node)
}
_ => {
cx.sess.bug(
&format!("Variable id {} maps to {:?}, not local",
id,
pat));
}
}
}
r => {
cx.sess.bug(&format!("Variable id {} maps to {:?}, not local",
id,
r));
}
}
}
/// See `expr_ty_adjusted`
pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>,
span: Span,
expr_id: ast::NodeId,
unadjusted_ty: Ty<'tcx>,
adjustment: Option<&AutoAdjustment<'tcx>>,
mut method_type: F)
-> Ty<'tcx> where
F: FnMut(MethodCall) -> Option<Ty<'tcx>>,
{
if let ty_err = unadjusted_ty.sty {
return unadjusted_ty;
}
return match adjustment {
Some(adjustment) => {
match *adjustment {
AdjustReifyFnPointer => {
match unadjusted_ty.sty {
ty::ty_bare_fn(Some(_), b) => {
ty::mk_bare_fn(cx, None, b)
}
_ => {
cx.sess.bug(
&format!("AdjustReifyFnPointer adjustment on non-fn-item: \
{}", unadjusted_ty.repr(cx)));
}
}
}
AdjustUnsafeFnPointer => {
match unadjusted_ty.sty {
ty::ty_bare_fn(None, b) => cx.safe_to_unsafe_fn_ty(b),
ref b => {
cx.sess.bug(
&format!("AdjustReifyFnPointer adjustment on non-fn-item: \
{:?}",
b));
}
}
}
AdjustDerefRef(ref adj) => {
let mut adjusted_ty = unadjusted_ty;
if !ty::type_is_error(adjusted_ty) {
for i in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(expr_id, i as u32);
match method_type(method_call) {
Some(method_ty) => {
// Overloaded deref operators have all late-bound
// regions fully instantiated and coverge.
let fn_ret =
ty::no_late_bound_regions(cx,
&ty_fn_ret(method_ty)).unwrap();
adjusted_ty = fn_ret.unwrap();
}
None => {}
}
match deref(adjusted_ty, true) {
Some(mt) => { adjusted_ty = mt.ty; }
None => {
cx.sess.span_bug(
span,
&format!("the {}th autoderef failed: {}",
i,
ty_to_string(cx, adjusted_ty))
);
}
}
}
}
if let Some(target) = adj.unsize {
target
} else {
adjust_ty_for_autoref(cx, adjusted_ty, adj.autoref)
}
}
}
}
None => unadjusted_ty
};
}
pub fn adjust_ty_for_autoref<'tcx>(cx: &ctxt<'tcx>,
ty: Ty<'tcx>,
autoref: Option<AutoRef<'tcx>>)
-> Ty<'tcx> {
match autoref {
None => ty,
Some(AutoPtr(r, m)) => {
mk_rptr(cx, r, mt { ty: ty, mutbl: m })
}
Some(AutoUnsafe(m)) => {
mk_ptr(cx, mt { ty: ty, mutbl: m })
}
}
}
pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def {
match tcx.def_map.borrow().get(&expr.id) {
Some(def) => def.full_def(),
None => {
tcx.sess.span_bug(expr.span, &format!(
"no def-map entry for expr {}", expr.id));
}
}
}
pub fn expr_is_lval(tcx: &ctxt, e: &ast::Expr) -> bool {
match expr_kind(tcx, e) {
LvalueExpr => true,
RvalueDpsExpr | RvalueDatumExpr | RvalueStmtExpr => false
}
}
/// We categorize expressions into three kinds. The distinction between
/// lvalue/rvalue is fundamental to the language. The distinction between the
/// two kinds of rvalues is an artifact of trans which reflects how we will
/// generate code for that kind of expression. See trans/expr.rs for more
/// information.
#[derive(Copy, Clone)]
pub enum ExprKind {
LvalueExpr,
RvalueDpsExpr,
RvalueDatumExpr,
RvalueStmtExpr
}
pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
if tcx.method_map.borrow().contains_key(&MethodCall::expr(expr.id)) {
// Overloaded operations are generally calls, and hence they are
// generated via DPS, but there are a few exceptions:
return match expr.node {
// `a += b` has a unit result.
ast::ExprAssignOp(..) => RvalueStmtExpr,
// the deref method invoked for `*a` always yields an `&T`
ast::ExprUnary(ast::UnDeref, _) => LvalueExpr,
// the index method invoked for `a[i]` always yields an `&T`
ast::ExprIndex(..) => LvalueExpr,
// in the general case, result could be any type, use DPS
_ => RvalueDpsExpr
};
}
match expr.node {
ast::ExprPath(..) => {
match resolve_expr(tcx, expr) {
def::DefVariant(tid, vid, _) => {
let variant_info = enum_variant_with_id(tcx, tid, vid);
if !variant_info.args.is_empty() {
// N-ary variant.
RvalueDatumExpr
} else {
// Nullary variant.
RvalueDpsExpr
}
}
def::DefStruct(_) => {
match tcx.node_types.borrow().get(&expr.id) {
Some(ty) => match ty.sty {
ty_bare_fn(..) => RvalueDatumExpr,
_ => RvalueDpsExpr
},
// See ExprCast below for why types might be missing.
None => RvalueDatumExpr
}
}
// Special case: A unit like struct's constructor must be called without () at the
// end (like `UnitStruct`) which means this is an ExprPath to a DefFn. But in case
// of unit structs this is should not be interpreted as function pointer but as
// call to the constructor.
def::DefFn(_, true) => RvalueDpsExpr,
// Fn pointers are just scalar values.
def::DefFn(..) | def::DefMethod(..) => RvalueDatumExpr,
// Note: there is actually a good case to be made that
// DefArg's, particularly those of immediate type, ought to
// considered rvalues.
def::DefStatic(..) |
def::DefUpvar(..) |
def::DefLocal(..) => LvalueExpr,
def::DefConst(..) |
def::DefAssociatedConst(..) => RvalueDatumExpr,
def => {
tcx.sess.span_bug(
expr.span,
&format!("uncategorized def for expr {}: {:?}",
expr.id,
def));
}
}
}
ast::ExprUnary(ast::UnDeref, _) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprIndex(..) => {
LvalueExpr
}
ast::ExprCall(..) |
ast::ExprMethodCall(..) |
ast::ExprStruct(..) |
ast::ExprRange(..) |
ast::ExprTup(..) |
ast::ExprIf(..) |
ast::ExprMatch(..) |
ast::ExprClosure(..) |
ast::ExprBlock(..) |
ast::ExprRepeat(..) |
ast::ExprVec(..) => {
RvalueDpsExpr
}
ast::ExprIfLet(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprIfLet");
}
ast::ExprWhileLet(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprWhileLet");
}
ast::ExprForLoop(..) => {
tcx.sess.span_bug(expr.span, "non-desugared ExprForLoop");
}
ast::ExprLit(ref lit) if lit_is_str(&**lit) => {
RvalueDpsExpr
}
ast::ExprBreak(..) |
ast::ExprAgain(..) |
ast::ExprRet(..) |
ast::ExprWhile(..) |
ast::ExprLoop(..) |
ast::ExprAssign(..) |
ast::ExprInlineAsm(..) |
ast::ExprAssignOp(..) => {
RvalueStmtExpr
}
ast::ExprLit(_) | // Note: LitStr is carved out above
ast::ExprUnary(..) |
ast::ExprBox(None, _) |
ast::ExprAddrOf(..) |
ast::ExprBinary(..) |
ast::ExprCast(..) => {
RvalueDatumExpr
}
ast::ExprBox(Some(ref place), _) => {
// Special case `Box<T>` for now:
let def_id = match tcx.def_map.borrow().get(&place.id) {
Some(def) => def.def_id(),
None => panic!("no def for place"),
};
if tcx.lang_items.exchange_heap() == Some(def_id) {
RvalueDatumExpr
} else {
RvalueDpsExpr
}
}
ast::ExprParen(ref e) => expr_kind(tcx, &**e),
ast::ExprMac(..) => {
tcx.sess.span_bug(
expr.span,
"macro expression remains after expansion");
}
}
}
pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId {
match s.node {
ast::StmtDecl(_, id) | StmtExpr(_, id) | StmtSemi(_, id) => {
return id;
}
ast::StmtMac(..) => panic!("unexpanded macro in trans")
}
}
pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
-> usize {
let mut i = 0;
for f in fields { if f.name == name { return i; } i += 1; }
tcx.sess.bug(&format!(
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
fields.iter()
.map(|f| token::get_name(f.name).to_string())
.collect::<Vec<String>>()));
}
pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem])
-> Option<usize> {
trait_items.iter().position(|m| m.name() == id)
}
pub fn ty_sort_string<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> String {
match ty.sty {
ty_bool | ty_char | ty_int(_) |
ty_uint(_) | ty_float(_) | ty_str => {
::util::ppaux::ty_to_string(cx, ty)
}
ty_tup(ref tys) if tys.is_empty() => ::util::ppaux::ty_to_string(cx, ty),
ty_enum(id, _) => format!("enum `{}`", item_path_str(cx, id)),
ty_uniq(_) => "box".to_string(),
ty_vec(_, Some(n)) => format!("array of {} elements", n),
ty_vec(_, None) => "slice".to_string(),
ty_ptr(_) => "*-ptr".to_string(),
ty_rptr(_, _) => "&-ptr".to_string(),
ty_bare_fn(Some(_), _) => format!("fn item"),
ty_bare_fn(None, _) => "fn pointer".to_string(),
ty_trait(ref inner) => {
format!("trait {}", item_path_str(cx, inner.principal_def_id()))
}
ty_struct(id, _) => {
format!("struct `{}`", item_path_str(cx, id))
}
ty_closure(..) => "closure".to_string(),
ty_tup(_) => "tuple".to_string(),
ty_infer(TyVar(_)) => "inferred type".to_string(),
ty_infer(IntVar(_)) => "integral variable".to_string(),
ty_infer(FloatVar(_)) => "floating-point variable".to_string(),
ty_infer(FreshTy(_)) => "skolemized type".to_string(),
ty_infer(FreshIntTy(_)) => "skolemized integral type".to_string(),
ty_infer(FreshFloatTy(_)) => "skolemized floating-point type".to_string(),
ty_projection(_) => "associated type".to_string(),
ty_param(ref p) => {
if p.space == subst::SelfSpace {
"Self".to_string()
} else {
"type parameter".to_string()
}
}
ty_err => "type error".to_string(),
}
}
impl<'tcx> Repr<'tcx> for ty::type_err<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
ty::type_err_to_str(tcx, self)
}
}
/// Explains the source of a type err in a short, human readable way. This is meant to be placed
/// in parentheses after some larger message. You should also invoke `note_and_explain_type_err()`
/// afterwards to present additional details, particularly when it comes to lifetime-related
/// errors.
pub fn type_err_to_str<'tcx>(cx: &ctxt<'tcx>, err: &type_err<'tcx>) -> String {
match *err {
terr_cyclic_ty => "cyclic type of infinite size".to_string(),
terr_mismatch => "types differ".to_string(),
terr_unsafety_mismatch(values) => {
format!("expected {} fn, found {} fn",
values.expected,
values.found)
}
terr_abi_mismatch(values) => {
format!("expected {} fn, found {} fn",
values.expected,
values.found)
}
terr_mutability => "values differ in mutability".to_string(),
terr_box_mutability => {
"boxed values differ in mutability".to_string()
}
terr_vec_mutability => "vectors differ in mutability".to_string(),
terr_ptr_mutability => "pointers differ in mutability".to_string(),
terr_ref_mutability => "references differ in mutability".to_string(),
terr_ty_param_size(values) => {
format!("expected a type with {} type params, \
found one with {} type params",
values.expected,
values.found)
}
terr_fixed_array_size(values) => {
format!("expected an array with a fixed size of {} elements, \
found one with {} elements",
values.expected,
values.found)
}
terr_tuple_size(values) => {
format!("expected a tuple with {} elements, \
found one with {} elements",
values.expected,
values.found)
}
terr_arg_count => {
"incorrect number of function parameters".to_string()
}
terr_regions_does_not_outlive(..) => {
"lifetime mismatch".to_string()
}
terr_regions_not_same(..) => {
"lifetimes are not the same".to_string()
}
terr_regions_no_overlap(..) => {
"lifetimes do not intersect".to_string()
}
terr_regions_insufficiently_polymorphic(br, _) => {
format!("expected bound lifetime parameter {}, \
found concrete lifetime",
bound_region_ptr_to_string(cx, br))
}
terr_regions_overly_polymorphic(br, _) => {
format!("expected concrete lifetime, \
found bound lifetime parameter {}",
bound_region_ptr_to_string(cx, br))
}
terr_sorts(values) => {
// A naive approach to making sure that we're not reporting silly errors such as:
// (expected closure, found closure).
let expected_str = ty_sort_string(cx, values.expected);
let found_str = ty_sort_string(cx, values.found);
if expected_str == found_str {
format!("expected {}, found a different {}", expected_str, found_str)
} else {
format!("expected {}, found {}", expected_str, found_str)
}
}
terr_traits(values) => {
format!("expected trait `{}`, found trait `{}`",
item_path_str(cx, values.expected),
item_path_str(cx, values.found))
}
terr_builtin_bounds(values) => {
if values.expected.is_empty() {
format!("expected no bounds, found `{}`",
values.found.user_string(cx))
} else if values.found.is_empty() {
format!("expected bounds `{}`, found no bounds",
values.expected.user_string(cx))
} else {
format!("expected bounds `{}`, found bounds `{}`",
values.expected.user_string(cx),
values.found.user_string(cx))
}
}
terr_integer_as_char => {
"expected an integral type, found `char`".to_string()
}
terr_int_mismatch(ref values) => {
format!("expected `{:?}`, found `{:?}`",
values.expected,
values.found)
}
terr_float_mismatch(ref values) => {
format!("expected `{:?}`, found `{:?}`",
values.expected,
values.found)
}
terr_variadic_mismatch(ref values) => {
format!("expected {} fn, found {} function",
if values.expected { "variadic" } else { "non-variadic" },
if values.found { "variadic" } else { "non-variadic" })
}
terr_convergence_mismatch(ref values) => {
format!("expected {} fn, found {} function",
if values.expected { "converging" } else { "diverging" },
if values.found { "converging" } else { "diverging" })
}
terr_projection_name_mismatched(ref values) => {
format!("expected {}, found {}",
token::get_name(values.expected),
token::get_name(values.found))
}
terr_projection_bounds_length(ref values) => {
format!("expected {} associated type bindings, found {}",
values.expected,
values.found)
}
}
}
pub fn note_and_explain_type_err<'tcx>(cx: &ctxt<'tcx>, err: &type_err<'tcx>, sp: Span) {
match *err {
terr_regions_does_not_outlive(subregion, superregion) => {
note_and_explain_region(cx, "", subregion, "...");
note_and_explain_region(cx, "...does not necessarily outlive ",
superregion, "");
}
terr_regions_not_same(region1, region2) => {
note_and_explain_region(cx, "", region1, "...");
note_and_explain_region(cx, "...is not the same lifetime as ",
region2, "");
}
terr_regions_no_overlap(region1, region2) => {
note_and_explain_region(cx, "", region1, "...");
note_and_explain_region(cx, "...does not overlap ",
region2, "");
}
terr_regions_insufficiently_polymorphic(_, conc_region) => {
note_and_explain_region(cx,
"concrete lifetime that was found is ",
conc_region, "");
}
terr_regions_overly_polymorphic(_, ty::ReInfer(ty::ReVar(_))) => {
// don't bother to print out the message below for
// inference variables, it's not very illuminating.
}
terr_regions_overly_polymorphic(_, conc_region) => {
note_and_explain_region(cx,
"expected concrete lifetime is ",
conc_region, "");
}
terr_sorts(values) => {
let expected_str = ty_sort_string(cx, values.expected);
let found_str = ty_sort_string(cx, values.found);
if expected_str == found_str && expected_str == "closure" {
cx.sess.span_note(sp, &format!("no two closures, even if identical, have the same \
type"));
cx.sess.span_help(sp, &format!("consider boxing your closure and/or \
using it as a trait object"));
}
}
_ => {}
}
}
pub fn provided_source(cx: &ctxt, id: ast::DefId) -> Option<ast::DefId> {
cx.provided_method_sources.borrow().get(&id).cloned()
}
pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> Vec<Rc<Method<'tcx>>> {
if is_local(id) {
if let ItemTrait(_, _, _, ref ms) = cx.map.expect_item(id.node).node {
ms.iter().filter_map(|ti| {
if let ast::MethodTraitItem(_, Some(_)) = ti.node {
match impl_or_trait_item(cx, ast_util::local_def(ti.id)) {
MethodTraitItem(m) => Some(m),
_ => {
cx.sess.bug("provided_trait_methods(): \
non-method item found from \
looking up provided method?!")
}
}
} else {
None
}
}).collect()
} else {
cx.sess.bug(&format!("provided_trait_methods: `{:?}` is not a trait", id))
}
} else {
csearch::get_provided_trait_methods(cx, id)
}
}
pub fn associated_consts<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> Vec<Rc<AssociatedConst<'tcx>>> {
if is_local(id) {
match cx.map.expect_item(id.node).node {
ItemTrait(_, _, _, ref tis) => {
tis.iter().filter_map(|ti| {
if let ast::ConstTraitItem(_, _) = ti.node {
match impl_or_trait_item(cx, ast_util::local_def(ti.id)) {
ConstTraitItem(ac) => Some(ac),
_ => {
cx.sess.bug("associated_consts(): \
non-const item found from \
looking up a constant?!")
}
}
} else {
None
}
}).collect()
}
ItemImpl(_, _, _, _, _, ref iis) => {
iis.iter().filter_map(|ii| {
if let ast::ConstImplItem(_, _) = ii.node {
match impl_or_trait_item(cx, ast_util::local_def(ii.id)) {
ConstTraitItem(ac) => Some(ac),
_ => {
cx.sess.bug("associated_consts(): \
non-const item found from \
looking up a constant?!")
}
}
} else {
None
}
}).collect()
}
_ => {
cx.sess.bug(&format!("associated_consts: `{:?}` is not a trait \
or impl", id))
}
}
} else {
csearch::get_associated_consts(cx, id)
}
}
/// Helper for looking things up in the various maps that are populated during
/// typeck::collect (e.g., `cx.impl_or_trait_items`, `cx.tcache`, etc). All of
/// these share the pattern that if the id is local, it should have been loaded
/// into the map by the `typeck::collect` phase. If the def-id is external,
/// then we have to go consult the crate loading code (and cache the result for
/// the future).
fn lookup_locally_or_in_crate_store<V, F>(descr: &str,
def_id: ast::DefId,
map: &RefCell<DefIdMap<V>>,
load_external: F) -> V where
V: Clone,
F: FnOnce() -> V,
{
match map.borrow().get(&def_id).cloned() {
Some(v) => { return v; }
None => { }
}
if def_id.krate == ast::LOCAL_CRATE {
panic!("No def'n found for {:?} in tcx.{}", def_id, descr);
}
let v = load_external();
map.borrow_mut().insert(def_id, v.clone());
v
}
pub fn trait_item<'tcx>(cx: &ctxt<'tcx>, trait_did: ast::DefId, idx: usize)
-> ImplOrTraitItem<'tcx> {
let method_def_id = (*ty::trait_item_def_ids(cx, trait_did))[idx].def_id();
impl_or_trait_item(cx, method_def_id)
}
pub fn trait_items<'tcx>(cx: &ctxt<'tcx>, trait_did: ast::DefId)
-> Rc<Vec<ImplOrTraitItem<'tcx>>> {
let mut trait_items = cx.trait_items_cache.borrow_mut();
match trait_items.get(&trait_did).cloned() {
Some(trait_items) => trait_items,
None => {
let def_ids = ty::trait_item_def_ids(cx, trait_did);
let items: Rc<Vec<ImplOrTraitItem>> =
Rc::new(def_ids.iter()
.map(|d| impl_or_trait_item(cx, d.def_id()))
.collect());
trait_items.insert(trait_did, items.clone());
items
}
}
}
pub fn trait_impl_polarity<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> Option<ast::ImplPolarity> {
if id.krate == ast::LOCAL_CRATE {
match cx.map.find(id.node) {
Some(ast_map::NodeItem(item)) => {
match item.node {
ast::ItemImpl(_, polarity, _, _, _, _) => Some(polarity),
_ => None
}
}
_ => None
}
} else {
csearch::get_impl_polarity(cx, id)
}
}
pub fn custom_coerce_unsized_kind<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
-> CustomCoerceUnsized {
memoized(&cx.custom_coerce_unsized_kinds, did, |did: DefId| {
let (kind, src) = if did.krate != ast::LOCAL_CRATE {
(csearch::get_custom_coerce_unsized_kind(cx, did), "external")
} else {
(None, "local")
};
match kind {
Some(kind) => kind,
None => {
cx.sess.bug(&format!("custom_coerce_unsized_kind: \
{} impl `{}` is missing its kind",
src, item_path_str(cx, did)));
}
}
})
}
pub fn impl_or_trait_item<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> ImplOrTraitItem<'tcx> {
lookup_locally_or_in_crate_store(
"impl_or_trait_items", id, &cx.impl_or_trait_items,
|| csearch::get_impl_or_trait_item(cx, id))
}
/// Returns the parameter index that the given associated type corresponds to.
pub fn associated_type_parameter_index(cx: &ctxt,
trait_def: &TraitDef,
associated_type_id: ast::DefId)
-> usize {
for type_parameter_def in trait_def.generics.types.iter() {
if type_parameter_def.def_id == associated_type_id {
return type_parameter_def.index as usize
}
}
cx.sess.bug("couldn't find associated type parameter index")
}
pub fn trait_item_def_ids(cx: &ctxt, id: ast::DefId)
-> Rc<Vec<ImplOrTraitItemId>> {
lookup_locally_or_in_crate_store(
"trait_item_def_ids", id, &cx.trait_item_def_ids,
|| Rc::new(csearch::get_trait_item_def_ids(&cx.sess.cstore, id)))
}
/// Returns the trait-ref corresponding to a given impl, or None if it is
/// an inherent impl.
pub fn impl_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> Option<TraitRef<'tcx>>
{
lookup_locally_or_in_crate_store(
"impl_trait_refs", id, &cx.impl_trait_refs,
|| csearch::get_impl_trait(cx, id))
}
/// Returns whether this DefId refers to an impl
pub fn is_impl<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) -> bool {
if id.krate == ast::LOCAL_CRATE {
if let Some(ast_map::NodeItem(
&ast::Item { node: ast::ItemImpl(..), .. })) = cx.map.find(id.node) {
true
} else {
false
}
} else {
csearch::is_impl(&cx.sess.cstore, id)
}
}
pub fn trait_ref_to_def_id(tcx: &ctxt, tr: &ast::TraitRef) -> ast::DefId {
tcx.def_map.borrow().get(&tr.ref_id).expect("no def-map entry for trait").def_id()
}
pub fn try_add_builtin_trait(
tcx: &ctxt,
trait_def_id: ast::DefId,
builtin_bounds: &mut EnumSet<BuiltinBound>)
-> bool
{
//! Checks whether `trait_ref` refers to one of the builtin
//! traits, like `Send`, and adds the corresponding
//! bound to the set `builtin_bounds` if so. Returns true if `trait_ref`
//! is a builtin trait.
match tcx.lang_items.to_builtin_kind(trait_def_id) {
Some(bound) => { builtin_bounds.insert(bound); true }
None => false
}
}
pub fn ty_to_def_id(ty: Ty) -> Option<ast::DefId> {
match ty.sty {
ty_trait(ref tt) =>
Some(tt.principal_def_id()),
ty_struct(id, _) |
ty_enum(id, _) |
ty_closure(id, _) =>
Some(id),
_ =>
None
}
}
// Enum information
#[derive(Clone)]
pub struct VariantInfo<'tcx> {
pub args: Vec<Ty<'tcx>>,
pub arg_names: Option<Vec<ast::Name>>,
pub ctor_ty: Option<Ty<'tcx>>,
pub name: ast::Name,
pub id: ast::DefId,
pub disr_val: Disr,
pub vis: Visibility
}
impl<'tcx> VariantInfo<'tcx> {
/// Creates a new VariantInfo from the corresponding ast representation.
///
/// Does not do any caching of the value in the type context.
pub fn from_ast_variant(cx: &ctxt<'tcx>,
ast_variant: &ast::Variant,
discriminant: Disr) -> VariantInfo<'tcx> {
let ctor_ty = node_id_to_type(cx, ast_variant.node.id);
match ast_variant.node.kind {
ast::TupleVariantKind(ref args) => {
let arg_tys = if !args.is_empty() {
// the regions in the argument types come from the
// enum def'n, and hence will all be early bound
ty::no_late_bound_regions(cx, &ty_fn_args(ctor_ty)).unwrap()
} else {
Vec::new()
};
return VariantInfo {
args: arg_tys,
arg_names: None,
ctor_ty: Some(ctor_ty),
name: ast_variant.node.name.name,
id: ast_util::local_def(ast_variant.node.id),
disr_val: discriminant,
vis: ast_variant.node.vis
};
},
ast::StructVariantKind(ref struct_def) => {
let fields: &[StructField] = &struct_def.fields;
assert!(!fields.is_empty());
let arg_tys = struct_def.fields.iter()
.map(|field| node_id_to_type(cx, field.node.id)).collect();
let arg_names = fields.iter().map(|field| {
match field.node.kind {
NamedField(ident, _) => ident.name,
UnnamedField(..) => cx.sess.bug(
"enum_variants: all fields in struct must have a name")
}
}).collect();
return VariantInfo {
args: arg_tys,
arg_names: Some(arg_names),
ctor_ty: None,
name: ast_variant.node.name.name,
id: ast_util::local_def(ast_variant.node.id),
disr_val: discriminant,
vis: ast_variant.node.vis
};
}
}
}
}
pub fn substd_enum_variants<'tcx>(cx: &ctxt<'tcx>,
id: ast::DefId,
substs: &Substs<'tcx>)
-> Vec<Rc<VariantInfo<'tcx>>> {
enum_variants(cx, id).iter().map(|variant_info| {
let substd_args = variant_info.args.iter()
.map(|aty| aty.subst(cx, substs)).collect::<Vec<_>>();
let substd_ctor_ty = variant_info.ctor_ty.subst(cx, substs);
Rc::new(VariantInfo {
args: substd_args,
ctor_ty: substd_ctor_ty,
..(**variant_info).clone()
})
}).collect()
}
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> String {
with_path(cx, id, |path| ast_map::path_to_string(path)).to_string()
}
#[derive(Copy, Clone)]
pub enum DtorKind {
NoDtor,
TraitDtor(DefId, bool)
}
impl DtorKind {
pub fn is_present(&self) -> bool {
match *self {
TraitDtor(..) => true,
_ => false
}
}
pub fn has_drop_flag(&self) -> bool {
match self {
&NoDtor => false,
&TraitDtor(_, flag) => flag
}
}
}
/* If struct_id names a struct with a dtor. */
pub fn ty_dtor(cx: &ctxt, struct_id: DefId) -> DtorKind {
match cx.destructor_for_type.borrow().get(&struct_id) {
Some(&method_def_id) => {
let flag = !has_attr(cx, struct_id, "unsafe_no_drop_flag");
TraitDtor(method_def_id, flag)
}
None => NoDtor,
}
}
pub fn has_dtor(cx: &ctxt, struct_id: DefId) -> bool {
cx.destructor_for_type.borrow().contains_key(&struct_id)
}
pub fn with_path<T, F>(cx: &ctxt, id: ast::DefId, f: F) -> T where
F: FnOnce(ast_map::PathElems) -> T,
{
if id.krate == ast::LOCAL_CRATE {
cx.map.with_path(id.node, f)
} else {
f(csearch::get_item_path(cx, id).iter().cloned().chain(LinkedPath::empty()))
}
}
pub fn enum_is_univariant(cx: &ctxt, id: ast::DefId) -> bool {
enum_variants(cx, id).len() == 1
}
pub fn type_is_empty(cx: &ctxt, ty: Ty) -> bool {
match ty.sty {
ty_enum(did, _) => (*enum_variants(cx, did)).is_empty(),
_ => false
}
}
trait IntTypeExt {
fn to_ty<'tcx>(&self, cx: &ctxt<'tcx>) -> Ty<'tcx>;
fn i64_to_disr(&self, val: i64) -> Option<Disr>;
fn u64_to_disr(&self, val: u64) -> Option<Disr>;
fn disr_incr(&self, val: Disr) -> Option<Disr>;
fn disr_string(&self, val: Disr) -> String;
fn disr_wrap_incr(&self, val: Option<Disr>) -> Disr;
}
impl IntTypeExt for attr::IntType {
fn to_ty<'tcx>(&self, cx: &ctxt<'tcx>) -> Ty<'tcx> {
match *self {
SignedInt(ast::TyI8) => cx.types.i8,
SignedInt(ast::TyI16) => cx.types.i16,
SignedInt(ast::TyI32) => cx.types.i32,
SignedInt(ast::TyI64) => cx.types.i64,
SignedInt(ast::TyIs) => cx.types.isize,
UnsignedInt(ast::TyU8) => cx.types.u8,
UnsignedInt(ast::TyU16) => cx.types.u16,
UnsignedInt(ast::TyU32) => cx.types.u32,
UnsignedInt(ast::TyU64) => cx.types.u64,
UnsignedInt(ast::TyUs) => cx.types.usize,
}
}
fn i64_to_disr(&self, val: i64) -> Option<Disr> {
match *self {
SignedInt(ast::TyI8) => val.to_i8() .map(|v| v as Disr),
SignedInt(ast::TyI16) => val.to_i16() .map(|v| v as Disr),
SignedInt(ast::TyI32) => val.to_i32() .map(|v| v as Disr),
SignedInt(ast::TyI64) => val.to_i64() .map(|v| v as Disr),
UnsignedInt(ast::TyU8) => val.to_u8() .map(|v| v as Disr),
UnsignedInt(ast::TyU16) => val.to_u16() .map(|v| v as Disr),
UnsignedInt(ast::TyU32) => val.to_u32() .map(|v| v as Disr),
UnsignedInt(ast::TyU64) => val.to_u64() .map(|v| v as Disr),
UnsignedInt(ast::TyUs) |
SignedInt(ast::TyIs) => unreachable!(),
}
}
fn u64_to_disr(&self, val: u64) -> Option<Disr> {
match *self {
SignedInt(ast::TyI8) => val.to_i8() .map(|v| v as Disr),
SignedInt(ast::TyI16) => val.to_i16() .map(|v| v as Disr),
SignedInt(ast::TyI32) => val.to_i32() .map(|v| v as Disr),
SignedInt(ast::TyI64) => val.to_i64() .map(|v| v as Disr),
UnsignedInt(ast::TyU8) => val.to_u8() .map(|v| v as Disr),
UnsignedInt(ast::TyU16) => val.to_u16() .map(|v| v as Disr),
UnsignedInt(ast::TyU32) => val.to_u32() .map(|v| v as Disr),
UnsignedInt(ast::TyU64) => val.to_u64() .map(|v| v as Disr),
UnsignedInt(ast::TyUs) |
SignedInt(ast::TyIs) => unreachable!(),
}
}
fn disr_incr(&self, val: Disr) -> Option<Disr> {
macro_rules! add1 {
($e:expr) => { $e.and_then(|v|v.checked_add(1)).map(|v| v as Disr) }
}
match *self {
// SignedInt repr means we *want* to reinterpret the bits
// treating the highest bit of Disr as a sign-bit, so
// cast to i64 before range-checking.
SignedInt(ast::TyI8) => add1!((val as i64).to_i8()),
SignedInt(ast::TyI16) => add1!((val as i64).to_i16()),
SignedInt(ast::TyI32) => add1!((val as i64).to_i32()),
SignedInt(ast::TyI64) => add1!(Some(val as i64)),
UnsignedInt(ast::TyU8) => add1!(val.to_u8()),
UnsignedInt(ast::TyU16) => add1!(val.to_u16()),
UnsignedInt(ast::TyU32) => add1!(val.to_u32()),
UnsignedInt(ast::TyU64) => add1!(Some(val)),
UnsignedInt(ast::TyUs) |
SignedInt(ast::TyIs) => unreachable!(),
}
}
// This returns a String because (1.) it is only used for
// rendering an error message and (2.) a string can represent the
// full range from `i64::MIN` through `u64::MAX`.
fn disr_string(&self, val: Disr) -> String {
match *self {
SignedInt(ast::TyI8) => format!("{}", val as i8 ),
SignedInt(ast::TyI16) => format!("{}", val as i16),
SignedInt(ast::TyI32) => format!("{}", val as i32),
SignedInt(ast::TyI64) => format!("{}", val as i64),
UnsignedInt(ast::TyU8) => format!("{}", val as u8 ),
UnsignedInt(ast::TyU16) => format!("{}", val as u16),
UnsignedInt(ast::TyU32) => format!("{}", val as u32),
UnsignedInt(ast::TyU64) => format!("{}", val as u64),
UnsignedInt(ast::TyUs) |
SignedInt(ast::TyIs) => unreachable!(),
}
}
fn disr_wrap_incr(&self, val: Option<Disr>) -> Disr {
macro_rules! add1 {
($e:expr) => { ($e).wrapping_add(1) as Disr }
}
let val = val.unwrap_or(ty::INITIAL_DISCRIMINANT_VALUE);
match *self {
SignedInt(ast::TyI8) => add1!(val as i8 ),
SignedInt(ast::TyI16) => add1!(val as i16),
SignedInt(ast::TyI32) => add1!(val as i32),
SignedInt(ast::TyI64) => add1!(val as i64),
UnsignedInt(ast::TyU8) => add1!(val as u8 ),
UnsignedInt(ast::TyU16) => add1!(val as u16),
UnsignedInt(ast::TyU32) => add1!(val as u32),
UnsignedInt(ast::TyU64) => add1!(val as u64),
UnsignedInt(ast::TyUs) |
SignedInt(ast::TyIs) => unreachable!(),
}
}
}
/// Returns `(normalized_type, ty)`, where `normalized_type` is the
/// IntType representation of one of {i64,i32,i16,i8,u64,u32,u16,u8},
/// and `ty` is the original type (i.e. may include `isize` or
/// `usize`).
pub fn enum_repr_type<'tcx>(cx: &ctxt<'tcx>,
opt_hint: Option<&attr::ReprAttr>)
-> (attr::IntType, Ty<'tcx>)
{
let repr_type = match opt_hint {
// Feed in the given type
Some(&attr::ReprInt(_, int_t)) => int_t,
// ... but provide sensible default if none provided
//
// NB. Historically `fn enum_variants` generate i64 here, while
// rustc_typeck::check would generate isize.
_ => SignedInt(ast::TyIs),
};
let repr_type_ty = repr_type.to_ty(cx);
let repr_type = match repr_type {
SignedInt(ast::TyIs) =>
SignedInt(cx.sess.target.int_type),
UnsignedInt(ast::TyUs) =>
UnsignedInt(cx.sess.target.uint_type),
other => other
};
(repr_type, repr_type_ty)
}
fn report_discrim_overflow(cx: &ctxt,
variant_span: Span,
variant_name: &str,
repr_type: attr::IntType,
prev_val: Disr) {
let computed_value = repr_type.disr_wrap_incr(Some(prev_val));
let computed_value = repr_type.disr_string(computed_value);
let prev_val = repr_type.disr_string(prev_val);
let repr_type = repr_type.to_ty(cx).user_string(cx);
span_err!(cx.sess, variant_span, E0370,
"enum discriminant overflowed on value after {}: {}; \
set explicitly via {} = {} if that is desired outcome",
prev_val, repr_type, variant_name, computed_value);
}
// This computes the discriminant values for the sequence of Variants
// attached to a particular enum, taking into account the #[repr] (if
// any) provided via the `opt_hint`.
fn compute_enum_variants<'tcx>(cx: &ctxt<'tcx>,
vs: &'tcx [P<ast::Variant>],
opt_hint: Option<&attr::ReprAttr>)
-> Vec<Rc<ty::VariantInfo<'tcx>>> {
let mut variants: Vec<Rc<ty::VariantInfo>> = Vec::new();
let mut prev_disr_val: Option<ty::Disr> = None;
let (repr_type, repr_type_ty) = ty::enum_repr_type(cx, opt_hint);
for v in vs {
// If the discriminant value is specified explicitly in the
// enum, check whether the initialization expression is valid,
// otherwise use the last value plus one.
let current_disr_val;
// This closure marks cases where, when an error occurs during
// the computation, attempt to assign a (hopefully) fresh
// value to avoid spurious error reports downstream.
let attempt_fresh_value = move || -> Disr {
repr_type.disr_wrap_incr(prev_disr_val)
};
match v.node.disr_expr {
Some(ref e) => {
debug!("disr expr, checking {}", pprust::expr_to_string(&**e));
// check_expr (from check_const pass) doesn't guarantee
// that the expression is in a form that eval_const_expr can
// handle, so we may still get an internal compiler error
//
// pnkfelix: The above comment was transcribed from
// the version of this code taken from rustc_typeck.
// Presumably the implication is that we need to deal
// with such ICE's as they arise.
//
// Since this can be called from `ty::enum_variants`
// anyway, best thing is to make `eval_const_expr`
// more robust (on case-by-case basis).
match const_eval::eval_const_expr_partial(cx, &**e, Some(repr_type_ty)) {
Ok(const_eval::const_int(val)) => current_disr_val = val as Disr,
Ok(const_eval::const_uint(val)) => current_disr_val = val as Disr,
Ok(_) => {
let sign_desc = if repr_type.is_signed() { "signed" } else { "unsigned" };
span_err!(cx.sess, e.span, E0079,
"expected {} integer constant",
sign_desc);
current_disr_val = attempt_fresh_value();
}
Err(ref err) => {
span_err!(cx.sess, err.span, E0080,
"constant evaluation error: {}",
err.description());
current_disr_val = attempt_fresh_value();
}
}
},
None => {
current_disr_val = match prev_disr_val {
Some(prev_disr_val) => {
if let Some(v) = repr_type.disr_incr(prev_disr_val) {
v
} else {
report_discrim_overflow(cx, v.span, v.node.name.as_str(),
repr_type, prev_disr_val);
attempt_fresh_value()
}
}
None => ty::INITIAL_DISCRIMINANT_VALUE
}
}
}
let variant_info = Rc::new(VariantInfo::from_ast_variant(cx, &**v, current_disr_val));
prev_disr_val = Some(current_disr_val);
variants.push(variant_info);
}
return variants;
}
pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
-> Rc<Vec<Rc<VariantInfo<'tcx>>>> {
memoized(&cx.enum_var_cache, id, |id: ast::DefId| {
if ast::LOCAL_CRATE != id.krate {
Rc::new(csearch::get_enum_variants(cx, id))
} else {
match cx.map.get(id.node) {
ast_map::NodeItem(ref item) => {
match item.node {
ast::ItemEnum(ref enum_definition, _) => {
Rc::new(compute_enum_variants(
cx,
&enum_definition.variants,
lookup_repr_hints(cx, id).get(0)))
}
_ => {
cx.sess.bug("enum_variants: id not bound to an enum")
}
}
}
_ => cx.sess.bug("enum_variants: id not bound to an enum")
}
}
})
}
// Returns information about the enum variant with the given ID:
pub fn enum_variant_with_id<'tcx>(cx: &ctxt<'tcx>,
enum_id: ast::DefId,
variant_id: ast::DefId)
-> Rc<VariantInfo<'tcx>> {
enum_variants(cx, enum_id).iter()
.find(|variant| variant.id == variant_id)
.expect("enum_variant_with_id(): no variant exists with that ID")
.clone()
}
// If the given item is in an external crate, looks up its type and adds it to
// the type cache. Returns the type parameters and type.
pub fn lookup_item_type<'tcx>(cx: &ctxt<'tcx>,
did: ast::DefId)
-> TypeScheme<'tcx> {
lookup_locally_or_in_crate_store(
"tcache", did, &cx.tcache,
|| csearch::get_type(cx, did))
}
/// Given the did of a trait, returns its canonical trait ref.
pub fn lookup_trait_def<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
-> &'tcx TraitDef<'tcx> {
lookup_locally_or_in_crate_store(
"trait_defs", did, &cx.trait_defs,
|| cx.arenas.trait_defs.alloc(csearch::get_trait_def(cx, did))
)
}
/// Given the did of an item, returns its full set of predicates.
pub fn lookup_predicates<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
-> GenericPredicates<'tcx>
{
lookup_locally_or_in_crate_store(
"predicates", did, &cx.predicates,
|| csearch::get_predicates(cx, did))
}
/// Given the did of a trait, returns its superpredicates.
pub fn lookup_super_predicates<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
-> GenericPredicates<'tcx>
{
lookup_locally_or_in_crate_store(
"super_predicates", did, &cx.super_predicates,
|| csearch::get_super_predicates(cx, did))
}
pub fn predicates<'tcx>(
tcx: &ctxt<'tcx>,
param_ty: Ty<'tcx>,
bounds: &ParamBounds<'tcx>)
-> Vec<Predicate<'tcx>>
{
let mut vec = Vec::new();
for builtin_bound in &bounds.builtin_bounds {
match traits::trait_ref_for_builtin_bound(tcx, builtin_bound, param_ty) {
Ok(trait_ref) => { vec.push(trait_ref.as_predicate()); }
Err(ErrorReported) => { }
}
}
for ®ion_bound in &bounds.region_bounds {
// account for the binder being introduced below; no need to shift `param_ty`
// because, at present at least, it can only refer to early-bound regions
let region_bound = ty_fold::shift_region(region_bound, 1);
vec.push(ty::Binder(ty::OutlivesPredicate(param_ty, region_bound)).as_predicate());
}
for bound_trait_ref in &bounds.trait_bounds {
vec.push(bound_trait_ref.as_predicate());
}
for projection in &bounds.projection_bounds {
vec.push(projection.as_predicate());
}
vec
}
/// Get the attributes of a definition.
pub fn get_attrs<'tcx>(tcx: &'tcx ctxt, did: DefId)
-> Cow<'tcx, [ast::Attribute]> {
if is_local(did) {
Cow::Borrowed(tcx.map.attrs(did.node))
} else {
Cow::Owned(csearch::get_item_attrs(&tcx.sess.cstore, did))
}
}
/// Determine whether an item is annotated with an attribute
pub fn has_attr(tcx: &ctxt, did: DefId, attr: &str) -> bool {
get_attrs(tcx, did).iter().any(|item| item.check_name(attr))
}
/// Determine whether an item is annotated with `#[repr(packed)]`
pub fn lookup_packed(tcx: &ctxt, did: DefId) -> bool {
lookup_repr_hints(tcx, did).contains(&attr::ReprPacked)
}
/// Determine whether an item is annotated with `#[simd]`
pub fn lookup_simd(tcx: &ctxt, did: DefId) -> bool {
has_attr(tcx, did, "simd")
}
/// Obtain the representation annotation for a struct definition.
pub fn lookup_repr_hints(tcx: &ctxt, did: DefId) -> Rc<Vec<attr::ReprAttr>> {
memoized(&tcx.repr_hint_cache, did, |did: DefId| {
Rc::new(if did.krate == LOCAL_CRATE {
get_attrs(tcx, did).iter().flat_map(|meta| {
attr::find_repr_attrs(tcx.sess.diagnostic(), meta).into_iter()
}).collect()
} else {
csearch::get_repr_attrs(&tcx.sess.cstore, did)
})
})
}
// Look up a field ID, whether or not it's local
pub fn lookup_field_type_unsubstituted<'tcx>(tcx: &ctxt<'tcx>,
struct_id: DefId,
id: DefId)
-> Ty<'tcx> {
if id.krate == ast::LOCAL_CRATE {
node_id_to_type(tcx, id.node)
} else {
let mut tcache = tcx.tcache.borrow_mut();
tcache.entry(id).or_insert_with(|| csearch::get_field_type(tcx, struct_id, id)).ty
}
}
// Look up a field ID, whether or not it's local
// Takes a list of type substs in case the struct is generic
pub fn lookup_field_type<'tcx>(tcx: &ctxt<'tcx>,
struct_id: DefId,
id: DefId,
substs: &Substs<'tcx>)
-> Ty<'tcx> {
lookup_field_type_unsubstituted(tcx, struct_id, id).subst(tcx, substs)
}
// Look up the list of field names and IDs for a given struct.
// Panics if the id is not bound to a struct.
pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> {
if did.krate == ast::LOCAL_CRATE {
let struct_fields = cx.struct_fields.borrow();
match struct_fields.get(&did) {
Some(fields) => (**fields).clone(),
_ => {
cx.sess.bug(
&format!("ID not mapped to struct fields: {}",
cx.map.node_to_string(did.node)));
}
}
} else {
csearch::get_struct_fields(&cx.sess.cstore, did)
}
}
pub fn is_tuple_struct(cx: &ctxt, did: ast::DefId) -> bool {
let fields = lookup_struct_fields(cx, did);
!fields.is_empty() && fields.iter().all(|f| f.name == token::special_names::unnamed_field)
}
// Returns a list of fields corresponding to the struct's items. trans uses
// this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tcx>)
-> Vec<field<'tcx>> {
lookup_struct_fields(cx, did).iter().map(|f| {
field {
name: f.name,
mt: mt {
ty: lookup_field_type(cx, did, f.id, substs),
mutbl: MutImmutable
}
}
}).collect()
}
// Returns a list of fields corresponding to the tuple's items. trans uses
// this.
pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec<field<'tcx>> {
v.iter().enumerate().map(|(i, &f)| {
field {
name: token::intern(&i.to_string()),
mt: mt {
ty: f,
mutbl: MutImmutable
}
}
}).collect()
}
/// Returns the deeply last field of nested structures, or the same type,
/// if not a structure at all. Corresponds to the only possible unsized
/// field, and its type can be used to determine unsizing strategy.
pub fn struct_tail<'tcx>(cx: &ctxt<'tcx>, mut ty: Ty<'tcx>) -> Ty<'tcx> {
while let ty_struct(def_id, substs) = ty.sty {
match struct_fields(cx, def_id, substs).last() {
Some(f) => ty = f.mt.ty,
None => break
}
}
ty
}
/// Same as applying struct_tail on `source` and `target`, but only
/// keeps going as long as the two types are instances of the same
/// structure definitions.
/// For `(Foo<Foo<T>>, Foo<Trait>)`, the result will be `(Foo<T>, Trait)`,
/// whereas struct_tail produces `T`, and `Trait`, respectively.
pub fn struct_lockstep_tails<'tcx>(cx: &ctxt<'tcx>,
source: Ty<'tcx>,
target: Ty<'tcx>)
-> (Ty<'tcx>, Ty<'tcx>) {
let (mut a, mut b) = (source, target);
while let (&ty_struct(a_did, a_substs), &ty_struct(b_did, b_substs)) = (&a.sty, &b.sty) {
if a_did != b_did {
continue;
}
if let Some(a_f) = struct_fields(cx, a_did, a_substs).last() {
if let Some(b_f) = struct_fields(cx, b_did, b_substs).last() {
a = a_f.mt.ty;
b = b_f.mt.ty;
} else {
break;
}
} else {
break;
}
}
(a, b)
}
#[derive(Copy, Clone)]
pub struct ClosureUpvar<'tcx> {
pub def: def::Def,
pub span: Span,
pub ty: Ty<'tcx>,
}
// Returns a list of `ClosureUpvar`s for each upvar.
pub fn closure_upvars<'tcx>(typer: &mc::Typer<'tcx>,
closure_id: ast::DefId,
substs: &Substs<'tcx>)
-> Option<Vec<ClosureUpvar<'tcx>>>
{
// Presently an unboxed closure type cannot "escape" out of a
// function, so we will only encounter ones that originated in the
// local crate or were inlined into it along with some function.
// This may change if abstract return types of some sort are
// implemented.
assert!(closure_id.krate == ast::LOCAL_CRATE);
let tcx = typer.tcx();
match tcx.freevars.borrow().get(&closure_id.node) {
None => Some(vec![]),
Some(ref freevars) => {
freevars.iter()
.map(|freevar| {
let freevar_def_id = freevar.def.def_id();
let freevar_ty = match typer.node_ty(freevar_def_id.node) {
Ok(t) => { t }
Err(()) => { return None; }
};
let freevar_ty = freevar_ty.subst(tcx, substs);
let upvar_id = ty::UpvarId {
var_id: freevar_def_id.node,
closure_expr_id: closure_id.node
};
typer.upvar_capture(upvar_id).map(|capture| {
let freevar_ref_ty = match capture {
UpvarCapture::ByValue => {
freevar_ty
}
UpvarCapture::ByRef(borrow) => {
mk_rptr(tcx,
tcx.mk_region(borrow.region),
ty::mt {
ty: freevar_ty,
mutbl: borrow.kind.to_mutbl_lossy(),
})
}
};
ClosureUpvar {
def: freevar.def,
span: freevar.span,
ty: freevar_ref_ty,
}
})
})
.collect()
}
}
}
// Returns the repeat count for a repeating vector expression.
pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> usize {
match const_eval::eval_const_expr_partial(tcx, count_expr, Some(tcx.types.usize)) {
Ok(val) => {
let found = match val {
const_eval::const_uint(count) => return count as usize,
const_eval::const_int(count) if count >= 0 => return count as usize,
const_eval::const_int(_) => "negative integer",
const_eval::const_float(_) => "float",
const_eval::const_str(_) => "string",
const_eval::const_bool(_) => "boolean",
const_eval::const_binary(_) => "binary array",
const_eval::Struct(..) => "struct",
const_eval::Tuple(_) => "tuple"
};
span_err!(tcx.sess, count_expr.span, E0306,
"expected positive integer for repeat count, found {}",
found);
}
Err(err) => {
let err_description = err.description();
let found = match count_expr.node {
ast::ExprPath(None, ast::Path {
global: false,
ref segments,
..
}) if segments.len() == 1 =>
format!("{}", "found variable"),
_ =>
format!("but {}", err_description),
};
span_err!(tcx.sess, count_expr.span, E0307,
"expected constant integer for repeat count, {}",
found);
}
}
0
}
// Iterate over a type parameter's bounded traits and any supertraits
// of those traits, ignoring kinds.
// Here, the supertraits are the transitive closure of the supertrait
// relation on the supertraits from each bounded trait's constraint
// list.
pub fn each_bound_trait_and_supertraits<'tcx, F>(tcx: &ctxt<'tcx>,
bounds: &[PolyTraitRef<'tcx>],
mut f: F)
-> bool where
F: FnMut(PolyTraitRef<'tcx>) -> bool,
{
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
if !f(bound_trait_ref) {
return false;
}
}
return true;
}
/// Given a set of predicates that apply to an object type, returns
/// the region bounds that the (erased) `Self` type must
/// outlive. Precisely *because* the `Self` type is erased, the
/// parameter `erased_self_ty` must be supplied to indicate what type
/// has been used to represent `Self` in the predicates
/// themselves. This should really be a unique type; `FreshTy(0)` is a
/// popular choice.
///
/// Requires that trait definitions have been processed so that we can
/// elaborate predicates and walk supertraits.
pub fn required_region_bounds<'tcx>(tcx: &ctxt<'tcx>,
erased_self_ty: Ty<'tcx>,
predicates: Vec<ty::Predicate<'tcx>>)
-> Vec<ty::Region>
{
debug!("required_region_bounds(erased_self_ty={:?}, predicates={:?})",
erased_self_ty.repr(tcx),
predicates.repr(tcx));
assert!(!erased_self_ty.has_escaping_regions());
traits::elaborate_predicates(tcx, predicates)
.filter_map(|predicate| {
match predicate {
ty::Predicate::Projection(..) |
ty::Predicate::Trait(..) |
ty::Predicate::Equate(..) |
ty::Predicate::RegionOutlives(..) => {
None
}
ty::Predicate::TypeOutlives(ty::Binder(ty::OutlivesPredicate(t, r))) => {
// Search for a bound of the form `erased_self_ty
// : 'a`, but be wary of something like `for<'a>
// erased_self_ty : 'a` (we interpret a
// higher-ranked bound like that as 'static,
// though at present the code in `fulfill.rs`
// considers such bounds to be unsatisfiable, so
// it's kind of a moot point since you could never
// construct such an object, but this seems
// correct even if that code changes).
if t == erased_self_ty && !r.has_escaping_regions() {
if r.has_escaping_regions() {
Some(ty::ReStatic)
} else {
Some(r)
}
} else {
None
}
}
}
})
.collect()
}
pub fn item_variances(tcx: &ctxt, item_id: ast::DefId) -> Rc<ItemVariances> {
lookup_locally_or_in_crate_store(
"item_variance_map", item_id, &tcx.item_variance_map,
|| Rc::new(csearch::get_item_variances(&tcx.sess.cstore, item_id)))
}
pub fn trait_has_default_impl(tcx: &ctxt, trait_def_id: DefId) -> bool {
populate_implementations_for_trait_if_necessary(tcx, trait_def_id);
let def = lookup_trait_def(tcx, trait_def_id);
def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
}
/// Records a trait-to-implementation mapping.
pub fn record_trait_has_default_impl(tcx: &ctxt, trait_def_id: DefId) {
let def = lookup_trait_def(tcx, trait_def_id);
def.flags.set(def.flags.get() | TraitFlags::HAS_DEFAULT_IMPL)
}
/// Load primitive inherent implementations if necessary
pub fn populate_implementations_for_primitive_if_necessary(tcx: &ctxt,
primitive_def_id: ast::DefId) {
if primitive_def_id.krate == LOCAL_CRATE {
return
}
if tcx.populated_external_primitive_impls.borrow().contains(&primitive_def_id) {
return
}
debug!("populate_implementations_for_primitive_if_necessary: searching for {:?}",
primitive_def_id);
let impl_items = csearch::get_impl_items(&tcx.sess.cstore, primitive_def_id);
// Store the implementation info.
tcx.impl_items.borrow_mut().insert(primitive_def_id, impl_items);
tcx.populated_external_primitive_impls.borrow_mut().insert(primitive_def_id);
}
/// Populates the type context with all the inherent implementations for
/// the given type if necessary.
pub fn populate_inherent_implementations_for_type_if_necessary(tcx: &ctxt,
type_id: ast::DefId) {
if type_id.krate == LOCAL_CRATE {
return
}
if tcx.populated_external_types.borrow().contains(&type_id) {
return
}
debug!("populate_inherent_implementations_for_type_if_necessary: searching for {:?}", type_id);
let mut inherent_impls = Vec::new();
csearch::each_inherent_implementation_for_type(&tcx.sess.cstore, type_id, |impl_def_id| {
// Record the implementation.
inherent_impls.push(impl_def_id);
// Store the implementation info.
let impl_items = csearch::get_impl_items(&tcx.sess.cstore, impl_def_id);
tcx.impl_items.borrow_mut().insert(impl_def_id, impl_items);
});
tcx.inherent_impls.borrow_mut().insert(type_id, Rc::new(inherent_impls));
tcx.populated_external_types.borrow_mut().insert(type_id);
}
/// Populates the type context with all the implementations for the given
/// trait if necessary.
pub fn populate_implementations_for_trait_if_necessary(tcx: &ctxt, trait_id: ast::DefId) {
if trait_id.krate == LOCAL_CRATE {
return
}
let def = lookup_trait_def(tcx, trait_id);
if def.flags.get().intersects(TraitFlags::IMPLS_VALID) {
return;
}
debug!("populate_implementations_for_trait_if_necessary: searching for {}", def.repr(tcx));
if csearch::is_defaulted_trait(&tcx.sess.cstore, trait_id) {
record_trait_has_default_impl(tcx, trait_id);
}
csearch::each_implementation_for_trait(&tcx.sess.cstore, trait_id, |implementation_def_id| {
let impl_items = csearch::get_impl_items(&tcx.sess.cstore, implementation_def_id);
let trait_ref = impl_trait_ref(tcx, implementation_def_id).unwrap();
// Record the trait->implementation mapping.
def.record_impl(tcx, implementation_def_id, trait_ref);
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
for impl_item_def_id in &impl_items {
let method_def_id = impl_item_def_id.def_id();
match impl_or_trait_item(tcx, method_def_id) {
MethodTraitItem(method) => {
if let Some(source) = method.provided_source {
tcx.provided_method_sources
.borrow_mut()
.insert(method_def_id, source);
}
}
_ => {}
}
}
// Store the implementation info.
tcx.impl_items.borrow_mut().insert(implementation_def_id, impl_items);
});
def.flags.set(def.flags.get() | TraitFlags::IMPLS_VALID);
}
/// Given the def_id of an impl, return the def_id of the trait it implements.
/// If it implements no trait, return `None`.
pub fn trait_id_of_impl(tcx: &ctxt,
def_id: ast::DefId)
-> Option<ast::DefId> {
ty::impl_trait_ref(tcx, def_id).map(|tr| tr.def_id)
}
/// If the given def ID describes a method belonging to an impl, return the
/// ID of the impl that the method belongs to. Otherwise, return `None`.
pub fn impl_of_method(tcx: &ctxt, def_id: ast::DefId)
-> Option<ast::DefId> {
if def_id.krate != LOCAL_CRATE {
return match csearch::get_impl_or_trait_item(tcx,
def_id).container() {
TraitContainer(_) => None,
ImplContainer(def_id) => Some(def_id),
};
}
match tcx.impl_or_trait_items.borrow().get(&def_id).cloned() {
Some(trait_item) => {
match trait_item.container() {
TraitContainer(_) => None,
ImplContainer(def_id) => Some(def_id),
}
}
None => None
}
}
/// If the given def ID describes an item belonging to a trait (either a
/// default method or an implementation of a trait method), return the ID of
/// the trait that the method belongs to. Otherwise, return `None`.
pub fn trait_of_item(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> {
if def_id.krate != LOCAL_CRATE {
return csearch::get_trait_of_item(&tcx.sess.cstore, def_id, tcx);
}
match tcx.impl_or_trait_items.borrow().get(&def_id).cloned() {
Some(impl_or_trait_item) => {
match impl_or_trait_item.container() {
TraitContainer(def_id) => Some(def_id),
ImplContainer(def_id) => trait_id_of_impl(tcx, def_id),
}
}
None => None
}
}
/// If the given def ID describes an item belonging to a trait, (either a
/// default method or an implementation of a trait method), return the ID of
/// the method inside trait definition (this means that if the given def ID
/// is already that of the original trait method, then the return value is
/// the same).
/// Otherwise, return `None`.
pub fn trait_item_of_item(tcx: &ctxt, def_id: ast::DefId)
-> Option<ImplOrTraitItemId> {
let impl_item = match tcx.impl_or_trait_items.borrow().get(&def_id) {
Some(m) => m.clone(),
None => return None,
};
let name = impl_item.name();
match trait_of_item(tcx, def_id) {
Some(trait_did) => {
let trait_items = ty::trait_items(tcx, trait_did);
trait_items.iter()
.position(|m| m.name() == name)
.map(|idx| ty::trait_item(tcx, trait_did, idx).id())
}
None => None
}
}
/// Creates a hash of the type `Ty` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) -> u64 {
let mut state = SipHasher::new();
helper(tcx, ty, svh, &mut state);
return state.finish();
fn helper<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh,
state: &mut SipHasher) {
macro_rules! byte { ($b:expr) => { ($b as u8).hash(state) } }
macro_rules! hash { ($e:expr) => { $e.hash(state) } }
let region = |state: &mut SipHasher, r: Region| {
match r {
ReStatic => {}
ReLateBound(db, BrAnon(i)) => {
db.hash(state);
i.hash(state);
}
ReEmpty |
ReEarlyBound(..) |
ReLateBound(..) |
ReFree(..) |
ReScope(..) |
ReInfer(..) => {
tcx.sess.bug("unexpected region found when hashing a type")
}
}
};
let did = |state: &mut SipHasher, did: DefId| {
let h = if ast_util::is_local(did) {
svh.clone()
} else {
tcx.sess.cstore.get_crate_hash(did.krate)
};
h.as_str().hash(state);
did.node.hash(state);
};
let mt = |state: &mut SipHasher, mt: mt| {
mt.mutbl.hash(state);
};
let fn_sig = |state: &mut SipHasher, sig: &Binder<FnSig<'tcx>>| {
let sig = anonymize_late_bound_regions(tcx, sig).0;
for a in &sig.inputs { helper(tcx, *a, svh, state); }
if let ty::FnConverging(output) = sig.output {
helper(tcx, output, svh, state);
}
};
maybe_walk_ty(ty, |ty| {
match ty.sty {
ty_bool => byte!(2),
ty_char => byte!(3),
ty_int(i) => {
byte!(4);
hash!(i);
}
ty_uint(u) => {
byte!(5);
hash!(u);
}
ty_float(f) => {
byte!(6);
hash!(f);
}
ty_str => {
byte!(7);
}
ty_enum(d, _) => {
byte!(8);
did(state, d);
}
ty_uniq(_) => {
byte!(9);
}
ty_vec(_, Some(n)) => {
byte!(10);
n.hash(state);
}
ty_vec(_, None) => {
byte!(11);
}
ty_ptr(m) => {
byte!(12);
mt(state, m);
}
ty_rptr(r, m) => {
byte!(13);
region(state, *r);
mt(state, m);
}
ty_bare_fn(opt_def_id, ref b) => {
byte!(14);
hash!(opt_def_id);
hash!(b.unsafety);
hash!(b.abi);
fn_sig(state, &b.sig);
return false;
}
ty_trait(ref data) => {
byte!(17);
did(state, data.principal_def_id());
hash!(data.bounds);
let principal = anonymize_late_bound_regions(tcx, &data.principal).0;
for subty in principal.substs.types.iter() {
helper(tcx, *subty, svh, state);
}
return false;
}
ty_struct(d, _) => {
byte!(18);
did(state, d);
}
ty_tup(ref inner) => {
byte!(19);
hash!(inner.len());
}
ty_param(p) => {
byte!(20);
hash!(p.space);
hash!(p.idx);
hash!(token::get_name(p.name));
}
ty_infer(_) => unreachable!(),
ty_err => byte!(21),
ty_closure(d, _) => {
byte!(22);
did(state, d);
}
ty_projection(ref data) => {
byte!(23);
did(state, data.trait_ref.def_id);
hash!(token::get_name(data.item_name));
}
}
true
});
}
}
impl Variance {
pub fn to_string(self) -> &'static str {
match self {
Covariant => "+",
Contravariant => "-",
Invariant => "o",
Bivariant => "*",
}
}
}
/// Construct a parameter environment suitable for static contexts or other contexts where there
/// are no free type/lifetime parameters in scope.
pub fn empty_parameter_environment<'a,'tcx>(cx: &'a ctxt<'tcx>) -> ParameterEnvironment<'a,'tcx> {
ty::ParameterEnvironment { tcx: cx,
free_substs: Substs::empty(),
caller_bounds: Vec::new(),
implicit_region_bound: ty::ReEmpty,
selection_cache: traits::SelectionCache::new(), }
}
/// Constructs and returns a substitution that can be applied to move from
/// the "outer" view of a type or method to the "inner" view.
/// In general, this means converting from bound parameters to
/// free parameters. Since we currently represent bound/free type
/// parameters in the same way, this only has an effect on regions.
pub fn construct_free_substs<'a,'tcx>(
tcx: &'a ctxt<'tcx>,
generics: &Generics<'tcx>,
free_id: ast::NodeId)
-> Substs<'tcx>
{
// map T => T
let mut types = VecPerParamSpace::empty();
push_types_from_defs(tcx, &mut types, generics.types.as_slice());
let free_id_outlive = region::DestructionScopeData::new(free_id);
// map bound 'a => free 'a
let mut regions = VecPerParamSpace::empty();
push_region_params(&mut regions, free_id_outlive, generics.regions.as_slice());
return Substs {
types: types,
regions: subst::NonerasedRegions(regions)
};
fn push_region_params(regions: &mut VecPerParamSpace<ty::Region>,
all_outlive_extent: region::DestructionScopeData,
region_params: &[RegionParameterDef])
{
for r in region_params {
regions.push(r.space, ty::free_region_from_def(all_outlive_extent, r));
}
}
fn push_types_from_defs<'tcx>(tcx: &ty::ctxt<'tcx>,
types: &mut VecPerParamSpace<Ty<'tcx>>,
defs: &[TypeParameterDef<'tcx>]) {
for def in defs {
debug!("construct_parameter_environment(): push_types_from_defs: def={:?}",
def.repr(tcx));
let ty = ty::mk_param_from_def(tcx, def);
types.push(def.space, ty);
}
}
}
/// See `ParameterEnvironment` struct def'n for details
pub fn construct_parameter_environment<'a,'tcx>(
tcx: &'a ctxt<'tcx>,
span: Span,
generics: &ty::Generics<'tcx>,
generic_predicates: &ty::GenericPredicates<'tcx>,
free_id: ast::NodeId)
-> ParameterEnvironment<'a, 'tcx>
{
//
// Construct the free substs.
//
let free_substs = construct_free_substs(tcx, generics, free_id);
let free_id_outlive = region::DestructionScopeData::new(free_id);
//
// Compute the bounds on Self and the type parameters.
//
let bounds = generic_predicates.instantiate(tcx, &free_substs);
let bounds = liberate_late_bound_regions(tcx, free_id_outlive, &ty::Binder(bounds));
let predicates = bounds.predicates.into_vec();
debug!("construct_parameter_environment: free_id={:?} free_subst={:?} predicates={:?}",
free_id,
free_substs.repr(tcx),
predicates.repr(tcx));
//
// Finally, we have to normalize the bounds in the environment, in
// case they contain any associated type projections. This process
// can yield errors if the put in illegal associated types, like
// `<i32 as Foo>::Bar` where `i32` does not implement `Foo`. We
// report these errors right here; this doesn't actually feel
// right to me, because constructing the environment feels like a
// kind of a "idempotent" action, but I'm not sure where would be
// a better place. In practice, we construct environments for
// every fn once during type checking, and we'll abort if there
// are any errors at that point, so after type checking you can be
// sure that this will succeed without errors anyway.
//
let unnormalized_env = ty::ParameterEnvironment {
tcx: tcx,
free_substs: free_substs,
implicit_region_bound: ty::ReScope(free_id_outlive.to_code_extent()),
caller_bounds: predicates,
selection_cache: traits::SelectionCache::new(),
};
let cause = traits::ObligationCause::misc(span, free_id);
traits::normalize_param_env_or_error(unnormalized_env, cause)
}
impl BorrowKind {
pub fn from_mutbl(m: ast::Mutability) -> BorrowKind {
match m {
ast::MutMutable => MutBorrow,
ast::MutImmutable => ImmBorrow,
}
}
/// Returns a mutability `m` such that an `&m T` pointer could be used to obtain this borrow
/// kind. Because borrow kinds are richer than mutabilities, we sometimes have to pick a
/// mutability that is stronger than necessary so that it at least *would permit* the borrow in
/// question.
pub fn to_mutbl_lossy(self) -> ast::Mutability {
match self {
MutBorrow => ast::MutMutable,
ImmBorrow => ast::MutImmutable,
// We have no type corresponding to a unique imm borrow, so
// use `&mut`. It gives all the capabilities of an `&uniq`
// and hence is a safe "over approximation".
UniqueImmBorrow => ast::MutMutable,
}
}
pub fn to_user_str(&self) -> &'static str {
match *self {
MutBorrow => "mutable",
ImmBorrow => "immutable",
UniqueImmBorrow => "uniquely immutable",
}
}
}
impl<'tcx> ctxt<'tcx> {
pub fn is_method_call(&self, expr_id: ast::NodeId) -> bool {
self.method_map.borrow().contains_key(&MethodCall::expr(expr_id))
}
pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture> {
Some(self.upvar_capture_map.borrow().get(&upvar_id).unwrap().clone())
}
}
impl<'a,'tcx> mc::Typer<'tcx> for ParameterEnvironment<'a,'tcx> {
fn node_ty(&self, id: ast::NodeId) -> mc::McResult<Ty<'tcx>> {
Ok(ty::node_id_to_type(self.tcx, id))
}
fn expr_ty_adjusted(&self, expr: &ast::Expr) -> mc::McResult<Ty<'tcx>> {
Ok(ty::expr_ty_adjusted(self.tcx, expr))
}
fn node_method_ty(&self, method_call: ty::MethodCall) -> Option<Ty<'tcx>> {
self.tcx.method_map.borrow().get(&method_call).map(|method| method.ty)
}
fn node_method_origin(&self, method_call: ty::MethodCall)
-> Option<ty::MethodOrigin<'tcx>>
{
self.tcx.method_map.borrow().get(&method_call).map(|method| method.origin.clone())
}
fn adjustments(&self) -> &RefCell<NodeMap<ty::AutoAdjustment<'tcx>>> {
&self.tcx.adjustments
}
fn is_method_call(&self, id: ast::NodeId) -> bool {
self.tcx.is_method_call(id)
}
fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option<region::CodeExtent> {
self.tcx.region_maps.temporary_scope(rvalue_id)
}
fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture> {
self.tcx.upvar_capture(upvar_id)
}
fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool {
type_moves_by_default(self, span, ty)
}
}
impl<'a,'tcx> ClosureTyper<'tcx> for ty::ParameterEnvironment<'a,'tcx> {
fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> {
self
}
fn closure_kind(&self,
def_id: ast::DefId)
-> Option<ty::ClosureKind>
{
Some(self.tcx.closure_kind(def_id))
}
fn closure_type(&self,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>)
-> ty::ClosureTy<'tcx>
{
self.tcx.closure_type(def_id, substs)
}
fn closure_upvars(&self,
def_id: ast::DefId,
substs: &Substs<'tcx>)
-> Option<Vec<ClosureUpvar<'tcx>>>
{
closure_upvars(self, def_id, substs)
}
}
/// The category of explicit self.
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub enum ExplicitSelfCategory {
StaticExplicitSelfCategory,
ByValueExplicitSelfCategory,
ByReferenceExplicitSelfCategory(Region, ast::Mutability),
ByBoxExplicitSelfCategory,
}
/// Pushes all the lifetimes in the given type onto the given list. A
/// "lifetime in a type" is a lifetime specified by a reference or a lifetime
/// in a list of type substitutions. This does *not* traverse into nominal
/// types, nor does it resolve fictitious types.
pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec<ty::Region>,
ty: Ty) {
walk_ty(ty, |ty| {
match ty.sty {
ty_rptr(region, _) => {
accumulator.push(*region)
}
ty_trait(ref t) => {
accumulator.push_all(t.principal.0.substs.regions().as_slice());
}
ty_enum(_, substs) |
ty_struct(_, substs) => {
accum_substs(accumulator, substs);
}
ty_closure(_, substs) => {
accum_substs(accumulator, substs);
}
ty_bool |
ty_char |
ty_int(_) |
ty_uint(_) |
ty_float(_) |
ty_uniq(_) |
ty_str |
ty_vec(_, _) |
ty_ptr(_) |
ty_bare_fn(..) |
ty_tup(_) |
ty_projection(_) |
ty_param(_) |
ty_infer(_) |
ty_err => {
}
}
});
fn accum_substs(accumulator: &mut Vec<Region>, substs: &Substs) {
match substs.regions {
subst::ErasedRegions => {}
subst::NonerasedRegions(ref regions) => {
for region in regions.iter() {
accumulator.push(*region)
}
}
}
}
}
/// A free variable referred to in a function.
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct Freevar {
/// The variable being accessed free.
pub def: def::Def,
// First span where it is accessed (there can be multiple).
pub span: Span
}
pub type FreevarMap = NodeMap<Vec<Freevar>>;
pub type CaptureModeMap = NodeMap<ast::CaptureClause>;
// Trait method resolution
pub type TraitMap = NodeMap<Vec<DefId>>;
// Map from the NodeId of a glob import to a list of items which are actually
// imported.
pub type GlobMap = HashMap<NodeId, HashSet<Name>>;
pub fn with_freevars<T, F>(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where
F: FnOnce(&[Freevar]) -> T,
{
match tcx.freevars.borrow().get(&fid) {
None => f(&[]),
Some(d) => f(&d[..])
}
}
impl<'tcx> AutoAdjustment<'tcx> {
pub fn is_identity(&self) -> bool {
match *self {
AdjustReifyFnPointer |
AdjustUnsafeFnPointer => false,
AdjustDerefRef(ref r) => r.is_identity(),
}
}
}
impl<'tcx> AutoDerefRef<'tcx> {
pub fn is_identity(&self) -> bool {
self.autoderefs == 0 && self.unsize.is_none() && self.autoref.is_none()
}
}
/// Replace any late-bound regions bound in `value` with free variants attached to scope-id
/// `scope_id`.
pub fn liberate_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
all_outlive_scope: region::DestructionScopeData,
value: &Binder<T>)
-> T
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
replace_late_bound_regions(
tcx, value,
|br| ty::ReFree(ty::FreeRegion{scope: all_outlive_scope, bound_region: br})).0
}
pub fn count_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
value: &Binder<T>)
-> usize
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
let (_, skol_map) = replace_late_bound_regions(tcx, value, |_| ty::ReStatic);
skol_map.len()
}
pub fn binds_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
value: &Binder<T>)
-> bool
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
count_late_bound_regions(tcx, value) > 0
}
/// Flattens two binding levels into one. So `for<'a> for<'b> Foo`
/// becomes `for<'a,'b> Foo`.
pub fn flatten_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
bound2_value: &Binder<Binder<T>>)
-> Binder<T>
where T: TypeFoldable<'tcx> + Repr<'tcx>
{
let bound0_value = bound2_value.skip_binder().skip_binder();
let value = ty_fold::fold_regions(tcx, bound0_value, |region, current_depth| {
match region {
ty::ReLateBound(debruijn, br) if debruijn.depth >= current_depth => {
// should be true if no escaping regions from bound2_value
assert!(debruijn.depth - current_depth <= 1);
ty::ReLateBound(DebruijnIndex::new(current_depth), br)
}
_ => {
region
}
}
});
Binder(value)
}
pub fn no_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
value: &Binder<T>)
-> Option<T>
where T : TypeFoldable<'tcx> + Repr<'tcx> + Clone
{
if binds_late_bound_regions(tcx, value) {
None
} else {
Some(value.0.clone())
}
}
/// Replace any late-bound regions bound in `value` with `'static`. Useful in trans but also
/// method lookup and a few other places where precise region relationships are not required.
pub fn erase_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
value: &Binder<T>)
-> T
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
replace_late_bound_regions(tcx, value, |_| ty::ReStatic).0
}
/// Rewrite any late-bound regions so that they are anonymous. Region numbers are
/// assigned starting at 1 and increasing monotonically in the order traversed
/// by the fold operation.
///
/// The chief purpose of this function is to canonicalize regions so that two
/// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become
/// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and
/// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization.
pub fn anonymize_late_bound_regions<'tcx, T>(
tcx: &ctxt<'tcx>,
sig: &Binder<T>)
-> Binder<T>
where T : TypeFoldable<'tcx> + Repr<'tcx>,
{
let mut counter = 0;
ty::Binder(replace_late_bound_regions(tcx, sig, |_| {
counter += 1;
ReLateBound(ty::DebruijnIndex::new(1), BrAnon(counter))
}).0)
}
/// Replaces the late-bound-regions in `value` that are bound by `value`.
pub fn replace_late_bound_regions<'tcx, T, F>(
tcx: &ty::ctxt<'tcx>,
binder: &Binder<T>,
mut mapf: F)
-> (T, FnvHashMap<ty::BoundRegion,ty::Region>)
where T : TypeFoldable<'tcx> + Repr<'tcx>,
F : FnMut(BoundRegion) -> ty::Region,
{
debug!("replace_late_bound_regions({})", binder.repr(tcx));
let mut map = FnvHashMap();
// Note: fold the field `0`, not the binder, so that late-bound
// regions bound by `binder` are considered free.
let value = ty_fold::fold_regions(tcx, &binder.0, |region, current_depth| {
debug!("region={}", region.repr(tcx));
match region {
ty::ReLateBound(debruijn, br) if debruijn.depth == current_depth => {
let region = *map.entry(br).or_insert_with(|| mapf(br));
if let ty::ReLateBound(debruijn1, br) = region {
// If the callback returns a late-bound region,
// that region should always use depth 1. Then we
// adjust it to the correct depth.
assert_eq!(debruijn1.depth, 1);
ty::ReLateBound(debruijn, br)
} else {
region
}
}
_ => {
region
}
}
});
debug!("resulting map: {:?} value: {:?}", map, value.repr(tcx));
(value, map)
}
impl DebruijnIndex {
pub fn new(depth: u32) -> DebruijnIndex {
assert!(depth > 0);
DebruijnIndex { depth: depth }
}
pub fn shifted(&self, amount: u32) -> DebruijnIndex {
DebruijnIndex { depth: self.depth + amount }
}
}
impl<'tcx> Repr<'tcx> for AutoAdjustment<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
AdjustReifyFnPointer => {
format!("AdjustReifyFnPointer")
}
AdjustUnsafeFnPointer => {
format!("AdjustUnsafeFnPointer")
}
AdjustDerefRef(ref data) => {
data.repr(tcx)
}
}
}
}
impl<'tcx> Repr<'tcx> for AutoDerefRef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("AutoDerefRef({}, unsize={}, {})",
self.autoderefs, self.unsize.repr(tcx), self.autoref.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for AutoRef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
AutoPtr(a, b) => {
format!("AutoPtr({},{:?})", a.repr(tcx), b)
}
AutoUnsafe(ref a) => {
format!("AutoUnsafe({:?})", a)
}
}
}
}
impl<'tcx> Repr<'tcx> for TyTrait<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TyTrait({},{})",
self.principal.repr(tcx),
self.bounds.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Predicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
Predicate::Trait(ref a) => a.repr(tcx),
Predicate::Equate(ref pair) => pair.repr(tcx),
Predicate::RegionOutlives(ref pair) => pair.repr(tcx),
Predicate::TypeOutlives(ref pair) => pair.repr(tcx),
Predicate::Projection(ref pair) => pair.repr(tcx),
}
}
}
pub fn make_substs_for_receiver_types<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_ref: &ty::TraitRef<'tcx>,
method: &ty::Method<'tcx>)
-> subst::Substs<'tcx>
{
/*!
* Substitutes the values for the receiver's type parameters
* that are found in method, leaving the method's type parameters
* intact.
*/
let meth_tps: Vec<Ty> =
method.generics.types.get_slice(subst::FnSpace)
.iter()
.map(|def| ty::mk_param_from_def(tcx, def))
.collect();
let meth_regions: Vec<ty::Region> =
method.generics.regions.get_slice(subst::FnSpace)
.iter()
.map(|def| def.to_early_bound_region())
.collect();
trait_ref.substs.clone().with_method(meth_tps, meth_regions)
}
#[derive(Copy, Clone)]
pub enum CopyImplementationError {
FieldDoesNotImplementCopy(ast::Name),
VariantDoesNotImplementCopy(ast::Name),
TypeIsStructural,
TypeHasDestructor,
}
pub fn can_type_implement_copy<'a,'tcx>(param_env: &ParameterEnvironment<'a, 'tcx>,
span: Span,
self_type: Ty<'tcx>)
-> Result<(),CopyImplementationError>
{
let tcx = param_env.tcx;
let did = match self_type.sty {
ty::ty_struct(struct_did, substs) => {
let fields = ty::struct_fields(tcx, struct_did, substs);
for field in &fields {
if type_moves_by_default(param_env, span, field.mt.ty) {
return Err(FieldDoesNotImplementCopy(field.name))
}
}
struct_did
}
ty::ty_enum(enum_did, substs) => {
let enum_variants = ty::enum_variants(tcx, enum_did);
for variant in &*enum_variants {
for variant_arg_type in &variant.args {
let substd_arg_type =
variant_arg_type.subst(tcx, substs);
if type_moves_by_default(param_env, span, substd_arg_type) {
return Err(VariantDoesNotImplementCopy(variant.name))
}
}
}
enum_did
}
_ => return Err(TypeIsStructural),
};
if ty::has_dtor(tcx, did) {
return Err(TypeHasDestructor)
}
Ok(())
}
// FIXME(#20298) -- all of these types basically walk various
// structures to test whether types/regions are reachable with various
// properties. It should be possible to express them in terms of one
// common "walker" trait or something.
pub trait RegionEscape {
fn has_escaping_regions(&self) -> bool {
self.has_regions_escaping_depth(0)
}
fn has_regions_escaping_depth(&self, depth: u32) -> bool;
}
impl<'tcx> RegionEscape for Ty<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
ty::type_escapes_depth(*self, depth)
}
}
impl<'tcx> RegionEscape for Substs<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.types.has_regions_escaping_depth(depth) ||
self.regions.has_regions_escaping_depth(depth)
}
}
impl<'tcx,T:RegionEscape> RegionEscape for VecPerParamSpace<T> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.iter_enumerated().any(|(space, _, t)| {
if space == subst::FnSpace {
t.has_regions_escaping_depth(depth+1)
} else {
t.has_regions_escaping_depth(depth)
}
})
}
}
impl<'tcx> RegionEscape for TypeScheme<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.ty.has_regions_escaping_depth(depth)
}
}
impl RegionEscape for Region {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.escapes_depth(depth)
}
}
impl<'tcx> RegionEscape for GenericPredicates<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.predicates.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for Predicate<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
match *self {
Predicate::Trait(ref data) => data.has_regions_escaping_depth(depth),
Predicate::Equate(ref data) => data.has_regions_escaping_depth(depth),
Predicate::RegionOutlives(ref data) => data.has_regions_escaping_depth(depth),
Predicate::TypeOutlives(ref data) => data.has_regions_escaping_depth(depth),
Predicate::Projection(ref data) => data.has_regions_escaping_depth(depth),
}
}
}
impl<'tcx,P:RegionEscape> RegionEscape for traits::Obligation<'tcx,P> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.predicate.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for TraitRef<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.substs.types.iter().any(|t| t.has_regions_escaping_depth(depth)) ||
self.substs.regions.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for subst::RegionSubsts {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
match *self {
subst::ErasedRegions => false,
subst::NonerasedRegions(ref r) => {
r.iter().any(|t| t.has_regions_escaping_depth(depth))
}
}
}
}
impl<'tcx,T:RegionEscape> RegionEscape for Binder<T> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.0.has_regions_escaping_depth(depth + 1)
}
}
impl<'tcx> RegionEscape for EquatePredicate<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.0.has_regions_escaping_depth(depth) || self.1.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for TraitPredicate<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.trait_ref.has_regions_escaping_depth(depth)
}
}
impl<T:RegionEscape,U:RegionEscape> RegionEscape for OutlivesPredicate<T,U> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.0.has_regions_escaping_depth(depth) || self.1.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for ProjectionPredicate<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.projection_ty.has_regions_escaping_depth(depth) ||
self.ty.has_regions_escaping_depth(depth)
}
}
impl<'tcx> RegionEscape for ProjectionTy<'tcx> {
fn has_regions_escaping_depth(&self, depth: u32) -> bool {
self.trait_ref.has_regions_escaping_depth(depth)
}
}
impl<'tcx> Repr<'tcx> for ty::ProjectionPredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ProjectionPredicate({}, {})",
self.projection_ty.repr(tcx),
self.ty.repr(tcx))
}
}
pub trait HasProjectionTypes {
fn has_projection_types(&self) -> bool;
}
impl<'tcx,T:HasProjectionTypes> HasProjectionTypes for Vec<T> {
fn has_projection_types(&self) -> bool {
self.iter().any(|p| p.has_projection_types())
}
}
impl<'tcx,T:HasProjectionTypes> HasProjectionTypes for VecPerParamSpace<T> {
fn has_projection_types(&self) -> bool {
self.iter().any(|p| p.has_projection_types())
}
}
impl<'tcx> HasProjectionTypes for ClosureTy<'tcx> {
fn has_projection_types(&self) -> bool {
self.sig.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for ClosureUpvar<'tcx> {
fn has_projection_types(&self) -> bool {
self.ty.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for ty::InstantiatedPredicates<'tcx> {
fn has_projection_types(&self) -> bool {
self.predicates.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for Predicate<'tcx> {
fn has_projection_types(&self) -> bool {
match *self {
Predicate::Trait(ref data) => data.has_projection_types(),
Predicate::Equate(ref data) => data.has_projection_types(),
Predicate::RegionOutlives(ref data) => data.has_projection_types(),
Predicate::TypeOutlives(ref data) => data.has_projection_types(),
Predicate::Projection(ref data) => data.has_projection_types(),
}
}
}
impl<'tcx> HasProjectionTypes for TraitPredicate<'tcx> {
fn has_projection_types(&self) -> bool {
self.trait_ref.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for EquatePredicate<'tcx> {
fn has_projection_types(&self) -> bool {
self.0.has_projection_types() || self.1.has_projection_types()
}
}
impl HasProjectionTypes for Region {
fn has_projection_types(&self) -> bool {
false
}
}
impl<T:HasProjectionTypes,U:HasProjectionTypes> HasProjectionTypes for OutlivesPredicate<T,U> {
fn has_projection_types(&self) -> bool {
self.0.has_projection_types() || self.1.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for ProjectionPredicate<'tcx> {
fn has_projection_types(&self) -> bool {
self.projection_ty.has_projection_types() || self.ty.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for ProjectionTy<'tcx> {
fn has_projection_types(&self) -> bool {
self.trait_ref.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for Ty<'tcx> {
fn has_projection_types(&self) -> bool {
ty::type_has_projection(*self)
}
}
impl<'tcx> HasProjectionTypes for TraitRef<'tcx> {
fn has_projection_types(&self) -> bool {
self.substs.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for subst::Substs<'tcx> {
fn has_projection_types(&self) -> bool {
self.types.iter().any(|t| t.has_projection_types())
}
}
impl<'tcx,T> HasProjectionTypes for Option<T>
where T : HasProjectionTypes
{
fn has_projection_types(&self) -> bool {
self.iter().any(|t| t.has_projection_types())
}
}
impl<'tcx,T> HasProjectionTypes for Rc<T>
where T : HasProjectionTypes
{
fn has_projection_types(&self) -> bool {
(**self).has_projection_types()
}
}
impl<'tcx,T> HasProjectionTypes for Box<T>
where T : HasProjectionTypes
{
fn has_projection_types(&self) -> bool {
(**self).has_projection_types()
}
}
impl<T> HasProjectionTypes for Binder<T>
where T : HasProjectionTypes
{
fn has_projection_types(&self) -> bool {
self.0.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for FnOutput<'tcx> {
fn has_projection_types(&self) -> bool {
match *self {
FnConverging(t) => t.has_projection_types(),
FnDiverging => false,
}
}
}
impl<'tcx> HasProjectionTypes for FnSig<'tcx> {
fn has_projection_types(&self) -> bool {
self.inputs.iter().any(|t| t.has_projection_types()) ||
self.output.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for field<'tcx> {
fn has_projection_types(&self) -> bool {
self.mt.ty.has_projection_types()
}
}
impl<'tcx> HasProjectionTypes for BareFnTy<'tcx> {
fn has_projection_types(&self) -> bool {
self.sig.has_projection_types()
}
}
pub trait ReferencesError {
fn references_error(&self) -> bool;
}
impl<T:ReferencesError> ReferencesError for Binder<T> {
fn references_error(&self) -> bool {
self.0.references_error()
}
}
impl<T:ReferencesError> ReferencesError for Rc<T> {
fn references_error(&self) -> bool {
(&**self).references_error()
}
}
impl<'tcx> ReferencesError for TraitPredicate<'tcx> {
fn references_error(&self) -> bool {
self.trait_ref.references_error()
}
}
impl<'tcx> ReferencesError for ProjectionPredicate<'tcx> {
fn references_error(&self) -> bool {
self.projection_ty.trait_ref.references_error() || self.ty.references_error()
}
}
impl<'tcx> ReferencesError for TraitRef<'tcx> {
fn references_error(&self) -> bool {
self.input_types().iter().any(|t| t.references_error())
}
}
impl<'tcx> ReferencesError for Ty<'tcx> {
fn references_error(&self) -> bool {
type_is_error(*self)
}
}
impl<'tcx> ReferencesError for Predicate<'tcx> {
fn references_error(&self) -> bool {
match *self {
Predicate::Trait(ref data) => data.references_error(),
Predicate::Equate(ref data) => data.references_error(),
Predicate::RegionOutlives(ref data) => data.references_error(),
Predicate::TypeOutlives(ref data) => data.references_error(),
Predicate::Projection(ref data) => data.references_error(),
}
}
}
impl<A,B> ReferencesError for OutlivesPredicate<A,B>
where A : ReferencesError, B : ReferencesError
{
fn references_error(&self) -> bool {
self.0.references_error() || self.1.references_error()
}
}
impl<'tcx> ReferencesError for EquatePredicate<'tcx>
{
fn references_error(&self) -> bool {
self.0.references_error() || self.1.references_error()
}
}
impl ReferencesError for Region
{
fn references_error(&self) -> bool {
false
}
}
impl<'tcx> Repr<'tcx> for ClosureTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ClosureTy({},{},{})",
self.unsafety,
self.sig.repr(tcx),
self.abi)
}
}
impl<'tcx> Repr<'tcx> for ClosureUpvar<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ClosureUpvar({},{})",
self.def.repr(tcx),
self.ty.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for field<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("field({},{})",
self.name.repr(tcx),
self.mt.repr(tcx))
}
}
impl<'a, 'tcx> Repr<'tcx> for ParameterEnvironment<'a, 'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ParameterEnvironment(\
free_substs={}, \
implicit_region_bound={}, \
caller_bounds={})",
self.free_substs.repr(tcx),
self.implicit_region_bound.repr(tcx),
self.caller_bounds.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ObjectLifetimeDefault {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ObjectLifetimeDefault::Ambiguous => format!("Ambiguous"),
ObjectLifetimeDefault::Specific(ref r) => r.repr(tcx),
}
}
}<|fim▁end|> | // compute an *intermediate* contents for, e.g., Option<List> of |
<|file_name|>zhanzhuanxc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'ByStudent'
def zhanzhuanxc(p,q,e):
def egcd(a, b):
x,y, u,v = 0,1, 1,0
while a != 0:
q, r = b//a, b%a
m, n = x-u*q, y-v*q
b,a, x,y, u,v = a,r, u,v, m,n
gcd = b
return gcd, x, y
def modinv(a, m):
gcd, x, y = egcd(a, m)
if gcd != 1:
return None # modular inverse does not exist
else:
return x % m
phi_n = (p - 1) * (q - 1)
d = modinv(e, phi_n)<|fim▁hole|><|fim▁end|> | return int(d)
# print zhanzhuanxc(18443,49891,19) |
<|file_name|>OverflowSet.Example.scss.d.ts<|end_file_name|><|fim▁begin|>declare const styles: {
overflowLinks: string;
overflowButton: string;<|fim▁hole|><|fim▁end|> | isEnabled: string;
isOpened: string;
};
export = styles; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 CSIRO<|fim▁hole|># [email protected]
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
#<|fim▁end|> | # Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Epping NSW 1710, Australia |
<|file_name|>all_permutations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
class AllPermutations(object):
def __init__(self, arr):
self.arr = arr
def all_permutations(self):
results = []
used = []
self._all_permutations(self.arr, used, results)
return results
def _all_permutations(self, to_use, used, results):
if len(to_use) == 0:
results.append(used)
for i, x in enumerate(to_use):
new_used = used + [x]
new_to_use = to_use[:i] + to_use[i+1:]
self._all_permutations(new_to_use, new_used, results)
<|fim▁hole|> arr = [1, 2, 3, 4]
ap = AllPermutations(arr)
results = ap.all_permutations()
for x in results:
print x
print len(results)
if __name__ == "__main__":
main()<|fim▁end|> | def main(): |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import pytest
# ``py.test --runslow`` causes the entire testsuite to be run, including test
# that are decorated with ``@@slow`` (scaffolding tests).
# see http://pytest.org/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option # Noqa
## def pytest_addoption(parser):
## parser.addoption("--runslow", action="store_true", help="run slow tests")
## slow = pytest.mark.skipif(
## not pytest.config.getoption("--runslow"),<|fim▁hole|>## )<|fim▁end|> | ## reason="need --runslow option to run" |
<|file_name|>reader.rs<|end_file_name|><|fim▁begin|>use rustc_serialize::json::{Json, Object};
use std::fs::File;
use super::responses::Responses;
use super::outcome::Outcome;
pub struct Phase {
pub operation: Responses,
pub outcome: Outcome,
}
impl Phase {
fn from_json(object: &Object) -> Result<Phase, String> {
let operation = val_or_err!(object.get("responses"),
Some(&Json::Array(ref array)) => try!(Responses::from_json(array)),
"No `responses` array found.");
let outcome = val_or_err!(object.get("outcome"),
Some(&Json::Object(ref obj)) => try!(Outcome::from_json(obj)),
"No `outcome` object found.");
Ok(Phase{ operation: operation, outcome: outcome })
}
}
pub struct Suite {
pub uri: String,
pub phases: Vec<Phase>,
}
fn get_phases(object: &Object) -> Result<Vec<Phase>, String> {
let array = val_or_err!(object.get("phases"),
Some(&Json::Array(ref array)) => array.clone(),
"No `phases` array found");
let mut phases = vec![];
for json in array {
let obj = val_or_err!(json,
Json::Object(ref obj) => obj.clone(),
"`phases` array must only contain objects");
let phase = match Phase::from_json(&obj) {
Ok(phase) => phase,
Err(s) => return Err(s)
};
phases.push(phase);
}
Ok(phases)
}
pub trait SuiteContainer {
fn from_file(path: &str) -> Result<Self, String>;
fn get_suite(&self) -> Result<Suite, String>;
}
impl SuiteContainer for Json {<|fim▁hole|> fn from_file(path: &str) -> Result<Json, String> {
let mut file = File::open(path).ok().expect(&format!("Unable to open file: {}", path));
Ok(Json::from_reader(&mut file).ok().expect(&format!("Invalid JSON file: {}", path)))
}
fn get_suite(&self) -> Result<Suite, String> {
let object = val_or_err!(self,
&Json::Object(ref object) => object.clone(),
"`get_suite` requires a JSON object");
let uri = val_or_err!(object.get("uri"),
Some(&Json::String(ref s)) => s.clone(),
"`get_suite` requires a connection uri");
let phases = try!(get_phases(&object));
Ok(Suite { uri: uri, phases: phases })
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .sanic import Sanic
from .blueprints import Blueprint<|fim▁hole|>
__version__ = '0.1.9'
__all__ = ['Sanic', 'Blueprint']<|fim▁end|> | |
<|file_name|>email.service.configure.route.js<|end_file_name|><|fim▁begin|>var mailServiceRoutes = (function () {
'use strict';
var HTTPStatus = require('http-status'),
express = require('express'),
tokenAuthMiddleware = require('../middlewares/token.authentication.middleware'),
roleAuthMiddleware = require('../middlewares/role.authorization.middleware'),
messageConfig = require('../configs/api.message.config'),
emailServiceController = require('../controllers/email.service.server.controller'),
mailServiceRouter = express.Router();
mailServiceRouter.route('/')
/**
* @api {get} /api/emailservice/ Get Email Service Setting Configuration Info
* @apiPermission admin
* @apiName getMailServiceConfig
* @apiGroup EmailServiceSetting
* @apiDescription Retrieves the Email Service setting Information Object if exists, else return empty object
* @apiVersion 0.0.1
* @apiHeader (AuthorizationHeader) {String} authorization x-access-token value.
* @apiHeaderExample {json} Header-Example:
*{
* "x-access-token": "yJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c"
* }
*
* @apiExample {curl} Example usage:
* curl -i http://localhost:3000/api/emailservice
*
*
* @apiSuccess {String} _id object id of the email service configuration data
* @apiSuccess {String} serviceProviderType Type of Email service Providers. Email Service Providers can be any one of - 'mailgun', 'postmark', 'mandrill', 'sendgrid', 'amazon' or 'normal' email sending mechanism using google smtp
* @apiSuccess {String} host hostname or IP address to connect to (defaults to 'localhost'). for [normal smtp]
* @apiSuccess {Number} port port to connect to (defaults to 25 or 465). for [normal smtp]
* @apiSuccess {String} authUserName authentication username, mainly google email address for google smtp. for [normal smtp]
* @apiSuccess {String} authPassword password for the gmail account or user. for [normal smtp]
* @apiSuccess {String} api_Key secret unique key to access the email service provider api. needed for [mandrill, mailgun, Amazon, sendGrid, postmark]
* @apiSuccess {String} api_Secret secret unique key to access the email service provider api.needed for [Amaazon;]
* @apiSuccess {String} api_User username of the user registered in the email service provider user database.
* @apiSuccess {String} domain domain name of the email service provider. [mailgun]
* @apiSuccess {Date} addedOn date at which email service configuration is saved.
* @apiSuccess {Number} rateLimit limits the message count to be sent in a second (defaults to false) - available only if pool is set to true. needed for [Amazon ses]
* @apiSuccess {Boolean} pool if set to true uses pooled connections (defaults to false), otherwise creates a new connection for every e-mail.
* @apiSuccess {Boolean} secure if true the connection will only use TLS. If false (the default), TLS may still be upgraded to if available via the STARTTLS command. for [normal smtp]
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "57357eb98b22c55e361176a2",
* "serviceProviderType": "mailgun",
* "host": "smtp.gmail.com",
* "port": 8000,
* "authUserName": "[email protected]",
* "authPassword": "lakdehe@123",
* "api_Key": "key-dfsew",
* "api_Secret": "api-fdsfsd",
* "api_User": "shrawan",
* "domain": "sandbox73ad601fcdd74461b1c46820a59b2374.mailgun.org",
* "addedOn": "2016-05-13T07:14:01.496Z",
* "rateLimit": 300,
* "pool": false,
* "secure": true
* }
*
* @apiError (UnAuthorizedError) {String} message authentication token was not supplied
* @apiError (UnAuthorizedError) {Boolean} isToken to check if token is supplied or not , if token is supplied , returns true else returns false
* @apiError (UnAuthorizedError) {Boolean} success true if jwt token verifies
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "isToken": true,
* "success": false,
* "message": "Authentication failed"
* }
*
*
* @apiError (UnAuthorizedError_1) {String} message You are not authorized to access this api route.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to access this api route."
* }
*
* @apiError (UnAuthorizedError_2) {String} message You are not authorized to perform this action.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to perform this action"
* }
*
*
* @apiError (NotFound) {String} message Email service configuration not found
*
* @apiErrorExample Error-Response:
* HTTP/1.1 404 Not Found
* {
* "message": "Email service configuration not found"
* }
*
* @apiError (InternalServerError) {String} message Internal Server Error
*
* @apiErrorExample Error-Response:
* HTTP/1.1 500 Internal Server Error
* {
* "message": "Internal Server Error"
* }
*
*/
.get( tokenAuthMiddleware.authenticate, roleAuthMiddleware.authorize, getMailServiceConfig )
/**
* @api {post} /api/emailservice/ Post Email Service Configuration Info
* @apiPermission admin
* @apiName postMailServiceConfig
* @apiGroup EmailServiceSetting
* @apiParam {String} Mandatory serviceProviderType Type of Email service Providers. Email Service Providers can be any one of - 'mailgun', 'postmark', 'mandrill', 'sendgrid', 'amazon' or 'normal' email sending mechanism using google smtp
* @apiDescription saves email service configuration setting information to the database so that we can send email to our users
* @apiVersion 0.0.1
* @apiHeader (AuthorizationHeader) {String} authorization x-access-token value.
* @apiHeaderExample {json} Header-Example:
*{
* "x-access-token": "yJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c"
* }
*
* @apiExample {curl} Example usage:
*
* curl \
* -v \
* -X POST \
* http://localhost:3000/api/emailservice \
* -H 'Content-Type: application/json' \
* -H 'x-access-token: eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c2JlYXQuY29tIiwidXNlcm5hbWUiOiJzdXBlcmFkbWluIiwiX2lkIjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwidXNlckNvbmZpcm1lZCI6ZmFsc2UsImJsb2NrZWQiOmZhbHNlLCJkZWxldGVkIjpmYWxzZSwiYWRkZWRPbiI6IjIwMTYtMDctMDhUMDc6NTQ6MDMuNzY2WiIsInR3b0ZhY3RvckF1dGhFbmFibGVkIjpmYWxzZSwidXNlclJvbGUiOiJhZG1pbiIsImFjdGl2ZSI6dHJ1ZX0sImNsYWltcyI6eyJzdWJqZWN0IjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwiaXNzdWVyIjoiaHR0cDovL2xvY2FsaG9zdDozMDAwIiwicGVybWlzc2lvbnMiOlsic2F2ZSIsInVwZGF0ZSIsInJlYWQiLCJkZWxldGUiXX0sImlhdCI6MTQ2ODUzMzgzMiwiZXhwIjoxNDY4NTUzODMyLCJpc3MiOiI1NzdmNWMxYjU4Njk5MDJkNjdlYjIyYTgifQ.bmHC9pUtN1aOZUOc62nNfywggLBUfpLhs0CyMuunhEpVJq4WLYZ7fcr2Ap8xn0WYL_yODPPuSYGIFZ8uk4nilA' \
* -d '{"serviceProviderType":"mailgun","domain":"www.mailgun.com","api_Key":"helloapikey123456"}'
*
* @apiSuccess {String} message Email service configuration saved successfully.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "message": "Email service configuration saved successfully"
* }
*
* @apiError (UnAuthorizedError) {String} message authentication token was not supplied
* @apiError (UnAuthorizedError) {Boolean} isToken to check if token is supplied or not , if token is supplied , returns true else returns false
* @apiError (UnAuthorizedError) {Boolean} success true if jwt token verifies
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "isToken": true,
* "success": false,
* "message": "Authentication failed"
* }
*
*
* @apiError (UnAuthorizedError_1) {String} message You are not authorized to access this api route.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to access this api route."
* }
*
* @apiError (UnAuthorizedError_2) {String} message You are not authorized to perform this action.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to perform this action"
* }
*
*
* @apiError (AlreadyExists) {String} message Email Service setting configuration already exists, only can update existing data. new inserts is not allowed
*
* @apiErrorExample Error-Response:
* HTTP/1.1 409 Conflict
* {
* "message": "You can only update email service configuration setting"
* }
*
*
* @apiError (BadRequest) {String[]} message Email service configuration setting post method throws error if serviceProviderType, is not provided or invalid data entry for host, port authUserName, domain and rateLimit
*
* @apiErrorExample Error-Response:
* HTTP/1.1 400 Bad Request
* {
* "message": "[{"param":"serviceProviderType","msg":"Email service provider is required","value":""},{"param":"domain","msg":"Invalid domain","value":"www."}]"
* }
*
* @apiError (InternalServerError) {String} message Internal Server Error
*
* @apiErrorExample Error-Response:
* HTTP/1.1 500 Internal Server Error
* {
* "message": "Internal Server Error"
* }
*
*/
.post( tokenAuthMiddleware.authenticate, roleAuthMiddleware.authorize, emailServiceController.postMailServiceConfig );
//middleware function that will be executed for every routes below this to get email service configuration setting object using id as parameter
mailServiceRouter.use('/:mailServiceConfigId', tokenAuthMiddleware.authenticate, roleAuthMiddleware.authorize, function(req, res, next){
emailServiceController.getMailServiceConfigByID(req)
.then(function(mailServiceConfig){
//saving in request object so that it can be used for other operations like updating data using put and patch method
if(mailServiceConfig){
req.mailService = mailServiceConfig;
next();
return null;// return a non-undefined value to signal that we didn't forget to return promise
}else{
res.status(HTTPStatus.NOT_FOUND);
res.json({
message: messageConfig.emailService.notFound
});
}
})
.catch(function(err){
return next(err);
});
});
mailServiceRouter.route('/:mailServiceConfigId')
/**
* @api {get} /api/emailservice/:mailServiceConfigId Get Email Service Setting Configuration Info by id
* @apiPermission admin
* @apiName getMailServiceConfigByID
* @apiGroup EmailServiceSetting
*
*
* @apiParam {String} mailServiceConfigId object id of the email service data
*
* * @apiParamExample {json} Request-Example:
* {
* "mailServiceConfigId": "57889ae9585d9632523f1234"
* }
*
*
* @apiDescription Retrieves the Email Service setting Information Object by id if exists, else return empty object
* @apiVersion 0.0.1
* @apiHeader (AuthorizationHeader) {String} authorization x-access-token value.
* @apiHeaderExample {json} Header-Example:
*{
* "x-access-token": "yJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c"
* }
*
* @apiExample {curl} Example usage:
* curl -i http://localhost:3000/api/emailservice/57357eb98b22c55e361176a2 \
* -H 'x-access-token: eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c2JlYXQuY29tIiwidXNlcm5hbWUiOiJzdXBlcmFkbWluIiwiX2lkIjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwidXNlckNvbmZpcm1lZCI6ZmFsc2UsImJsb2NrZWQiOmZhbHNlLCJkZWxldGVkIjpmYWxzZSwiYWRkZWRPbiI6IjIwMTYtMDctMDhUMDc6NTQ6MDMuNzY2WiIsInR3b0ZhY3RvckF1dGhFbmFibGVkIjpmYWxzZSwidXNlclJvbGUiOiJhZG1pbiIsImFjdGl2ZSI6dHJ1ZX0sImNsYWltcyI6eyJzdWJqZWN0IjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwiaXNzdWVyIjoiaHR0cDovL2xvY2FsaG9zdDozMDAwIiwicGVybWlzc2lvbnMiOlsic2F2ZSIsInVwZGF0ZSIsInJlYWQiLCJkZWxldGUiXX0sImlhdCI6MTQ2ODMxNjg4MiwiZXhwIjoxNDY4MzM2ODgyLCJpc3MiOiI1NzdmNWMxYjU4Njk5MDJkNjdlYjIyYTgifQ.agd70Nk8y4bcORqzQP4eTSZW_3lN9TpC9zIpKM5j98RkNqS43qVPRQyN3DfRS6CKblHyvYASisvQGpCvJSyfgw'
*
*
*
* @apiSuccess {String} _id object id of the email service configuration data
* @apiSuccess {String} serviceProviderType Type of Email service Providers. Email Service Providers can be any one of - 'mailgun', 'postmark', 'mandrill', 'sendgrid', 'amazon' or 'normal' email sending mechanism using google smtp<|fim▁hole|> * @apiSuccess {String} host hostname or IP address to connect to (defaults to 'localhost'). for [normal smtp]
* @apiSuccess {Number} port port to connect to (defaults to 25 or 465). for [normal smtp]
* @apiSuccess {String} authUserName authentication username, mainly google email address for google smtp. for [normal smtp]
* @apiSuccess {String} authPassword password for the gmail account or user. for [normal smtp]
* @apiSuccess {String} api_Key secret unique key to access the email service provider api. needed for [mandrill, mailgun, Amazon, sendGrid, postmark]
* @apiSuccess {String} api_Secret secret unique key to access the email service provider api.needed for [Amaazon;]
* @apiSuccess {String} api_User username of the user registered in the email service provider user database.
* @apiSuccess {String} domain domain name of the email service provider. [mailgun]
* @apiSuccess {Date} addedOn date at which email service configuration is saved.
* @apiSuccess {Number} rateLimit limits the message count to be sent in a second (defaults to false) - available only if pool is set to true. needed for [Amazon ses]
* @apiSuccess {Boolean} pool if set to true uses pooled connections (defaults to false), otherwise creates a new connection for every e-mail.
* @apiSuccess {Boolean} secure if true the connection will only use TLS. If false (the default), TLS may still be upgraded to if available via the STARTTLS command. for [normal smtp]
*
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "57357eb98b22c55e361176a2",
* "serviceProviderType": "mailgun",
* "host": "smtp.gmail.com",
* "port": 8000,
* "authUserName": "[email protected]",
* "authPassword": "lakdehe@123",
* "api_Key": "key-dfsew",
* "api_Secret": "api-fdsfsd",
* "api_User": "shrawan",
* "domain": "sandbox73ad601fcdd74461b1c46820a59b2374.mailgun.org",
* "addedOn": "2016-05-13T07:14:01.496Z",
* "rateLimit": 300,
* "pool": false,
* "secure": true
* }
*
* @apiError (UnAuthorizedError) {String} message authentication token was not supplied
* @apiError (UnAuthorizedError) {Boolean} isToken to check if token is supplied or not , if token is supplied , returns true else returns false
* @apiError (UnAuthorizedError) {Boolean} success true if jwt token verifies
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "isToken": true,
* "success": false,
* "message": "Authentication failed"
* }
*
*
* @apiError (UnAuthorizedError_1) {String} message You are not authorized to access this api route.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to access this api route."
* }
*
* @apiError (UnAuthorizedError_2) {String} message You are not authorized to perform this action.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to perform this action"
* }
*
*
* @apiError (NotFound) {String} message Email service configuration not found
*
* @apiErrorExample Error-Response:
* HTTP/1.1 404 Not Found
* {
* "message": "Email service configuration not found"
* }
*
* @apiError (InternalServerError) {String} message Internal Server Error
*
* @apiErrorExample Error-Response:
* HTTP/1.1 500 Internal Server Error
* {
* "message": "Internal Server Error"
* }
*
*/
.get(function(req, res){
res.status(HTTPStatus.OK);
res.json(req.mailService);
})
/**
* @api {put} /api/emailservice/:mailServiceConfigId Updates Email Service Configuration Info
* @apiPermission admin
* @apiName updateMailService
* @apiGroup EmailServiceSetting
*
*
* @apiParam {String} mailServiceConfigId object id of the email service data
*
* * @apiParamExample {json} Request-Example:
* {
* "mailServiceConfigId": "57889ae9585d9632523f1234"
* }
*
*
* @apiDescription Updates existing email service configuration setting information to the database so that we can send email to our users
* @apiVersion 0.0.1
* @apiHeader (AuthorizationHeader) {String} authorization x-access-token value.
* @apiHeaderExample {json} Header-Example:
*{
* "x-access-token": "yJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c"
* }
*
* @apiExample {curl} Example usage:
*
* curl \
* -v \
* -X PUT \
* http://localhost:3000/api/emailservice/5788105fd519f49e17f0579f \
* -H 'Content-Type: application/json' \
* -H 'x-access-token: eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7ImVtYWlsIjoiaGVsbG9AYml0c2JlYXQuY29tIiwidXNlcm5hbWUiOiJzdXBlcmFkbWluIiwiX2lkIjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwidXNlckNvbmZpcm1lZCI6ZmFsc2UsImJsb2NrZWQiOmZhbHNlLCJkZWxldGVkIjpmYWxzZSwiYWRkZWRPbiI6IjIwMTYtMDctMDhUMDc6NTQ6MDMuNzY2WiIsInR3b0ZhY3RvckF1dGhFbmFibGVkIjpmYWxzZSwidXNlclJvbGUiOiJhZG1pbiIsImFjdGl2ZSI6dHJ1ZX0sImNsYWltcyI6eyJzdWJqZWN0IjoiNTc3ZjVjMWI1ODY5OTAyZDY3ZWIyMmE4IiwiaXNzdWVyIjoiaHR0cDovL2xvY2FsaG9zdDozMDAwIiwicGVybWlzc2lvbnMiOlsic2F2ZSIsInVwZGF0ZSIsInJlYWQiLCJkZWxldGUiXX0sImlhdCI6MTQ2ODUzMzgzMiwiZXhwIjoxNDY4NTUzODMyLCJpc3MiOiI1NzdmNWMxYjU4Njk5MDJkNjdlYjIyYTgifQ.bmHC9pUtN1aOZUOc62nNfywggLBUfpLhs0CyMuunhEpVJq4WLYZ7fcr2Ap8xn0WYL_yODPPuSYGIFZ8uk4nilA' \
* -d '{"serviceProviderType":"postmark","domain":"www.mailgun.com","api_Key":"helloapikey123456"}'
*
* @apiSuccess {String} message Email service configuration updated successfully.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "message": "Email service configuration updated successfully"
* }
*
* @apiError (UnAuthorizedError) {String} message authentication token was not supplied
* @apiError (UnAuthorizedError) {Boolean} isToken to check if token is supplied or not , if token is supplied , returns true else returns false
* @apiError (UnAuthorizedError) {Boolean} success true if jwt token verifies
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "isToken": true,
* "success": false,
* "message": "Authentication failed"
* }
*
*
* @apiError (UnAuthorizedError_1) {String} message You are not authorized to access this api route.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to access this api route."
* }
*
* @apiError (UnAuthorizedError_2) {String} message You are not authorized to perform this action.
*
* @apiErrorExample Error-Response:
* HTTP/1.1 401 Unauthorized
* {
* "message": "You are not authorized to perform this action"
* }
*
*
*
* @apiError (BadRequest) {String[]} message Email service configuration setting put method throws error if serviceProviderType, is not provided or invalid data entry for host, port authUserName, domain and rateLimit
*
* @apiErrorExample Error-Response:
* HTTP/1.1 400 Bad Request
* {
* "message": "[{"param":"serviceProviderType","msg":"Email service provider is required","value":""},{"param":"domain","msg":"Invalid domain","value":"www."}]"
* }
*
* @apiError (InternalServerError) {String} message Internal Server Error
*
* @apiErrorExample Error-Response:
* HTTP/1.1 500 Internal Server Error
* {
* "message": "Internal Server Error"
* }
*
*/
.put( tokenAuthMiddleware.authenticate, roleAuthMiddleware.authorize, emailServiceController.updateMailService );
//function declaration to return email service configuration object to the client if exists else return not found message
function getMailServiceConfig(req, res, next) {
emailServiceController.getMailServiceConfig()
.then(function(mailServiceConfig){
//if exists, return data in json format
if (mailServiceConfig) {
res.status(HTTPStatus.OK);
res.json(mailServiceConfig);
} else {
res.status(HTTPStatus.NOT_FOUND);
res.json({
message: messageConfig.emailService.notFound
});
}
})
.catch(function(err){
return next(err);
});
}
return mailServiceRouter;
})();
module.exports = mailServiceRoutes;<|fim▁end|> | |
<|file_name|>calculator.rs<|end_file_name|><|fim▁begin|>use std::collections::{VecDeque, HashMap};
use std::str::FromStr;
use std::rc::Rc;
/// The description of a calculator operation and how to execute it.
#[derive(Clone)]
pub struct OpSpec {
/// A counted reference to the function itself.
/// Takes a calculator's current state.
/// returns a list of messages to print, and the updated calculator state.
pub op: Rc<Fn(Calculator) -> (Calculator, Vec<String>)>,
/// A help string displayed for this operation when the user asks for help.
pub help: String
}
/// Stores the state of a calculator at any given time.
#[derive(Clone)]
pub struct Calculator {
/// A stack to place values in.
pub stack: VecDeque<f64>,
/// The operations this calculator supports.
/// The key in this map specifies a string the user writes to invoke the operation.
/// An operation's key string should not have any spaces in it.
pub ops: HashMap<String, OpSpec>,
}
/// Used to encode a boolean as f64 (true -> 1.0, false -> 0.0).
fn bool_to_f64(b: bool) -> f64 {
if b {1.0} else {0.0}
}
/// Used to decode a boolean previously stored as f64 (0.0 -> false, != 0.0 -> true).
fn f64_to_bool(f: f64) -> bool {
f != 0.0
}
impl Calculator {
/// Constructs a new calculator with a basic set of operations available.
pub fn new() -> Self {
let mut calc = Calculator {
stack: VecDeque::new(),
ops: HashMap::new(),
};
let make_binop = |name: String, binop: Box<Fn(f64, f64) -> f64>, help: String|
OpSpec{
op: Rc::new(move |mut calc: Calculator| {
let len = calc.stack.len();
if len < 2 {
(calc, vec![format!("'{}' requires stack size >= 2, current = {}", name, len)])
} else {
let a = calc.stack.pop_back().unwrap();
let b = calc.stack.pop_back().unwrap();
calc.stack.push_back(binop(a, b));
(calc, vec![])
}
}),
help: String::from(help)
};
let make_unop = |name: String, unop: Box<Fn(f64) -> f64>, help: String|
OpSpec{
op: Rc::new(move |mut calc: Calculator| {
match calc.stack.pop_back() {
None => (calc, vec![format!("'{}' requires a non-empty stack", name)]),
Some(x) => {
calc.stack.push_back(unop(x));
(calc, vec![])
}
}
}),
help: String::from(help)
};
// binary floating point operations
calc.ops.insert(String::from("-"),
make_binop(String::from("Subtract"),
Box::new(|a, b| a - b),
String::from("push(pop - pop)")));
calc.ops.insert(String::from("+"),
make_binop(String::from("Add"),
Box::new(|a, b| a + b),
String::from("push(pop + pop)")));
calc.ops.insert(String::from("*"),
make_binop(String::from("Multiply"),
Box::new(|a, b| a * b),
String::from("push(pop * pop)")));
calc.ops.insert(String::from("/"),
make_binop(String::from("Divide"),
Box::new(|a, b| a / b),
String::from("push(pop / pop)")));
calc.ops.insert(String::from("^"),
make_binop(String::from("Exponentiate"),
Box::new(|a, b| a.powf(b)),
String::from("push(pop ^ pop)")));
calc.ops.insert(String::from("*e^"),
make_binop(String::from("Times Ten to the ..."),
Box::new(|a, b| a * (10.0 as f64).powf(b)),
String::from("push(pop * (10 ^ pop))")));
calc.ops.insert(String::from("/e^"),
make_binop(String::from("Divided by Ten to the ..."),
Box::new(|a, b| a / (10.0 as f64).powf(b)),
String::from("push(pop / (10 ^ pop))")));
calc.ops.insert(String::from("log"),
make_binop(String::from("Logarithm"),
Box::new(|a, b| a.log(b)),
String::from("push(log base pop of pop")));
calc.ops.insert(String::from(">"),
make_binop(String::from("Greater Than"),
Box::new(|a, b| bool_to_f64(a > b)),
String::from("push(pop > pop)")));
calc.ops.insert(String::from("<"),
make_binop(String::from("Less Than"),
Box::new(|a, b| bool_to_f64(a < b)),
String::from("push(pop < pop)")));
calc.ops.insert(String::from("=="),
make_binop(String::from("Equal?"),
Box::new(|a, b| bool_to_f64(a == b)),
String::from("push(pop == pop)")));
// binary logical operations
calc.ops.insert(String::from("nand"),
make_binop(String::from("Nand"),
Box::new(|a, b| bool_to_f64(
!(f64_to_bool(a) && f64_to_bool(b))
)),
String::from("push(not(pop and pop))")));
calc.ops.insert(String::from("and"),
make_binop(String::from("And"),
Box::new(|a, b| bool_to_f64(
f64_to_bool(a) && f64_to_bool(b)
)),
String::from("push(pop and pop)")));
calc.ops.insert(String::from("or"),
make_binop(String::from("Or"),
Box::new(|a, b| bool_to_f64(
f64_to_bool(a) || f64_to_bool(b)
)),
String::from("push(pop or pop)")));
calc.ops.insert(String::from("xor"),
make_binop(String::from("Xor"),
Box::new(|a, b| bool_to_f64(
f64_to_bool(a) ^ f64_to_bool(b)
)),
String::from("push(pop xor pop)")));
// unary floating point operations
calc.ops.insert(String::from("neg"),
make_unop(String::from("Negate"),
Box::new(|x| -x),
String::from("push(whether pip is finite)")));
calc.ops.insert(String::from("ln"),<|fim▁hole|> make_unop(String::from("Logarithm Base 2"),
Box::new(|x| x.log2()),
String::from("push(lg(pop))")));
calc.ops.insert(String::from("inf?"),
make_unop(String::from("Infinite?"),
Box::new(|x| bool_to_f64(x.is_infinite())),
String::from("push(whether pop is infinite)")));
calc.ops.insert(String::from("nan?"),
make_unop(String::from("Not A Number?"),
Box::new(|x| bool_to_f64(x.is_nan())),
String::from("push(whether pop is NaN)")));
calc.ops.insert(String::from("sign"),
make_unop(String::from("Sign"),
Box::new(|x| x.signum()),
String::from("push(sign of pop)")));
calc.ops.insert(String::from("fin?"),
make_unop(String::from("Finite?"),
Box::new(|x| bool_to_f64(x.is_finite())),
String::from("push(whether pip is finite)")));
// unary logical operations
calc.ops.insert(String::from("not"), make_unop(
String::from("Not"),
Box::new(|x| bool_to_f64(!f64_to_bool(x))),
String::from("push(not(pop))")));
// stack manipulation and printing
calc.ops.insert(String::from("print"),
OpSpec{
op: Rc::new(|mut calc| match calc.stack.pop_back() {
None => (calc, vec![String::from("The stack is empty")]),
Some(n) => (calc, vec![format!("{}", n)]),
}),
help: String::from("print(pop)")
}
);
calc.ops.insert(String::from("cp"),
OpSpec{
op: Rc::new(|mut calc| match calc.stack.back().map(|n| n.clone()) {
None => (calc, vec![String::from("'Copy' requires a non-empty stack")]),
Some(n) => {
calc.stack.push_back(n);
(calc, vec![])
}
}),
help: String::from("push(copy(pop))")
}
);
calc.ops.insert(String::from("swap"),
OpSpec{
op: Rc::new(|mut calc| {
let len = calc.stack.len();
if len < 2 {
(calc, vec![format!("'Swap' requires stack size >= 2, current = {}", len)])
} else {
let a = calc.stack.pop_back().unwrap();
let b = calc.stack.pop_back().unwrap();
calc.stack.push_back(a);
calc.stack.push_back(b);
(calc, vec![])
}
}),
help: String::from("a = pop, b = pop, push(a), push(b)")
}
);
calc.ops.insert(String::from("help"),
OpSpec{
op: Rc::new(|calc| {
let msgs: Vec<String> = calc.ops.iter().map(|(k, v)| {
format!("{}:\n\t{}\n", k, v.help)
}).collect();
(calc, msgs)
}),
help: String::from("Display this help message"),
}
);
calc
}
/// Executes a single token on the calculator, returning its new state and some messages.
/// This implementation simply selects an operation based on the provided token
/// and returns the result of executing that operation on the calculator's current state.
pub fn exec(mut self, token: String) -> (Self, Vec<String>) {
match f64::from_str(&(*token)) {
Ok(num) => {
self.stack.push_back(num);
(self, vec![])
}
Err(_) => {
let op = self.ops.get(&token).map(|x| x.clone());
match op {
None => (self, vec![format!("Unknown command '{}'", &token)]),
Some(op_spec) => (op_spec.op)(self)
}
}
}
}
}<|fim▁end|> | make_unop(String::from("Natural Logarithm"),
Box::new(|x| x.ln()),
String::from("push(ln(pop))")));
calc.ops.insert(String::from("lg"), |
<|file_name|>shBrushPython.js<|end_file_name|><|fim▁begin|>/**
* SyntaxHighlighter
* http://alexgorbatchev.com/SyntaxHighlighter
*
* SyntaxHighlighter is donationware. If you are using it, please donate.
* http://alexgorbatchev.com/SyntaxHighlighter/donate.html
*
* @version
* 3.0.90 (Sat, 18 Jun 2016 21:01:41 GMT)
*
* @copyright
* Copyright (C) 2004-2013 Alex Gorbatchev.
*
* @license
* Dual licensed under the MIT and GPL licenses.
*/
;(function()
{
// CommonJS
SyntaxHighlighter = SyntaxHighlighter || (typeof require !== 'undefined'? require('shCore').SyntaxHighlighter : null);
function Brush()
{
// Contributed by Gheorghe Milas and Ahmad Sherif
var keywords = 'and assert break class continue def del elif else ' +
'except exec finally for from global if import in is ' +
'lambda not or pass raise return try yield while';
var funcs = '__import__ abs all any apply basestring bin bool buffer callable ' +
'chr classmethod cmp coerce compile complex delattr dict dir ' +
'divmod enumerate eval execfile file filter float format frozenset ' +
'getattr globals hasattr hash help hex id input int intern ' +
'isinstance issubclass iter len list locals long map max min next ' +
'object oct open ord pow print property range raw_input reduce ' +
'reload repr reversed round set setattr slice sorted staticmethod ' +
'str sum super tuple type type unichr unicode vars xrange zip';
var special = 'None True False self cls class_';
this.regexList = [
{ regex: SyntaxHighlighter.regexLib.singleLinePerlComments, css: 'comments' },
{ regex: /^\s*@\w+/gm, css: 'decorator' },
{ regex: /(['\"]{3})([^\1])*?\1/gm, css: 'comments' },
{ regex: /"(?!")(?:\.|\\\"|[^\""\n])*"/gm, css: 'string' },
{ regex: /'(?!')(?:\.|(\\\')|[^\''\n])*'/gm, css: 'string' },
{ regex: /\+|\-|\*|\/|\%|=|==/gm, css: 'keyword' },
{ regex: /\b\d+\.?\w*/g, css: 'value' },
{ regex: new RegExp(this.getKeywords(funcs), 'gmi'), css: 'functions' },
{ regex: new RegExp(this.getKeywords(keywords), 'gm'), css: 'keyword' },
{ regex: new RegExp(this.getKeywords(special), 'gm'), css: 'color1' }
];
this.forHtmlScript(SyntaxHighlighter.regexLib.aspScriptTags);
};
Brush.prototype = new SyntaxHighlighter.Highlighter();
Brush.aliases = ['py', 'python'];
SyntaxHighlighter.brushes.Python = Brush;<|fim▁hole|><|fim▁end|> |
// CommonJS
typeof(exports) != 'undefined' ? exports.Brush = Brush : null;
})(); |
<|file_name|>XtraDrugType.java<|end_file_name|><|fim▁begin|>//
// Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 generiert
// Siehe <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren.
// Generiert: 2020.05.20 um 02:10:33 PM CEST
//
package ch.fd.invoice450.request;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java-Klasse für xtraDrugType complex type.
*
* <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist.
*
* <pre>
* <complexType name="xtraDrugType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="indicated" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="iocm_category">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <enumeration value="A"/>
* <enumeration value="B"/>
* <enumeration value="C"/>
* <enumeration value="D"/>
* <enumeration value="E"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="delivery" default="first">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <enumeration value="first"/>
* <enumeration value="repeated"/>
* <enumeration value="permanent"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="regulation_attributes" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" default="0" />
* <attribute name="limitation" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "xtraDrugType")
public class XtraDrugType {
@XmlAttribute(name = "indicated")
protected Boolean indicated;
@XmlAttribute(name = "iocm_category")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String iocmCategory;
@XmlAttribute(name = "delivery")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String delivery;
@XmlAttribute(name = "regulation_attributes")
@XmlSchemaType(name = "unsignedInt")
protected Long regulationAttributes;
@XmlAttribute(name = "limitation")
protected Boolean limitation;
/**
* Ruft den Wert der indicated-Eigenschaft ab.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIndicated() {
return indicated;
}
/**
* Legt den Wert der indicated-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIndicated(Boolean value) {
this.indicated = value;
}
/**<|fim▁hole|> *
* @return
* possible object is
* {@link String }
*
*/
public String getIocmCategory() {
return iocmCategory;
}
/**
* Legt den Wert der iocmCategory-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIocmCategory(String value) {
this.iocmCategory = value;
}
/**
* Ruft den Wert der delivery-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDelivery() {
if (delivery == null) {
return "first";
} else {
return delivery;
}
}
/**
* Legt den Wert der delivery-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDelivery(String value) {
this.delivery = value;
}
/**
* Ruft den Wert der regulationAttributes-Eigenschaft ab.
*
* @return
* possible object is
* {@link Long }
*
*/
public long getRegulationAttributes() {
if (regulationAttributes == null) {
return 0L;
} else {
return regulationAttributes;
}
}
/**
* Legt den Wert der regulationAttributes-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setRegulationAttributes(Long value) {
this.regulationAttributes = value;
}
/**
* Ruft den Wert der limitation-Eigenschaft ab.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isLimitation() {
return limitation;
}
/**
* Legt den Wert der limitation-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setLimitation(Boolean value) {
this.limitation = value;
}
}<|fim▁end|> | * Ruft den Wert der iocmCategory-Eigenschaft ab. |
<|file_name|>mdichild.cpp<|end_file_name|><|fim▁begin|>#include <QtWidgets>
#include "mdichild.h"
MdiChild::MdiChild()
{
setAttribute(Qt::WA_DeleteOnClose);
isUntitled = true;
}
void MdiChild::newFile()
{
static int sequenceNumber = 1;
isUntitled = true;
curFile = tr("document%1.txt").arg(sequenceNumber++);
setWindowTitle(curFile + "[*]");
connect(document(), &QTextDocument::contentsChanged,
this, &MdiChild::documentWasModified);
}
bool MdiChild::loadFile(const QString &fileName)
{
QFile file(fileName);
if (!file.open(QFile::ReadOnly | QFile::Text)) {
QMessageBox::warning(this, tr("MDI"),
tr("Cannot read file %1:\n%2.")
.arg(fileName)
.arg(file.errorString()));<|fim▁hole|> }
QTextStream in(&file);
QApplication::setOverrideCursor(Qt::WaitCursor);
setPlainText(in.readAll());
QApplication::restoreOverrideCursor();
setCurrentFile(fileName);
connect(document(), &QTextDocument::contentsChanged,
this, &MdiChild::documentWasModified);
return true;
}
bool MdiChild::save()
{
if (isUntitled) {
return saveAs();
} else {
return saveFile(curFile);
}
}
bool MdiChild::saveAs()
{
QString fileName = QFileDialog::getSaveFileName(this, tr("Save As"),
curFile);
if (fileName.isEmpty())
return false;
return saveFile(fileName);
}
bool MdiChild::saveFile(const QString &fileName)
{
QFile file(fileName);
if (!file.open(QFile::WriteOnly | QFile::Text)) {
QMessageBox::warning(this, tr("MDI"),
tr("Cannot write file %1:\n%2.")
.arg(QDir::toNativeSeparators(fileName), file.errorString()));
return false;
}
QTextStream out(&file);
QApplication::setOverrideCursor(Qt::WaitCursor);
out << toPlainText();
QApplication::restoreOverrideCursor();
setCurrentFile(fileName);
return true;
}
QString MdiChild::userFriendlyCurrentFile()
{
return strippedName(curFile);
}
void MdiChild::closeEvent(QCloseEvent *event)
{
if (maybeSave()) {
event->accept();
} else {
event->ignore();
}
}
void MdiChild::documentWasModified()
{
setWindowModified(document()->isModified());
}
bool MdiChild::maybeSave()
{
if (!document()->isModified())
return true;
const QMessageBox::StandardButton ret
= QMessageBox::warning(this, tr("MDI"),
tr("'%1' has been modified.\n"
"Do you want to save your changes?")
.arg(userFriendlyCurrentFile()),
QMessageBox::Save | QMessageBox::Discard
| QMessageBox::Cancel);
switch (ret) {
case QMessageBox::Save:
return save();
case QMessageBox::Cancel:
return false;
default:
break;
}
return true;
}
void MdiChild::setCurrentFile(const QString &fileName)
{
curFile = QFileInfo(fileName).canonicalFilePath();
isUntitled = false;
document()->setModified(false);
setWindowModified(false);
setWindowTitle(userFriendlyCurrentFile() + "[*]");
}
QString MdiChild::strippedName(const QString &fullFileName)
{
return QFileInfo(fullFileName).fileName();
}<|fim▁end|> | return false; |
<|file_name|>config.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export const FIREBASE_ROOT = 'https://sweltering-inferno-2727.firebaseio.com/'; |
<|file_name|>LoginController.js<|end_file_name|><|fim▁begin|>import { connect } from 'react-redux'
import { increment, doubleAsync,callApiAsync } from '../modules/Login'
/* This is a container component. Notice it does not contain any JSX,
nor does it import React. This component is **only** responsible for
wiring in the actions and state necessary to render a presentational
component - in this case, the counter: */
import Login from '../views/LoginView'
/* Object of action creators (can also be function that returns object).
Keys will be passed as props to presentational components. Here we are
implementing our wrapper around increment; the component doesn't care */
const mapDispatchToProps = {
increment : () => increment(1),
doubleAsync,
callApiAsync
}
const mapStateToProps = (state) => ({
counter : state.counter,<|fim▁hole|>
/* Note: mapStateToProps is where you should use `reselect` to create selectors, ie:
import { createSelector } from 'reselect'
const counter = (state) => state.counter
const tripleCount = createSelector(counter, (count) => count * 3)
const mapStateToProps = (state) => ({
counter: tripleCount(state)
})
Selectors can compute derived data, allowing Redux to store the minimal possible state.
Selectors are efficient. A selector is not recomputed unless one of its arguments change.
Selectors are composable. They can be used as input to other selectors.
https://github.com/reactjs/reselect */
export default connect(mapStateToProps, mapDispatchToProps)(Login)<|fim▁end|> | jsonResult: state.jsonResult
}) |
<|file_name|>tcp-connect-timeouts.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-pretty
// compile-flags:--test
// exec-env:RUST_TEST_TASKS=1
// Tests for the connect_timeout() function on a TcpStream. This runs with only
// one test task to ensure that errors are timeouts, not file descriptor
// exhaustion.
#![allow(experimental)]<|fim▁hole|>
use std::io::*;
use std::io::test::*;
use std::io;
use std::time::Duration;
use std::sync::mpsc::channel;
use std::thread::Thread;
#[cfg_attr(target_os = "freebsd", ignore)]
fn eventual_timeout() {
let addr = next_test_ip4();
let (tx1, rx1) = channel();
let (_tx2, rx2) = channel::<()>();
let _t = Thread::spawn(move|| {
let _l = TcpListener::bind(addr).unwrap().listen();
tx1.send(()).unwrap();
let _ = rx2.recv();
});
rx1.recv().unwrap();
let mut v = Vec::new();
for _ in range(0u, 10000) {
match TcpStream::connect_timeout(addr, Duration::milliseconds(100)) {
Ok(e) => v.push(e),
Err(ref e) if e.kind == io::TimedOut => return,
Err(e) => panic!("other error: {}", e),
}
}
panic!("never timed out!");
}
fn timeout_success() {
let addr = next_test_ip4();
let _l = TcpListener::bind(addr).unwrap().listen();
assert!(TcpStream::connect_timeout(addr, Duration::milliseconds(1000)).is_ok());
}
fn timeout_error() {
let addr = next_test_ip4();
assert!(TcpStream::connect_timeout(addr, Duration::milliseconds(1000)).is_err());
}
fn connect_timeout_zero() {
let addr = next_test_ip4();
assert!(TcpStream::connect_timeout(addr, Duration::milliseconds(0)).is_err());
}
fn connect_timeout_negative() {
let addr = next_test_ip4();
assert!(TcpStream::connect_timeout(addr, Duration::milliseconds(-1)).is_err());
}<|fim▁end|> | #![reexport_test_harness_main = "test_main"]
#![allow(unused_imports)] |
<|file_name|>Six_zebra_models30110.py<|end_file_name|><|fim▁begin|>import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((14490.9, 3029.12, 3060.83), (0.7, 0.7, 0.7), 507.685)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((15116.6, 3760.52, 2692.79), (0.7, 0.7, 0.7), 479.978)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((13383.1, 4090.52, 3479.5), (0.7, 0.7, 0.7), 681.834)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((11225.1, 4429.85, 4322.38), (0.7, 0.7, 0.7), 522.532)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((10554.9, 4586.88, 4596.63), (0, 1, 0), 751.925)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((12326, 4639.28, 5766.95), (0.7, 0.7, 0.7), 437.001)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((11370.4, 6195.88, 6398.83), (0.7, 0.7, 0.7), 710.767)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((12294.8, 7399.36, 7205.16), (0.7, 0.7, 0.7), 762.077)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((11975.4, 8902.22, 7425.72), (0.7, 0.7, 0.7), 726.799)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((11370.5, 10459.7, 8176.74), (0.7, 0.7, 0.7), 885.508)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((11026.4, 11464.8, 6823.83), (0.7, 0.7, 0.7), 778.489)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((12450.8, 13012.5, 6529.84), (0.7, 0.7, 0.7), 790.333)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((13874.2, 14489.8, 6268.22), (0.7, 0.7, 0.7), 707.721)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((14235.6, 13007.5, 5753.75), (0.7, 0.7, 0.7), 651.166)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((13215.9, 13710.7, 6892.86), (0.7, 0.7, 0.7), 708.61)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((11980.5, 13227.6, 7763.68), (0.7, 0.7, 0.7), 490.595)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((11432, 11904.7, 8032.92), (0.7, 0.7, 0.7), 591.565)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((10650.9, 10579.8, 8390.35), (0.7, 0.7, 0.7), 581.287)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((11842.6, 9383.56, 9068.83), (0.7, 0.7, 0.7), 789.529)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')<|fim▁hole|> s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((9961.01, 7958.91, 11407.6), (0.7, 0.7, 0.7), 1083.56)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((9762.12, 8128.95, 13085.8), (0.7, 0.7, 0.7), 504.258)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((9105.04, 8055.92, 11818.5), (0.7, 0.7, 0.7), 805.519)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((8098.96, 8945.37, 10136.6), (0.7, 0.7, 0.7), 631.708)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((7113.44, 10486.1, 9008.71), (0.7, 0.7, 0.7), 805.942)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((6624.41, 11283.8, 8528.05), (1, 0.7, 0), 672.697)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((5149.07, 9466.37, 7162.96), (0.7, 0.7, 0.7), 797.863)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((3504.19, 8941.41, 6449.57), (1, 0.7, 0), 735.682)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((3197.3, 7852.11, 7001.47), (0.7, 0.7, 0.7), 602.14)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((2170.99, 6197.96, 8273.91), (0.7, 0.7, 0.7), 954.796)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((2865.17, 6382.4, 7807.34), (0.7, 0.7, 0.7), 1021.88)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((1678.42, 6425.8, 7006.61), (0.7, 0.7, 0.7), 909.323)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((135.134, 4970.61, 6205.73), (0.7, 0.7, 0.7), 621.049)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((571.58, 4380.79, 4957.66), (0.7, 0.7, 0.7), 525.154)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((1541.14, 3448.69, 4309.93), (0.7, 0.7, 0.7), 890.246)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((1849.42, 1776.94, 3839.31), (0.7, 0.7, 0.7), 671.216)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((2383.37, 290.48, 4448.44), (0.7, 0.7, 0.7), 662.672)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((2421.7, 991.476, 5898.51), (0.7, 0.7, 0.7), 646.682)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((1032.67, 1633.14, 5847.07), (0.7, 0.7, 0.7), 769.945)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((1237.16, 3532.79, 5257.35), (0.7, 0.7, 0.7), 606.92)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((553.737, 3534.85, 4213.84), (0.7, 0.7, 0.7), 622.571)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((1601.81, 4124.33, 4851.28), (0.7, 0.7, 0.7), 466.865)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((2243.52, 3644.67, 4704.5), (0.7, 0.7, 0.7), 682.933)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((1631.44, 3925.35, 4647.59), (0.7, 0.7, 0.7), 809.326)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((1142.92, 5369.1, 5674.5), (0.7, 0.7, 0.7), 796.72)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((2626.92, 7819.11, 5319.34), (0.7, 0.7, 0.7), 870.026)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((3047.34, 9026.17, 3950.98), (0.7, 0.7, 0.7), 909.577)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((3457.58, 9145.33, 2883.25), (0, 1, 0), 500.536)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((2644.32, 8950.16, 1073.97), (0.7, 0.7, 0.7), 725.276)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((846.027, 9108.82, -893.839), (0.7, 0.7, 0.7), 570.331)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((-6.51386, 7800.76, -208.332), (0.7, 0.7, 0.7), 492.203)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((-40.7095, 8772.6, 2535.54), (0, 1, 0), 547.7)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((421.267, 8145, 2491.85), (0.7, 0.7, 0.7), 581.921)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((115.381, 6285.36, 2181.62), (0.7, 0.7, 0.7), 555.314)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((658.606, 4858.23, 1911.62), (0.7, 0.7, 0.7), 404.219)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((2406.42, 4665.42, 2429.8), (0.7, 0.7, 0.7), 764.234)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])<|fim▁end|> | marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((11007.5, 8508.02, 10007.1), (0.7, 0.7, 0.7), 623.587)
if "particle_20 geometry" not in marker_sets: |
<|file_name|>serviceworkerjob.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A Job is an abstraction of async operation in service worker lifecycle propagation.
//! Each Job is uniquely identified by its scope_url, and is keyed accordingly under
//! the script thread. The script thread contains a JobQueue, which stores all scheduled Jobs
//! by multiple service worker clients in a Vec.
use dom::bindings::cell::DOMRefCell;
use dom::bindings::error::Error;
use dom::bindings::js::JS;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::client::Client;
use dom::promise::Promise;
use dom::serviceworkerregistration::ServiceWorkerRegistration;
use dom::urlhelper::UrlHelper;
use script_thread::ScriptThread;
use servo_url::ServoUrl;
use std::cmp::PartialEq;
use std::collections::HashMap;
use std::rc::Rc;
use task_source::TaskSource;
use task_source::dom_manipulation::DOMManipulationTaskSource;
#[derive(Clone, Copy, Debug, JSTraceable, PartialEq)]
pub enum JobType {
Register,
Unregister,
Update
}
#[derive(Clone)]
pub enum SettleType {
Resolve(Trusted<ServiceWorkerRegistration>),
Reject(Error)
}
#[must_root]
#[derive(JSTraceable)]
pub struct Job {
pub job_type: JobType,
pub scope_url: ServoUrl,
pub script_url: ServoUrl,
pub promise: Rc<Promise>,
pub equivalent_jobs: Vec<Job>,
// client can be a window client, worker client so `Client` will be an enum in future
pub client: JS<Client>,
pub referrer: ServoUrl
}
impl Job {
#[allow(unrooted_must_root)]
// https://w3c.github.io/ServiceWorker/#create-job-algorithm
pub fn create_job(job_type: JobType,
scope_url: ServoUrl,
script_url: ServoUrl,
promise: Rc<Promise>,
client: &Client) -> Job {
Job {
job_type: job_type,
scope_url: scope_url,
script_url: script_url,
promise: promise,
equivalent_jobs: vec![],
client: JS::from_ref(client),
referrer: client.creation_url()
}
}
#[allow(unrooted_must_root)]
pub fn append_equivalent_job(&mut self, job: Job) {
self.equivalent_jobs.push(job);
}
}
impl PartialEq for Job {
// Equality criteria as described in https://w3c.github.io/ServiceWorker/#dfn-job-equivalent
fn eq(&self, other: &Self) -> bool {
let same_job = self.job_type == other.job_type;
if same_job {
match self.job_type {
JobType::Register | JobType::Update => {
self.scope_url == other.scope_url && self.script_url == other.script_url
},
JobType::Unregister => self.scope_url == other.scope_url
}
} else {
false
}
}
}
#[must_root]
#[derive(JSTraceable)]
pub struct JobQueue(pub DOMRefCell<HashMap<ServoUrl, Vec<Job>>>);
impl JobQueue {
pub fn new() -> JobQueue {
JobQueue(DOMRefCell::new(HashMap::new()))<|fim▁hole|> #[allow(unrooted_must_root)]
// https://w3c.github.io/ServiceWorker/#schedule-job-algorithm
pub fn schedule_job(&self, job: Job, script_thread: &ScriptThread) {
debug!("scheduling {:?} job", job.job_type);
let mut queue_ref = self.0.borrow_mut();
let job_queue = queue_ref.entry(job.scope_url.clone()).or_insert(vec![]);
// Step 1
if job_queue.is_empty() {
let scope_url = job.scope_url.clone();
job_queue.push(job);
let _ = script_thread.schedule_job_queue(scope_url);
debug!("queued task to run newly-queued job");
} else {
// Step 2
let mut last_job = job_queue.pop().unwrap();
if job == last_job && !last_job.promise.is_fulfilled() {
last_job.append_equivalent_job(job);
job_queue.push(last_job);
debug!("appended equivalent job");
} else {
// restore the popped last_job
job_queue.push(last_job);
// and push this new job to job queue
job_queue.push(job);
debug!("pushed onto job queue job");
}
}
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/ServiceWorker/#run-job-algorithm
pub fn run_job(&self, scope_url: ServoUrl, script_thread: &ScriptThread) {
debug!("running a job");
let url = {
let queue_ref = self.0.borrow();
let front_job = {
let job_vec = queue_ref.get(&scope_url);
job_vec.unwrap().first().unwrap()
};
let front_scope_url = front_job.scope_url.clone();
match front_job.job_type {
JobType::Register => self.run_register(front_job, scope_url, script_thread),
JobType::Update => self.update(front_job, script_thread),
JobType::Unregister => unreachable!(),
};
front_scope_url
};
self.finish_job(url, script_thread);
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/ServiceWorker/#register-algorithm
fn run_register(&self, job: &Job, scope_url: ServoUrl, script_thread: &ScriptThread) {
debug!("running register job");
// Step 1-3
if !UrlHelper::is_origin_trustworthy(&job.script_url) {
// Step 1.1
reject_job_promise(job,
Error::Type("Invalid script ServoURL".to_owned()),
script_thread.dom_manipulation_task_source());
// Step 1.2 (see run_job)
return;
} else if job.script_url.origin() != job.referrer.origin() || job.scope_url.origin() != job.referrer.origin() {
// Step 2.1/3.1
reject_job_promise(job,
Error::Security,
script_thread.dom_manipulation_task_source());
// Step 2.2/3.2 (see run_job)
return;
}
// Step 4-5
if let Some(reg) = script_thread.handle_get_registration(&job.scope_url) {
// Step 5.1
if reg.get_uninstalling() {
reg.set_uninstalling(false);
}
// Step 5.3
if let Some(ref newest_worker) = reg.get_newest_worker() {
if (&*newest_worker).get_script_url() == job.script_url {
// Step 5.3.1
resolve_job_promise(job, &*reg, script_thread.dom_manipulation_task_source());
// Step 5.3.2 (see run_job)
return;
}
}
} else {
// Step 6.1
let global = &*job.client.global();
let pipeline = global.pipeline_id();
let new_reg = ServiceWorkerRegistration::new(&*global, &job.script_url, scope_url);
script_thread.handle_serviceworker_registration(&job.scope_url, &*new_reg, pipeline);
}
// Step 7
self.update(job, script_thread)
}
#[allow(unrooted_must_root)]
// https://w3c.github.io/ServiceWorker/#finish-job-algorithm
pub fn finish_job(&self, scope_url: ServoUrl, script_thread: &ScriptThread) {
debug!("finishing previous job");
let run_job = if let Some(job_vec) = (*self.0.borrow_mut()).get_mut(&scope_url) {
assert_eq!(job_vec.first().as_ref().unwrap().scope_url, scope_url);
let _ = job_vec.remove(0);
!job_vec.is_empty()
} else {
warn!("non-existent job vector for Servourl: {:?}", scope_url);
false
};
if run_job {
debug!("further jobs in queue after finishing");
self.run_job(scope_url, script_thread);
}
}
// https://w3c.github.io/ServiceWorker/#update-algorithm
fn update(&self, job: &Job, script_thread: &ScriptThread) {
debug!("running update job");
// Step 1
let reg = match script_thread.handle_get_registration(&job.scope_url) {
Some(reg) => reg,
None => {
let err_type = Error::Type("No registration to update".to_owned());
// Step 2.1
reject_job_promise(job, err_type, script_thread.dom_manipulation_task_source());
// Step 2.2 (see run_job)
return;
}
};
// Step 2
if reg.get_uninstalling() {
let err_type = Error::Type("Update called on an uninstalling registration".to_owned());
// Step 2.1
reject_job_promise(job, err_type, script_thread.dom_manipulation_task_source());
// Step 2.2 (see run_job)
return;
}
// Step 3
let newest_worker = reg.get_newest_worker();
let newest_worker_url = newest_worker.as_ref().map(|w| w.get_script_url());
// Step 4
if newest_worker_url.as_ref() == Some(&job.script_url) && job.job_type == JobType::Update {
let err_type = Error::Type("Invalid script ServoURL".to_owned());
// Step 4.1
reject_job_promise(job, err_type, script_thread.dom_manipulation_task_source());
// Step 4.2 (see run_job)
return;
}
// Step 8
if let Some(newest_worker) = newest_worker {
job.client.set_controller(&*newest_worker);
// Step 8.1
resolve_job_promise(job, &*reg, script_thread.dom_manipulation_task_source());
// Step 8.2 present in run_job
}
// TODO Step 9 (create new service worker)
}
}
fn settle_job_promise(promise: &Promise, settle: SettleType) {
match settle {
SettleType::Resolve(reg) => promise.resolve_native(&*reg.root()),
SettleType::Reject(err) => promise.reject_error(err),
};
}
#[allow(unrooted_must_root)]
fn queue_settle_promise_for_job(job: &Job, settle: SettleType, task_source: &DOMManipulationTaskSource) {
let global = job.client.global();
let promise = TrustedPromise::new(job.promise.clone());
// FIXME(nox): Why are errors silenced here?
let _ = task_source.queue(
task!(settle_promise_for_job: move || {
let promise = promise.root();
settle_job_promise(&promise, settle)
}),
&*global,
);
}
// https://w3c.github.io/ServiceWorker/#reject-job-promise-algorithm
// https://w3c.github.io/ServiceWorker/#resolve-job-promise-algorithm
fn queue_settle_promise(job: &Job, settle: SettleType, task_source: &DOMManipulationTaskSource) {
// Step 1
queue_settle_promise_for_job(job, settle.clone(), task_source);
// Step 2
for job in &job.equivalent_jobs {
queue_settle_promise_for_job(job, settle.clone(), task_source);
}
}
fn reject_job_promise(job: &Job, err: Error, task_source: &DOMManipulationTaskSource) {
queue_settle_promise(job, SettleType::Reject(err), task_source)
}
fn resolve_job_promise(job: &Job, reg: &ServiceWorkerRegistration, task_source: &DOMManipulationTaskSource) {
queue_settle_promise(job, SettleType::Resolve(Trusted::new(reg)), task_source)
}<|fim▁end|> | } |
<|file_name|>create_org_data_czar_policy.py<|end_file_name|><|fim▁begin|>"""
create_org_data_czar_policy.py
Creates an IAM group for an edX org and applies an S3 policy to that group
that allows for read-only access to the group.
"""
import argparse
import boto3
from botocore.exceptions import ClientError
from string import Template
import sys
template = Template("""{
"Version":"2012-10-17",
"Statement": [
{
"Sid": "AllowListingOfOrgFolder",
"Action": ["s3:ListBucket"],
"Effect": "Allow",
"Resource": ["arn:aws:s3:::edx-course-data"],
"Condition":{"StringLike":{"s3:prefix":["$org","$org/*"]}}
},
{
"Sid": "AllowGetBucketLocation",
"Action": ["s3:GetBucketLocation"],
"Effect": "Allow",
"Resource": ["arn:aws:s3:::edx-course-data"]
},
{
"Sid": "AllowGetS3ActionInOrgFolder",
"Effect": "Allow",
"Action": ["s3:GetObject"],
"Resource": ["arn:aws:s3:::edx-course-data/$org/*"]
}
]
}""")
def add_org_group(org, iam_connection):
group_name = "edx-course-data-{org}".format(org=org)
try:
iam_connection.create_group(GroupName=group_name)
except ClientError as bse:
if bse.response['ResponseMetadata']['HTTPStatusCode'] == 409:
pass
else:
print(bse)
try:
iam_connection.put_group_policy(
GroupName=group_name,
PolicyName=group_name,
PolicyDocument=template.substitute(org=org)
)
except boto.exception.BotoServerError as bse:
if bse.response['ResponseMetadata']['HTTPStatusCode'] == 409:
pass
else:
print(bse)
print(template.substitute(org=org))
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument('-o', '--org', help='Name of the org for which to create an IAM '
'role and policy, this should have the same '
'name as the S3 bucket')
group.add_argument('-f', '--file', help='The path to a file containing one org name '
'per line.')
<|fim▁hole|>if args.org:
add_org_group(args.org.rstrip('\n').lower(), iam_connection)
elif args.file:
with open(args.file) as file:
for line in file:
org = line.rstrip('\n').lower()
add_org_group(org, iam_connection)
else:
parser.print_usage()
sys.exit(1)
sys.exit(0)<|fim▁end|> | args = parser.parse_args()
iam_connection = boto3.client('iam') |
<|file_name|>expr_unary.rs<|end_file_name|><|fim▁begin|>#![feature(never_type)]
#![allow(unused_variables)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![deny(unreachable_code)]
<|fim▁hole|> let x: ! = ! { return; }; //~ ERROR unreachable
//~| ERROR cannot apply unary operator `!` to type `!`
}
fn main() { }<|fim▁end|> | fn foo() { |
<|file_name|>util.go<|end_file_name|><|fim▁begin|>package byteutil
import (
"bytes"
// "fmt"
"unsafe"
"github.com/shenwei356/bpool"
)
// ReverseByteSlice reverses a byte slice
func ReverseByteSlice(s []byte) []byte {
// make a copy of s
l := len(s)
t := make([]byte, l)
for i := 0; i < l; i++ {
t[i] = s[i]
}
// reverse
for i, j := 0, l-1; i < j; i, j = i+1, j-1 {
t[i], t[j] = t[j], t[i]
}
return t
}
// ReverseByteSliceInplace reverses a byte slice
func ReverseByteSliceInplace(s []byte) {
// reverse
for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 {
s[i], s[j] = s[j], s[i]
}
}
var _newline = []byte{'\n'}
// WrapByteSlice wraps byte slice
func WrapByteSlice(s []byte, width int) []byte {
if width < 1 {
return s
}
l := len(s)
if l == 0 {
return s
}
var lines int
if l%width == 0 {
lines = l/width - 1
} else {
lines = int(l / width)
}
// var buffer bytes.Buffer
buffer := bytes.NewBuffer(make([]byte, 0, l+lines))
var start, end int
for i := 0; i <= lines; i++ {
start = i * width
end = (i + 1) * width
if end > l {
end = l
}
buffer.Write(s[start:end])
if i < lines {
// buffer.WriteString("\n")
buffer.Write(_newline)
}
}
return buffer.Bytes()
}
// WrapByteSlice2 wraps byte slice, it reuses the bytes.Buffer
func WrapByteSlice2(s []byte, width int, buffer *bytes.Buffer) ([]byte, *bytes.Buffer) {
if width < 1 {
return s, buffer
}
l := len(s)
if l == 0 {
return s, buffer
}
var lines int
if l%width == 0 {
lines = l/width - 1
} else {
lines = int(l / width)
}
if buffer == nil {
buffer = bytes.NewBuffer(make([]byte, 0, l+lines))
} else {
buffer.Reset()
}
var start, end int
for i := 0; i <= lines; i++ {
start = i * width
end = (i + 1) * width
if end > l {
end = l
}
buffer.Write(s[start:end])
if i < lines {
buffer.Write(_newline)
}
}
return buffer.Bytes(), buffer
}
// BufferedByteSliceWrapper is used to wrap byte slice,
// using a buffer of bytes.Buffer to reduce GC
type BufferedByteSliceWrapper struct {
pool *bpool.SizedBufferPool
}
// NewBufferedByteSliceWrapper create a new BufferedByteSliceWrapper
func NewBufferedByteSliceWrapper(size, alloc int) *BufferedByteSliceWrapper {
if size < 1 {
panic("buffer number should be > 0")
}
if alloc < 1 {
panic("buffer size should be > 0")
}
return &BufferedByteSliceWrapper{bpool.NewSizedBufferPool(size, alloc)}
}
// NewBufferedByteSliceWrapper2 could pre-alloc space according to length of slice and width
func NewBufferedByteSliceWrapper2(size int, length, width int) *BufferedByteSliceWrapper {
if size < 1 {
// panic("buffer number should be > 0")
size = 1
}
if length < 1 {
// panic("buffer size should be > 0")
length = 1
}
if width <= 0 {
return NewBufferedByteSliceWrapper(size, length)
}
var lines int
if length%width == 0 {
lines = length/width - 1
} else {
lines = int(length / width)
}
return &BufferedByteSliceWrapper{bpool.NewSizedBufferPool(size, length+lines)}
}
// Recycle a buffer
func (w *BufferedByteSliceWrapper) Recycle(b *bytes.Buffer) {
w.pool.Put(b)
}
// Wrap a byte slice. DO NOT FORGET call Recycle() with the returned buffer
func (w *BufferedByteSliceWrapper) Wrap(s []byte, width int) ([]byte, *bytes.Buffer) {
if width < 1 {
return s, nil
}
l := len(s)
if l == 0 {
return s, nil
}
var lines int
if l%width == 0 {
lines = l/width - 1
} else {
lines = int(l / width)
}
// var buffer bytes.Buffer
// buffer := bytes.NewBuffer(make([]byte, 0, l+lines))
buffer := w.pool.Get()
var start, end int
for i := 0; i <= lines; i++ {
start = i * width
end = (i + 1) * width
if end > l {
end = l
}
buffer.Write(s[start:end])
if i < lines {
// buffer.WriteString("\n")
buffer.Write(_newline)
}
}
return buffer.Bytes(), buffer
}
// WrapByteSliceInplace wraps byte slice in place.
// Sadly, it's too slow. Never use this!
func WrapByteSliceInplace(s []byte, width int) []byte {
if width < 1 {
return s
}
var l, lines int
l = len(s)
if l%width == 0 {
lines = l/width - 1
} else {
lines = int(l / width)
}
var end int
j := 0
for i := 0; i <= lines; i++ {
end = (i+1)*width + j
if end >= l {
break
}
// fmt.Printf("len:%d, lines:%d, i:%d, j:%d, end:%d\n", l, lines, i, j, end)<|fim▁hole|> if i < lines {
// https://github.com/golang/go/wiki/SliceTricks
// Sadly, it's too slow
// s = append(s, []byte(" ")[0])
// copy(s[end+1:], s[end:])
// s[end] = []byte("\n")[0]
// slow too
s = append(s[:end], append([]byte("\n"), s[end:]...)...)
l = len(s)
if l%width == 0 {
lines = l/width - 1
} else {
lines = int(l / width)
}
j++
}
}
return s
}
// SubSlice provides similar slice indexing as python with one exception
// that end could be equal to 0.
// So we could get the last element by SubSlice(s, -1, 0)
// or get the whole element by SubSlice(s, 0, 0)
func SubSlice(slice []byte, start int, end int) []byte {
if start == 0 && end == 0 {
return slice
}
if start == end || (start < 0 && end > 0) {
return []byte{}
}
l := len(slice)
s, e := start, end
if s < 0 {
s = l + s
if s < 1 {
s = 0
}
}
if e < 0 {
e = l + e
if e < 0 {
e = 0
}
}
if e == 0 || e > l {
e = l
}
return slice[s:e]
}
// ByteToLower lowers a byte
func ByteToLower(b byte) byte {
if b <= '\u007F' {
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
return b
}
return b
}
// ByteToUpper upper a byte
func ByteToUpper(b byte) byte {
if b <= '\u007F' {
if 'a' <= b && b <= 'z' {
b -= 'a' - 'A'
}
return b
}
return b
}
// MakeQuerySlice is used to replace map.
// see: http://blog.shenwei.me/map-is-not-the-fastest-in-go/
func MakeQuerySlice(letters []byte) []byte {
max := -1
for i := 0; i < len(letters); i++ {
j := int(letters[i])
if max < j {
max = j
}
}
querySlice := make([]byte, max+1)
for i := 0; i < len(letters); i++ {
querySlice[int(letters[i])] = letters[i]
}
return querySlice
}
// Split splits a byte slice by giveen letters.
// It's much faster than regexp.Split
func Split(slice []byte, letters []byte) [][]byte {
querySlice := MakeQuerySlice(letters)
results := [][]byte{}
tmp := []byte{}
var j int
var value byte
var sliceSize = len(querySlice)
for _, b := range slice {
j = int(b)
if j >= sliceSize { // not delimiter byte
tmp = append(tmp, b)
continue
}
value = querySlice[j]
if value == 0 { // not delimiter byte
tmp = append(tmp, b)
continue
} else {
if len(tmp) > 0 {
results = append(results, tmp)
tmp = []byte{}
}
}
}
if len(tmp) > 0 {
results = append(results, tmp)
}
return results
}
// Bytes2Str convert byte slice to string without GC. Warning: it's unsafe!!!
func Bytes2Str(b []byte) string {
return *(*string)(unsafe.Pointer(&b))
}
// CountBytes counts given ASCII characters in a byte slice
func CountBytes(seq, letters []byte) int {
if len(letters) == 0 || len(seq) == 0 {
return 0
}
// do not use map
querySlice := make([]byte, 256)
for i := 0; i < len(letters); i++ {
querySlice[int(letters[i])] = letters[i]
}
var g byte
var n int
for i := 0; i < len(seq); i++ {
g = querySlice[int(seq[i])]
if g > 0 { // not gap
n++
}
}
return n
}<|fim▁end|> | |
<|file_name|>test_tables_client_v1beta1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pandas
import pytest
from google.api_core import exceptions
from google.auth.credentials import AnonymousCredentials
from google.cloud import automl_v1beta1
from google.cloud.automl_v1beta1.proto import data_types_pb2
PROJECT = "project"
REGION = "region"
LOCATION_PATH = "projects/{}/locations/{}".format(PROJECT, REGION)
class TestTablesClient(object):
def tables_client(
self, client_attrs={}, prediction_client_attrs={}, gcs_client_attrs={}
):
client_mock = mock.Mock(**client_attrs)
prediction_client_mock = mock.Mock(**prediction_client_attrs)
gcs_client_mock = mock.Mock(**gcs_client_attrs)
return automl_v1beta1.TablesClient(
client=client_mock,
prediction_client=prediction_client_mock,
gcs_client=gcs_client_mock,
project=PROJECT,
region=REGION,
)
def test_list_datasets_empty(self):
client = self.tables_client(
{
"list_datasets.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_datasets_not_empty(self):
datasets = ["some_dataset"]
client = self.tables_client(
{
"list_datasets.return_value": datasets,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_dataset"
def test_get_dataset_no_value(self):
dataset_actual = "dataset"
client = self.tables_client({}, {})
with pytest.raises(ValueError):
dataset = client.get_dataset()
client.auto_ml_client.get_dataset.assert_not_called()
def test_get_dataset_name(self):
dataset_actual = "dataset"
client = self.tables_client({"get_dataset.return_value": dataset_actual}, {})
dataset = client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
assert dataset == dataset_actual
def test_get_no_dataset(self):
client = self.tables_client(
{"get_dataset.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
def test_get_dataset_from_empty_list(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list_not_found(self):
client = self.tables_client(
{"list_datasets.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
dataset = client.get_dataset(dataset_display_name="my_dataset")
assert dataset.display_name == "my_dataset"
def test_get_dataset_from_list_ambiguous(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="my_dataset"),
mock.Mock(display_name="not_my_dataset"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_dataset(dataset_display_name="my_dataset")
def test_create_dataset(self):
client = self.tables_client(
{
"location_path.return_value": LOCATION_PATH,
"create_dataset.return_value": mock.Mock(display_name="name"),
},
{},
)
metadata = {"metadata": "values"}
dataset = client.create_dataset("name", metadata=metadata)
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.create_dataset.assert_called_with(
LOCATION_PATH, {"display_name": "name", "tables_dataset_metadata": metadata}
)
assert dataset.display_name == "name"
def test_delete_dataset(self):
dataset = mock.Mock()
dataset.configure_mock(name="name")
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset=dataset)
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_delete_dataset_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
client.delete_dataset(dataset_display_name="not_found")
client.auto_ml_client.delete_dataset.assert_not_called()
def test_delete_dataset_name(self):
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset_name="name")
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_export_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.export_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.export_data.assert_not_called()
def test_export_gcs_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", gcs_output_uri_prefix="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"gcs_destination": {"output_uri_prefix": "uri"}}
)
def test_export_bq_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", bigquery_output_uri="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"bigquery_destination": {"output_uri": "uri"}}
)
def test_import_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.import_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_not_called()
def test_import_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "uri",
}
)
dataframe = pandas.DataFrame({})
client.import_data(
project=PROJECT,
region=REGION,
dataset_name="name",
pandas_dataframe=dataframe,
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "uri"
client.import_data(dataset_name="name", pandas_dataframe=dataframe)
assert client.gcs_client is mockInstance
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uris(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris=["uri", "uri"])
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri", "uri"]}}
)
def test_import_bq_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", bigquery_input_uri="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"bigquery_source": {"input_uri": "uri"}}
)
def test_list_table_specs(self):
client = self.tables_client({"list_table_specs.return_value": None}, {})
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_list_table_specs_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("not found")}, {}
)
with pytest.raises(exceptions.NotFound):
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_get_table_spec(self):
client = self.tables_client({}, {})
client.get_table_spec("name")
client.auto_ml_client.get_table_spec.assert_called_with("name")
def test_get_column_spec(self):
client = self.tables_client({}, {})
client.get_column_spec("name")
client.auto_ml_client.get_column_spec.assert_called_with("name")
def test_list_column_specs(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [],
},
{},
)
client.list_column_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
def test_update_column_spec_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):<|fim▁hole|> client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_display_name_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.update_column_spec(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_name_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column/2", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(dataset_name="name", column_spec_name="column/2")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column/2", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column", nullable=True
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code", "nullable": True},
}
)
def test_update_column_spec_type_code(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code2"}}
)
def test_update_column_spec_type_code_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=True,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": True},
}
)
def test_update_column_spec_type_code_nullable_false(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=False,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": False},
}
)
def test_set_target_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="2",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_target_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_set_weight_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
try:
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
except exceptions.NotFound:
pass
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="1",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_weight_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_weight_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_weight_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": None,
"ml_use_column_spec_id": "3",
},
}
)
def test_set_test_train_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_test_train_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_test_train_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": None,
},
}
)
def test_set_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_time_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": "3"}
)
def test_clear_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
},
{},
)
client.clear_time_column(dataset_name="name")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": None}
)
def test_get_model_evaluation(self):
client = self.tables_client({}, {})
ds = client.get_model_evaluation(model_evaluation_name="x")
client.auto_ml_client.get_model_evaluation.assert_called_with("x")
def test_list_model_evaluations_empty(self):
client = self.tables_client({"list_model_evaluations.return_value": []}, {})
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert ds == []
def test_list_model_evaluations_not_empty(self):
evaluations = ["eval"]
client = self.tables_client(
{
"list_model_evaluations.return_value": evaluations,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert len(ds) == 1
assert ds[0] == "eval"
def test_list_models_empty(self):
client = self.tables_client(
{
"list_models.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_models_not_empty(self):
models = ["some_model"]
client = self.tables_client(
{
"list_models.return_value": models,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_model"
def test_get_model_name(self):
model_actual = "model"
client = self.tables_client({"get_model.return_value": model_actual}, {})
model = client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
assert model == model_actual
def test_get_no_model(self):
client = self.tables_client(
{"get_model.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
def test_get_model_from_empty_list(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list_not_found(self):
client = self.tables_client(
{"list_models.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_model"),
]
},
{},
)
model = client.get_model(model_display_name="my_model")
assert model.display_name == "my_model"
def test_get_model_from_list_ambiguous(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="my_model"),
mock.Mock(display_name="not_my_model"),
mock.Mock(display_name="my_model"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_model(model_display_name="my_model")
def test_delete_model(self):
model = mock.Mock()
model.configure_mock(name="name")
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model=model)
client.auto_ml_client.delete_model.assert_called_with("name")
def test_delete_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
client.delete_model(model_display_name="not_found")
client.auto_ml_client.delete_model.assert_not_called()
def test_delete_model_name(self):
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model_name="name")
client.auto_ml_client.delete_model.assert_called_with("name")
def test_deploy_model_no_args(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.deploy_model()
client.auto_ml_client.deploy_model.assert_not_called()
def test_deploy_model(self):
client = self.tables_client({}, {})
client.deploy_model(model_name="name")
client.auto_ml_client.deploy_model.assert_called_with("name")
def test_deploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.deploy_model(model_display_name="name")
client.auto_ml_client.deploy_model.assert_not_called()
def test_undeploy_model(self):
client = self.tables_client({}, {})
client.undeploy_model(model_name="name")
client.auto_ml_client.undeploy_model.assert_called_with("name")
def test_undeploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.undeploy_model(model_display_name="name")
client.auto_ml_client.undeploy_model.assert_not_called()
def test_create_model(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1000
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {"train_budget_milli_node_hours": 1000},
},
)
def test_create_model_include_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock1],
},
},
)
def test_create_model_exclude_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
exclude_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock2],
},
},
)
def test_create_model_invalid_hours_small(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_hours_large(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
train_budget_milli_node_hours=1000000,
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_no_dataset(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model("my_model", train_budget_milli_node_hours=1000)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_include_exclude(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["a"],
exclude_column_spec_names=["b"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_predict_from_array(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(["1"], model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model", {"row": {"values": [{"string_value": "1"}]}}, None
)
def test_predict_from_dict(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1", "b": "2"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
None,
)
def test_predict_from_dict_with_feature_importance(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(
{"a": "1", "b": "2"}, model_name="my_model", feature_importance=True
)
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
{"feature_importance": "true"},
)
def test_predict_from_dict_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"null_value": 0}]}},
None,
)
def test_predict_all_types(self):
float_type = mock.Mock(type_code=data_types_pb2.FLOAT64)
timestamp_type = mock.Mock(type_code=data_types_pb2.TIMESTAMP)
string_type = mock.Mock(type_code=data_types_pb2.STRING)
array_type = mock.Mock(type_code=data_types_pb2.ARRAY)
struct_type = mock.Mock(type_code=data_types_pb2.STRUCT)
category_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_float = mock.Mock(display_name="float", data_type=float_type)
column_spec_timestamp = mock.Mock(
display_name="timestamp", data_type=timestamp_type
)
column_spec_string = mock.Mock(display_name="string", data_type=string_type)
column_spec_array = mock.Mock(display_name="array", data_type=array_type)
column_spec_struct = mock.Mock(display_name="struct", data_type=struct_type)
column_spec_category = mock.Mock(
display_name="category", data_type=category_type
)
column_spec_null = mock.Mock(display_name="null", data_type=category_type)
model_metadata = mock.Mock(
input_feature_column_specs=[
column_spec_float,
column_spec_timestamp,
column_spec_string,
column_spec_array,
column_spec_struct,
column_spec_category,
column_spec_null,
]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(
{
"float": 1.0,
"timestamp": "EST",
"string": "text",
"array": [1],
"struct": {"a": "b"},
"category": "a",
"null": None,
},
model_name="my_model",
)
client.prediction_client.predict.assert_called_with(
"my_model",
{
"row": {
"values": [
{"number_value": 1.0},
{"string_value": "EST"},
{"string_value": "text"},
{"list_value": [1]},
{"struct_value": {"a": "b"}},
{"string_value": "a"},
{"null_value": 0},
]
}
},
None,
)
def test_predict_from_array_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
with pytest.raises(ValueError):
client.predict([], model_name="my_model")
client.prediction_client.predict.assert_not_called()
def test_batch_predict_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "gs://input",
}
)
dataframe = pandas.DataFrame({})
client.batch_predict(
project=PROJECT,
region=REGION,
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "gs://input"
dataframe = pandas.DataFrame({})
client.batch_predict(
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_gcs(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_bigquery(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
bigquery_input_uri="bq://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"bigquery_source": {"input_uri": "bq://input"}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_mixed(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_missing_input_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_input_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
bigquery_input_uri=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_model(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.batch_predict(
model_display_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_no_model(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output"
)
client.auto_ml_client.list_models.assert_not_called()
client.prediction_client.batch_predict.assert_not_called()
def test_auto_ml_client_credentials(self):
credentials_mock = mock.Mock()
patch_auto_ml_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient"
)
with patch_auto_ml_client as MockAutoMlClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, auto_ml_client_kwargs = MockAutoMlClient.call_args
assert "credentials" in auto_ml_client_kwargs
assert auto_ml_client_kwargs["credentials"] == credentials_mock
def test_prediction_client_credentials(self):
credentials_mock = mock.Mock()
patch_prediction_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
)
with patch_prediction_client as MockPredictionClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "credentials" in prediction_client_kwargs
assert prediction_client_kwargs["credentials"] == credentials_mock
def test_prediction_client_client_info(self):
client_info_mock = mock.Mock()
patch_prediction_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
)
with patch_prediction_client as MockPredictionClient:
client = automl_v1beta1.TablesClient(client_info=client_info_mock)
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "client_info" in prediction_client_kwargs
assert prediction_client_kwargs["client_info"] == client_info_mock<|fim▁end|> | client.update_column_spec(dataset_name="name", column_spec_name="column2")
client.auto_ml_client.list_table_specs.assert_called_with("name") |
<|file_name|>extern-call-deep2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc;
use std::task;
mod rustrt {
use std::libc;
extern {
pub fn rust_dbg_call(cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t,
data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
count(data - 1u) + 1u
}
}
#[fixed_stack_segment] #[inline(never)]<|fim▁hole|> }
}
pub fn main() {
// Make sure we're on a task with small Rust stacks (main currently
// has a large stack)
do task::spawn {
let result = count(1000u);
info!("result = %?", result);
assert_eq!(result, 1000u);
};
}<|fim▁end|> | fn count(n: uint) -> uint {
unsafe {
info!("n = %?", n);
rustrt::rust_dbg_call(cb, n) |
<|file_name|>jinja2.py<|end_file_name|><|fim▁begin|># Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import sys
import jinja2
from django.conf import settings
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.utils import six
from django.utils.module_loading import import_string
from .base import BaseEngine
from .utils import csrf_input_lazy, csrf_token_lazy
class Jinja2(BaseEngine):
app_dirname = 'jinja2'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
super(Jinja2, self).__init__(params)
environment = options.pop('environment', 'jinja2.Environment')
environment_cls = import_string(environment)
options.setdefault('autoescape', True)
options.setdefault('loader', jinja2.FileSystemLoader(self.template_dirs))
options.setdefault('auto_reload', settings.DEBUG)
options.setdefault('undefined',
jinja2.DebugUndefined if settings.DEBUG else jinja2.Undefined)
self.env = environment_cls(**options)
def from_string(self, template_code):
return Template(self.env.from_string(template_code))
def get_template(self, template_name):
try:
return Template(self.env.get_template(template_name))
except jinja2.TemplateNotFound as exc:
six.reraise(
TemplateDoesNotExist,
TemplateDoesNotExist(exc.name, backend=self),
sys.exc_info()[2],
)
except jinja2.TemplateSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
six.reraise(TemplateSyntaxError, new, sys.exc_info()[2])
class Template(object):
def __init__(self, template):
self.template = template
self.origin = Origin(
name=template.filename, template_name=template.name,<|fim▁hole|>
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context['request'] = request
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
return self.template.render(context)
class Origin(object):
"""
A container to hold debug information as described in the template API
documentation.
"""
def __init__(self, name, template_name):
self.name = name
self.template_name = template_name
def get_exception_info(exception):
"""
Formats exception information for display on the debug page using the
structure described in the template API documentation.
"""
context_lines = 10
lineno = exception.lineno
lines = list(enumerate(exception.source.strip().split("\n"), start=1))
during = lines[lineno - 1][1]
total = len(lines)
top = max(0, lineno - context_lines - 1)
bottom = min(total, lineno + context_lines)
return {
'name': exception.filename,
'message': exception.message,
'source_lines': lines[top:bottom],
'line': lineno,
'before': '',
'during': during,
'after': '',
'total': total,
'top': top,
'bottom': bottom,
}<|fim▁end|> | ) |
<|file_name|>Store2.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import xml.sax.saxutils as xml
def soap_header(from_member_name, friendly_name, proxy, msnp_ver, build_ver,
to_member_name, message_number, security_token, app_id,
lock_key):
"""Returns the SOAP xml header"""
# FIXME : escape the parameters
return """<From memberName="%(from_member_name)s" friendlyName="%(friendly_name)s" xml:lang="en-US" proxy="%(proxy)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/" msnpVer="%(msnp_ver)s" buildVer="%(build_ver)s"/>
<To memberName="%(to_member_name)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Ticket passport="%(passport)s" appid="%(app_id)s" lockkey="%(lock_key)s" xmlns="http://messenger.msn.com/ws/2004/09/oim/"/>
<Sequence xmlns="http://schemas.xmlsoap.org/ws/2003/03/rm">
<Identifier xmlns="http://schemas.xmlsoap.org/ws/2002/07/utility">
http://messenger.msn.com
</Identifier>
<MessageNumber>%(message_number)s</MessageNumber>
</Sequence>""" % { 'from_member_name' : from_member_name,
'friendly_name' : friendly_name,
'proxy' : proxy,
'msnp_ver' : msnp_ver,
'build_ver' : build_ver,
'to_member_name' : to_member_name,
'passport' : xml.escape(security_token),
'app_id' : app_id,
'lock_key' : lock_key,
'message_number' : message_number }
def transport_headers():
"""Returns a dictionary, containing transport (http) headers
to use for the request"""
return {}
def soap_action():
"""Returns the SOAPAction value to pass to the transport
or None if no SOAPAction needs to be specified"""
return "http://messenger.live.com/ws/2006/09/oim/Store2"
def soap_body(message_type, message_content):
"""Returns the SOAP xml body"""
return """<MessageType xmlns="http://messenger.msn.com/ws/2004/09/oim/"><|fim▁hole|> </Content>""" % (message_type, message_content)
def process_response(soap_response):
return True<|fim▁end|> | %s
</MessageType>
<Content xmlns="http://messenger.msn.com/ws/2004/09/oim/">
%s |
<|file_name|>test_trigger.py<|end_file_name|><|fim▁begin|># Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from unittest import mock
from oslo_config import cfg
from solum.api.controllers.v1 import trigger
from solum.tests import base
from solum.tests import fakes
@mock.patch('pecan.request', new_callable=fakes.FakePecanRequest)
@mock.patch('pecan.response', new_callable=fakes.FakePecanResponse)
@mock.patch('solum.api.controllers.v1.trigger.app_handler'
'.AppHandler')
class TestTriggerController(base.BaseTestCase):
def test_trigger_get_workflow_with_empty_body(self, assem_mock,
resp_mock, request_mock):
obj = trigger.TriggerController()
workflow = obj._get_workflow({})
self.assertIsNone(workflow)
def test_trigger_get_workflow_with_deploy(self, assem_mock,
resp_mock, request_mock):
obj = trigger.TriggerController()
query = {'workflow': 'deploy'}
workflow = obj._get_workflow(query)
self.assertEqual(['deploy'], list(workflow))
def test_trigger_get_workflow_with_build_deploy(self, assem_mock,
resp_mock, request_mock):
obj = trigger.TriggerController()
query = {'workflow': 'build+deploy'}
workflow = obj._get_workflow(query)
self.assertEqual(['build', 'deploy'], list(workflow))
def test_trigger_get_workflow_with_all(self, assem_mock,
resp_mock, request_mock):
obj = trigger.TriggerController()
query = {'workflow': 'unittest+build+deploy'}
workflow = obj._get_workflow(query)
self.assertEqual(['unittest', 'build', 'deploy'], list(workflow))
def test_trigger_get_workflow_with_invalid_stage(self, assem_mock,
resp_mock, request_mock):
obj = trigger.TriggerController()
query = {'workflow': 'unittest+unitunitunittest'}
workflow = obj._get_workflow(query)
self.assertEqual(['unittest'], list(workflow))
def test_trigger_process_request_private_repo(self, assem_mock,
resp_mock, request_mock):
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'comment': {'commit_id': 'asdf',
'body': ' SOLUM retry tests ',
'user': {'login': 'u'}},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': True}}<|fim▁hole|> commit_sha, collab_url = obj._process_request(body_dict)
self.assertIsNone(collab_url)
self.assertEqual('asdf', commit_sha)
def test_trigger_process_request_on_valid_pub_repo(self,
assem_mock, resp_mock,
request_mock):
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'comment': {'commit_id': 'asdf',
'body': 'solum retry tests',
'user': {'login': 'u'}},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': False}}
obj = trigger.TriggerController()
commit_sha, collab_url = obj._process_request(body_dict)
self.assertEqual('https://api.github.com/repos/u/r/collaborators/u',
collab_url)
self.assertEqual('asdf', commit_sha)
@mock.patch('solum.common.policy.check')
def test_trigger_post_with_empty_body(self, mock_policy, assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(400, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_github_webhook(self, mock_policy, assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
body_dict = {'sender': {'url': 'https://api.github.com'},
'action': 'opened',
'pull_request': {'head': {'sha': 'asdf'}},
'repository': {'statuses_url': status_url}}
expected_st_url = 'https://api.github.com/repos/u/r/statuses/asdf'
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(202, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
tw.assert_called_once_with('test_id', 'asdf', expected_st_url, None,
workflow=None)
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_github_comment_webhook(self, mock_policy,
assem_mock, resp_mock,
request_mock):
mock_policy.return_value = True
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'action': 'created',
'comment': {'commit_id': 'asdf',
'body': ' SOLUM retry tests ',
'user': {'login': 'u'}},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': True}}
expected_st_url = 'https://api.github.com/repos/u/r/statuses/asdf'
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(202, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
tw.assert_called_once_with('test_id', 'asdf', expected_st_url, None,
workflow=None)
@mock.patch('httplib2.Http.request')
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_mismatch_comment_pub_repo(self, http_mock,
mock_policy,
assem_mock, resp_mock,
request_mock):
mock_policy.return_value = True
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'action': 'created',
'comment': {'commit_id': 'asdf',
'body': 'solum is awesome',
'user': {'login': 'u'}},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': False}}
request_mock.body = json.dumps(body_dict)
http_mock.return_value = ({'status': '204'}, '') # a collaborator
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(403, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('httplib2.Http.request')
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_valid_comment_pub_repo(self, http_mock,
mock_policy,
assem_mock, resp_mock,
request_mock):
mock_policy.return_value = True
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'action': 'created',
'comment': {'commit_id': 'asdf',
'body': 'solum retry tests',
'user': {'login': 'u'}},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': False}}
expected_st_url = 'https://api.github.com/repos/u/r/statuses/asdf'
expected_clb_url = 'https://api.github.com/repos/u/r/collaborators/u'
request_mock.body = json.dumps(body_dict)
http_mock.return_value = ({'status': '204'}, '') # Valid collaborator
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(202, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
tw.assert_called_once_with('test_id', 'asdf', expected_st_url,
expected_clb_url, workflow=None)
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_comment_missing_login(self, mock_policy,
assem_mock, resp_mock,
request_mock):
mock_policy.return_value = True
cfg.CONF.api.rebuild_phrase = "solum retry tests"
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
collab_url = ('https://api.github.com/repos/u/r/' +
'collaborators{/collaborator}')
body_dict = {'sender': {'url': 'https://api.github.com'},
'comment': {'commit_id': 'asdf',
'body': 'solum retry tests',
'user': 'MISSING_LOGIN'},
'repository': {'statuses_url': status_url,
'collaborators_url': collab_url,
'private': False}}
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(400, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_wrong_github_webhook(self, mock_policy,
assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
status_url = 'https://api.github.com/repos/u/r/statuses/{sha}'
body_dict = {'sender': {'url': 'https://api.github.com'},
'pull_request': {'head': {'sha': 'asdf'}},
'repository': {'HACKED_statuses_url': status_url}}
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(400, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_unknown_git_webhook(self, mock_policy, assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
body_dict = {"pull_request": {"head": {"sha": "asdf"}}}
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(501, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_non_github_webhook(self, mock_policy, assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
body_dict = {"sender": {"url": "https://non-github.com"},
"pull_request": {"head": {"sha": "asdf"}}}
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(501, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called
@mock.patch('solum.common.policy.check')
def test_trigger_post_on_github_ping_webhook(self, mock_policy, assem_mock,
resp_mock, request_mock):
mock_policy.return_value = True
body_dict = {"sender": {"url": "https://api.github.com"},
"zen": "Keep it logically awesome."}
request_mock.body = json.dumps(body_dict)
obj = trigger.TriggerController()
obj.post('test_id')
self.assertEqual(501, resp_mock.status)
tw = assem_mock.return_value.trigger_workflow
assert not tw.called<|fim▁end|> | obj = trigger.TriggerController() |
<|file_name|>closures-in.js<|end_file_name|><|fim▁begin|>M.profile("generators");
function* forOfBlockScope() {
let a = [1, 2, 3, 4, 5, 6, 7, 8];
let b = [10, 11, 12, 13, 14, 15, 16];
const funs = [];
for (const i of a) {
let j = 0;
funs.push(function* iter() {
yield `fo1: ${i} ${j++}`;
});
}
for (var i of a) {
var j = 0;
funs.push(function* iter() {
yield `fo2: ${i} ${j++}`;
});
}
for (const i of a) {
for (let j of b) {
funs.push(function* iter() {
yield `fo3: ${i} ${j++}`;
});
}
}
for (const i of a) {
for (let j of b) {
yield `fo4: ${j}`;
funs.push(function* iter() {<|fim▁hole|> for (const i of a) {
yield `fo6: ${i}`;
for (let j of b) {
funs.push(function* iter() {
yield `fo7: ${i} ${j++}`;
});
}
}
for (const i of a) {
yield `fo8 ${i}`;
for (let j of b) {
yield `fo9: ${i}`;
funs.push(function* iter() {
yield `fo10: ${i} ${j++}`;
});
}
}
for (const i of funs) yield* i();
funs.length = 0;
for (const i of a) {
funs.push(function* iter() {
yield `fo11: ${i}`;
});
}
for (const i of a) {
yield `fo12 ${i}`;
funs.push(function* iter() {
yield `fo13 ${i}`;
});
}
let k = 0;
for (const i of a) {
yield `fo14 ${i} ${k} {m}`;
let m = k;
k++;
if (k === 3) continue;
if (k === 5) break;
funs.push(function* iter() {
yield `fo15 ${i} ${k} {m}`;
});
}
k = 0;
up1: for (const i of a) {
let m = k;
k++;
for (const j of b) {
let n = m;
m++;
if (k === 3) continue up1;
if (k === 5) break up1;
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo18: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
k = 0;
up2: for (const i of a) {
let m = 0;
k++;
yield `fo16: ${i} ${k} ${m}`;
for (const j of b) {
let n = m;
m++;
if (k === 3) continue up2;
if (k === 5) break up2;
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo18: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
k = 0;
up3: for (const i of a) {
let m = 0;
k++;
for (const j of b) {
let n = m;
m++;
yield `fo19 ${i} ${j} ${k} ${m} ${n}`;
if (k === 3) {
continue up3;
}
if (k === 5) break up3;
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo20: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
bl1: {
let k = 0;
yield `fo21: ${i} ${k}`;
up4: for (const i of a) {
let m = 0;
k++;
yield `fo22: ${i} ${k} ${m}`;
for (const j of b) {
let n = m;
m++;
yield `fo23 ${i} ${j} ${k} ${m} ${n}`;
if (k === 3) continue up4;
if (k === 5) break bl1;
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo24: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
}
bl2: {
let k = 0;
yield `fo25`;
up5: for (const i of a) {
let m = 0;
k++;
yield `fo26: ${i} ${k} ${m}`;
for (const j of b) {
let n = m;
m++;
yield `fo27 ${i} ${j} ${k} ${m} ${n}`;
if (k === 3) continue up5;
if (k === 5) break bl2;
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo28: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
}
bl3: {
let k = 0;
up6: for (const i of a) {
let m = 0;
k++;
yield `fo29: ${i} ${k} ${m}`;
for (const j of b) {
let n = m;
m++;
yield `fo30 ${i} ${j} ${k} ${m} ${n}`;
if (k === 3) continue up6;
if (k === 5) {
for (const i of funs) yield* i();
return `r: ${i} ${j} ${k} ${m} ${n}`;
}
if (n === 3) continue;
if (n === 5) break;
funs.push(function* iter() {
n++;
yield `fo31: ${i} ${j} ${k} ${m} ${n}`;
});
}
}
}
}<|fim▁end|> | yield `fo5: ${i} ${j++}`;
});
}
} |
<|file_name|>edp_engine.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.service.edp import base_engine
from sahara.utils import edp
class FakeJobEngine(base_engine.JobEngine):
def cancel_job(self, job_execution):
pass
def get_job_status(self, job_execution):
pass
def run_job(self, job_execution):
return 'engine_job_id', edp.JOB_STATUS_SUCCEEDED, None
def run_scheduled_job(self, job_execution):
pass
def validate_job_execution(self, cluster, job, data):
pass
@staticmethod
def get_possible_job_config(job_type):
return None
@staticmethod
def get_supported_job_types():
return edp.JOB_TYPES_ALL<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
<|file_name|>report.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include <fstream>
#include <iomanip>
#include <map>
#include <sstream>
#include <string>
#include "jw.hpp"
namespace Zee {
class Report {
public:
Report(std::string title, std::string rowTitle)
: title_(title), rowTitle_(rowTitle) {
rowSize_ = rowTitle_.size();
}
void addColumn(std::string colName, std::string texName = "") {
columns_.push_back(colName);
if (!texName.empty())
columnsTex_.push_back(texName);
else
columnsTex_.push_back(colName);
columnWidth_[colName] = colName.size();
}
void addRow(std::string row) {
entries_[row] = std::map<std::string, std::string>();
if (row.size() > rowSize_) {
rowSize_ = row.size();
}
}
template <typename T>
void addResult(std::string row, std::string column, T result) {
if (entries_.find(row) == entries_.end()) {
JWLogError << "Trying to add result to non-existing row" << endLog;
return;
}
std::stringstream ss;<|fim▁hole|> ss << std::fixed << std::setprecision(1) << result;
entries_[row][column] = ss.str();
entriesTex_[row][column] = ss.str();
if (ss.str().size() > columnWidth_[column]) {
columnWidth_[column] = ss.str().size();
}
}
void addResult(std::string row, std::string column, std::string result,
std::string texResult) {
addResult(row, column, result);
entriesTex_[row][column] = texResult;
}
void print() {
JWLogResult << title_ << endLog;
unsigned int lineSize = rowSize_ + 4;
for (auto col : columnWidth_) {
lineSize += col.second + 2;
}
std::string hline = "";
for (unsigned int i = 0; i < lineSize; ++i) hline.push_back('-');
auto addElement = [](int width, std::stringstream& result,
std::string entry) {
result << std::left << std::setprecision(1) << std::setw(width)
<< std::setfill(' ') << entry;
};
std::stringstream ss;
addElement(rowSize_ + 2, ss, rowTitle_);
ss << "| ";
for (auto& col : columns_) addElement(columnWidth_[col] + 2, ss, col);
JWLogInfo << hline << endLog;
JWLogInfo << ss.str() << endLog;
JWLogInfo << hline << endLog;
for (auto& rowCols : entries_) {
std::stringstream rowSs;
addElement(rowSize_ + 2, rowSs, rowCols.first);
rowSs << "| ";
for (auto& col : columns_) {
addElement(columnWidth_[col] + 2, rowSs, rowCols.second[col]);
}
JWLogInfo << rowSs.str() << endLog;
}
JWLogInfo << hline << endLog;
}
void saveToCSV();
void readFromCSV();
void saveToTex(std::string filename) {
auto replaceTex = [](std::string entry) {
std::string texEntry = entry;
auto pos = entry.find("+-");
if (pos != std::string::npos) {
texEntry =
texEntry.substr(0, pos) + "\\pm" + texEntry.substr(pos + 2);
}
pos = entry.find("%");
if (pos != std::string::npos) {
texEntry =
texEntry.substr(0, pos) + "\\%" + texEntry.substr(pos + 1);
}
return texEntry;
};
std::ofstream fout(filename);
fout << "\\begin{table}" << std::endl;
fout << "\\centering" << std::endl;
fout << "\\begin{tabular}{|l|";
for (unsigned int i = 0; i < columns_.size(); ++i) {
fout << "l";
fout << ((i < (columns_.size() - 1)) ? " " : "|}");
}
fout << std::endl << "\\hline" << std::endl;
fout << "\\textbf{" << rowTitle_ << "} & ";
for (unsigned int i = 0; i < columns_.size(); ++i) {
fout << "$" << columnsTex_[i] << "$";
fout << ((i < (columns_.size() - 1)) ? " & " : "\\\\");
}
fout << std::endl;
fout << "\\hline" << std::endl;
for (auto& rowCols : entriesTex_) {
fout << "\\verb|" << rowCols.first << "| & ";
for (unsigned int i = 0; i < columns_.size(); ++i) {
fout << "$" << replaceTex(rowCols.second[columns_[i]]) << "$";
fout << ((i < (columns_.size() - 1)) ? " & " : "\\\\");
}
fout << std::endl;
}
fout << "\\hline" << std::endl;
fout << "\\end{tabular}" << std::endl;
;
fout << "\\caption{\\ldots}" << std::endl;
fout << "\\end{table}" << std::endl;
}
private:
std::string title_;
std::string rowTitle_;
std::map<std::string, std::map<std::string, std::string>> entries_;
std::map<std::string, std::map<std::string, std::string>> entriesTex_;
std::vector<std::string> columns_;
std::vector<std::string> columnsTex_;
std::map<std::string, unsigned int> columnWidth_;
unsigned int rowSize_;
};
} // namespace Zee<|fim▁end|> | |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package backup
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol"
"github.com/aws/aws-sdk-go/private/protocol/restjson"
)
// Backup provides the API operation methods for making requests to
// AWS Backup. See this package's package overview docs
// for details on the service.
//
// Backup methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type Backup struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "Backup" // Name of service.
EndpointsID = "backup" // ID to lookup a service endpoint with.
ServiceID = "Backup" // ServiceID is a unique identifier of a specific service.
)
// New creates a new instance of the Backup client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// mySession := session.Must(session.NewSession())
//
// // Create a Backup client from just a session.
// svc := backup.New(mySession)
//
// // Create a Backup client with additional configuration
// svc := backup.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *Backup {
c := p.ClientConfig(EndpointsID, cfgs...)
return newClient(*c.Config, c.Handlers, c.PartitionID, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, partitionID, endpoint, signingRegion, signingName string) *Backup {
svc := &Backup{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
PartitionID: partitionID,
Endpoint: endpoint,<|fim▁hole|> ),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(restjson.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(restjson.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(restjson.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(
protocol.NewUnmarshalErrorHandler(restjson.NewUnmarshalTypedError(exceptionFromCode)).NamedHandler(),
)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a Backup operation and runs any
// custom request initialization.
func (c *Backup) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}<|fim▁end|> | APIVersion: "2018-11-15",
},
handlers, |
<|file_name|>SchemasManagementTest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Rastislav Szabo <[email protected]>, Lukas Macko <[email protected]>"
__copyright__ = "Copyright 2016, Cisco Systems, Inc."
__license__ = "Apache 2.0"
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# sysrepod must be in PATH
from ConcurrentHelpers import *
import subprocess
import TestModule
import libsysrepoPython3
class SysrepoctlTester(SysrepoTester):
sysrepoctl = "{}/src/sysrepoctl".format(os.path.realpath(os.curdir))
def installModuleStep(self, yang_file, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "-i", "--yang={0}".format(yang_file), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertEqual(rc, 0)
def uninstallModuleFailStep(self, module_name, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "--uninstall", "--module={0}".format(module_name), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertNotEquals(rc, 0)
def uninstallModuleStep(self, module_name, log_level = sr.SR_LL_INF):
self.process = subprocess.Popen([self.sysrepoctl, "--uninstall", "--module={0}".format(module_name), "-L {0}".format(log_level)])
rc = self.process.wait()
self.tc.assertEqual(rc, 0)
class SchemasManagementTest(unittest.TestCase):
@classmethod<|fim▁hole|> TestModule.create_test_module()
def test_ModuleLoading(self):
"""Schemas are loaded on demand. Try to send multiple requests targeting the same model
simultaneously. All of the should receive correct data.
"""
tm = TestManager()
srd = SysrepodDaemonTester("Srd")
tester1 = SysrepoTester("First", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester2 = SysrepoTester("Second", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester3 = SysrepoTester("Third", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester4 = SysrepoTester("Fourth", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
srd.add_step(srd.startDaemonStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
tester4.add_step(tester4.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.restartConnection)
tester2.add_step(tester2.restartConnection)
tester3.add_step(tester3.restartConnection)
tester4.add_step(tester4.restartConnection)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester2.add_step(tester2.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester3.add_step(tester3.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester4.add_step(tester4.getItemsStepExpectedCount, "/test-module:main/*", 19)
srd.add_step(srd.stopDaemonStep)
tm.add_tester(srd)
tm.add_tester(tester1)
tm.add_tester(tester2)
tm.add_tester(tester3)
tm.add_tester(tester4)
tm.run()
def test_module_uninstall(self):
"""A schema can not be uninstalled until it is used by a session.
Test simulates the request of sysrepoctl trying to uninstall/install module.
"""
tmp_file = "/tmp/test-module.yang" # used to reinstall 'test-module' after uninstall
dep_file = "/tmp/referenced-data.yang" # 'test-module' depends on 'referenced-data'
tm = TestManager()
srd = SysrepodDaemonTester("Srd")
tester1 = SysrepoTester("First", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester2 = SysrepoTester("Second", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
tester3 = SysrepoTester("Third", sr.SR_DS_STARTUP, sr.SR_CONN_DAEMON_REQUIRED, False)
admin = SysrepoctlTester()
srd.add_step(srd.startDaemonStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
admin.add_step(admin.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.restartConnection)
tester2.add_step(tester2.restartConnection)
tester3.add_step(tester3.restartConnection)
admin.add_step(admin.waitStep)
srd.add_step(srd.waitStep)
tester1.add_step(tester1.getItemsStepExpectedCount, "/test-module:main/*", 19)
tester2.add_step(tester2.setItemStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
tester3.add_step(tester3.lockModelStep, "test-module")
admin.add_step(admin.waitStep)
#unsuccesful try to uninstall
srd.add_step(srd.waitStep)
tester1.add_step(tester1.waitStep)
tester2.add_step(tester2.waitStep)
tester3.add_step(tester3.waitStep)
admin.add_step(admin.uninstallModuleFailStep, "test-module")
#export schema to file before uninstall and release lock
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester1.add_step(tester1.getSchemaToFileStep, "test-module", tmp_file)
tester2.add_step(tester2.getSchemaToFileStep, "referenced-data", dep_file)
tester3.add_step(tester3.unlockModelStep, "test-module")
#testers 1,2 close the session, tester 3 releases the lock -> module can be uninstalled
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester1.add_step(tester1.stopSession)
tester2.add_step(tester2.stopSession)
tester3.add_step(tester3.waitStep)
#uninstall succeed
srd.add_step(srd.waitStep)
admin.add_step(admin.uninstallModuleStep, "test-module")
tester3.add_step(tester3.waitStep)
#module is uninstalled
srd.add_step(srd.waitStep)
admin.add_step(admin.waitStep)
tester3.add_step(tester3.setItemFailStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
#install module back
srd.add_step(srd.waitStep)
admin.add_step(admin.installModuleStep, tmp_file)
tester3.add_step(tester3.waitStep)
#request work again
srd.add_step(srd.waitStep)
tester3.add_step(tester3.setItemStep, "/test-module:main/string", sr.Val("abcd", sr.SR_STRING_T))
srd.add_step(srd.stopDaemonStep)
tm.add_tester(srd)
tm.add_tester(tester1)
tm.add_tester(tester2)
tm.add_tester(tester3)
tm.add_tester(admin)
tm.run()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | def setUpClass(self): |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png")]<|fim▁hole|>extern crate futures;
#[macro_use]
extern crate log;
extern crate rusoto_core;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*;<|fim▁end|> | //! <p>Use the AWS Elemental MediaTailor SDK to configure scalable ad insertion for your live and VOD content. With AWS Elemental MediaTailor, you can serve targeted ads to viewers while maintaining broadcast quality in over-the-top (OTT) video applications. For information about using the service, including detailed information about the settings covered in this guide, see the AWS Elemental MediaTailor User Guide.<p>Through the SDK, you manage AWS Elemental MediaTailor configurations the same as you do through the console. For example, you specify ad insertion behavior and mapping information for the origin server and the ad decision server (ADS).</p>
//!
//! If you're using the service, you're probably looking for [MediaTailorClient](struct.MediaTailorClient.html) and [MediaTailor](trait.MediaTailor.html).
|
<|file_name|>zerigo.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'ZerigoDNSDriver'
]
import copy
import base64
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
from xml.etree import ElementTree as ET
from libcloud.utils.misc import merge_valid_keys, get_new_obj
from libcloud.utils.xml import findtext, findall
from libcloud.common.base import XmlResponse, ConnectionUserAndKey
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.types import MalformedResponseError
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import ZoneDoesNotExistError, RecordDoesNotExistError
from libcloud.dns.base import DNSDriver, Zone, Record
API_HOST = 'ns.zerigo.com'
API_VERSION = '1.1'
API_ROOT = '/api/%s/' % (API_VERSION)
VALID_ZONE_EXTRA_PARAMS = ['notes', 'tag-list', 'ns1', 'slave-nameservers']
VALID_RECORD_EXTRA_PARAMS = ['notes', 'ttl', 'priority']
# Number of items per page (maximum limit is 1000)
ITEMS_PER_PAGE = 100
class ZerigoError(LibcloudError):
def __init__(self, code, errors):
self.code = code
self.errors = errors or []
def __str__(self):
return 'Errors: %s' % (', '.join(self.errors))
def __repr__(self):
return ('<ZerigoError response code=%s, errors count=%s>' % (
self.code, len(self.errors)))
class ZerigoDNSResponse(XmlResponse):
def success(self):
return self.status in [httplib.OK, httplib.CREATED, httplib.ACCEPTED]
def parse_error(self):
status = int(self.status)
if status == 401:
if not self.body:
raise InvalidCredsError(str(self.status) + ': ' + self.error)
else:
raise InvalidCredsError(self.body)
elif status == 404:
context = self.connection.context
if context['resource'] == 'zone':
raise ZoneDoesNotExistError(value='', driver=self,
zone_id=context['id'])
elif context['resource'] == 'record':
raise RecordDoesNotExistError(value='', driver=self,
record_id=context['id'])
elif status != 503:
try:
body = ET.XML(self.body)
except:
raise MalformedResponseError('Failed to parse XML',
body=self.body)
errors = []
for error in findall(element=body, xpath='error'):
errors.append(error.text)
raise ZerigoError(code=status, errors=errors)
return self.body
class ZerigoDNSConnection(ConnectionUserAndKey):
host = API_HOST
secure = True
responseCls = ZerigoDNSResponse
def add_default_headers(self, headers):
auth_b64 = base64.b64encode(b('%s:%s' % (self.user_id, self.key)))
headers['Authorization'] = 'Basic %s' % (auth_b64.decode('utf-8'))
return headers
def request(self, action, params=None, data='', headers=None,
method='GET'):
if not headers:
headers = {}
if not params:
params = {}
if method in ("POST", "PUT"):
headers = {'Content-Type': 'application/xml; charset=UTF-8'}
return super(ZerigoDNSConnection, self).request(action=action,
params=params,
data=data,
method=method,
headers=headers)
class ZerigoDNSDriver(DNSDriver):
type = Provider.ZERIGO
name = 'Zerigo DNS'
website = 'http://www.zerigo.com/'
connectionCls = ZerigoDNSConnection
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.CNAME: 'CNAME',
RecordType.MX: 'MX',
RecordType.REDIRECT: 'REDIRECT',
RecordType.TXT: 'TXT',
RecordType.SRV: 'SRV',
RecordType.NAPTR: 'NAPTR',
RecordType.NS: 'NS',
RecordType.PTR: 'PTR',
RecordType.SPF: 'SPF',
RecordType.GEO: 'GEO',
RecordType.URL: 'URL',
}
def iterate_zones(self):
return self._get_more('zones')
def iterate_records(self, zone):
return self._get_more('records', zone=zone)
def get_zone(self, zone_id):
path = API_ROOT + 'zones/%s.xml' % (zone_id)
self.connection.set_context({'resource': 'zone', 'id': zone_id})
data = self.connection.request(path).object
zone = self._to_zone(elem=data)
return zone
def get_record(self, zone_id, record_id):
zone = self.get_zone(zone_id=zone_id)
self.connection.set_context({'resource': 'record', 'id': record_id})
path = API_ROOT + 'hosts/%s.xml' % (record_id)
data = self.connection.request(path).object
record = self._to_record(elem=data, zone=zone)
return record
def create_zone(self, domain, type='master', ttl=None, extra=None):
"""
Create a new zone.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/zones/create
@inherits: :class:`DNSDriver.create_zone`
"""
path = API_ROOT + 'zones.xml'
zone_elem = self._to_zone_elem(domain=domain, type=type, ttl=ttl,
extra=extra)
data = self.connection.request(action=path,
data=ET.tostring(zone_elem),
method='POST').object
zone = self._to_zone(elem=data)
return zone
def update_zone(self, zone, domain=None, type=None, ttl=None, extra=None):
"""
Update an existing zone.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/zones/update
@inherits: :class:`DNSDriver.update_zone`
"""
if domain:
raise LibcloudError('Domain cannot be changed', driver=self)
path = API_ROOT + 'zones/%s.xml' % (zone.id)
zone_elem = self._to_zone_elem(domain=domain, type=type, ttl=ttl,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(zone_elem),
method='PUT')
assert response.status == httplib.OK
merged = merge_valid_keys(params=copy.deepcopy(zone.extra),
valid_keys=VALID_ZONE_EXTRA_PARAMS,
extra=extra)
updated_zone = get_new_obj(obj=zone, klass=Zone,
attributes={'type': type,
'ttl': ttl,
'extra': merged})
return updated_zone
def create_record(self, name, zone, type, data, extra=None):
"""
Create a new record.
Provider API docs:
https://www.zerigo.com/docs/apis/dns/1.1/hosts/create
@inherits: :class:`DNSDriver.create_record`
"""
path = API_ROOT + 'zones/%s/hosts.xml' % (zone.id)
record_elem = self._to_record_elem(name=name, type=type, data=data,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(record_elem),
method='POST')
assert response.status == httplib.CREATED
record = self._to_record(elem=response.object, zone=zone)
return record
def update_record(self, record, name=None, type=None, data=None,
extra=None):
path = API_ROOT + 'hosts/%s.xml' % (record.id)
record_elem = self._to_record_elem(name=name, type=type, data=data,
extra=extra)
response = self.connection.request(action=path,
data=ET.tostring(record_elem),
method='PUT')
assert response.status == httplib.OK
merged = merge_valid_keys(params=copy.deepcopy(record.extra),
valid_keys=VALID_RECORD_EXTRA_PARAMS,
extra=extra)
updated_record = get_new_obj(obj=record, klass=Record,
attributes={'type': type,
'data': data,
'extra': merged})
return updated_record
def delete_zone(self, zone):
path = API_ROOT + 'zones/%s.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(action=path, method='DELETE')
return response.status == httplib.OK
def delete_record(self, record):
path = API_ROOT + 'hosts/%s.xml' % (record.id)
self.connection.set_context({'resource': 'record', 'id': record.id})
response = self.connection.request(action=path, method='DELETE')
return response.status == httplib.OK
def ex_get_zone_by_domain(self, domain):
"""
Retrieve a zone object by the domain name.
:param domain: The domain which should be used
:type domain: ``str``
:rtype: :class:`Zone`
"""
path = API_ROOT + 'zones/%s.xml' % (domain)
self.connection.set_context({'resource': 'zone', 'id': domain})
data = self.connection.request(path).object
zone = self._to_zone(elem=data)
return zone
def ex_force_slave_axfr(self, zone):
"""
Force a zone transfer.
:param zone: Zone which should be used.
:type zone: :class:`Zone`
:rtype: :class:`Zone`
"""
path = API_ROOT + 'zones/%s/force_slave_axfr.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(path, method='POST')
assert response.status == httplib.ACCEPTED
return zone
def _to_zone_elem(self, domain=None, type=None, ttl=None, extra=None):
zone_elem = ET.Element('zone', {})
if domain:
domain_elem = ET.SubElement(zone_elem, 'domain')
domain_elem.text = domain
if type:
ns_type_elem = ET.SubElement(zone_elem, 'ns-type')
if type == 'master':
ns_type_elem.text = 'pri_sec'
elif type == 'slave':
if not extra or 'ns1' not in extra:
raise LibcloudError('ns1 extra attribute is required ' +
'when zone type is slave', driver=self)
ns_type_elem.text = 'sec'
ns1_elem = ET.SubElement(zone_elem, 'ns1')
ns1_elem.text = extra['ns1']
elif type == 'std_master':
# TODO: Each driver should provide supported zone types
# Slave name servers are elsewhere
if not extra or 'slave-nameservers' not in extra:
raise LibcloudError('slave-nameservers extra ' +
'attribute is required whenzone ' +
'type is std_master', driver=self)
ns_type_elem.text = 'pri'
slave_nameservers_elem = ET.SubElement(zone_elem,
'slave-nameservers')
slave_nameservers_elem.text = extra['slave-nameservers']
if ttl:
default_ttl_elem = ET.SubElement(zone_elem, 'default-ttl')
default_ttl_elem.text = str(ttl)
if extra and 'tag-list' in extra:
tags = extra['tag-list']
tags_elem = ET.SubElement(zone_elem, 'tag-list')
tags_elem.text = ' '.join(tags)
return zone_elem
def _to_record_elem(self, name=None, type=None, data=None, extra=None):
record_elem = ET.Element('host', {})
if name:
name_elem = ET.SubElement(record_elem, 'hostname')
name_elem.text = name
if type is not None:
type_elem = ET.SubElement(record_elem, 'host-type')
type_elem.text = self.RECORD_TYPE_MAP[type]
if data:
data_elem = ET.SubElement(record_elem, 'data')
data_elem.text = data
if extra:
if 'ttl' in extra:
ttl_elem = ET.SubElement(record_elem, 'ttl',
{'type': 'integer'})
ttl_elem.text = str(extra['ttl'])
if 'priority' in extra:
# Only MX and SRV records support priority
priority_elem = ET.SubElement(record_elem, 'priority',
{'type': 'integer'})
priority_elem.text = str(extra['priority'])
if 'notes' in extra:
notes_elem = ET.SubElement(record_elem, 'notes')
notes_elem.text = extra['notes']
return record_elem
def _to_zones(self, elem):
zones = []
for item in findall(element=elem, xpath='zone'):
zone = self._to_zone(elem=item)
zones.append(zone)
return zones
def _to_zone(self, elem):
id = findtext(element=elem, xpath='id')
domain = findtext(element=elem, xpath='domain')
type = findtext(element=elem, xpath='ns-type')
type = 'master' if type.find('pri') == 0 else 'slave'
ttl = findtext(element=elem, xpath='default-ttl')
hostmaster = findtext(element=elem, xpath='hostmaster')
custom_ns = findtext(element=elem, xpath='custom-ns')
custom_nameservers = findtext(element=elem, xpath='custom-nameservers')
notes = findtext(element=elem, xpath='notes')
nx_ttl = findtext(element=elem, xpath='nx-ttl')
slave_nameservers = findtext(element=elem, xpath='slave-nameservers')
tags = findtext(element=elem, xpath='tag-list')
tags = tags.split(' ') if tags else []
extra = {'hostmaster': hostmaster, 'custom-ns': custom_ns,
'custom-nameservers': custom_nameservers, 'notes': notes,
'nx-ttl': nx_ttl, 'slave-nameservers': slave_nameservers,
'tags': tags}
zone = Zone(id=str(id), domain=domain, type=type, ttl=int(ttl),
driver=self, extra=extra)
return zone
def _to_records(self, elem, zone):
records = []
for item in findall(element=elem, xpath='host'):
record = self._to_record(elem=item, zone=zone)
records.append(record)
return records
def _to_record(self, elem, zone):
id = findtext(element=elem, xpath='id')
name = findtext(element=elem, xpath='hostname')
type = findtext(element=elem, xpath='host-type')
type = self._string_to_record_type(type)
data = findtext(element=elem, xpath='data')
notes = findtext(element=elem, xpath='notes', no_text_value=None)
state = findtext(element=elem, xpath='state', no_text_value=None)
fqdn = findtext(element=elem, xpath='fqdn', no_text_value=None)
priority = findtext(element=elem, xpath='priority', no_text_value=None)
ttl = findtext(element=elem, xpath='ttl', no_text_value=None)
if not name:
name = None
if ttl:
ttl = int(ttl)
extra = {'notes': notes, 'state': state, 'fqdn': fqdn,
'priority': priority, 'ttl': ttl}
record = Record(id=id, name=name, type=type, data=data,
zone=zone, driver=self, extra=extra)
return record
def _get_more(self, rtype, **kwargs):
exhausted = False
last_key = None
while not exhausted:
items, last_key, exhausted = self._get_data(rtype, last_key,
**kwargs)
for item in items:
yield item
def _get_data(self, rtype, last_key, **kwargs):
# Note: last_key in this case really is a "last_page".
# TODO: Update base driver and change last_key to something more
# generic - e.g. marker
params = {}
params['per_page'] = ITEMS_PER_PAGE
params['page'] = last_key + 1 if last_key else 1
if rtype == 'zones':
path = API_ROOT + 'zones.xml'
response = self.connection.request(path)
transform_func = self._to_zones
elif rtype == 'records':
zone = kwargs['zone']
path = API_ROOT + 'zones/%s/hosts.xml' % (zone.id)
self.connection.set_context({'resource': 'zone', 'id': zone.id})
response = self.connection.request(path, params=params)
transform_func = self._to_records
exhausted = False
result_count = int(response.headers.get('x-query-count', 0))
if (params['page'] * ITEMS_PER_PAGE) >= result_count:
exhausted = True<|fim▁hole|> if response.status == httplib.OK:
items = transform_func(elem=response.object, **kwargs)
return items, params['page'], exhausted
else:
return [], None, True<|fim▁end|> | |
<|file_name|>45_regularexpressions.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"fmt"
"regexp"
)
func main() {<|fim▁hole|>
fmt.Println(r.MatchString("peach"))
fmt.Println(r.FindString("peach punch"))
fmt.Println(r.FindStringIndex("peach punch"))
fmt.Println(r.FindStringSubmatch("peach punch"))
fmt.Println(r.FindStringSubmatchIndex("peach punch"))
fmt.Println(r.FindAllString("peach punch pinch", -1))
fmt.Println(r.FindAllStringSubmatchIndex("peach punch pinch", -1))
fmt.Println(r.FindAllString("peach punch pinch", 2))
fmt.Println(r.Match([]byte("peach")))
r = regexp.MustCompile("p([a+z])ch")
fmt.Println(r)
fmt.Println(r.ReplaceAllString("a peach", "<fruit>"))
in := []byte("a peach")
out := r.ReplaceAllFunc(in, bytes.ToUpper)
fmt.Println(string(out))
}<|fim▁end|> | match, _ := regexp.MatchString("p([a-z]+)ch", "peach")
fmt.Println(match)
r, _ := regexp.Compile("p([a-z]+)ch") |
<|file_name|>options.js<|end_file_name|><|fim▁begin|>const Combinatorics = require('js-combinatorics');
const widths = [null, 20, 60];
const heights = [null, 40, 80];
const horizontalAlignments = [null, 'left', 'right', 'center'];
const verticalAlignments = [null, 'bottom', 'top', 'center'];
const orients = [null, 'horizontal', 'vertical'];
const caps = [null, 10, 20];<|fim▁hole|> combinations: Combinatorics.cartesianProduct(
widths, // Bar chart needs a width
heights.slice(1),
horizontalAlignments,
verticalAlignments
).toArray()
},
boxPlot: {
keys: ['width', 'orient', 'cap'],
combinations: Combinatorics.cartesianProduct(
widths,
orients,
caps
).toArray()
},
candlestick: {
keys: ['width'],
combinations: Combinatorics.cartesianProduct(
widths
).toArray()
},
errorBar: {
keys: ['width', 'orient'],
combinations: Combinatorics.cartesianProduct(
widths,
orients
).toArray()
},
ohlc: {
keys: ['width', 'orient'],
combinations: Combinatorics.cartesianProduct(
widths,
orients
).toArray()
}
};
module.exports = results;<|fim▁end|> |
const results = {
bar: {
keys: ['width', 'height', 'horizontalAlign', 'verticalAlign'], |
<|file_name|>key.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2012 The Paccoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <map>
#include <openssl/ecdsa.h>
#include <openssl/obj_mac.h>
#include "key.h"
#include "util.h"
// Generate a private key from just the secret parameter
int EC_KEY_regenerate_key(EC_KEY *eckey, BIGNUM *priv_key)
{
int ok = 0;
BN_CTX *ctx = NULL;
EC_POINT *pub_key = NULL;
if (!eckey) return 0;
const EC_GROUP *group = EC_KEY_get0_group(eckey);
if ((ctx = BN_CTX_new()) == NULL)
goto err;
pub_key = EC_POINT_new(group);
if (pub_key == NULL)
goto err;
if (!EC_POINT_mul(group, pub_key, priv_key, NULL, NULL, ctx))
goto err;
EC_KEY_set_private_key(eckey,priv_key);
EC_KEY_set_public_key(eckey,pub_key);
ok = 1;
err:
if (pub_key)
EC_POINT_free(pub_key);
if (ctx != NULL)
BN_CTX_free(ctx);
return(ok);
}
// Perform ECDSA key recovery (see SEC1 4.1.6) for curves over (mod p)-fields
// recid selects which key is recovered
// if check is nonzero, additional checks are performed
int ECDSA_SIG_recover_key_GFp(EC_KEY *eckey, ECDSA_SIG *ecsig, const unsigned char *msg, int msglen, int recid, int check)
{
if (!eckey) return 0;
int ret = 0;
BN_CTX *ctx = NULL;
BIGNUM *x = NULL;
BIGNUM *e = NULL;
BIGNUM *order = NULL;
BIGNUM *sor = NULL;
BIGNUM *eor = NULL;
BIGNUM *field = NULL;
EC_POINT *R = NULL;
EC_POINT *O = NULL;
EC_POINT *Q = NULL;
BIGNUM *rr = NULL;
BIGNUM *zero = NULL;
int n = 0;
int i = recid / 2;
const EC_GROUP *group = EC_KEY_get0_group(eckey);
if ((ctx = BN_CTX_new()) == NULL) { ret = -1; goto err; }
BN_CTX_start(ctx);
order = BN_CTX_get(ctx);
if (!EC_GROUP_get_order(group, order, ctx)) { ret = -2; goto err; }
x = BN_CTX_get(ctx);
if (!BN_copy(x, order)) { ret=-1; goto err; }
if (!BN_mul_word(x, i)) { ret=-1; goto err; }
if (!BN_add(x, x, ecsig->r)) { ret=-1; goto err; }
field = BN_CTX_get(ctx);
if (!EC_GROUP_get_curve_GFp(group, field, NULL, NULL, ctx)) { ret=-2; goto err; }
if (BN_cmp(x, field) >= 0) { ret=0; goto err; }
if ((R = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
if (!EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)) { ret=0; goto err; }
if (check)
{
if ((O = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
if (!EC_POINT_mul(group, O, NULL, R, order, ctx)) { ret=-2; goto err; }
if (!EC_POINT_is_at_infinity(group, O)) { ret = 0; goto err; }
}
if ((Q = EC_POINT_new(group)) == NULL) { ret = -2; goto err; }
n = EC_GROUP_get_degree(group);
e = BN_CTX_get(ctx);
if (!BN_bin2bn(msg, msglen, e)) { ret=-1; goto err; }
if (8*msglen > n) BN_rshift(e, e, 8-(n & 7));
zero = BN_CTX_get(ctx);
if (!BN_zero(zero)) { ret=-1; goto err; }
if (!BN_mod_sub(e, zero, e, order, ctx)) { ret=-1; goto err; }
rr = BN_CTX_get(ctx);
if (!BN_mod_inverse(rr, ecsig->r, order, ctx)) { ret=-1; goto err; }
sor = BN_CTX_get(ctx);
if (!BN_mod_mul(sor, ecsig->s, rr, order, ctx)) { ret=-1; goto err; }
eor = BN_CTX_get(ctx);
if (!BN_mod_mul(eor, e, rr, order, ctx)) { ret=-1; goto err; }
if (!EC_POINT_mul(group, Q, eor, R, sor, ctx)) { ret=-2; goto err; }
if (!EC_KEY_set_public_key(eckey, Q)) { ret=-2; goto err; }
ret = 1;
err:
if (ctx) {
BN_CTX_end(ctx);
BN_CTX_free(ctx);
}
if (R != NULL) EC_POINT_free(R);
if (O != NULL) EC_POINT_free(O);
if (Q != NULL) EC_POINT_free(Q);
return ret;
}
void CKey::SetCompressedPubKey()
{
EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED);
fCompressedPubKey = true;
}
void CKey::Reset()
{
fCompressedPubKey = false;
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
if (pkey == NULL)
throw key_error("CKey::CKey() : EC_KEY_new_by_curve_name failed");
fSet = false;
}
CKey::CKey()
{
Reset();
}
CKey::CKey(const CKey& b)
{
pkey = EC_KEY_dup(b.pkey);
if (pkey == NULL)
throw key_error("CKey::CKey(const CKey&) : EC_KEY_dup failed");
fSet = b.fSet;
}
CKey& CKey::operator=(const CKey& b)
{
if (!EC_KEY_copy(pkey, b.pkey))
throw key_error("CKey::operator=(const CKey&) : EC_KEY_copy failed");
fSet = b.fSet;
return (*this);
}
CKey::~CKey()
{
EC_KEY_free(pkey);
}
bool CKey::IsNull() const
{
return !fSet;
}
bool CKey::IsCompressed() const
{
return fCompressedPubKey;
}
void CKey::MakeNewKey(bool fCompressed)
{
if (!EC_KEY_generate_key(pkey))
throw key_error("CKey::MakeNewKey() : EC_KEY_generate_key failed");
if (fCompressed)
SetCompressedPubKey();
fSet = true;
}
bool CKey::SetPrivKey(const CPrivKey& vchPrivKey)
{
const unsigned char* pbegin = &vchPrivKey[0];
if (!d2i_ECPrivateKey(&pkey, &pbegin, vchPrivKey.size()))
return false;
fSet = true;
return true;
}
bool CKey::SetSecret(const CSecret& vchSecret, bool fCompressed)
{
EC_KEY_free(pkey);
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
if (pkey == NULL)
throw key_error("CKey::SetSecret() : EC_KEY_new_by_curve_name failed");
if (vchSecret.size() != 32)
throw key_error("CKey::SetSecret() : secret must be 32 bytes");
BIGNUM *bn = BN_bin2bn(&vchSecret[0],32,BN_new());
if (bn == NULL)
throw key_error("CKey::SetSecret() : BN_bin2bn failed");
if (!EC_KEY_regenerate_key(pkey,bn))
{
BN_clear_free(bn);
throw key_error("CKey::SetSecret() : EC_KEY_regenerate_key failed");
}
BN_clear_free(bn);
fSet = true;
if (fCompressed || fCompressedPubKey)
SetCompressedPubKey();
return true;
}
CSecret CKey::GetSecret(bool &fCompressed) const
{
CSecret vchRet;
vchRet.resize(32);
const BIGNUM *bn = EC_KEY_get0_private_key(pkey);
int nBytes = BN_num_bytes(bn);
if (bn == NULL)
throw key_error("CKey::GetSecret() : EC_KEY_get0_private_key failed");
int n=BN_bn2bin(bn,&vchRet[32 - nBytes]);
if (n != nBytes)
throw key_error("CKey::GetSecret(): BN_bn2bin failed");
fCompressed = fCompressedPubKey;
return vchRet;
}
CPrivKey CKey::GetPrivKey() const
{
int nSize = i2d_ECPrivateKey(pkey, NULL);
if (!nSize)
throw key_error("CKey::GetPrivKey() : i2d_ECPrivateKey failed");
CPrivKey vchPrivKey(nSize, 0);
unsigned char* pbegin = &vchPrivKey[0];
if (i2d_ECPrivateKey(pkey, &pbegin) != nSize)
throw key_error("CKey::GetPrivKey() : i2d_ECPrivateKey returned unexpected size");
return vchPrivKey;
}
bool CKey::SetPubKey(const std::vector<unsigned char>& vchPubKey)
{
const unsigned char* pbegin = &vchPubKey[0];
if (!o2i_ECPublicKey(&pkey, &pbegin, vchPubKey.size()))
return false;
fSet = true;
if (vchPubKey.size() == 33)
SetCompressedPubKey();
return true;
}
std::vector<unsigned char> CKey::GetPubKey() const
{
int nSize = i2o_ECPublicKey(pkey, NULL);
if (!nSize)
throw key_error("CKey::GetPubKey() : i2o_ECPublicKey failed");
std::vector<unsigned char> vchPubKey(nSize, 0);
unsigned char* pbegin = &vchPubKey[0];
if (i2o_ECPublicKey(pkey, &pbegin) != nSize)
throw key_error("CKey::GetPubKey() : i2o_ECPublicKey returned unexpected size");
return vchPubKey;
}
bool CKey::Sign(uint256 hash, std::vector<unsigned char>& vchSig)
{
unsigned int nSize = ECDSA_size(pkey);
vchSig.resize(nSize); // Make sure it is big enough
if (!ECDSA_sign(0, (unsigned char*)&hash, sizeof(hash), &vchSig[0], &nSize, pkey))
{
vchSig.clear();
return false;
}
vchSig.resize(nSize); // Shrink to fit actual size
return true;
}
// create a compact signature (65 bytes), which allows reconstructing the used public key
// The format is one header byte, followed by two times 32 bytes for the serialized r and s values.
// The header byte: 0x1B = first key with even y, 0x1C = first key with odd y,
// 0x1D = second key with even y, 0x1E = second key with odd y
bool CKey::SignCompact(uint256 hash, std::vector<unsigned char>& vchSig)
{
bool fOk = false;
ECDSA_SIG *sig = ECDSA_do_sign((unsigned char*)&hash, sizeof(hash), pkey);
if (sig==NULL)
return false;
vchSig.clear();
vchSig.resize(65,0);
int nBitsR = BN_num_bits(sig->r);
int nBitsS = BN_num_bits(sig->s);
if (nBitsR <= 256 && nBitsS <= 256)
{
int nRecId = -1;<|fim▁hole|> if (fCompressedPubKey)
keyRec.SetCompressedPubKey();
if (ECDSA_SIG_recover_key_GFp(keyRec.pkey, sig, (unsigned char*)&hash, sizeof(hash), i, 1) == 1)
if (keyRec.GetPubKey() == this->GetPubKey())
{
nRecId = i;
break;
}
}
if (nRecId == -1)
throw key_error("CKey::SignCompact() : unable to construct recoverable key");
vchSig[0] = nRecId+27+(fCompressedPubKey ? 4 : 0);
BN_bn2bin(sig->r,&vchSig[33-(nBitsR+7)/8]);
BN_bn2bin(sig->s,&vchSig[65-(nBitsS+7)/8]);
fOk = true;
}
ECDSA_SIG_free(sig);
return fOk;
}
// reconstruct public key from a compact signature
// This is only slightly more CPU intensive than just verifying it.
// If this function succeeds, the recovered public key is guaranteed to be valid
// (the signature is a valid signature of the given data for that key)
bool CKey::SetCompactSignature(uint256 hash, const std::vector<unsigned char>& vchSig)
{
if (vchSig.size() != 65)
return false;
int nV = vchSig[0];
if (nV<27 || nV>=35)
return false;
ECDSA_SIG *sig = ECDSA_SIG_new();
BN_bin2bn(&vchSig[1],32,sig->r);
BN_bin2bn(&vchSig[33],32,sig->s);
EC_KEY_free(pkey);
pkey = EC_KEY_new_by_curve_name(NID_secp256k1);
if (nV >= 31)
{
SetCompressedPubKey();
nV -= 4;
}
if (ECDSA_SIG_recover_key_GFp(pkey, sig, (unsigned char*)&hash, sizeof(hash), nV - 27, 0) == 1)
{
fSet = true;
ECDSA_SIG_free(sig);
return true;
}
return false;
}
bool CKey::Verify(uint256 hash, const std::vector<unsigned char>& vchSig)
{
// -1 = error, 0 = bad sig, 1 = good
if (ECDSA_verify(0, (unsigned char*)&hash, sizeof(hash), &vchSig[0], vchSig.size(), pkey) != 1)
return false;
return true;
}
bool CKey::VerifyCompact(uint256 hash, const std::vector<unsigned char>& vchSig)
{
CKey key;
if (!key.SetCompactSignature(hash, vchSig))
return false;
if (GetPubKey() != key.GetPubKey())
return false;
return true;
}
bool CKey::IsValid()
{
if (!fSet)
return false;
bool fCompr;
CSecret secret = GetSecret(fCompr);
CKey key2;
key2.SetSecret(secret, fCompr);
return GetPubKey() == key2.GetPubKey();
}<|fim▁end|> | for (int i=0; i<4; i++)
{
CKey keyRec;
keyRec.fSet = true; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------
# jsonrpc - jsonrpc interface for XBMC-compatible remotes
# -----------------------------------------------------------------------
# $Id$
#
# JSONRPC and XBMC eventserver to be used for XBMC-compatible
# remotes. Only tested with Yatse so far. If something is not working,
# do not blame the remote, blame this plugin.<|fim▁hole|># -----------------------------------------------------------------------
# Freevo - A Home Theater PC framework
# Copyright (C) 2014 Dirk Meyer, et al.
#
# First Edition: Dirk Meyer <https://github.com/Dischi>
# Maintainer: Dirk Meyer <https://github.com/Dischi>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# ----------------------------------------------------------------------- */
# python imports
import os
import logging
import socket
import urllib
# kaa imports
import kaa
import kaa.beacon
# freevo imports
from ... import core as freevo
# get logging object
log = logging.getLogger('freevo')
# generic functions
import utils
import eventserver
# jsonrpc callbacks
import videolibrary as VideoLibrary
import player as Player
import playlist as Playlist
class PluginInterface( freevo.Plugin ):
"""
JSONRPC and XBMC eventserver to be used for XBMC-compatible remotes
"""
@kaa.coroutine()
def plugin_activate(self, level):
"""
Activate the plugin
"""
super(PluginInterface, self).plugin_activate(level)
self.httpserver = freevo.get_plugin('httpserver')
if not self.httpserver:
raise RuntimeError('httpserver plugin not running')
self.httpserver.server.add_json_handler('/jsonrpc', self.jsonrpc)
self.httpserver.server.add_handler('/image/', self.provide_image)
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._sock.bind(('', freevo.config.plugin.jsonrpc.eventserver))
udp = kaa.Socket()
udp.wrap(self._sock, kaa.IO_READ | kaa.IO_WRITE)
udp.signals['read'].connect(eventserver.handle)
utils.imagedir = (yield kaa.beacon.get_db_info())['directory']
utils.cachedir = os.path.join(os.environ['HOME'], '.thumbnails')
self.api = {}
for module in ('VideoLibrary', 'Player', 'Playlist'):
for name in dir(eval(module)):
method = getattr(eval(module), name)
if callable(method) and not name.startswith('_'):
self.api[module + '.' + name] = method
@kaa.coroutine()
def provide_image(self, path, **attributes):
"""
HTTP callback for images
"""
filename = ''
path = urllib.unquote(path)
if path.startswith('beacon'):
filename = os.path.join(utils.imagedir, path[7:])
if path.startswith('cache'):
filename = os.path.join(utils.cachedir, path[6:])
if path.startswith('thumbnail'):
item = yield kaa.beacon.query(id=int(path.split('/')[2]), type=path.split('/')[1])
if len(item) != 1:
log.error('beacon returned wrong results')
yield None
thumbnail = item[0].get('thumbnail')
if thumbnail.needs_update or 1:
yield kaa.inprogress(thumbnail.create(priority=kaa.beacon.Thumbnail.PRIORITY_HIGH))
filename = thumbnail.large
if filename:
if os.path.isfile(filename):
yield open(filename).read(), None, None
log.error('no file: %s' % filename)
yield None
else:
yield None
def Application_GetProperties(self, properties):
"""
JsonRPC Callback Application.GetProperties
"""
result = {}
for prop in properties:
if prop == 'version':
result[prop] = {"major": 16,"minor": 0,"revision": "a5f3a99", "tag": "stable"}
elif prop == 'volume':
result[prop] = 100
elif prop == 'muted':
result[prop] = eventserver.muted
else:
raise AttributeError('unsupported property: %s' % prop)
return result
def Settings_GetSettingValue(self, setting):
"""
JsonRPC Settings.GetSettingValue (MISSING)
"""
return {}
def XBMC_GetInfoBooleans(self, booleans):
"""
JsonRPC Callback XBMC.GetInfoBooleans
"""
result = {}
for b in booleans:
if b == 'System.Platform.Linux':
result[b] = True
else:
result[b] = False
return result
def XBMC_GetInfoLabels(self, labels):
"""
JsonRPC Callback XBMC.GetInfoLabels
"""
result = {}
for l in labels:
# FIXME: use correct values for all these labels
if l == 'System.BuildVersion':
result[l] = "13.1"
elif l == 'System.KernelVersion':
result[l] = "Linux 3.11.0"
elif l == 'MusicPlayer.Codec':
result[l] = ""
elif l == 'MusicPlayer.SampleRate':
result[l] = ""
elif l == 'MusicPlayer.BitRate':
result[l] = ""
else:
raise AttributeError('unsupported label: %s' % l)
return result
def XBMC_Ping(self):
"""
JsonRPC Ping
"""
return ''
def JSONRPC_Ping(self):
"""
JsonRPC Ping
"""
return ''
def GUI_ActivateWindow(self, window, parameters=None):
"""
Switch Menu Type
"""
window = window.lower()
if window == 'pictures':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('image', event_source='user')
elif window == 'musiclibrary':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('audio', event_source='user')
elif window == 'videos':
if parameters and parameters[0] == 'MovieTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'movie', event_source='user')
if parameters and parameters[0] == 'TvShowTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'tv', event_source='user')
elif window == 'home':
freevo.Event(freevo.MENU_GOTO_MAINMENU).post(event_source='user')
else:
log.error('ActivateWindow: unsupported window: %s' % window)
@kaa.coroutine()
def jsonrpc(self, path, **attributes):
"""
HTTP callback for /jsonrpc
"""
if not attributes:
# supported XBMC API version
yield {"major": 6,"minor": 14,"patch": 3}
method = attributes.get('method')
params = attributes.get('params')
result = None
if method.startswith('Input'):
callback = eventserver.input(method[6:].lower(), params)
yield {'jsonrpc': '2.0', 'result': 'OK', 'id': attributes.get('id')}
callback = self.api.get(method, None) or getattr(self, method.replace('.', '_'), None)
if callback:
# log.info('%s(%s)' % (method, params))
if params is None:
result = callback()
else:
result = callback(**params)
if isinstance(result, kaa.InProgress):
result = yield result
else:
raise AttributeError('unsupported method: %s' % method)
yield {'jsonrpc': '2.0', 'result': result, 'id': attributes.get('id')}<|fim▁end|> | #
# Not all API calls are implemented yet.
# |
<|file_name|>add_tags.go<|end_file_name|><|fim▁begin|>package tasks
// This file is generated by methodGenerator.
// DO NOT MOTIFY THIS FILE.
import (
"net/url"
"github.com/kawaken/go-rtm/methods"<|fim▁hole|>)
// AddTags returns "rtm.tasks.addTags" method instance.
func AddTags(timeline string, listID string, taskseriesID string, taskID string, tags string) *methods.Method {
name := "rtm.tasks.addTags"
p := url.Values{}
p.Add("method", name)
p.Add("timeline", timeline)
p.Add("list_id", listID)
p.Add("taskseries_id", taskseriesID)
p.Add("task_id", taskID)
p.Add("tags", tags)
return &methods.Method{Name: name, Params: p}
}<|fim▁end|> | |
<|file_name|>CodeFormatter.py<|end_file_name|><|fim▁begin|># @author Avtandil Kikabidze
# @copyright Copyright (c) 2008-2015, Avtandil Kikabidze aka LONGMAN ([email protected])
# @link http://longman.me
# @license The MIT License (MIT)
import os
import sys
import sublime
import sublime_plugin
st_version = 2
if sublime.version() == '' or int(sublime.version()) > 3000:
st_version = 3
reloader_name = 'codeformatter.reloader'
# ST3 loads each package as a module, so it needs an extra prefix
if st_version == 3:
reloader_name = 'CodeFormatter.' + reloader_name
from imp import reload
if reloader_name in sys.modules:
reload(sys.modules[reloader_name])
try:
# Python 3
from .codeformatter.formatter import Formatter
except (ValueError):
# Python 2
from codeformatter.formatter import Formatter
# fix for ST2
cprint = globals()['__builtins__']['print']
debug_mode = False
def plugin_loaded():
cprint('CodeFormatter: Plugin Initialized')
# settings = sublime.load_settings('CodeFormatter.sublime-settings')
# debug_mode = settings.get('codeformatter_debug', False)
# if debug_mode:
# from pprint import pprint
# pprint(settings)
# debug_write('Debug mode enabled')
# debug_write('Platform ' + sublime.platform() + ' ' + sublime.arch())
# debug_write('Sublime Version ' + sublime.version())
# debug_write('Settings ' + pprint(settings))
if (sublime.platform() != 'windows'):
import stat
path = (
sublime.packages_path() +
'/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'
)
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
if st_version == 2:
plugin_loaded()
class CodeFormatterCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None, saving=None):
run_formatter(self.view, edit, syntax=syntax, saving=saving)
class CodeFormatterOpenTabsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None):
window = sublime.active_window()
for view in window.views():
run_formatter(view, edit, quiet=True)
class CodeFormatterEventListener(sublime_plugin.EventListener):
def on_pre_save(self, view):
view.run_command('code_formatter', {'saving': True})
class CodeFormatterShowPhpTransformationsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=False):
import subprocess
import re
platform = sublime.platform()
settings = sublime.load_settings('CodeFormatter.sublime-settings')
opts = settings.get('codeformatter_php_options')
php_path = 'php'
if ('php_path' in opts and opts['php_path']):
php_path = opts['php_path']
php55_compat = False
if ('php55_compat' in opts and opts['php55_compat']):
php55_compat = opts['php55_compat']
cmd = []
cmd.append(str(php_path))
if php55_compat:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'.format(
sublime.packages_path()))
else:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/phpf.phar'.format(
sublime.packages_path()))
cmd.append('--list')
#print(cmd)
stderr = ''
stdout = ''
try:
if (platform == 'windows'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo,
shell=False, creationflags=subprocess.SW_HIDE)
else:
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
except Exception as e:
stderr = str(e)
if (not stderr and not stdout):
stderr = 'Error while gethering list of php transformations'
if len(stderr) == 0 and len(stdout) > 0:
text = stdout.decode('utf-8')
text = re.sub(
'Usage:.*?PASSNAME', 'Available PHP Tranformations:', text)
window = self.view.window()
pt = window.get_output_panel('paneltranformations')
pt.set_read_only(False)
pt.insert(edit, pt.size(), text)
window.run_command(
'show_panel', {'panel': 'output.paneltranformations'})
else:
show_error('Formatter error:\n' + stderr)
def run_formatter(view, edit, *args, **kwargs):
if view.is_scratch():
show_error('File is scratch')
return
# default parameters
syntax = kwargs.get('syntax')
saving = kwargs.get('saving', False)
quiet = kwargs.get('quiet', False)
formatter = Formatter(view, syntax)
if not formatter.exists():
if not quiet and not saving:
show_error('Formatter for this file type ({}) not found.'.format(
formatter.syntax))
return<|fim▁hole|> file_text = sublime.Region(0, view.size())
file_text_utf = view.substr(file_text).encode('utf-8')
if (len(file_text_utf) == 0):
return
stdout, stderr = formatter.format(file_text_utf)
if len(stderr) == 0 and len(stdout) > 0:
view.replace(edit, file_text, stdout)
elif not quiet:
show_error('Format error:\n' + stderr)
def console_write(text, prefix=False):
if prefix:
sys.stdout.write('CodeFormatter: ')
sys.stdout.write(text + '\n')
def debug_write(text, prefix=False):
console_write(text, True)
def show_error(text):
sublime.error_message(u'CodeFormatter\n\n%s' % text)<|fim▁end|> |
if (saving and not formatter.format_on_save_enabled()):
return
|
<|file_name|>test_ws.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
###############################################################################
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
###############################################################################
import ws
import unittest
class TestCollapse(unittest.TestCase):
def test_collapse(self):
result = ws.collapse(" ")
self.assertEqual(result, "")
result = ws.collapse(" foo")
self.assertEqual(result, "foo")
result = ws.collapse("foo ")
self.assertEqual(result, "foo")
result = ws.collapse(" foo bar ")
self.assertEqual(result, "foo bar")<|fim▁hole|>
result = ws.collapse("foo\t\nbar\r")
self.assertEqual(result, "foo bar")
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>auto-ref-sliceable.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Pushable<T> {
fn push_val(&mut self, t: T);
}
impl<T> Pushable<T> for ~[T] {
fn push_val(&mut self, t: T) {
self.push(t);
}
}
pub fn main() {
let mut v = ~[1];
v.push_val(2);<|fim▁hole|><|fim▁end|> | v.push_val(3);
assert!(v == ~[1, 2, 3]);
} |
<|file_name|>SdFile.cpp<|end_file_name|><|fim▁begin|>/**
* Marlin 3D Printer Firmware
* Copyright (C) 2016 MarlinFirmware [https://github.com/MarlinFirmware/Marlin]
*
* Based on Sprinter and grbl.
* Copyright (C) 2011 Camiel Gubbels / Erik van der Zalm
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
/**
* Arduino SdFat Library
* Copyright (C) 2009 by William Greiman
*
* This file is part of the Arduino Sd2Card Library
*/
#include "../inc/MarlinConfig.h"
#if ENABLED(SDSUPPORT)
#include "SdFile.h"
/**
* Create a file object and open it in the current working directory.
*
* \param[in] path A path with a valid 8.3 DOS name for a file to be opened.
*
* \param[in] oflag Values for \a oflag are constructed by a bitwise-inclusive
* OR of open flags. see SdBaseFile::open(SdBaseFile*, const char*, uint8_t).
*/
SdFile::SdFile(const char* path, uint8_t oflag) : SdBaseFile(path, oflag) { }
/**
* Write data to an open file.
*
* \note Data is moved to the cache but may not be written to the
* storage device until sync() is called.
*
* \param[in] buf Pointer to the location of the data to be written.
*
* \param[in] nbyte Number of bytes to write.
*
* \return For success write() returns the number of bytes written, always
* \a nbyte. If an error occurs, write() returns -1. Possible errors
* include write() is called before a file has been opened, write is called
* for a read-only file, device is full, a corrupt file system or an I/O error.
*
*/
int16_t SdFile::write(const void* buf, uint16_t nbyte) { return SdBaseFile::write(buf, nbyte); }
/**
* Write a byte to a file. Required by the Arduino Print class.
* \param[in] b the byte to be written.
* Use writeError to check for errors.
*/
#if ARDUINO >= 100
size_t SdFile::write(uint8_t b) { return SdBaseFile::write(&b, 1); }
#else
void SdFile::write(uint8_t b) { SdBaseFile::write(&b, 1); }
#endif
/**
* Write a string to a file. Used by the Arduino Print class.
* \param[in] str Pointer to the string.
* Use writeError to check for errors.
*/
void SdFile::write(const char* str) { SdBaseFile::write(str, strlen(str)); }
/**
* Write a PROGMEM string to a file.
* \param[in] str Pointer to the PROGMEM string.
* Use writeError to check for errors.
*/
void SdFile::write_P(PGM_P str) {
for (uint8_t c; (c = pgm_read_byte(str)); str++) write(c);
}
/**
* Write a PROGMEM string followed by CR/LF to a file.
* \param[in] str Pointer to the PROGMEM string.
* Use writeError to check for errors.
*/
void SdFile::writeln_P(PGM_P str) {
write_P(str);
write_P(PSTR("\r\n"));
}<|fim▁hole|><|fim▁end|> |
#endif // SDSUPPORT |
<|file_name|>condition_list_widget.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
KHotKeys
Copyright (C) 1999-2001 Lubos Lunak <[email protected]>
Distributed under the terms of the GNU General Public License version 2.
****************************************************************************/
#define _CONDITIONS_LIST_WIDGET_CPP_
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "condition_list_widget.h"
<|fim▁hole|>#include <qlineedit.h>
#include <qpopupmenu.h>
#include <kdebug.h>
#include <klocale.h>
#include <kmessagebox.h>
#include <khlistview.h>
#include <conditions.h>
#include "windowdef_list_widget.h"
#include "kcmkhotkeys.h"
namespace KHotKeys {
// Condition_list_widget
Condition_list_widget::Condition_list_widget(QWidget *parent_P, const char *name_P) : Condition_list_widget_ui(parent_P, name_P), selected_item(NULL)
{
conditions.setAutoDelete(true);
QPopupMenu *popup = new QPopupMenu; // CHECKME looks like setting parent doesn't work
popup->insertItem(i18n("Active Window..."), TYPE_ACTIVE_WINDOW);
popup->insertItem(i18n("Existing Window..."), TYPE_EXISTING_WINDOW);
popup->insertItem(i18n("Not_condition", "Not"), TYPE_NOT);
popup->insertItem(i18n("And_condition", "And"), TYPE_AND);
popup->insertItem(i18n("Or_condition", "Or"), TYPE_OR);
connect(conditions_listview, SIGNAL(doubleClicked(QListViewItem *, const QPoint &, int)), this, SLOT(modify_pressed()));
connect(popup, SIGNAL(activated(int)), SLOT(new_selected(int)));
new_button->setPopup(popup);
conditions_listview->header()->hide();
conditions_listview->addColumn("");
conditions_listview->setSorting(-1);
conditions_listview->setRootIsDecorated(true); // CHECKME
conditions_listview->setForceSelect(true);
copy_button->setEnabled(false);
modify_button->setEnabled(false);
delete_button->setEnabled(false);
clear_data();
// KHotKeys::Module::changed()
connect(new_button, SIGNAL(clicked()), module, SLOT(changed()));
connect(copy_button, SIGNAL(clicked()), module, SLOT(changed()));
connect(modify_button, SIGNAL(clicked()), module, SLOT(changed()));
connect(delete_button, SIGNAL(clicked()), module, SLOT(changed()));
connect(comment_lineedit, SIGNAL(textChanged(const QString &)), module, SLOT(changed()));
}
Condition_list_widget::~Condition_list_widget()
{
delete new_button->popup();
}
void Condition_list_widget::clear_data()
{
comment_lineedit->clear();
conditions.clear();
conditions_listview->clear();
}
void Condition_list_widget::set_data(const Condition_list *data_P)
{
if(data_P == NULL)
{
clear_data();
return;
}
comment_lineedit->setText(data_P->comment());
conditions.clear();
conditions_listview->clear();
insert_listview_items(data_P, conditions_listview, NULL);
#ifdef KHOTKEYS_DEBUG
kdDebug(1217) << "Condition_list_widget::set_data():" << endl;
Condition::debug_list(conditions);
#endif
}
void Condition_list_widget::insert_listview_items(const Condition_list_base *parent_P, QListView *parent1_P, Condition_list_item *parent2_P)
{
Condition_list_item *prev = NULL;
for(Condition_list_base::Iterator it(*parent_P); *it; ++it)
{
prev = create_listview_item(*it, parent1_P, parent2_P, prev, true);
if(Condition_list_base *group = dynamic_cast< Condition_list_base * >(*it))
insert_listview_items(group, NULL, prev);
}
}
Condition_list *Condition_list_widget::get_data(Action_data_base *data_P) const
{
#ifdef KHOTKEYS_DEBUG
kdDebug(1217) << "Condition_list_widget::get_data():" << endl;
Condition::debug_list(conditions);
#endif
// CHECKME TODO hmm, tady to bude chtit asi i children :(
Condition_list *list = new Condition_list(comment_lineedit->text(), data_P);
get_listview_items(list, conditions_listview->firstChild());
return list;
}
void Condition_list_widget::get_listview_items(Condition_list_base *list_P, QListViewItem *first_item_P) const
{
list_P->clear();
for(QListViewItem *pos = first_item_P; pos != NULL; pos = pos->nextSibling())
{
Condition *cond = static_cast< Condition_list_item * >(pos)->condition()->copy(list_P);
if(Condition_list_base *group = dynamic_cast< Condition_list_base * >(cond))
get_listview_items(group, pos->firstChild());
}
}
void Condition_list_widget::new_selected(int type_P)
{
Condition_list_item *parent = NULL;
Condition_list_item *after = NULL;
if(selected_item && selected_item->condition())
{
Condition_list_base *tmp = dynamic_cast< Condition_list_base * >(selected_item->condition());
if(tmp && tmp->accepts_children())
{
int ret = KMessageBox::questionYesNoCancel(NULL, i18n("A group is selected.\nAdd the new condition in this selected group?"),
QString::null, i18n("Add in Group"), i18n("Ignore Group"));
if(ret == KMessageBox::Cancel)
return;
else if(ret == KMessageBox::Yes)
parent = selected_item;
else
parent = NULL;
}
}
if(parent == NULL && selected_item != NULL && selected_item->parent() != NULL)
{
parent = static_cast< Condition_list_item * >(selected_item->parent());
after = selected_item;
}
Condition_list_base *parent_cond = parent ? static_cast< Condition_list_base * >(parent->condition()) : NULL;
assert(!parent || dynamic_cast< Condition_list_base * >(parent->condition()));
Condition_dialog *dlg = NULL;
Condition *condition = NULL;
switch(type_P)
{
case TYPE_ACTIVE_WINDOW: // Active_window_condition
dlg = new Active_window_condition_dialog(new Active_window_condition(new Windowdef_list(""), parent_cond)); // CHECKME NULL
break;
case TYPE_EXISTING_WINDOW: // Existing_window_condition
dlg = new Existing_window_condition_dialog(new Existing_window_condition(new Windowdef_list(""), parent_cond)); // CHECKME NULL
break;
case TYPE_NOT: // Not_condition
condition = new Not_condition(parent_cond);
break;
case TYPE_AND: // And_condition
condition = new And_condition(parent_cond);
break;
case TYPE_OR: // Or_condition
condition = new Or_condition(parent_cond);
break;
}
if(dlg != NULL)
{
condition = dlg->edit_condition();
delete dlg;
}
if(condition != NULL)
{
if(parent != NULL)
conditions_listview->setSelected(create_listview_item(condition, NULL, parent, after, false), true);
else
conditions_listview->setSelected(create_listview_item(condition, conditions_listview, NULL, selected_item, false), true);
}
}
void Condition_list_widget::copy_pressed()
{
if(!selected_item)
return;
conditions_listview->setSelected(create_listview_item(selected_item->condition()->copy(selected_item->condition()->parent()),
selected_item->parent() ? NULL : conditions_listview,
static_cast< Condition_list_item * >(selected_item->parent()), selected_item, true),
true);
}
void Condition_list_widget::delete_pressed()
{
if(selected_item)
{
conditions.remove(selected_item->condition()); // we own it
delete selected_item; // CHECKME snad vyvola signaly pro enable()
selected_item = NULL;
}
}
void Condition_list_widget::modify_pressed()
{
if(!selected_item)
return;
edit_listview_item(selected_item);
}
void Condition_list_widget::current_changed(QListViewItem *item_P)
{
// if( item_P == selected_item )
// return;
selected_item = static_cast< Condition_list_item * >(item_P);
// conditions_listview->setSelected( selected_item, true );
copy_button->setEnabled(selected_item != NULL);
delete_button->setEnabled(selected_item != NULL);
if(selected_item != NULL)
{ // not,and,or can't be modified
if(dynamic_cast< Not_condition * >(selected_item->condition()) == NULL && dynamic_cast< And_condition * >(selected_item->condition()) == NULL
&& dynamic_cast< Or_condition * >(selected_item->condition()) == NULL)
{
modify_button->setEnabled(true);
}
else
modify_button->setEnabled(false);
}
else
modify_button->setEnabled(false);
}
Condition_list_item *Condition_list_widget::create_listview_item(Condition *condition_P, QListView *parent1_P, Condition_list_item *parent2_P,
QListViewItem *after_P, bool copy_P)
{
#ifdef KHOTKEYS_DEBUG
kdDebug(1217) << "Condition_list_widget::create_listview_item():" << endl;
Condition::debug_list(conditions);
kdDebug(1217) << kdBacktrace() << endl;
#endif
Condition *new_cond = copy_P ? condition_P->copy(parent2_P ? static_cast< Condition_list_base * >(parent2_P->condition()) : NULL) : condition_P;
assert(!copy_P || !parent2_P || dynamic_cast< Condition_list_base * >(parent2_P->condition()));
// CHECKME uz by nemelo byt treba
/* if( after_P == NULL )
{
if( parent1_P == NULL )
return new Condition_list_item( parent2_P, new_win );
else
return new Condition_list_item( parent1_P, new_win );
}
else*/
{
if(parent1_P == NULL)
{
parent2_P->setOpen(true);
if(new_cond->parent() == NULL) // own only toplevels, they own the rest
conditions.append(new_cond); // we own it, not the listview
return new Condition_list_item(parent2_P, after_P, new_cond);
}
else
{
if(new_cond->parent() == NULL)
conditions.append(new_cond); // we own it, not the listview
return new Condition_list_item(parent1_P, after_P, new_cond);
}
}
}
void Condition_list_widget::edit_listview_item(Condition_list_item *item_P)
{
Condition_dialog *dlg = NULL;
if(Active_window_condition *condition = dynamic_cast< Active_window_condition * >(item_P->condition()))
dlg = new Active_window_condition_dialog(condition);
else if(Existing_window_condition *condition = dynamic_cast< Existing_window_condition * >(item_P->condition()))
dlg = new Existing_window_condition_dialog(condition);
else if(dynamic_cast< Not_condition * >(item_P->condition()) != NULL)
return;
else if(dynamic_cast< And_condition * >(item_P->condition()) != NULL)
return;
else if(dynamic_cast< Or_condition * >(item_P->condition()) != NULL)
return;
else // CHECKME TODO pridat dalsi
assert(false);
Condition *new_condition = dlg->edit_condition();
if(new_condition != NULL)
{
Condition *old_cond = item_P->condition();
item_P->set_condition(new_condition);
int pos = conditions.find(old_cond);
if(pos >= 0)
{
conditions.remove(pos); // we own it
conditions.insert(pos, new_condition);
}
item_P->widthChanged(0);
conditions_listview->repaintItem(item_P);
}
#ifdef KHOTKEYS_DEBUG
kdDebug(1217) << "Condition_list_widget::edit_listview_item():" << endl;
Condition::debug_list(conditions);
#endif
delete dlg;
}
// Condition_list_item
QString Condition_list_item::text(int column_P) const
{
return column_P == 0 ? condition()->description() : QString::null;
}
// Active_window_condition_dialog
Active_window_condition_dialog::Active_window_condition_dialog(Active_window_condition *condition_P)
: KDialogBase(NULL, NULL, true, i18n("Window Details"), Ok | Cancel), condition(NULL)
{
widget = new Windowdef_list_widget(this);
widget->set_data(condition_P->window());
setMainWidget(widget);
}
Condition *Active_window_condition_dialog::edit_condition()
{
exec();
return condition;
}
void Active_window_condition_dialog::accept()
{
KDialogBase::accept();
condition = new Active_window_condition(widget->get_data(), NULL); // CHECKME NULL ?
}
// Existing_window_condition_dialog
Existing_window_condition_dialog::Existing_window_condition_dialog(Existing_window_condition *condition_P)
: KDialogBase(NULL, NULL, true, i18n("Window Details"), Ok | Cancel), condition(NULL)
{
widget = new Windowdef_list_widget(this);
widget->set_data(condition_P->window());
setMainWidget(widget);
}
Condition *Existing_window_condition_dialog::edit_condition()
{
exec();
return condition;
}
void Existing_window_condition_dialog::accept()
{
KDialogBase::accept();
condition = new Existing_window_condition(widget->get_data(), NULL); // CHECKME NULL ?
}
} // namespace KHotKeys
#include "condition_list_widget.moc"<|fim▁end|> | #include <assert.h>
#include <qpushbutton.h>
#include <qheader.h> |
<|file_name|>bitcoin_ar.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ar" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Nanolite</source>
<translation>عن Nanolite</translation>
</message>
<message>
<location line="+39"/>
<source><b>Nanolite</b> version</source>
<translation>نسخة <b>Nanolite</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Nanolite developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>دفتر العناوين</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>أنقر على الماوس مرتين لتعديل عنوان</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>قم بعمل عنوان جديد</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>قم بنسخ القوانين المختارة لحافظة النظام</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Nanolite addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Nanolite address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Nanolite address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&أمسح</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Nanolite addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR NanoliteS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Nanolite will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your Nanolites from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about Nanolite</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Nanolite address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Nanolite</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Nanolite</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Nanolite</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Nanolite addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Nanolite addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Nanolite client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Nanolite network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Nanolite address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Nanolite can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Nanolite address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Nanolite-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Nanolite after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Nanolite on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Nanolite client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Nanolite network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Nanolite.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Nanolite addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Nanolite.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Nanolite network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start Nanolite: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Nanolite-Qt help message to get a list with possible Nanolite command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Nanolite - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Nanolite Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Nanolite debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Nanolite RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message><|fim▁hole|> <source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. KNhAMQRmJwZKmojp2uc7TmH8EqxVWpvW7w)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Nanolite address (e.g. KNhAMQRmJwZKmojp2uc7TmH8EqxVWpvW7w)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. KNhAMQRmJwZKmojp2uc7TmH8EqxVWpvW7w)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Nanolite address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. KNhAMQRmJwZKmojp2uc7TmH8EqxVWpvW7w)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Nanolite address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Nanolite address (e.g. KNhAMQRmJwZKmojp2uc7TmH8EqxVWpvW7w)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Nanolite signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Nanolite developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Nanolite version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or Nanolited</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: Nanolite.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: Nanolited.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9348 or testnet: 19348)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9347 or testnet: 19347)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=Nanoliterpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Nanolite Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Nanolite is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Nanolite will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Nanolite Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Nanolite</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Nanolite to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Nanolite is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | <location line="+1"/> |
<|file_name|>dictutils.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class DictUtils(object):
'''
Provides dict services
'''
@staticmethod
def exclude(dct, keys=[]):
"""
Removes given items from the disct
@param dct: the ditc to look at
@param keys: the keys of items to pop
@return: updated dict<|fim▁hole|> for key in keys:
if dct.has_key(key):
dct.pop(key)<|fim▁end|> | """
if dct: |
<|file_name|>missed.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
# encoding: UTF-8
from six import iteritems
from collections import defaultdict
from json import dumps
from ..config import CONFIG
class MissedKeys(object):
""" Object append missed key"""
def __init__(self, client):
self.client = client<|fim▁hole|> Args:
key (Key): translation key
source_path (string, optional): source path under which key have to be registered
"""
source_path = CONFIG['default_source'] if source_path is None else source_path
self.key_folder[source_path].add(key)
def register(self, source_path):
self.key_folder[source_path] = set()
def prepare(self):
""" Dict repr of keys list """
ret = []
for source, keys in iteritems(self.key_folder):
ret.append({
'source': source,
'keys': [key.as_dict for key in keys]})
return ret
def submit(self, missed_keys):
""" Submit keys over API
Args:
missed keys
"""
return self.client.post('sources/register_keys', params={'source_keys': dumps(missed_keys), 'options': '{"realtime": true}'})
def submit_all(self):
""" Submit all missed keys to server """
if len(self.key_folder) == 0:
return
ret = self.submit(self.prepare())
self.key_folder = defaultdict(set)
return ret<|fim▁end|> | self.key_folder = defaultdict(set)
def append(self, key, source_path=None):
""" Add key to missed |
<|file_name|>test_avhrr_l0_hrpt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2021 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the hrpt reader."""
import os
import unittest
from contextlib import suppress
from tempfile import NamedTemporaryFile
from unittest import mock
import numpy as np
import xarray as xr
from satpy.readers.hrpt import HRPTFile, dtype
from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher
from satpy.tests.utils import make_dataid
NUMBER_OF_SCANS = 10
SWATH_WIDTH = 2048
class TestHRPTWithFile(unittest.TestCase):
"""Test base class with writing a fake file."""
def setUp(self) -> None:
"""Set up the test case."""
test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype)
# Channel 3a
test_data["id"]["id"][:5] = 891
# Channel 3b
test_data["id"]["id"][5:] = 890
with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file:
self.filename = hrpt_file.name
test_data.tofile(hrpt_file)
def tearDown(self) -> None:
"""Tear down the test case."""
with suppress(OSError):
os.remove(self.filename)
def _get_dataset(self, dataset_id):
fh = HRPTFile(self.filename, {}, {})
return fh.get_dataset(dataset_id, {})
class TestHRPTReading(TestHRPTWithFile):
"""Test case for reading hrpt data."""
def test_reading(self):
"""Test that data is read."""
fh = HRPTFile(self.filename, {}, {})
assert fh._data is not None
class TestHRPTGetUncalibratedData(TestHRPTWithFile):
"""Test case for reading uncalibrated hrpt data."""
def _get_channel_1_counts(self):
return self._get_dataset(make_dataid(name='1', calibration='counts'))
def test_get_dataset_returns_a_dataarray(self):
"""Test that get_dataset returns a dataarray."""
result = self._get_channel_1_counts()<|fim▁hole|> result = self._get_channel_1_counts()
assert result.attrs['platform_name'] == 'NOAA 19'
def test_no_calibration_values_are_1(self):
"""Test that the values of non-calibrated data is 1."""
result = self._get_channel_1_counts()
assert (result.values == 1).all()
def fake_calibrate_solar(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 25.43 + 3
def fake_calibrate_thermal(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 35.43 + 3
class CalibratorPatcher(PygacPatcher):
"""Patch pygac."""
def setUp(self) -> None:
"""Patch pygac's calibration."""
super().setUp()
# Import things to patch here to make them patchable. Otherwise another function
# might import it first which would prevent a successful patch.
from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal
self.Calibrator = Calibrator
self.calibrate_thermal = calibrate_thermal
self.calibrate_thermal.side_effect = fake_calibrate_thermal
self.calibrate_solar = calibrate_solar
self.calibrate_solar.side_effect = fake_calibrate_solar
class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile):
"""Test case with patched calibration routines and a synthetic file."""
def setUp(self) -> None:
"""Set up the test case."""
CalibratorPatcher.setUp(self)
TestHRPTWithFile.setUp(self)
def tearDown(self):
"""Tear down the test case."""
CalibratorPatcher.tearDown(self)
TestHRPTWithFile.tearDown(self)
class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated reflectances from hrpt data."""
def _get_channel_1_reflectance(self):
"""Get the channel 1 reflectance."""
dataset_id = make_dataid(name='1', calibration='reflectance')
return self._get_dataset(dataset_id)
def test_calibrated_reflectances_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_1_reflectance()
np.testing.assert_allclose(result.values, 28.43)
class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_4_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='4', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def test_calibrated_bt_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_4_bt()
np.testing.assert_allclose(result.values, 38.43)
class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_3b_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3b', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def _get_channel_3a_reflectance(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='reflectance')
return self._get_dataset(dataset_id)
def _get_channel_3a_counts(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='counts')
return self._get_dataset(dataset_id)
def test_channel_3b_masking(self):
"""Test that channel 3b is split correctly."""
result = self._get_channel_3b_bt()
assert np.isnan(result.values[:5]).all()
assert np.isfinite(result.values[5:]).all()
def test_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_reflectance()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
def test_uncalibrated_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_counts()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
class TestHRPTNavigation(TestHRPTWithFile):
"""Test case for computing HRPT navigation."""
def setUp(self) -> None:
"""Set up the test case."""
super().setUp()
self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH))
self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2
def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt):
"""Prepare the mocks."""
Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock()
get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = make_dataid(name='longitude')
result = self._get_dataset(dataset_id)
assert (result == self.fake_lons).all()
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = make_dataid(name='latitude')
result = self._get_dataset(dataset_id)
assert (result == self.fake_lats).all()<|fim▁end|> | assert isinstance(result, xr.DataArray)
def test_platform_name(self):
"""Test that the platform name is correct.""" |
<|file_name|>17_removing_text.rs<|end_file_name|><|fim▁begin|>fn main() {
let mut choco = "chocolate".to_string();
println!("{}", choco.len());<|fim▁hole|> // this crap takes an ownership, how does it reallocate the buffer?
// TODO: Know how? Perhaps it just repositions the symbols in current buffer?
choco.drain(3..6).collect::<String>();
println!("{}", choco.len());
}<|fim▁end|> | |
<|file_name|>test_variables.py<|end_file_name|><|fim▁begin|>from tests.base import TestBase
from pascal.program import Program
class TestVariables(TestBase):
def test_pass_valid_var(self):
file_name = "tests/mock_pas/all_var.pas"
pascal_program = Program(file_name)
pascal_program.run()
self.assertEqual(len(pascal_program.symbol_table), 7)
self.assertEqual(pascal_program.symbol_address, 23)
def test_pass_assign(self):
file_name = "tests/mock_pas/variables.pas"<|fim▁hole|><|fim▁end|> | pascal_program = Program(file_name)
pascal_program.run() |
<|file_name|>message.rs<|end_file_name|><|fim▁begin|>use libc::{c_int, size_t, c_void};
use std::mem;
use std::marker::PhantomData;
#[repr(C)]
pub struct nl_msg {
_unused: [u8; 0],
}
#[repr(C)]
pub struct nlmsghdr {
_unused: [u8; 0],
}
#[link(name="nl-3")]
extern "C" {
// Exposed msg functions
fn nlmsg_alloc() -> *const nl_msg;
fn nlmsg_free(msg: *const nl_msg);
fn nlmsg_append(msg: *const nl_msg, data: *const c_void, len: size_t, pad: c_int) -> i32;
fn nlmsg_put(msg: *const nl_msg, pid: u32, seq: u32, mtype: c_int, payload: c_int, flags: c_int) -> *const nlmsghdr;
fn nlmsg_datalen(nlh: *const nlmsghdr) -> i32;
fn nlmsg_next(nlh: *const nlmsghdr, remaining: *const i32) -> *const nlmsghdr;
fn nlmsg_inherit(nlh: *const nlmsghdr) -> *const nl_msg;
fn nlmsg_hdr(msg: *const nl_msg) -> *const nlmsghdr;
fn nlmsg_ok(msg: *const nl_msg) -> u32;
fn nlmsg_data(msg: *const nlmsghdr) -> *const c_void;
}
pub struct NetlinkMessage {
ptr: *const nl_msg,
hdr: *const nlmsghdr,
}
pub struct NetlinkData <T> {
ptr: Option<*const c_void>,
phantom: PhantomData<T>
}
impl <T> NetlinkData <T> {
pub fn new() -> NetlinkData<T> {
NetlinkData {
ptr: None,
phantom: PhantomData
}
}<|fim▁hole|> NetlinkData {
ptr: Some(unsafe{ mem::transmute(data) }),
phantom: PhantomData
}
}
pub fn with_vptr(data: *const c_void) -> NetlinkData<T> {
NetlinkData {
ptr: Some(data),
phantom: PhantomData
}
}
pub fn get(&self) -> Option<&T> {
match self.ptr {
None => None,
Some(vptr) => {
Some( unsafe { mem::transmute(vptr) } )
}
}
}
pub fn set(&mut self, data: &T) {
match self.ptr {
None => {
let p: *const c_void = unsafe{ mem::transmute(data) };
self.ptr = Some( p );
},
_ => return
}
}
pub fn from_vptr(&mut self, data: *const c_void) {
match self.ptr {
None => self.ptr = Some(data),
_ => return
}
}
pub fn to_vptr(&self) -> Option<*const c_void> {
self.ptr
}
}
pub fn contain(ptr: *const nl_msg) -> Option<NetlinkMessage> {
match ptr as isize {
0x0 => None,
_ => Some (
NetlinkMessage {
ptr: ptr,
hdr: unsafe { nlmsg_hdr(ptr) }
})
}
}
pub fn alloc() -> Option<NetlinkMessage> {
let mptr = unsafe { nlmsg_alloc() };
contain(mptr)
}
pub fn free(msg: NetlinkMessage) {
unsafe { nlmsg_free(msg.ptr) }
}
pub fn append<T>(msg: &mut NetlinkMessage, data: &T, len: u32, pad: i32) -> i32 {
unsafe {
let vptr: *const c_void = mem::transmute(data);
nlmsg_append(msg.ptr, vptr, len as size_t, pad as c_int) as i32
}
}
pub fn put(msg: &mut NetlinkMessage, pid: u32, seq: u32, mtype: i32, payload: i32, flags: i32) -> bool {
let hdr = unsafe { nlmsg_put(msg.ptr, pid, seq, mtype as c_int, payload as c_int, flags as c_int) };
match hdr as i32 {
0x0 => false,
_ => {
true
}
}
}
pub fn data_len(msg: &NetlinkMessage) -> i32 {
unsafe { nlmsg_datalen(msg.hdr) }
}
pub fn inherit(msg: &NetlinkMessage) -> NetlinkMessage {
let mptr = unsafe { nlmsg_inherit(msg.hdr) };
NetlinkMessage {
ptr: mptr,
hdr: unsafe { nlmsg_hdr(mptr) }
}
}
pub fn data<T>(msg: &NetlinkMessage, container: &mut NetlinkData<T>) {
unsafe {
let vptr = nlmsg_data(msg.hdr);
container.from_vptr(vptr);
}
}
pub mod expose {
pub fn nl_msg_ptr(msg: &::message::NetlinkMessage) -> *const ::message::nl_msg {
msg.ptr
}
pub fn nlmsghdr_ptr(msg: &::message::NetlinkMessage) -> *const ::message::nlmsghdr {
msg.hdr
}
}<|fim▁end|> |
pub fn with_data<D>(data: &D) -> NetlinkData<T> { |
<|file_name|>PDFTextStreamEngine.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.text;<|fim▁hole|>
import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.contentstream.PDFStreamEngine;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.font.encoding.GlyphList;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDSimpleFont;
import org.apache.pdfbox.pdmodel.font.PDType3Font;
import org.apache.pdfbox.pdmodel.graphics.state.PDGraphicsState;
import java.io.IOException;
import org.apache.pdfbox.util.Matrix;
import org.apache.pdfbox.util.Vector;
import org.apache.pdfbox.contentstream.operator.DrawObject;
import org.apache.pdfbox.contentstream.operator.state.Concatenate;
import org.apache.pdfbox.contentstream.operator.state.Restore;
import org.apache.pdfbox.contentstream.operator.state.Save;
import org.apache.pdfbox.contentstream.operator.state.SetGraphicsStateParameters;
import org.apache.pdfbox.contentstream.operator.state.SetMatrix;
import org.apache.pdfbox.contentstream.operator.text.BeginText;
import org.apache.pdfbox.contentstream.operator.text.EndText;
import org.apache.pdfbox.contentstream.operator.text.SetFontAndSize;
import org.apache.pdfbox.contentstream.operator.text.SetTextHorizontalScaling;
import org.apache.pdfbox.contentstream.operator.text.ShowTextAdjusted;
import org.apache.pdfbox.contentstream.operator.text.ShowTextLine;
import org.apache.pdfbox.contentstream.operator.text.ShowTextLineAndSpace;
import org.apache.pdfbox.contentstream.operator.text.MoveText;
import org.apache.pdfbox.contentstream.operator.text.MoveTextSetLeading;
import org.apache.pdfbox.contentstream.operator.text.NextLine;
import org.apache.pdfbox.contentstream.operator.text.SetCharSpacing;
import org.apache.pdfbox.contentstream.operator.text.SetTextLeading;
import org.apache.pdfbox.contentstream.operator.text.SetTextRenderingMode;
import org.apache.pdfbox.contentstream.operator.text.SetTextRise;
import org.apache.pdfbox.contentstream.operator.text.SetWordSpacing;
import org.apache.pdfbox.contentstream.operator.text.ShowText;
/**
* PDFStreamEngine subclass for advanced processing of text via TextPosition.
*
* @see org.apache.pdfbox.text.TextPosition
* @author Ben Litchfield
* @author John Hewson
*/
class PDFTextStreamEngine extends PDFStreamEngine
{
private static final Log LOG = LogFactory.getLog(PDFTextStreamEngine.class);
private int pageRotation;
private PDRectangle pageSize;
private final GlyphList glyphList;
/**
* Constructor.
*/
PDFTextStreamEngine() throws IOException
{
addOperator(new BeginText());
addOperator(new Concatenate());
addOperator(new DrawObject()); // special text version
addOperator(new EndText());
addOperator(new SetGraphicsStateParameters());
addOperator(new Save());
addOperator(new Restore());
addOperator(new NextLine());
addOperator(new SetCharSpacing());
addOperator(new MoveText());
addOperator(new MoveTextSetLeading());
addOperator(new SetFontAndSize());
addOperator(new ShowText());
addOperator(new ShowTextAdjusted());
addOperator(new SetTextLeading());
addOperator(new SetMatrix());
addOperator(new SetTextRenderingMode());
addOperator(new SetTextRise());
addOperator(new SetWordSpacing());
addOperator(new SetTextHorizontalScaling());
addOperator(new ShowTextLine());
addOperator(new ShowTextLineAndSpace());
// load additional glyph list for Unicode mapping
String path = "org/apache/pdfbox/resources/glyphlist/additional.txt";
InputStream input = GlyphList.class.getClassLoader().getResourceAsStream(path);
glyphList = new GlyphList(GlyphList.getAdobeGlyphList(), input);
}
/**
* This will initialise and process the contents of the stream.
*
* @param page the page to process
* @throws java.io.IOException if there is an error accessing the stream.
*/
@Override
public void processPage(PDPage page) throws IOException
{
this.pageRotation = page.getRotation();
this.pageSize = page.getCropBox();
super.processPage(page);
}
/**
* This method was originally written by Ben Litchfield for PDFStreamEngine.
*/
@Override
protected void showGlyph(Matrix textRenderingMatrix, PDFont font, int code, String unicode,
Vector displacement) throws IOException
{
//
// legacy calculations which were previously in PDFStreamEngine
//
PDGraphicsState state = getGraphicsState();
Matrix ctm = state.getCurrentTransformationMatrix();
float fontSize = state.getTextState().getFontSize();
float horizontalScaling = state.getTextState().getHorizontalScaling() / 100f;
Matrix textMatrix = getTextMatrix();
// 1/2 the bbox is used as the height todo: why?
float glyphHeight = font.getBoundingBox().getHeight() / 2;
// transformPoint from glyph space -> text space
float height = font.getFontMatrix().transformPoint(0, glyphHeight).y;
// (modified) combined displacement, this is calculated *without* taking the character
// spacing and word spacing into account, due to legacy code in TextStripper
float tx = displacement.getX() * fontSize * horizontalScaling;
float ty = 0; // todo: support vertical writing mode
// (modified) combined displacement matrix
Matrix td = Matrix.getTranslateInstance(tx, ty);
// (modified) text rendering matrix
Matrix nextTextRenderingMatrix = td.multiply(textMatrix).multiply(ctm); // text space -> device space
float nextX = nextTextRenderingMatrix.getTranslateX();
float nextY = nextTextRenderingMatrix.getTranslateY();
// (modified) width and height calculations
float dxDisplay = nextX - textRenderingMatrix.getTranslateX();
float dyDisplay = height * textRenderingMatrix.getScalingFactorY();
//
// start of the original method
//
// Note on variable names. There are three different units being used in this code.
// Character sizes are given in glyph units, text locations are initially given in text
// units, and we want to save the data in display units. The variable names should end with
// Text or Disp to represent if the values are in text or disp units (no glyph units are
// saved).
float fontSizeText = getGraphicsState().getTextState().getFontSize();
float horizontalScalingText = getGraphicsState().getTextState().getHorizontalScaling()/100f;
//Matrix ctm = getGraphicsState().getCurrentTransformationMatrix();
float glyphSpaceToTextSpaceFactor = 1 / 1000f;
if (font instanceof PDType3Font)
{
// This will typically be 1000 but in the case of a type3 font
// this might be a different number
glyphSpaceToTextSpaceFactor = 1f / font.getFontMatrix().getScaleX();
}
float spaceWidthText = 0;
try
{
// to avoid crash as described in PDFBOX-614, see what the space displacement should be
spaceWidthText = font.getSpaceWidth() * glyphSpaceToTextSpaceFactor;
}
catch (Throwable exception)
{
LOG.warn(exception, exception);
}
if (spaceWidthText == 0)
{
spaceWidthText = font.getAverageFontWidth() * glyphSpaceToTextSpaceFactor;
// the average space width appears to be higher than necessary so make it smaller
spaceWidthText *= .80f;
}
if (spaceWidthText == 0)
{
spaceWidthText = 1.0f; // if could not find font, use a generic value
}
// the space width has to be transformed into display units
float spaceWidthDisplay = spaceWidthText * fontSizeText * horizontalScalingText *
textRenderingMatrix.getScalingFactorX() * ctm.getScalingFactorX();
// use our additional glyph list for Unicode mapping
unicode = font.toUnicode(code, glyphList);
// when there is no Unicode mapping available, Acrobat simply coerces the character code
// into Unicode, so we do the same. Subclasses of PDFStreamEngine don't necessarily want
// this, which is why we leave it until this point in PDFTextStreamEngine.
if (unicode == null)
{
if (font instanceof PDSimpleFont)
{
char c = (char) code;
unicode = new String(new char[] { c });
}
else
{
// Acrobat doesn't seem to coerce composite font's character codes, instead it
// skips them. See the "allah2.pdf" TestTextStripper file.
return;
}
}
processTextPosition(new TextPosition(pageRotation, pageSize.getWidth(),
pageSize.getHeight(), textRenderingMatrix, nextX, nextY,
dyDisplay, dxDisplay,
spaceWidthDisplay, unicode, new int[] { code } , font, fontSize,
(int)(fontSize * textRenderingMatrix.getScalingFactorX())));
}
/**
* A method provided as an event interface to allow a subclass to perform some specific
* functionality when text needs to be processed.
*
* @param text The text to be processed.
*/
protected void processTextPosition(TextPosition text)
{
// subclasses can override to provide specific functionality
}
}<|fim▁end|> | |
<|file_name|>repeat_with.rs<|end_file_name|><|fim▁begin|>use crate::iter::{FusedIterator, TrustedLen};
/// Creates a new iterator that repeats elements of type `A` endlessly by
/// applying the provided closure, the repeater, `F: FnMut() -> A`.
///
/// The `repeat_with()` function calls the repeater over and over again.
///
/// Infinite iterators like `repeat_with()` are often used with adapters like
/// [`Iterator::take()`], in order to make them finite.
///
/// If the element type of the iterator you need implements [`Clone`], and
/// it is OK to keep the source element in memory, you should instead use
/// the [`repeat()`] function.
///
/// An iterator produced by `repeat_with()` is not a [`DoubleEndedIterator`].
/// If you need `repeat_with()` to return a [`DoubleEndedIterator`],<|fim▁hole|>///
/// [`repeat()`]: crate::iter::repeat
/// [`DoubleEndedIterator`]: crate::iter::DoubleEndedIterator
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::iter;
///
/// // let's assume we have some value of a type that is not `Clone`
/// // or which don't want to have in memory just yet because it is expensive:
/// #[derive(PartialEq, Debug)]
/// struct Expensive;
///
/// // a particular value forever:
/// let mut things = iter::repeat_with(|| Expensive);
///
/// assert_eq!(Some(Expensive), things.next());
/// assert_eq!(Some(Expensive), things.next());
/// assert_eq!(Some(Expensive), things.next());
/// assert_eq!(Some(Expensive), things.next());
/// assert_eq!(Some(Expensive), things.next());
/// ```
///
/// Using mutation and going finite:
///
/// ```rust
/// use std::iter;
///
/// // From the zeroth to the third power of two:
/// let mut curr = 1;
/// let mut pow2 = iter::repeat_with(|| { let tmp = curr; curr *= 2; tmp })
/// .take(4);
///
/// assert_eq!(Some(1), pow2.next());
/// assert_eq!(Some(2), pow2.next());
/// assert_eq!(Some(4), pow2.next());
/// assert_eq!(Some(8), pow2.next());
///
/// // ... and now we're done
/// assert_eq!(None, pow2.next());
/// ```
#[inline]
#[stable(feature = "iterator_repeat_with", since = "1.28.0")]
pub fn repeat_with<A, F: FnMut() -> A>(repeater: F) -> RepeatWith<F> {
RepeatWith { repeater }
}
/// An iterator that repeats elements of type `A` endlessly by
/// applying the provided closure `F: FnMut() -> A`.
///
/// This `struct` is created by the [`repeat_with()`] function.
/// See its documentation for more.
#[derive(Copy, Clone, Debug)]
#[stable(feature = "iterator_repeat_with", since = "1.28.0")]
pub struct RepeatWith<F> {
repeater: F,
}
#[stable(feature = "iterator_repeat_with", since = "1.28.0")]
impl<A, F: FnMut() -> A> Iterator for RepeatWith<F> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
Some((self.repeater)())
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(usize::MAX, None)
}
}
#[stable(feature = "iterator_repeat_with", since = "1.28.0")]
impl<A, F: FnMut() -> A> FusedIterator for RepeatWith<F> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<A, F: FnMut() -> A> TrustedLen for RepeatWith<F> {}<|fim▁end|> | /// please open a GitHub issue explaining your use case. |
<|file_name|>KuhnMunkresWongTest.java<|end_file_name|><|fim▁begin|>/*
* This file is part of ELKI:
* Environment for Developing KDD-Applications Supported by Index-Structures
*
* Copyright (C) 2019
* ELKI Development Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package elki.utilities.datastructures;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Random;
import org.junit.Test;
/**
* Test the Kuhn-Munkres implementation.
*
* @author Erich Schubert
*/
public class KuhnMunkresWongTest {
@Test
public void test1() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.TEST1);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.TEST1[i][assignment[i]];
}
assertEquals("Assignment not optimal", 55, sum, 0);
}
@Test
public void test2() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.TEST2);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.TEST2[i][assignment[i]];
}
assertEquals("Assignment not optimal", 4, sum, 0);
}
@Test
public void testNonSq() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.NONSQUARE);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.NONSQUARE[i][assignment[i]];
}
assertEquals("Assignment not optimal", 637518, sum, 0);
}
@Test
public void testDifficult() {<|fim▁hole|> double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.DIFFICULT[i][assignment[i]];
}
assertEquals("Assignment not optimal", 2.24, sum, 1e-4);
}
@Test
public void testDifficult2() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.DIFFICULT2);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.DIFFICULT2[i][assignment[i]];
}
assertEquals("Assignment not optimal", 0.8802, sum, 1e-4);
}
@Test
public void testLarge() {
long seed = 0L;
Random rnd = new Random(seed);
double[][] mat = new double[100][100];
for(int i = 0; i < mat.length; i++) {
double[] row = mat[i];
for(int j = 0; j < row.length; j++) {
row[j] = Math.abs(rnd.nextDouble());
}
}
int[] assignment = new KuhnMunkresWong().run(mat);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += mat[i][assignment[i]];
}
if(seed == 0) {
if(mat.length == 10 && mat[0].length == 10) {
assertEquals("sum", 1.467733381753002, sum, 1e-8);
// Duration: 0.007970609
}
if(mat.length == 100 && mat[0].length == 100) {
assertEquals("sum", 1.5583906418867581, sum, 1e-8);
// Duration: 0.015696813
}
if(mat.length == 1000 && mat[0].length == 1000) {
assertEquals("sum", 1.6527526146559663, sum, 1e-8);
// Duration: 0.8892345580000001
}
if(mat.length == 10000 && mat[0].length == 10000) {
assertEquals("sum", 1.669458072091596, sum, 1e-8);
// Duration: 3035.95495334
}
}
}
}<|fim▁end|> | int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.DIFFICULT); |
<|file_name|>group.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from collections import defaultdict
DEFAULT_RELEASE = datetime(1970, 1, 1)
_SORT_KEY = lambda eps: eps[0].released or DEFAULT_RELEASE
class PodcastGrouper(object):
"""Groups episodes of two podcasts based on certain features
The results are sorted by release timestamp"""
DEFAULT_RELEASE = datetime(1970, 1, 1)
def __init__(self, podcasts):
if not podcasts or (None in podcasts):<|fim▁hole|> def __get_episodes(self):
episodes = {}
for podcast in self.podcasts:
episodes.update(dict((e.id, e.id) for e in podcast.episode_set.all()))
return episodes
def group(self, get_features):
episodes = self.__get_episodes()
episode_groups = defaultdict(list)
episode_features = map(get_features, episodes.items())
for features, episode_id in episode_features:
episode = episodes[episode_id]
episode_groups[features].append(episode)
# groups = sorted(episode_groups.values(), key=_SORT_KEY)
groups = episode_groups.values()
return enumerate(groups)<|fim▁end|> | raise ValueError("podcasts must not be None")
self.podcasts = podcasts
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from models import Article, RelatedLink, Category
# Register your models here.
class RelatedLinkInline(admin.TabularInline):
model = RelatedLink
extra = 1
class ArticleAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
search_fields = ['title']
list_display = ['title', 'slug', 'category']
list_filter = ['created', 'modified']
inlines = [
RelatedLinkInline
]<|fim▁hole|>admin.site.register(Article, ArticleAdmin)
admin.site.register(Category)<|fim▁end|> | |
<|file_name|>SessionRegisteredEventDecoder.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2017 ZeXtras S.r.l.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, version 2 of
* the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|> * GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License.
* If not, see <http://www.gnu.org/licenses/>.
*/
import {IDateProvider} from "../../../../../lib/IDateProvider";
import {EventSessionRegistered} from "../../../../events/chat/EventSessionRegistered";
import {OpenChatEventCode} from "../../../../events/chat/OpenChatEventCode";
import {IChatEvent} from "../../../../events/IChatEvent";
import {ISoapEventObject} from "../SoapEventParser";
import {SoapEventDecoder} from "./SoapEventDecoder";
export class SessionRegisteredEventDecoder<T extends IUserCapabilities>
extends SoapEventDecoder<EventSessionRegistered<T>> {
protected mDateProvider: IDateProvider;
constructor(dateProvider: IDateProvider) {
super(OpenChatEventCode.REGISTER_SESSION);
this.mDateProvider = dateProvider;
}
public decodeEvent(
eventObj: ISessionRegisteredEventObj<T>,
originEvent?: IChatEvent,
): EventSessionRegistered<T> {
return new EventSessionRegistered<T>(
eventObj,
this.mDateProvider.getNow(),
);
}
}
export interface ISessionRegisteredEventObj<T extends IUserCapabilities> extends ISoapEventObject {
[key: string]: any;
capabilities?: T;
}
export interface IUserCapabilities {
[key: string]: any;
}<|fim▁end|> | |
<|file_name|>0000_get_requester_ratings_fn.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0007_auto_20151208_1957'),
]
operations = [
migrations.RunSQL('''
CREATE OR REPLACE FUNCTION get_requester_ratings(IN worker_profile_id INTEGER)
RETURNS TABLE(requester_id INTEGER, requester_rating DOUBLE PRECISION,
requester_avg_rating DOUBLE PRECISION)
AS $$
SELECT
r.id,
wr_rating.weight,
avg_wr_rating
FROM crowdsourcing_requester r
LEFT OUTER JOIN (
SELECT
wrr.target_id,
wrr.weight AS weight
FROM crowdsourcing_workerrequesterrating wrr
INNER JOIN (
SELECT
target_id,
MAX(last_updated) AS max_date
FROM crowdsourcing_workerrequesterrating
WHERE origin_type = 'worker' AND origin_id = $1
GROUP BY target_id
) most_recent
ON wrr.target_id = most_recent.target_id AND wrr.last_updated = most_recent.max_date AND
wrr.origin_type = 'worker'
AND wrr.origin_id = $1
) wr_rating ON wr_rating.target_id = r.profile_id
LEFT OUTER JOIN (
SELECT
target_id,
AVG(weight) AS avg_wr_rating
FROM (
SELECT
wrr.target_id,
wrr.weight
FROM crowdsourcing_workerrequesterrating wrr
INNER JOIN (
SELECT
origin_id,
target_id,
MAX(last_updated) AS max_date
FROM crowdsourcing_workerrequesterrating
WHERE origin_id<>$1 AND origin_type='worker'
GROUP BY origin_id, target_id
) most_recent
ON most_recent.origin_id = wrr.origin_id AND most_recent.target_id = wrr.target_id AND
wrr.last_updated = most_recent.max_date
AND wrr.origin_id <> $1 AND wrr.origin_type = 'worker'
) recent_wr_rating
GROUP BY target_id
) avg_wr_rating
ON avg_wr_rating.target_id = r.profile_id;
$$
LANGUAGE SQL
STABLE<|fim▁hole|> RETURNS NULL ON NULL INPUT;
''')
]<|fim▁end|> | |
<|file_name|>test_lmdb_utils.py<|end_file_name|><|fim▁begin|>'''
Created on Jan 06, 2016
@author: kashefy
'''
from nose.tools import assert_greater, assert_equal, assert_is_instance, assert_true
import nideep.iow.lmdb_utils as lu
class TestLMDBConsts:
def test_map_sz(self):
assert_greater(lu.MAP_SZ, 0)
assert_is_instance(lu.MAP_SZ, int)
def test_num_idx_digits(self):
assert_greater(lu.NUM_IDX_DIGITS, 0)
assert_is_instance(lu.NUM_IDX_DIGITS, int)
class TestIdxFormat:
def test_idx_format(self):
assert_greater(len(lu.IDX_FMT), 0)
assert_is_instance(lu.IDX_FMT, str)
assert_true(lu.IDX_FMT.startswith('{'))
assert_true(lu.IDX_FMT.endswith('}'))
def test_idx_format_zero(self):
assert_equal(lu.IDX_FMT.format(0), ''.join(['0'] * lu.NUM_IDX_DIGITS))
def test_idx_format_nonzero(self):
assert_greater(lu.NUM_IDX_DIGITS, 1)
s = ''.join(['0'] * lu.NUM_IDX_DIGITS)
for i in xrange(10):
assert_equal(lu.IDX_FMT.format(i), s[:-1] + '%d' % i)<|fim▁hole|><|fim▁end|> |
for i in xrange(10, 100):
assert_equal(lu.IDX_FMT.format(i), s[:-2] + '%d' % i) |
<|file_name|>connect.js<|end_file_name|><|fim▁begin|>var Q = require('q');
var uuid = require('uuid');
var crypto = require('../../../../crypto/crypto');
function Connect() {
}
var connect = new Connect();
module.exports = connect;
Connect.prototype.init = function(letter, handler) {
console.log(letter);
var deferred = Q.defer();
var back_letter = {
signature: letter.signature,
directive: {
connect: {
init: null
}
}
};
deferred.resolve(back_letter);
return deferred.promise;
};
Connect.prototype.crypto = function(letter, handler) {
let deferred = Q.defer();
let public_str = letter.crypto.public_str;
// 使用uuid 作为密码
// let secret = uuid.v4();
let secret = '12345678';
let encryptSecret = crypto.publicEncrypt(public_str, secret);
let json = encryptSecret.toJSON();
console.log('密码为:');
console.log(secret);
// console.log(encryptSecret.toString());<|fim▁hole|> setTimeout(function() {
handler.crypto = true;
handler.encryptSecret = secret;
let letter = {
directive: {
test: null
}
};
letter = JSON.stringify(letter);
letter = crypto.cipher(letter, secret);
handler.evenEmitter.emit('letter', letter);
}, 5000);
return deferred.promise;
};<|fim▁end|> | letter.crypto.encryptSecret = JSON.stringify(encryptSecret.toJSON());
deferred.resolve(letter);
|
<|file_name|>bar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | mod foo;
mod baz; |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate libbindgen;
extern crate gcc;
extern crate glob;
use std::env;
use std::path::Path;
fn gen_header() {
let out_dir = env::var("OUT_DIR").unwrap();
let _ = libbindgen::builder()
.header("wrapper.h")
.no_unstable_rust()
// .use_core()
.generate().unwrap()
.write_to_file(Path::new(&out_dir).join("lua.rs"));
}
fn gen_binary() {
let mut conf = gcc::Config::new();
<|fim▁hole|> continue;
}
conf.file(path);
}
conf.compile("liblua.a");
}
fn main() {
gen_header();
gen_binary();
}<|fim▁end|> | for path in glob::glob("../lua/*.c").unwrap().filter_map(Result::ok) {
if path.ends_with("lua.c") || path.ends_with("luac.c") { |
<|file_name|>test_corpora_dictionary.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Unit tests for the `corpora.Dictionary` class.
"""
from collections import Mapping
import logging
import tempfile
import unittest
import os
import os.path
import scipy
import gensim
from gensim.corpora import Dictionary
from six import PY3
from six.moves import zip
# sample data files are located in the same folder
module_path = os.path.dirname(__file__)
def get_tmpfile(suffix):
return os.path.join(tempfile.gettempdir(), suffix)
class TestDictionary(unittest.TestCase):
def setUp(self):
self.texts = [
['human', 'interface', 'computer'],
['survey', 'user', 'computer', 'system', 'response', 'time'],
['eps', 'user', 'interface', 'system'],
['system', 'human', 'system', 'eps'],
['user', 'response', 'time'],
['trees'],
['graph', 'trees'],
['graph', 'minors', 'trees'],
['graph', 'minors', 'survey']]
def testDocFreqOneDoc(self):
texts = [['human', 'interface', 'computer']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}<|fim▁hole|> texts = [['human'], ['human']]
d = Dictionary(texts)
expected = {0: 2}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# three docs
texts = [['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 3}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# four docs
texts = [['human'], ['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 4}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
def testDocFreqForOneDocWithSeveralWord(self):
# two words
texts = [['human', 'cat']]
d = Dictionary(texts)
expected = {0: 1, 1: 1}
self.assertEqual(d.dfs, expected)
# three words
texts = [['human', 'cat', 'minors']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}
self.assertEqual(d.dfs, expected)
def testBuild(self):
d = Dictionary(self.texts)
expected = {0: 2, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 2, 7: 3, 8: 2,
9: 3, 10: 3, 11: 2}
self.assertEqual(d.dfs, expected)
expected = {'computer': 0, 'eps': 8, 'graph': 10, 'human': 1,
'interface': 2, 'minors': 11, 'response': 3, 'survey': 4,
'system': 5, 'time': 6, 'trees': 9, 'user': 7}
self.assertEqual(d.token2id, expected)
def testFilter(self):
d = Dictionary(self.texts)
d.filter_extremes(no_below=2, no_above=1.0, keep_n=4)
expected = {0: 3, 1: 3, 2: 3, 3: 3}
self.assertEqual(d.dfs, expected)
def test_doc2bow(self):
d = Dictionary([["žluťoučký"], ["žluťoučký"]])
# pass a utf8 string
self.assertEqual(d.doc2bow(["žluťoučký"]), [(0, 1)])
# doc2bow must raise a TypeError if passed a string instead of array of strings by accident
self.assertRaises(TypeError, d.doc2bow, "žluťoučký")
# unicode must be converted to utf8
self.assertEqual(d.doc2bow([u'\u017elu\u0165ou\u010dk\xfd']), [(0, 1)])
def test_saveAsText_and_loadFromText(self):
"""`Dictionary` can be saved as textfile and loaded again from textfile. """
tmpf = get_tmpfile('dict_test.txt')
for sort_by_word in [True, False]:
d = Dictionary(self.texts)
d.save_as_text(tmpf, sort_by_word=sort_by_word)
self.assertTrue(os.path.exists(tmpf))
d_loaded = Dictionary.load_from_text(tmpf)
self.assertNotEqual(d_loaded, None)
self.assertEqual(d_loaded.token2id, d.token2id)
def test_from_corpus(self):
"""build `Dictionary` from an existing corpus"""
documents = ["Human machine interface for lab abc computer applications",
"A survey of user opinion of computer system response time",
"The EPS user interface management system",
"System and human system engineering testing of EPS",
"Relation of user perceived response time to error measurement",
"The generation of random binary unordered trees",
"The intersection graph of paths in trees",
"Graph minors IV Widths of trees and well quasi ordering",
"Graph minors A survey"]
stoplist = set('for a of the and to in'.split())
texts = [[word for word in document.lower().split() if word not in stoplist]
for document in documents]
# remove words that appear only once
all_tokens = sum(texts, [])
tokens_once = set(word for word in set(all_tokens) if all_tokens.count(word) == 1)
texts = [[word for word in text if word not in tokens_once]
for text in texts]
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
# Create dictionary from corpus without a token map
dictionary_from_corpus = Dictionary.from_corpus(corpus)
dict_token2id_vals = sorted(dictionary.token2id.values())
dict_from_corpus_vals = sorted(dictionary_from_corpus.token2id.values())
self.assertEqual(dict_token2id_vals, dict_from_corpus_vals)
self.assertEqual(dictionary.dfs, dictionary_from_corpus.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus.num_nnz)
# Create dictionary from corpus with an id=>token map
dictionary_from_corpus_2 = Dictionary.from_corpus(corpus, id2word=dictionary)
self.assertEqual(dictionary.token2id, dictionary_from_corpus_2.token2id)
self.assertEqual(dictionary.dfs, dictionary_from_corpus_2.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus_2.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus_2.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus_2.num_nnz)
# Ensure Sparse2Corpus is compatible with from_corpus
bow = gensim.matutils.Sparse2Corpus(scipy.sparse.rand(10, 100))
dictionary = Dictionary.from_corpus(bow)
self.assertEqual(dictionary.num_docs, 100)
def test_dict_interface(self):
"""Test Python 2 dict-like interface in both Python 2 and 3."""
d = Dictionary(self.texts)
self.assertTrue(isinstance(d, Mapping))
self.assertEqual(list(zip(d.keys(), d.values())), list(d.items()))
# Even in Py3, we want the iter* members.
self.assertEqual(list(d.items()), list(d.iteritems()))
self.assertEqual(list(d.keys()), list(d.iterkeys()))
self.assertEqual(list(d.values()), list(d.itervalues()))
# XXX Do we want list results from the dict members in Py3 too?
if not PY3:
self.assertTrue(isinstance(d.items(), list))
self.assertTrue(isinstance(d.keys(), list))
self.assertTrue(isinstance(d.values(), list))
#endclass TestDictionary
if __name__ == '__main__':
logging.basicConfig(level=logging.WARNING)
unittest.main()<|fim▁end|> | self.assertEqual(d.dfs, expected)
def testDocFreqAndToken2IdForSeveralDocsWithOneWord(self):
# two docs |
<|file_name|>api.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { ParseSourceSpan } from '@angular/compiler';
import * as ts from 'typescript';
export declare enum DiagnosticCategory {
Warning = 0,
Error = 1,
Message = 2,
}
export interface Diagnostic {
message: string;
span?: ParseSourceSpan;
category: DiagnosticCategory;
}
export interface CompilerOptions extends ts.CompilerOptions {
genDir?: string;
basePath?: string;
skipMetadataEmit?: boolean;
strictMetadataEmit?: boolean;
skipTemplateCodegen?: boolean;
flatModuleOutFile?: string;
flatModuleId?: string;
generateCodeForLibraries?: boolean;
annotateForClosureCompiler?: boolean;
annotationsAs?: 'decorators' | 'static fields';
trace?: boolean;
enableLegacyTemplate?: boolean;
}
export interface ModuleFilenameResolver {
/**
* Converts a module name that is used in an `import` to a file path.
* I.e. `path/to/containingFile.ts` containing `import {...} from 'module-name'`.
*/
moduleNameToFileName(moduleName: string, containingFile?: string): string | null;
/**
* Converts a file path to a module name that can be used as an `import.
* I.e. `path/to/importedFile.ts` should be imported by `path/to/containingFile.ts`.
*
* See ImportResolver.
*/
fileNameToModuleName(importedFilePath: string, containingFilePath: string): string | null;
getNgCanonicalFileName(fileName: string): string;
assumeFileExists(fileName: string): void;
}
export interface CompilerHost extends ts.CompilerHost, ModuleFilenameResolver {
/**<|fim▁hole|> * cause a diagnostics diagnostic error or an exception to be thrown.
*
* If `loadResource()` is not provided, `readFile()` will be called to load the resource.
*/
readResource?(fileName: string): Promise<string> | string;
}
export declare enum EmitFlags {
DTS = 1,
JS = 2,
Metadata = 4,
I18nBundle = 8,
Summary = 16,
Default = 3,
All = 31,
}
export interface Program {
/**
* Retrieve the TypeScript program used to produce semantic diagnostics and emit the sources.
*
* Angular structural information is required to produce the program.
*/
getTsProgram(): ts.Program;
/**
* Retreive options diagnostics for the TypeScript options used to create the program. This is
* faster than calling `getTsProgram().getOptionsDiagnostics()` since it does not need to
* collect Angular structural information to produce the errors.
*/
getTsOptionDiagnostics(cancellationToken?: ts.CancellationToken): ts.Diagnostic[];
/**
* Retrieve options diagnostics for the Angular options used to create the program.
*/
getNgOptionDiagnostics(cancellationToken?: ts.CancellationToken): Diagnostic[];
/**
* Retrive the syntax diagnostics from TypeScript. This is faster than calling
* `getTsProgram().getSyntacticDiagnostics()` since it does not need to collect Angular structural
* information to produce the errors.
*/
getTsSyntacticDiagnostics(sourceFile?: ts.SourceFile, cancellationToken?: ts.CancellationToken): ts.Diagnostic[];
/**
* Retrieve the diagnostics for the structure of an Angular application is correctly formed.
* This includes validating Angular annotations and the syntax of referenced and imbedded HTML
* and CSS.
*
* Note it is important to displaying TypeScript semantic diagnostics along with Angular
* structural diagnostics as an error in the program strucutre might cause errors detected in
* semantic analysis and a semantic error might cause errors in specifying the program structure.
*
* Angular structural information is required to produce these diagnostics.
*/
getNgStructuralDiagnostics(cancellationToken?: ts.CancellationToken): Diagnostic[];
/**
* Retreive the semantic diagnostics from TypeScript. This is equivilent to calling
* `getTsProgram().getSemanticDiagnostics()` directly and is included for completeness.
*/
getTsSemanticDiagnostics(sourceFile?: ts.SourceFile, cancellationToken?: ts.CancellationToken): ts.Diagnostic[];
/**
* Retrieve the Angular semantic diagnostics.
*
* Angular structural information is required to produce these diagnostics.
*/
getNgSemanticDiagnostics(fileName?: string, cancellationToken?: ts.CancellationToken): Diagnostic[];
/**
* Load Angular structural information asynchronously. If this method is not called then the
* Angular structural information, including referenced HTML and CSS files, are loaded
* synchronously. If the supplied Angular compiler host returns a promise from `loadResource()`
* will produce a diagnostic error message or, `getTsProgram()` or `emit` to throw.
*/
loadNgStructureAsync(): Promise<void>;
/**
* Retrieve the lazy route references in the program.
*
* Angular structural information is required to produce these routes.
*/
getLazyRoutes(cancellationToken?: ts.CancellationToken): {
[route: string]: string;
};
/**
* Emit the files requested by emitFlags implied by the program.
*
* Angular structural information is required to emit files.
*/
emit({emitFlags, cancellationToken}: {
emitFlags: EmitFlags;
cancellationToken?: ts.CancellationToken;
}): void;
}<|fim▁end|> | * Load a referenced resource either statically or asynchronously. If the host returns a
* `Promise<string>` it is assumed the user of the corresponding `Program` will call
* `loadNgStructureAsync()`. Returing `Promise<string>` outside `loadNgStructureAsync()` will |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Layer'
db.create_table('layers_layer', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 10, 2, 0, 0))),
('updated', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 10, 2, 0, 0))),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)),
('description', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('text', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('is_published', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_external', self.gf('django.db.models.fields.BooleanField')(default=False)),
('center', self.gf('django.contrib.gis.db.models.fields.PointField')(null=True, blank=True)),
('area', self.gf('django.contrib.gis.db.models.fields.PolygonField')(null=True, blank=True)),
('zoom', self.gf('django.db.models.fields.SmallIntegerField')(default=12)),
('organization', self.gf('django.db.models.fields.CharField')(max_length=255)),
('website', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),<|fim▁hole|> ('data', self.gf(u'django_hstore.fields.DictionaryField')(null=True, blank=True)),
))
db.send_create_signal('layers', ['Layer'])
# Adding M2M table for field mantainers on 'Layer'
m2m_table_name = db.shorten_name('layers_layer_mantainers')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('layer', models.ForeignKey(orm['layers.layer'], null=False)),
('profile', models.ForeignKey(orm['profiles.profile'], null=False))
))
db.create_unique(m2m_table_name, ['layer_id', 'profile_id'])
def backwards(self, orm):
# Deleting model 'Layer'
db.delete_table('layers_layer')
# Removing M2M table for field mantainers on 'Layer'
db.delete_table(db.shorten_name('layers_layer_mantainers'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'layers.layer': {
'Meta': {'object_name': 'Layer'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {'null': 'True', 'blank': 'True'}),
'center': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'data': (u'django_hstore.fields.DictionaryField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_external': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mantainers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Profile']", 'symmetrical': 'False', 'blank': 'True'}),
'minimum_distance': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'new_nodes_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'zoom': ('django.db.models.fields.SmallIntegerField', [], {'default': '12'})
},
'profiles.profile': {
'Meta': {'object_name': 'Profile'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
}
}
complete_apps = ['layers']<|fim▁end|> | ('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('minimum_distance', self.gf('django.db.models.fields.IntegerField')(default=0)),
('new_nodes_allowed', self.gf('django.db.models.fields.BooleanField')(default=True)), |
<|file_name|>Requests.js<|end_file_name|><|fim▁begin|>/*
Copyright 2015 Ricardo Tubio-Pardavila
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
angular.module('snRequestsDirective', [
'ngMaterial',
'snCommonFilters', 'snApplicationBus',
'snRequestsFilters', 'snControllers', 'snJRPCServices'
])
.controller('snRequestSlotCtrl', [
'$scope', '$mdDialog', '$mdToast', 'satnetRPC', 'snDialog', 'snMessageBus',
/**
* Controller function for handling the SatNet requests dialog.
*
* @param {Object} $scope $scope for the controller
*/
function ($scope, $mdDialog, $mdToast, satnetRPC, snDialog, snMessageBus) {
$scope.gui = {
groundstation_id: '',
spacecraft_id: '',
primary: '',
hide: {
accept: true,
drop: true,
deny: true,
},
slot: {}
};
/**
* Function that handles the process of accepting a given request that
* has already been selected.
*/
$scope.accept = function () {
satnetRPC.rCall(
'gs.operational.accept', [
$scope.groundstation_id, [$scope.slot.identifier]
]
).then(function (results) {
snDialog.toastAction('Confirmed slot #',$scope.slot.identifier);
$scope.slot.state = 'RESERVED';
snMessageBus.send(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.accepted.id, {
gs_id: $scope.gui.groundstation_id,
sc_id: $scope.gui.spacecraft_id,
primary: $scope.gui.primary,
slot: $scope.gui.slot
}
);
}).catch(function (c) {
snDialog.exception('gs.operational.accept', '', c);
});
};
/**
* Function that handles the process of denying a given request that
* has already been selected.
*
* TODO :: Temporary, it has been linked to the drop function so that
* the slot does not stay forever with the DENIED state.
*/
$scope.deny = function () {
satnetRPC.rCall(
'gs.operational.drop', [
$scope.groundstation_id, [$scope.slot.identifier]
]
).then(function (results) {
snDialog.toastAction('Denied slot #', $scope.slot.identifier);
$scope.slot.state = 'FREE';
snMessageBus.send(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.denied.id, {
gs_id: $scope.gui.groundstation_id,
sc_id: $scope.gui.spacecraft_id,
primary: $scope.gui.primary,
slot: $scope.gui.slot
}
);
}).catch(function (c) {
snDialog.exception('gs.operational.drop', '', c);
});
};
/**
* Function that handles the process of droping a given request that
* has already been booked.
*
* IMPORTANT: This function works both for spacecraft and for
* groundstation slots; therefore, there is an inherent
* level of complexity added in addition in order to
* handle both cases.
*/
$scope.drop = function () {
var rpc = ($scope.gui.primary === 'groundstation') ?
'gs.operational.drop' : 'sc.cancel',
segment_id = ($scope.gui.primary === 'groundstation') ?
$scope.groundstation_id : $scope.spacecraft_id;
satnetRPC.rCall(
rpc, [segment_id, [$scope.slot.identifier]]
).then(function (results) {
snDialog.toastAction('Dropped slot #', $scope.slot.identifier);
$scope.slot.state = 'FREE';
snMessageBus.send(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.dropped.id, {
gs_id: $scope.gui.groundstation_id,
sc_id: $scope.gui.spacecraft_id,
primary: $scope.gui.primary,
slot: $scope.gui.slot
}
);
}).catch(function (c) {
snDialog.exception(rpc, '', c);
});
};
/**
* Function that returns whether o not the "accept" button should be
* displayed, taking into account the state of the controller.
*/
$scope.showAccept = function () {
return ($scope.gui.slot.state === 'SELECTED') &&
!($scope.gui.hide.accept);
};
/**
* Function that returns whether o not the "deny" button should be
* displayed, taking into account the state of the controller.
*/
$scope.showDeny = function () {
return ($scope.gui.slot.state === 'SELECTED') &&
!($scope.gui.hide.deny);
};
/**
* Function that returns whether o not the "drop" button should be
* displayed, taking into account the state of the controller.
*/
$scope.showDrop = function () {
if ($scope.gui.primary === 'spacecraft') {
return !($scope.gui.hide.drop) && (
($scope.gui.slot.state === 'SELECTED') ||
($scope.gui.slot.state === 'RESERVED')
);
} else {
return !($scope.gui.hide.drop) && (
($scope.gui.slot.state === 'RESERVED')
);
}
};
/**
* Initialization of the controller.
*/
$scope.init = function () {
$scope.gui.groundstation_id = $scope.gs;
$scope.gui.spacecraft_id = $scope.sc;
$scope.gui.primary = $scope.primary;
$scope.gui.slot = $scope.slot;
if ( $scope.gui.primary === 'spacecraft' ) {
$scope.gui.hide.drop = false;
} else {
$scope.gui.hide.accept = false;
$scope.gui.hide.deny = false;
$scope.gui.hide.drop = false;
}
};
$scope.init();
}
])
.directive('snRequestSlot',
/**
* Function that creates the directive itself returning the object required
* by Angular.
*
* @returns {Object} Object directive required by Angular, with restrict
* and templateUrl
*/
function () {
return {
restrict: 'E',
templateUrl: 'operations/templates/requests/slot.html',
controller: 'snRequestSlotCtrl',
scope: {
sc: '@',
gs: '@',
primary: '@',
slot: '='
}
};
}
)
.controller('snRequestsDlgCtrl', [
'$scope', '$log', '$mdDialog', 'satnetRPC','snDialog', 'snMessageBus',
/**
* Controller function for handling the SatNet requests dialog.
*
* @param {Object} $scope $scope for the controller
*/
function ($scope, $log, $mdDialog, satnetRPC, snDialog, snMessageBus) {
$scope.events = {
requests: {
accepted: {
id: snMessageBus.createName(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.accepted.id
)
},
denied: {
id: snMessageBus.createName(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.denied.id
)
},
dropped: {
id: snMessageBus.createName(
snMessageBus.CHANNELS.requests.id,
snMessageBus.EVENTS.dropped.id
)
}
}
};
/**
* This function finds the given slot within the dictionary/array of
* slots within this controller.
*
* @param {String} segmentId Identifier of the segment
* @param {String} slotId Identifier of the slot
*/
$scope._findSlot = function (segmentId, slotId) {
var slots = $scope.gui.slots[segmentId];
if ((slots === undefined) || (slots.length === 0)) {
throw 'No slots for ss = ' + segmentId;
}<|fim▁hole|> return {
index: i,
slot: slots[i]
};
}
}
throw 'Slot not found for ss = ' + segmentId;
};
/**
* Updates the slots dictionary when the slot that triggered the event
* was updated to the "FREE" state.
*
* @param {Object} data The data object attached to the event
*/
$scope._updateFree = function (data) {
var ss_id = (data.primary === 'spacecraft') ?
data.gs_id: data.sc_id,
other_ss_id = (data.primary === 'spacecraft') ?
data.sc_id: data.gs_id,
slot = $scope._findSlot(ss_id, data.slot.identifier),
slot_other = $scope._findSlot(
other_ss_id, data.slot.identifier
);
$scope.gui.slots[ss_id].splice(slot.index, 1);
$scope.gui.slots[other_ss_id].splice(slot_other.index, 1);
};
/**
* Updates the slots dictionary when the slot that triggered the event
* was not updated to the "FREE" state.
*
* @param {Object} data The data object attached to the event
*/
$scope._updateNonFree = function (data) {
var ss_id = (data.primary === 'spacecraft') ?
data.gs_id: data.sc_id,
slot = $scope._findSlot(ss_id, data.slot.identifier);
slot.slot.state = data.slot.state;
};
/**
* CALLBACK
* This function is the callback that handles the event triggered
* whenever a request slot has been accepted, canceled or denied.
*
* @param {String} event The name of the event
* @param {Object} data The data object generated by the event
*/
$scope._updateRequestCb = function (event, data) {
try {
if (data.slot.state === 'FREE') { $scope._updateFree(data); }
else { $scope._updateNonFree(data); }
} catch (e) { $log.info(e); }
};
$scope.$on(
$scope.events.requests.accepted.id, $scope._updateRequestCb
);
$scope.$on(
$scope.events.requests.denied.id, $scope._updateRequestCb
);
$scope.$on(
$scope.events.requests.dropped.id, $scope._updateRequestCb
);
$scope.gui = {
gss: [],
scs: [],
slots: {},
filtered: {}
};
/**
* Function that closes the dialog.
*/
$scope.close = function () { $mdDialog.hide(); };
/**
* This function is used to check whether the given slot has to be
* discarded from amongst the other slots or not.
*
* @param {Object} slot The slot to be checked
* @param {Boolean} 'true' if the slot has to be discarded
*/
$scope._filterByState = function(slot) {
return (slot.state !== 'SELECTED') && (slot.state !== 'RESERVED');
};
/**
* This function processes the slots received from the server in order
* to adapt them to a more JavaScript "friendly" data structure. It
* stores the results directly in the controller's data section.
*
* @param {String} segmentId Identifier of the segment
* @param {Object} results Object with the results from the server
*/
$scope._processSlots = function (segmentId, results) {
$scope.gui.slots[segmentId] = [];
if ((results === null) || (angular.equals({}, results))) {
return;
}
var ss_id = Object.keys(results)[0],
slots = results[ss_id];
for (var i = 0, L = slots.length; i < L; i++) {
if ($scope._filterByState(slots[i])) {continue;}
slots[i].segment_id = ss_id;
$scope.gui.slots[segmentId].push(slots[i]);
}
};
/**
* This function retrieves the operational slots from the server for a
* given segment and stores them internally in a single list for the
* controller.
* IMPORTANT: It processes the list so that it adds the reference to
* the other segment related in the slot by place its id inside the
* object of the slot rather than as a key to access the slot.
* IMPORTANT 2: It filters out all the slots whose states are neither
* 'SELECTED' nor 'BOOKED'.
*
* @param segmentType String that indicates whether the reference
* segment is a ground station ('sc') or a
* spacecraft ('sc')
* @param segmentId String Identifier of the segment
*/
$scope._pullSlots = function (segmentType, segmentId) {
var rpc_name = segmentType + '.operational';
satnetRPC.rCall(rpc_name, [segmentId]).then(function (results) {
$scope._processSlots(segmentId, results);
}).catch(function (cause) {
snDialog.exception(segmentType + '.operational', '-', cause);
});
};
/**
* Retrieves the slots for all the ground stations owned by the
* currently logged-in user.
* @returns
*/
$scope._pullGsSlots = function () {
satnetRPC.rCall('gs.list.mine', []).then(function (results) {
$scope.gui.gss = results;
for (var i = 0, l = $scope.gui.gss.length;i < l; i++) {
$scope._pullSlots('gs', $scope.gui.gss[i]);
}
}).catch(function (cause) {
snDialog.exception('gs.list.mine', '-', cause);
});
};
/**
* Retrieves the slots for all the spacecraft owned by the
* currently logged-in user.
* @returns
*/
$scope._pullScSlots = function () {
satnetRPC.rCall('sc.list.mine', []).then(function (results) {
$scope.gui.scs = results;
for (var i = 0, l = $scope.gui.scs.length; i < l; i++ ) {
$scope._pullSlots('sc', $scope.gui.scs[i]);
}
}).catch(function (cause) {
snDialog.exception('sc.list.mine', '-', cause);
});
};
/**
* Initialization of the controller.
*/
$scope.init = function () {
$scope._pullGsSlots();
$scope._pullScSlots();
};
$scope.init();
}
])
.controller('snRequestsCtrl', [
'$scope', '$mdDialog',
/**
* Controller function for opening the SatNet requests dialog.
*
* @param {Object} $scope $scope for the controller
* @param {Object} $mdDialog Angular material Dialog service
*/
function ($scope, $mdDialog) {
/**
* Function that opens the dialog when the snRequests button is
* clicked.
*/
$scope.openDialog = function () {
$mdDialog.show({
templateUrl: 'operations/templates/requests/list.html',
controller: 'snRequestsDlgCtrl'
});
};
}
])
.directive('snRequests',
/**
* Function that creates the directive itself returning the object required
* by Angular.
*
* @returns {Object} Object directive required by Angular, with restrict
* and templateUrl
*/
function () {
return {
restrict: 'E',
templateUrl: 'operations/templates/requests/menu.html',
controller: 'snRequestsCtrl'
};
}
);<|fim▁end|> |
for (var i = 0, L = slots.length; i < L; i++) {
if (slots[i].identifier === slotId) { |
<|file_name|>test_unit_expectmax.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
import numpy as np
import pytest
import sys
import chronostar.likelihood
sys.path.insert(0,'..')
from chronostar import expectmax as em
from chronostar.synthdata import SynthData
from chronostar.component import SphereComponent
from chronostar import tabletool
from chronostar import expectmax
import chronostar.synthdata as syn
# import chronostar.retired2.measurer as ms
# import chronostar.retired2.converter as cv
#
# def test_calcMedAndSpan():
# """
# Test that the median, and +- 34th percentiles is found correctly
# """
# dx = 10.
# dv = 5.
# dummy_mean = np.array([10,10,10, 5, 5, 5,np.log(dx),np.log(dv),20])
# dummy_std = np.array([1.,1.,1.,1.,1.,1.,0.5, 0.5, 3.])
# assert len(dummy_mean) == len(dummy_std)
# npars = len(dummy_mean)
#
# nsteps = 10000
# nwalkers = 18
#
# dummy_chain = np.array([np.random.randn(nsteps)*std + mean
# for (std, mean) in zip(dummy_std, dummy_mean)]).T
# np.repeat(dummy_chain, 18, axis=0).reshape(nwalkers,nsteps,npars)
#
# med_and_span = em.calcMedAndSpan(dummy_chain)
# assert np.allclose(dummy_mean, med_and_span[:,0], atol=0.1)
# approx_stds = 0.5*(med_and_span[:,1] - med_and_span[:,2])
# assert np.allclose(dummy_std, approx_stds, atol=0.1)
def test_calcMembershipProbs():
"""
Even basicer. Checks that differing overlaps are
correctly mapped to memberships.
"""
# case 1
star_ols = [10, 10]
assert np.allclose([.5,.5], em.calc_membership_probs(np.log(star_ols)))
# case 2
star_ols = [10, 30]
assert np.allclose([.25,.75], em.calc_membership_probs(np.log(star_ols)))
# case 3
star_ols = [10, 10, 20]
assert np.allclose([.25, .25, .5],
em.calc_membership_probs(np.log(star_ols)))
def test_expectation():
"""
Super basic, generates some association stars along
with some background stars and checks membership allocation
is correct
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
ass_pars2 = np.array([100., 0, 0, 20, 0, 0, 5., 2., age])
comp2 = SphereComponent(ass_pars2)
starcounts = [100,100]
synth_data = SynthData(pars=[ass_pars1, ass_pars2],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 2))
true_memb_probs[:starcounts[0], 0] = 1.
true_memb_probs[starcounts[0]:, 1] = 1.<|fim▁hole|> # star_means, star_covs = tabletool.buildDataFromTable(synth_data.astr_table)
# all_lnols = em.getAllLnOverlaps(
# synth_data.astr_table, [comp1, comp2]
# )
fitted_memb_probs = em.expectation(
tabletool.build_data_dict_from_table(synth_data.table),
[comp1, comp2]
)
assert np.allclose(true_memb_probs, fitted_memb_probs, atol=1e-10)
'''
@pytest.mark.skip
def test_fit_many_comps_gradient_descent_with_multiprocessing():
"""
Added by MZ 2020 - 07 - 13
Test if maximisation works when using gradient descent and multiprocessing.
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
starcounts = [100,]
synth_data = SynthData(pars=[ass_pars1,],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 2))
true_memb_probs[:starcounts[0], 0] = 1.
true_memb_probs[starcounts[0]:, 1] = 1.
ncomps = len(starcounts)
best_comps, med_and_spans, memb_probs = \
expectmax.fit_many_comps(synth_data.table, ncomps,
rdir='test_gradient_descent_multiprocessing',
#~ init_memb_probs=None,
#~ init_comps=None,
trace_orbit_func=None,
optimisation_method='Nelder-Mead',
nprocess_ncomp = True,
)
'''
@pytest.mark.skip(reason='Too long for unit tests. Put this in integration instead')
def test_maximisation_gradient_descent_with_multiprocessing_tech():
"""
Added by MZ 2020 - 07 - 13
Test if maximisation works when using gradient descent and multiprocessing.
NOTE: this is not a test if maximisation returns appropriate results but
it only tests if the code runs withour errors. This is mainly to test
multiprocessing.
"""
age = 1e-5
ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age])
comp1 = SphereComponent(ass_pars1)
starcounts = [100,]
synth_data = SynthData(pars=[ass_pars1,],
starcounts=starcounts)
synth_data.synthesise_everything()
tabletool.convert_table_astro2cart(synth_data.table)
true_memb_probs = np.zeros((np.sum(starcounts), 1))
true_memb_probs[:starcounts[0], 0] = 1.
#~ true_memb_probs[starcounts[0]:, 1] = 1.
ncomps = len(starcounts)
noise = np.random.rand(ass_pars1.shape[0])*5
all_init_pars = [ass_pars1 + noise]
new_comps, all_samples, _, all_init_pos, success_mask =\
expectmax.maximisation(synth_data.table, ncomps,
true_memb_probs, 100, 'iter00',
all_init_pars,
optimisation_method='Nelder-Mead',
nprocess_ncomp=True,
)
# TODO: test if new_comps, all_samples, _, all_init_pos, success_mask are of the right format.
# def test_background_overlaps():
# """
# Author: Marusa Zerjal, 2019 - 05 - 26
# Compare background overlap with KDE and background overlap with tiny covariance matrix
# :return:
# """
# background_means = tabletool.build_data_dict_from_table(kernel_density_input_datafile,
# only_means=True,
# )
# ln_bg_ols_kde = em.get_kernel_densities(background_means,
# # star_means, )
if __name__=='__main__':
test_maximisation_gradient_descent_with_multiprocessing_tech()<|fim▁end|> | |
<|file_name|>bug244.go<|end_file_name|><|fim▁begin|>// $G $D/$F.go && $L $F.$A && ./$A.out
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
var nf int
var ng int
func f() (int, int, int) {
nf++
return 1, 2, 3
}<|fim▁hole|>}
var x, y, z = f()
var m = make(map[int]int)
var v, ok = m[g()]
func main() {
if x != 1 || y != 2 || z != 3 || nf != 1 || v != 0 || ok != false || ng != 1 {
println("x=", x, " y=", y, " z=", z, " nf=", nf, " v=", v, " ok=", ok, " ng=", ng)
panic("fail")
}
}<|fim▁end|> |
func g() int {
ng++
return 4 |
<|file_name|>directives.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('myApp.view1')
.directive('articolo', [function(){
return{
restrict: "EAC",
scope: {
articolo: "=news",
searchKey: "=searchKey"
},<|fim▁hole|><|fim▁end|> | templateUrl: 'app/components/view1/view1_articolo.html'
}
}])
; |
<|file_name|>run_all.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
from openfisca_france_data import default_config_files_directory as config_files_directory
from openfisca_france_data.input_data_builders.build_openfisca_survey_data import ( # analysis:ignore
step_01_pre_processing as pre_processing,
step_02_imputation_loyer as imputation_loyer,
step_03_fip as fip,
step_04_famille as famille,
step_05_foyer as foyer,
step_06_rebuild as rebuild,
step_07_invalides as invalides,
step_08_final as final,
)
from openfisca_france_data.temporary import get_store
from openfisca_survey_manager.surveys import Survey
from openfisca_survey_manager.survey_collections import SurveyCollection
log = logging.getLogger(__name__)
<|fim▁hole|> pre_processing.create_enfants_a_naitre(year = year)
# try:
# imputation_loyer.imputation_loyer(year = year)
# except Exception, e:
# log.info('Do not impute loyer because of the following error: \n {}'.format(e))
# pass
fip.create_fip(year = year)
famille.famille(year = year)
foyer.sif(year = year)
foyer.foyer_all(year = year)
rebuild.create_totals_first_pass(year = year)
rebuild.create_totals_second_pass(year = year)
rebuild.create_final(year = year)
invalides.invalide(year = year)
final.final(year = year, check = check)
temporary_store = get_store(file_name = 'erfs')
data_frame = temporary_store['input_{}'.format(year)]
# Saving the data_frame
openfisca_survey_collection = SurveyCollection(name = "openfisca", config_files_directory = config_files_directory)
output_data_directory = openfisca_survey_collection.config.get('data', 'output_directory')
survey_name = "openfisca_data_{}".format(year)
table = "input"
hdf5_file_path = os.path.join(os.path.dirname(output_data_directory), "{}.h5".format(survey_name))
survey = Survey(
name = survey_name,
hdf5_file_path = hdf5_file_path,
)
survey.insert_table(name = table, data_frame = data_frame)
openfisca_survey_collection.surveys.append(survey)
collections_directory = openfisca_survey_collection.config.get('collections', 'collections_directory')
json_file_path = os.path.join(collections_directory, 'openfisca.json')
openfisca_survey_collection.dump(json_file_path = json_file_path)
if __name__ == '__main__':
import time
start = time.time()
logging.basicConfig(level = logging.INFO, filename = 'run_all.log', filemode = 'w')
run_all(year = 2009, check = False)
log.info("Script finished after {}".format(time.time() - start))
print time.time() - start<|fim▁end|> | def run_all(year = None, check = False):
assert year is not None
pre_processing.create_indivim_menagem(year = year) |
<|file_name|>en.js<|end_file_name|><|fim▁begin|>{
"CMSMAIN.WARNINGSAVEPAGESBEFOREADDING" : "You have to save a page before adding children underneath it",
"CMSMAIN.CANTADDCHILDREN" : "You can't add children to the selected node",
"CMSMAIN.ERRORADDINGPAGE" : "Error adding page",
"CMSMAIN.FILTEREDTREE" : "Filtered tree to only show changed pages",
"CMSMAIN.ERRORFILTERPAGES" : "Could not filter tree to only show changed pages<br />%s",
"CMSMAIN.ERRORUNFILTER" : "Unfiltered tree",
"CMSMAIN.PUBLISHINGPAGES" : "Publishing pages...",
"CMSMAIN.SELECTONEPAGE" : "Please select at least 1 page.",
"CMSMAIN.ERRORPUBLISHING" : "Error publishing pages",
"CMSMAIN.REALLYDELETEPAGES" : "Do you really want to delete the %s marked pages?",
"CMSMAIN.DELETINGPAGES" : "Deleting pages...",
"CMSMAIN.ERRORDELETINGPAGES": "Error deleting pages",
"CMSMAIN.PUBLISHING" : "Publishing...",<|fim▁hole|> "CMSMAIN.SAVING" : "saving...",
"CMSMAIN.SELECTMOREPAGES" : "You have %s pages selected.\n\nDo you really want to perform this action?",
"CMSMAIN.ALERTCLASSNAME": "The page type will be updated after the page is saved",
"CMSMAIN.URLSEGMENTVALIDATION": "URLs can only be made up of letters, digits and hyphens.",
"AssetAdmin.BATCHACTIONSDELETECONFIRM": "Do you really want to delete %s folders?",
"AssetTableField.REALLYDELETE": "Do you really want to delete the marked files?",
"AssetTableField.MOVING": "Moving %s file(s)",
"CMSMAIN.AddSearchCriteria": "Add Criteria",
"WidgetAreaEditor.TOOMANY": "Sorry, you have reached the maximum number of widgets in this area",
"AssetAdmin.ConfirmDelete": "Do you really want to delete this folder and all contained files?",
"Folder.Name": "Folder name",
"Tree.AddSubPage": "Add new page here",
"Tree.Duplicate": "Duplicate",
"Tree.EditPage": "Edit",
"Tree.ThisPageOnly": "This page only",
"Tree.ThisPageAndSubpages": "This page and subpages",
"Tree.ShowAsList": "Show children as list",
"CMSMain.ConfirmRestoreFromLive": "Do you really want to copy the published content to the draft site?",
"CMSMain.RollbackToVersion": "Do you really want to roll back to version #%s of this page?",
"URLSEGMENT.Edit": "Edit",
"URLSEGMENT.OK": "OK",
"URLSEGMENT.Cancel": "Cancel",
"URLSEGMENT.UpdateURL": "Update URL"
}<|fim▁end|> | "CMSMAIN.RESTORING": "Restoring...",
"CMSMAIN.ERRORREVERTING": "Error reverting to live content", |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>function ajaxChimpCallback(a) {
if ("success" === a.result) {
$(".beta-request-result").show();
$(".beta-request-form").hide();
$(".beta-request-title").hide();
$.featherlight.current().close();
}
else
{
a.msg.indexOf("already subscribed") >= 0 ? ($(".beta-request-form").hide(), $(".beta-request-title").hide(), $(".beta-request-already-subscribed").show()) : $(".beta-request-error").show(), $(".beta-request-btn").html("Invite me")
}
};
function contactLightbox()
{
var configuration = ({
afterOpen: function(event)
{
$('body').toggleClass('body-open-modal');
setContactTabindex();
sendContactMessage();
},
afterClose: function(event)
{
$('body').toggleClass('body-open-modal');
}
});
$('body').on('click', '.open-contact-form', function(event)
{
event.preventDefault();
$.featherlight('#contactLightbox', configuration);
});
}
function setContactTabindex()
{
var $form = $('.featherlight-content form.sendingContactMessage');
$form.find('input[name=from_name]').focus().attr('tabindex', 1);
$form.find('input[name=from_email]').attr('tabindex', 2);
$form.find('textarea[name=message]').attr('tabindex', 3);
}
function setBetaTabIndex()<|fim▁hole|>{
var $form = $('.beta-request-form');
$form.find('.first-name').focus().attr('tabindex', 1);
$form.find('.email').attr('tabindex', 2);
}
function sendContactMessage()
{
$('.featherlight-content form.sendingContactMessage').validate({
rules: {
from_name: "required",
from_email: {
required: true,
email: true
},
message: "required"
},
messages: {
from_name: "Please enter your name",
from_email: "Please enter a valid email address",
message: "Please enter a message."
},
submitHandler: function(form, event) {
event.preventDefault();
var $form = $('.featherlight-content form.sendingContactMessage'),
service_id = "default_service",
template_id = "trado_contact_message",
currentModal = $.featherlight.current();
params = $form.serializeArray().reduce(function(obj, item) {
obj[item.name] = item.value;
return obj;
}, {});
$form.find('input').prop('disabled', true);
$form.find('textarea').prop('disabled', true);
$form.find("button").text("Sending...");
$('#errors, #success').html('');
emailjs.send(service_id,template_id,params)
.then(function(){
$form.find('#success').html('<p>Message has been sent. We will get back to you within 24 hours.</p>');
setTimeout(function(){
currentModal.close();
$form.find('input').prop('disabled', false);
$form.find('textarea').prop('disabled', false);
$form.find("button").text("Send");
}, 5000);
}, function(err) {
$form.find('input').prop('disabled', false);
$form.find('textarea').prop('disabled', false);
$form.find("#errors").html('<p>' + JSON.parse(err.text).service_error + '</p>');
$form.find("button").text("Send");
});
}
});
}
function scrollingNavbar()
{
$(window).on('scroll', function() {
var y_scroll_pos = window.pageYOffset;
var scroll_pos_test = 150; // set to whatever you want it to be
if(y_scroll_pos > scroll_pos_test)
{
$('header.scrolling').fadeIn();
$('#home-layout .slicknav_menu').addClass('home-scrolling');
}
else
{
$('header.scrolling').stop().fadeOut();
$('#home-layout .slicknav_menu').removeClass('home-scrolling');
}
});
$('.menu').slicknav({
label: "",
brand: "<a href='/'><img src=\"https://dlczmkt02tnnw.cloudfront.net/trado-promo/assets/img/cropped.png\" height=\"100\"></a>"
});
}
function betaLightbox()
{
$(".beta-request-form").ajaxChimp({
url: "https://tomdallimore.us9.list-manage.com/subscribe/post?u=b141eef8b30b7dc5813bd752a&id=95c7eadbb9",
callback: ajaxChimpCallback
});
$(".beta-request-form").submit(function() {
ga("send", "event", "invite", "request");
$(".beta-request-btn").html("<i class='fa fa-spinner fa-spin'></i>");
$(".beta-request-error").hide();
$(".beta-request-already-subscribed").hide();
});
if (!readCookie('tradoPopup'))
{
var configuration = ({
afterOpen: function(event)
{
$('body').toggleClass('body-open-modal');
setBetaTabIndex();
sendContactMessage();
},
afterClose: function(event)
{
$('body').toggleClass('body-open-modal');
}
});
setTimeout( function()
{
$.featherlight('#newsletterLightbox', configuration);
createCookie('tradoPopup','1',1);
}, 3000);
}
}
$(document).ready(function() {
contactLightbox();
betaLightbox();
scrollingNavbar();
if(!$('html').hasClass('touch'))
{
$(".first-name").first().focus();
}else{
bouncefix.add('html');
}
$('[data-ga="true"]').click(function()
{
var dataCategory = $(this).attr('data-event-category'),
dataAction = $(this).attr('data-event-action');
if(dataCategory == '' || dataAction == '')
{
return false;
}
else
{
ga("send", "event", dataCategory, dataAction);
}
});
});
jQuery.fn.capitalize = function() {
return $(this).each(function(a, b) {
$(b).keyup(function(a) {
var b = a.target,
c = $(this).val(),
d = b.selectionStart,
e = b.selectionEnd;
$(this).val(c.replace(/^(.)|(\s|\-)(.)/g, function(a) {
return a.toUpperCase()
})), b.setSelectionRange(d, e)
})
}), this
};
$(".first-name").capitalize();
$('#documentation .content, #documentation .sidebar').theiaStickySidebar(
{
additionalMarginTop: 120
});
// cookies
function createCookie(name,value,days) {
var expires = "";
if (days) {
var date = new Date();
date.setTime(date.getTime() + (days*24*60*60*1000));
expires = "; expires=" + date.toUTCString();
}
document.cookie = name + "=" + value + expires + "; path=/";
}
function readCookie(name) {
var nameEQ = name + "=";
var ca = document.cookie.split(';');
for(var i=0;i < ca.length;i++) {
var c = ca[i];
while (c.charAt(0)==' ') c = c.substring(1,c.length);
if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length);
}
return null;
}<|fim▁end|> | |
<|file_name|>contratoservidor.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required, permission_required,\
user_passes_test
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context import RequestContext
from project.tramitacao.forms import FormTipoCaixa
from django.http import HttpResponseRedirect
from project.tramitacao.models import Tbcontratoservidor
from django.contrib import messages
from project.tramitacao.admin import verificar_permissao_grupo
from django.http.response import HttpResponse
from project.tramitacao.relatorio_base import relatorio_csv_base, relatorio_ods_base,\
relatorio_ods_base_header, relatorio_pdf_base,\
relatorio_pdf_base_header_title, relatorio_pdf_base_header
from odslib import ODS
nome_relatorio = "relatorio_tipo_caixa"
response_consulta = "/sicop/restrito/tipo_caixa/consulta/"
titulo_relatorio = "Relatorio dos Tipos de Caixa"
planilha_relatorio = "Tipos de Caixa"
@permission_required('sicop.tipo_caixa_consulta', login_url='/excecoes/permissao_negada/', raise_exception=True)
def consulta(request):
if request.method == "POST":
nome = request.POST['nmcontrato']
lista = Tbcontratoservidor.objects.all()#.filter( nmtipocaixa__icontains=nome, tbdivisao__id = AuthUser.objects.get( pk = request.user.id ).tbdivisao.id )
else:
lista = Tbcontratoservidor.objects.all()
lista = lista.order_by( 'nmcontrato' )
#gravando na sessao o resultado da consulta preparando para o relatorio/pdf
request.session['relatorio_tipo_caixa'] = lista
return render_to_response('controle/servidor/contratoservidor/consulta.html' ,{'lista':lista}, context_instance = RequestContext(request))
@permission_required('sicop.tipo_caixa_cadastro', login_url='/excecoes/permissao_negada/', raise_exception=True)
def cadastro(request):
if request.method == "POST":
next = request.GET.get('next', '/')
if validacao(request):
f_tipocaixa = Tbtipocaixa(
nmtipocaixa = request.POST['nmtipocaixa'],
desctipocaixa = request.POST['desctipocaixa'],
tbdivisao = AuthUser.objects.get( pk = request.user.id ).tbdivisao
)
f_tipocaixa.save()
if next == "/":
return HttpResponseRedirect("/sicop/restrito/tipo_caixa/consulta/")
else:
return HttpResponseRedirect( next )
return render_to_response('sicop/restrito/tipo_caixa/cadastro.html',{}, context_instance = RequestContext(request))
@permission_required('sicop.tipo_caixa_edicao', login_url='/excecoes/permissao_negada/', raise_exception=True)
def edicao(request, id):
instance = get_object_or_404(Tbtipocaixa, id=id)
if request.method == "POST":
if validacao(request):
f_tipocaixa = Tbtipocaixa(
id = instance.id,
nmtipocaixa = request.POST['nmtipocaixa'],
desctipocaixa = request.POST['desctipocaixa'],
tbdivisao = AuthUser.objects.get( pk = request.user.id ).tbdivisao
)
f_tipocaixa.save()
return HttpResponseRedirect("/sicop/restrito/tipo_caixa/edicao/"+str(id)+"/")
return render_to_response('sicop/restrito/tipo_caixa/edicao.html', {"tipocaixa":instance}, context_instance = RequestContext(request))
@permission_required('sicop.tipo_caixa_consulta', login_url='/excecoes/permissao_negada/', raise_exception=True)
def relatorio_pdf(request):
# montar objeto lista com os campos a mostrar no relatorio/pdf
lista = request.session[nome_relatorio]
if lista:
response = HttpResponse(mimetype='application/pdf')
doc = relatorio_pdf_base_header(response, nome_relatorio)
elements=[]
dados = relatorio_pdf_base_header_title(titulo_relatorio)
dados.append( ('NOME','DESCRICAO') )
for obj in lista:
dados.append( ( obj.nmtipocaixa , obj.desctipocaixa ) )
return relatorio_pdf_base(response, doc, elements, dados)
else:
return HttpResponseRedirect(response_consulta)
@permission_required('sicop.tipo_caixa_consulta', login_url='/excecoes/permissao_negada/', raise_exception=True)
def relatorio_ods(request):
# montar objeto lista com os campos a mostrar no relatorio/pdf
lista = request.session[nome_relatorio]
if lista:
ods = ODS()
sheet = relatorio_ods_base_header(planilha_relatorio, titulo_relatorio, ods)
# subtitle
sheet.getCell(0, 1).setAlignHorizontal('center').stringValue( 'Nome' ).setFontSize('14pt')
sheet.getCell(1, 1).setAlignHorizontal('center').stringValue( 'Descricao' ).setFontSize('14pt')
sheet.getRow(1).setHeight('20pt')
#TRECHO PERSONALIZADO DE CADA CONSULTA
#DADOS<|fim▁hole|> x = 0
for obj in lista:
sheet.getCell(0, x+2).setAlignHorizontal('center').stringValue(obj.nmtipocaixa)
sheet.getCell(1, x+2).setAlignHorizontal('center').stringValue(obj.desctipocaixa)
x += 1
#TRECHO PERSONALIZADO DE CADA CONSULTA
relatorio_ods_base(ods, planilha_relatorio)
# generating response
response = HttpResponse(mimetype=ods.mimetype.toString())
response['Content-Disposition'] = 'attachment; filename='+nome_relatorio+'.ods'
ods.save(response)
return response
else:
return HttpResponseRedirect( response_consulta )
@permission_required('sicop.tipo_caixa_consulta', login_url='/excecoes/permissao_negada/', raise_exception=True)
def relatorio_csv(request):
# montar objeto lista com os campos a mostrar no relatorio/pdf
lista = request.session[nome_relatorio]
if lista:
response = HttpResponse(content_type='text/csv')
writer = relatorio_csv_base(response, nome_relatorio)
writer.writerow(['Nome', 'Descricao'])
for obj in lista:
writer.writerow([obj.nmtipocaixa, obj.desctipocaixa])
return response
else:
return HttpResponseRedirect( response_consulta )
def validacao(request_form):
warning = True
if request_form.POST['nmtipocaixa'] == '':
messages.add_message(request_form,messages.WARNING,'Informe um nome para o tipo caixa')
warning = False
return warning<|fim▁end|> | |
<|file_name|>pptls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*-coding:utf-8-*-
import time<|fim▁hole|>print """Hola! Bienvenido al juego Piedra Papel Tijera Lagarto Spock!\nEstas son las reglas:\n Las tijeras cortan el papel\n El papel cubre a la piedra\n La piedra aplasta al lagarto\n El lagarto envenena a Spock\n Spock destroza las tijeras\n Las tijeras decapitan al lagarto\n El lagarto se come el papel\n El papel refuta a Spock\n Spock vaporiza la piedra\n Y como es habitual... la piedra aplasta las tijeras.\nRecuerda que si escribes algun valor incorrecto pierdes un punto!\nEl primero en llegar a 10 puntos gana!
"""
sleep(2)
print "\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
sleep(1)
while (pc_puntos < 10 and user_puntos < 10):
tu = raw_input("Que eliges? Piedra, papel, tijera, lagarto o Spock:\n('marcador' para ver los puntos)(Control + C para salir)\n\n(Escribe en minusculas)" + tab)
pc = random.choice(depo)
sleep(0.5)
if tu in depo:
print (("\nElegiste {}\nComputadora eligio {}\nAsi que:").format(tu, pc))
elif tu not in depo and tu != "marcador":
print "\nEscribe un valor correcto!\nPierdes un punto"
if tu == pc:
print '\n Es un Empate...\n'
elif tu == 'piedra' and pc == 'tijera':
user_puntos = user_puntos + 1
print "\n Ganaste! Como es habitual... la piedra aplasta las tijeras.\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'papel' and pc == 'piedra':
user_puntos = user_puntos + 1
print "\n Ganaste! Papel cubre a la piedra\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'tijera' and pc == 'papel':
user_puntos = user_puntos + 1
print "\n Ganaste! Tijeras cortan el papel\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'piedra' and pc == 'lagarto':
user_puntos = user_puntos + 1
print "\n Ganaste! La piedra aplasta al lagarto\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'lagarto' and pc == 'spock':
user_puntos = user_puntos + 1
print "\n Ganaste! Lagarto envenena Spock\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'spock' and pc == 'tijera':
user_puntos = user_puntos + 1
print "\n Ganaste! Spock destroza las tijeras\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'tijera' and pc == 'lagarto':
user_puntos = user_puntos + 1
print "\n Ganaste! Las tijeras decapitan al lagarto\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'lagarto' and pc == 'papel':
user_puntos = user_puntos + 1
print "\n Ganaste! El lagarto se come el papel\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'papel' and pc == 'spock':
user_puntos = user_puntos + 1
print "\n Ganaste! El papel refuta a Spock\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'spock' and pc == 'piedra':
user_puntos = user_puntos + 1
print "\n Ganaste! Spock vaporiza la piedra\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == "marcador" and pc == pc:
print "\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
sleep(0.5)
else:
pc_puntos = pc_puntos + 1
print "\n Lo siento, perdiste: {} le gana a {} \n{}\nPierdes un punto...\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(pc, tu, sus, user_puntos, pc_puntos)
print "Acabo el juego...\nEl ganador es...\n "
sleep(2)
if pc_puntos == 10:
print "La computadora!\nGracias por jugar!"
else:
print "Tu!\nGracias por jugar!\nVuelve Pronto!"<|fim▁end|> | from time import sleep
import random
depo, sus, tab, user_puntos, pc_puntos = ["piedra", "papel", "tijera", "lagarto", "spock"], "-" * 35, " " * 4, 0, 0 |
<|file_name|>infer-from-object-issue-26952.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(dead_code)]
#![allow(unused_variables)]
// Test that when we match a trait reference like `Foo<A>: Foo<_#0t>`,<|fim▁hole|>// that, then you get an unconstrained type-variable in `call`.
//
// Also serves as a regression test for issue #26952, though the test
// was derived from another reported regression with the same cause.
use std::marker::PhantomData;
trait Trait<A> { fn foo(&self); }
struct Type<A> { a: PhantomData<A> }
fn as_trait<A>(t: &Type<A>) -> &dyn Trait<A> { loop { } }
fn want<A,T:Trait<A>+?Sized>(t: &T) { }
fn call<A>(p: Type<A>) {
let q = as_trait(&p);
want(q); // parameter A to `want` *would* be unconstrained
}
fn main() { }<|fim▁end|> | // we unify with `_#0t` with `A`. In this code, if we failed to do |
<|file_name|>datasize.go<|end_file_name|><|fim▁begin|>package datasize
import (
"errors"
"fmt"
"strconv"
"strings"
)
type ByteSize uint64
const (
B ByteSize = 1
KB = B << 10
MB = KB << 10
GB = MB << 10
TB = GB << 10
PB = TB << 10
EB = PB << 10
fnUnmarshalText string = "UnmarshalText"
maxUint64 uint64 = (1 << 64) - 1
cutoff uint64 = maxUint64 / 10
)
var ErrBits = errors.New("unit with capital unit prefix and lower case unit (b) - bits, not bytes ")
func (b ByteSize) Bytes() uint64 {
return uint64(b)
}
func (b ByteSize) KBytes() float64 {
v := b / KB
r := b % KB
return float64(v) + float64(r)/float64(KB)
}
func (b ByteSize) MBytes() float64 {
v := b / MB
r := b % MB
return float64(v) + float64(r)/float64(MB)
}
func (b ByteSize) GBytes() float64 {
v := b / GB
r := b % GB
return float64(v) + float64(r)/float64(GB)
}
func (b ByteSize) TBytes() float64 {
v := b / TB
r := b % TB
return float64(v) + float64(r)/float64(TB)
}
func (b ByteSize) PBytes() float64 {
v := b / PB
r := b % PB
return float64(v) + float64(r)/float64(PB)
}
func (b ByteSize) EBytes() float64 {
v := b / EB
r := b % EB
return float64(v) + float64(r)/float64(EB)
}
func (b ByteSize) String() string {
switch {
case b == 0:
return fmt.Sprint("0B")
case b%EB == 0:
return fmt.Sprintf("%dEB", b/EB)
case b%PB == 0:
return fmt.Sprintf("%dPB", b/PB)
case b%TB == 0:
return fmt.Sprintf("%dTB", b/TB)
case b%GB == 0:
return fmt.Sprintf("%dGB", b/GB)
case b%MB == 0:
return fmt.Sprintf("%dMB", b/MB)
case b%KB == 0:
return fmt.Sprintf("%dKB", b/KB)
default:
return fmt.Sprintf("%dB", b)
}
}
<|fim▁hole|>func (b ByteSize) HR() string {
return b.HumanReadable()
}
func (b ByteSize) HumanReadable() string {
switch {
case b > EB:
return fmt.Sprintf("%.1f EB", b.EBytes())
case b > PB:
return fmt.Sprintf("%.1f PB", b.PBytes())
case b > TB:
return fmt.Sprintf("%.1f TB", b.TBytes())
case b > GB:
return fmt.Sprintf("%.1f GB", b.GBytes())
case b > MB:
return fmt.Sprintf("%.1f MB", b.MBytes())
case b > KB:
return fmt.Sprintf("%.1f KB", b.KBytes())
default:
return fmt.Sprintf("%d B", b)
}
}
func (b ByteSize) MarshalText() ([]byte, error) {
return []byte(b.String()), nil
}
func (b *ByteSize) UnmarshalText(t []byte) error {
var val uint64
var unit string
// copy for error message
t0 := t
var c byte
var i int
ParseLoop:
for i < len(t) {
c = t[i]
switch {
case '0' <= c && c <= '9':
if val > cutoff {
goto Overflow
}
c = c - '0'
val *= 10
if val > val+uint64(c) {
// val+v overflows
goto Overflow
}
val += uint64(c)
i++
default:
if i == 0 {
goto SyntaxError
}
break ParseLoop
}
}
unit = strings.TrimSpace(string(t[i:]))
switch unit {
case "Kb", "Mb", "Gb", "Tb", "Pb", "Eb":
goto BitsError
}
unit = strings.ToLower(unit)
switch unit {
case "", "b", "byte":
// do nothing - already in bytes
case "k", "kb", "kilo", "kilobyte", "kilobytes":
if val > maxUint64/uint64(KB) {
goto Overflow
}
val *= uint64(KB)
case "m", "mb", "mega", "megabyte", "megabytes":
if val > maxUint64/uint64(MB) {
goto Overflow
}
val *= uint64(MB)
case "g", "gb", "giga", "gigabyte", "gigabytes":
if val > maxUint64/uint64(GB) {
goto Overflow
}
val *= uint64(GB)
case "t", "tb", "tera", "terabyte", "terabytes":
if val > maxUint64/uint64(TB) {
goto Overflow
}
val *= uint64(TB)
case "p", "pb", "peta", "petabyte", "petabytes":
if val > maxUint64/uint64(PB) {
goto Overflow
}
val *= uint64(PB)
case "E", "EB", "e", "eb", "eB":
if val > maxUint64/uint64(EB) {
goto Overflow
}
val *= uint64(EB)
default:
goto SyntaxError
}
*b = ByteSize(val)
return nil
Overflow:
*b = ByteSize(maxUint64)
return &strconv.NumError{fnUnmarshalText, string(t0), strconv.ErrRange}
SyntaxError:
*b = 0
return &strconv.NumError{fnUnmarshalText, string(t0), strconv.ErrSyntax}
BitsError:
*b = 0
return &strconv.NumError{fnUnmarshalText, string(t0), ErrBits}
}<|fim▁end|> | |
<|file_name|>TableDelegate.qunit.js<|end_file_name|><|fim▁begin|>/* global QUnit, sinon */
sap.ui.define([
"sap/ui/mdc/Table",
"sap/ui/mdc/table/Column",
"sap/ui/mdc/library",
"../../QUnitUtils",
"../../util/createAppEnvironment",
"sap/ui/fl/write/api/ControlPersonalizationWriteAPI",
"sap/ui/core/Core",
"sap/ui/core/library",
"sap/ui/model/odata/v4/ODataModel",
"sap/ui/model/Filter",
"sap/ui/base/ManagedObjectObserver"
], function(
Table,
Column,
Library,
MDCQUnitUtils,
createAppEnvironment,
ControlPersonalizationWriteAPI,
Core,
coreLibrary,
ODataModel,
Filter,
ManagedObjectObserver
) {
"use strict";
var TableType = Library.TableType;
sap.ui.define("odata.v4.TestDelegate", [
"sap/ui/mdc/odata/v4/TableDelegate"
], function(TableDelegate) {
var TestDelegate = Object.assign({}, TableDelegate);
TestDelegate.updateBindingInfo = function(oMDCTable, oBindingInfo) {
TableDelegate.updateBindingInfo.apply(this, arguments);
oBindingInfo.path = "/ProductList";
};
return TestDelegate;
});
Core.loadLibrary("sap.ui.fl");
var sTableView1 =
'<mvc:View xmlns:mvc="sap.ui.core.mvc" xmlns:m="sap.m" xmlns="sap.ui.mdc" xmlns:mdcTable="sap.ui.mdc.table">' +
'<Table p13nMode="Group,Aggregate" id="myTable" delegate=\'\{ name : "odata.v4.TestDelegate" \}\'>' +
'<columns><mdcTable:Column id="myTable--column0" header="column 0" dataProperty="Name">' +
'<m:Text text="{Name}" id="myTable--text0" /></mdcTable:Column>' +
'<mdcTable:Column id="myTable--column1" header="column 1" dataProperty="Country">' +
'<m:Text text="{Country}" id="myTable--text1" /></mdcTable:Column>' +
'<mdcTable:Column header="column 2" dataProperty="name_country"> ' +
'<m:Text text="{Name}" id="myTable--text2" /></mdcTable:Column></columns> ' +
'</Table></mvc:View>';
var sTableView2 =
'<mvc:View xmlns:mvc="sap.ui.core.mvc" xmlns:m="sap.m" xmlns="sap.ui.mdc" xmlns:mdcTable="sap.ui.mdc.table">' +
'<Table p13nMode="Group,Aggregate" id="myTable" delegate=\'\{ name : "odata.v4.TestDelegate" \}\'>' +
'<columns>' +
'<mdcTable:Column header="column 2" dataProperty="name_country"> ' +
'<m:Text text="{Name}" id="myTable--text2" /></mdcTable:Column></columns> ' +
'</Table></mvc:View>';
function createColumnStateIdMap(oTable, aStates) {
var mState = {};
oTable.getColumns().forEach(function(oColumn, iIndex) {
mState[oColumn.getId() + "-innerColumn"] = aStates[iIndex];
});
return mState;
}
function poll(fnCheck, iTimeout) {
return new Promise(function(resolve, reject) {
if (fnCheck()) {
resolve();
return;
}
var iRejectionTimeout = setTimeout(function() {
clearInterval(iCheckInterval);
reject("Polling timeout");
}, iTimeout == null ? 100 : iTimeout);
var iCheckInterval = setInterval(function() {
if (fnCheck()) {
clearTimeout(iRejectionTimeout);
clearInterval(iCheckInterval);
resolve();
}
}, 10);
});
}
function waitForBindingInfo(oTable, iTimeout) {
return poll(function() {
var oInnerTable = oTable._oTable;
return oInnerTable && oInnerTable.getBindingInfo(oTable._getStringType() === "Table" ? "rows" : "items");
}, iTimeout);
}
QUnit.module("Initialization", {
afterEach: function() {
if (this.oTable) {
this.oFetchProperties.restore();
this.oFetchPropertyExtensions.restore();
this.oFetchPropertiesForBinding.restore();
this.oFetchPropertyExtensionsForBinding.restore();
this.oTable.destroy();
}
},
initTable: function(mSettings) {
if (this.oTable) {
this.oTable.destroy();
}
this.oTable = new Table(Object.assign({
delegate: {
name: "odata.v4.TestDelegate"
}
}, mSettings));
return this.oTable.awaitControlDelegate().then(function(oDelegate) {
this.oFetchProperties = sinon.spy(oDelegate, "fetchProperties");
this.oFetchPropertyExtensions = sinon.spy(oDelegate, "fetchPropertyExtensions");
this.oFetchPropertiesForBinding = sinon.spy(oDelegate, "fetchPropertiesForBinding");
this.oFetchPropertyExtensionsForBinding = sinon.spy(oDelegate, "fetchPropertyExtensionsForBinding");
return this.oTable._fullyInitialized();
}.bind(this)).then(function() {
return this.oTable;
}.bind(this));
},
assertFetchPropertyCalls: function(assert, iProperties, iPropertyExtensions, iPropertiesForBinding, oPropertyExtensionsForBinding) {
assert.equal(this.oFetchProperties.callCount, iProperties, "Delegate.fetchProperties calls");
assert.equal(this.oFetchPropertyExtensions.callCount, iPropertyExtensions, "Delegate.fetchPropertyExtensions calls");
assert.equal(this.oFetchPropertiesForBinding.callCount, iPropertiesForBinding, "Delegate.fetchPropertiesForBinding calls");
assert.equal(this.oFetchPropertyExtensionsForBinding.callCount, oPropertyExtensionsForBinding, "Delegate.fetchPropertyExtensionsForBinding calls");
}
});
QUnit.test("GridTable; Grouping and aggregation disabled", function(assert) {
return this.initTable().then(function(oTable) {
assert.notOk(oTable._oTable.getDependents().find(function(oDependent) {
return oDependent.isA("sap.ui.table.plugins.V4Aggregation");
}), "V4Aggregation plugin is not added to the inner table");
this.assertFetchPropertyCalls(assert, 1, 1, 0, 0);
}.bind(this));
});
QUnit.test("GridTable; Grouping and aggregation enabled", function(assert) {
return this.initTable({
p13nMode: ["Group", "Aggregate"]
}).then(function(oTable) {
var oPlugin = oTable._oTable.getDependents()[0];
assert.ok(oPlugin.isA("sap.ui.table.plugins.V4Aggregation"), "V4Aggregation plugin is added to the inner table");
assert.ok(oPlugin.isActive(), "V4Aggregation plugin is active");
var oGroupHeaderFormatter = sinon.spy(oTable.getControlDelegate(), "formatGroupHeader");
oPlugin.getGroupHeaderFormatter()("MyContext", "MyProperty");
assert.ok(oGroupHeaderFormatter.calledOnceWithExactly(oTable, "MyContext", "MyProperty"), "Call Delegate.formatGroupHeader");
oGroupHeaderFormatter.restore();
this.assertFetchPropertyCalls(assert, 2, 2, 1, 1);
}.bind(this));
});
QUnit.test("ResponsiveTable; Grouping and aggregation disabled", function(assert) {
return this.initTable({
type: TableType.ResponsiveTable
}).then(function(oTable) {
assert.notOk(oTable._oTable.getDependents().find(function(oDependent) {
return oDependent.isA("sap.ui.table.plugins.V4Aggregation");
}), "V4Aggregation plugin is not added to the inner table");
this.assertFetchPropertyCalls(assert, 1, 1, 0, 0);
}.bind(this));
});
QUnit.test("ResponsiveTable; Grouping and aggregation enabled", function(assert) {
return this.initTable({
type: TableType.ResponsiveTable,
p13nMode: ["Group", "Aggregate"]
}).then(function(oTable) {
assert.notOk(oTable._oTable.getDependents().find(function(oDependent) {
return oDependent.isA("sap.ui.table.plugins.V4Aggregation");
}), "V4Aggregation plugin is not added to the inner table");
this.assertFetchPropertyCalls(assert, 1, 1, 0, 0);
}.bind(this));
});
QUnit.module("Change table settings", {
beforeEach: function() {
return this.initTable();
},
afterEach: function() {
this.restoreFetchPropertyMethods();
if (this.oTable) {
this.oTable.destroy();
}
},
initTable: function(mSettings) {
if (this.oTable) {
this.oTable.destroy();
}
this.restoreFetchPropertyMethods();
this.oTable = new Table(Object.assign({
delegate: {
name: "odata.v4.TestDelegate"
},
p13nMode: ["Group", "Aggregate"]
}, mSettings));
return this.oTable.awaitControlDelegate().then(function(oDelegate) {
this.oFetchProperties = sinon.spy(oDelegate, "fetchProperties");
this.oFetchPropertyExtensions = sinon.spy(oDelegate, "fetchPropertyExtensions");
this.oFetchPropertiesForBinding = sinon.spy(oDelegate, "fetchPropertiesForBinding");
this.oFetchPropertyExtensionsForBinding = sinon.spy(oDelegate, "fetchPropertyExtensionsForBinding");
return this.oTable._fullyInitialized();
}.bind(this)).then(function() {
return this.oTable;
}.bind(this));
},
assertFetchPropertyCalls: function(assert, iProperties, iPropertyExtensions, iPropertiesForBinding, oPropertyExtensionsForBinding) {
assert.equal(this.oFetchProperties.callCount, iProperties, "Delegate.fetchProperties calls");
assert.equal(this.oFetchPropertyExtensions.callCount, iPropertyExtensions, "Delegate.fetchPropertyExtensions calls");
assert.equal(this.oFetchPropertiesForBinding.callCount, iPropertiesForBinding, "Delegate.fetchPropertiesForBinding calls");
assert.equal(this.oFetchPropertyExtensionsForBinding.callCount, oPropertyExtensionsForBinding, "Delegate.fetchPropertyExtensionsForBinding calls");
},
resetFetchPropertyCalls: function() {
this.oFetchProperties.reset();
this.oFetchPropertyExtensions.reset();
this.oFetchPropertiesForBinding.reset();
this.oFetchPropertyExtensionsForBinding.reset();
},
restoreFetchPropertyMethods: function() {
if (this.oFetchProperties) {
this.oFetchProperties.restore();
this.oFetchPropertyExtensions.restore();
this.oFetchPropertiesForBinding.restore();
this.oFetchPropertyExtensionsForBinding.restore();
}
}
});
QUnit.test("Type", function(assert) {
var that = this;
var oOldPlugin = that.oTable._oTable.getDependents()[0];
this.resetFetchPropertyCalls();
this.oTable.setType(TableType.ResponsiveTable);
return this.oTable._fullyInitialized().then(function() {
assert.notOk(that.oTable._oTable.getDependents().find(function(oDependent) {
return oDependent.isA("sap.ui.table.plugins.V4Aggregation");
}), "V4Aggregation plugin is not added to the inner table");
that.assertFetchPropertyCalls(assert, 0, 0, 0, 0);
that.resetFetchPropertyCalls();
that.oTable.setType(TableType.Table);
return that.oTable._fullyInitialized();
}).then(function() {
var oPlugin = that.oTable._oTable.getDependents()[0];
assert.ok(oPlugin.isA("sap.ui.table.plugins.V4Aggregation"), "V4Aggregation plugin is added to the inner table");
assert.ok(oPlugin.isActive(), "V4Aggregation plugin is active");
assert.notEqual(oPlugin, oOldPlugin, "V4Aggregation plugin is not the same instance");
assert.ok(oOldPlugin.bIsDestroyed, "Old V4Aggregation plugin is destroyed");
var oGroupHeaderFormatter = sinon.spy(that.oTable.getControlDelegate(), "formatGroupHeader");
oPlugin.getGroupHeaderFormatter()("MyContext", "MyProperty");
assert.ok(oGroupHeaderFormatter.calledOnceWithExactly(that.oTable, "MyContext", "MyProperty"), "Call Delegate.formatGroupHeader");
oGroupHeaderFormatter.restore();
that.assertFetchPropertyCalls(assert, 0, 0, 0, 0);
});
});
QUnit.test("GridTable; p13nMode", function(assert) {
var oPlugin = this.oTable._oTable.getDependents()[0];
this.resetFetchPropertyCalls();
this.oTable.setP13nMode();
assert.ok(oPlugin.isA("sap.ui.table.plugins.V4Aggregation"), "V4Aggregation plugin is added to the inner table");
assert.notOk(oPlugin.isActive(), "V4Aggregation plugin is not active");
assert.equal(oPlugin, this.oTable._oTable.getDependents()[0], "V4Aggregation plugin is the same instance");
this.assertFetchPropertyCalls(assert, 0, 0, 0, 0);
this.oTable.setP13nMode(["Group"]);
assert.ok(oPlugin.isA("sap.ui.table.plugins.V4Aggregation"), "V4Aggregation plugin is added to the inner table");
assert.ok(oPlugin.isActive(), "V4Aggregation plugin is active");
assert.equal(oPlugin, this.oTable._oTable.getDependents()[0], "V4Aggregation plugin is the same instance");
this.assertFetchPropertyCalls(assert, 0, 0, 0, 0);
});
QUnit.test("GridTable; Initial activation of analytical p13n modes", function(assert) {
var that = this;
return this.initTable({
p13nMode: []
}).then(function() {
that.resetFetchPropertyCalls();
that.oTable.setP13nMode(["Group"]);
assert.notOk(that.oTable._oTable.getDependents().find(function(oDependent) {
return oDependent.isA("sap.ui.table.plugins.V4Aggregation");
}), "V4Aggregation plugin is not yet added to the inner table");
return new Promise(function(resolve) {
new ManagedObjectObserver(function(oChange) {
oChange.child.setPropertyInfos = resolve;
}).observe(that.oTable._oTable, {
aggregations: ["dependents"]
});
});
}).then(function() {
var oPlugin = that.oTable._oTable.getDependents()[0];
assert.ok(oPlugin.isA("sap.ui.table.plugins.V4Aggregation"), "V4Aggregation plugin is added to the inner table");
assert.ok(oPlugin.isActive(), "V4Aggregation plugin is active");
var oGroupHeaderFormatter = sinon.spy(that.oTable.getControlDelegate(), "formatGroupHeader");
oPlugin.getGroupHeaderFormatter()("MyContext", "MyProperty");
assert.ok(oGroupHeaderFormatter.calledOnceWithExactly(that.oTable, "MyContext", "MyProperty"), "Call Delegate.formatGroupHeader");
oGroupHeaderFormatter.restore();
that.assertFetchPropertyCalls(assert, 1, 1, 1, 1);
});
});
QUnit.module("Basic functionality with JsControlTreeModifier", {
before: function() {
MDCQUnitUtils.stubPropertyInfos(Table.prototype, [{
name: "Name",
label: "Name",
path: "Name",
groupable: true
}, {
name: "Country",
label: "Country",
path: "Country",
groupable: true
}, {
name: "name_country",
label: "Complex Title & Description",
propertyInfos: ["Name", "Country"]
}, {
name: "Value",
label: "Value",
path: "Value",
sortable: false,
filterable: false
}]);
MDCQUnitUtils.stubPropertyExtension(Table.prototype, {
Name: {
defaultAggregate: {}
},
Country: {
defaultAggregate: {}
}
});
},
beforeEach: function() {
return this.createTestObjects().then(function() {
return this.oTable.getEngine().getModificationHandler().waitForChanges({
element: this.oTable
});
}.bind(this));
},
afterEach: function() {
this.destroyTestObjects();
},
after: function() {
MDCQUnitUtils.restorePropertyInfos(Table.prototype);
MDCQUnitUtils.restorePropertyExtension(Table.prototype);
},
createTestObjects: function() {
return createAppEnvironment(sTableView1, "Table").then(function(mCreatedApp){
this.oView = mCreatedApp.view;
this.oUiComponentContainer = mCreatedApp.container;
this.oUiComponentContainer.placeAt("qunit-fixture");
Core.applyChanges();
this.oTable = this.oView.byId('myTable');
ControlPersonalizationWriteAPI.restore({
selector: this.oTable
});
}.bind(this));
},
destroyTestObjects: function() {
this.oUiComponentContainer.destroy();
}
});
QUnit.test("Allowed analytics on column header and tableDelegate API's", function(assert) {
var fColumnPressSpy = sinon.spy(this.oTable, "_onColumnPress");
var oResourceBundle = Core.getLibraryResourceBundle("sap.ui.mdc");
var oTable = this.oTable;
var oPlugin;
var fSetAggregationSpy;
this.oTable.addColumn(new Column({
header: "Value",
dataProperty: "Value",
template: new Text({text: "Value"})
}));
return oTable._fullyInitialized().then(function() {
oTable._oTable.fireEvent("columnSelect", {
column: oTable._oTable.getColumns()[3]
});
return oTable._fullyInitialized().then(function() {
assert.notOk(oTable._oPopover, "ColumnHeaderPopover not created");
fColumnPressSpy.resetHistory();
});
}).then(function() {
oPlugin = oTable._oTable.getDependents()[0];
fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
oTable.setAggregateConditions({
Country: {}
});
oTable.rebind();
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country", "Value"],
groupLevels: [],
grandTotal: ["Country"],
subtotals: ["Country"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
oTable._oTable.fireEvent("columnSelect", {
column: oTable._oTable.getColumns()[0]
});
assert.ok(fColumnPressSpy.calledOnce, "First Column pressed");
return oTable._fullyInitialized();
}).then(function() {
assert.strictEqual(oTable._oPopover.getItems()[0].getLabel(), oResourceBundle.getText("table.SETTINGS_GROUP"),
"The first column has group menu item");
assert.strictEqual(oTable._oPopover.getItems()[1].getLabel(), oResourceBundle.getText("table.SETTINGS_TOTALS"),
"The first column has aggregate menu item");
return new Promise(function(resolve) {
oTable._oPopover.getAggregation("_popover").attachAfterClose(function() {
oTable._oTable.fireEvent("columnSelect", {
column: oTable._oTable.getColumns()[2]
});
resolve();
});
oTable._oPopover.getAggregation("_popover").close();
}).then(function() {
return oTable._fullyInitialized();
});
}).then(function() {
assert.strictEqual(fColumnPressSpy.callCount, 2, "Third Column pressed");
assert.strictEqual(oTable._oPopover.getItems()[0].getItems().length,2, "The last column has complex property with list of two items");
fSetAggregationSpy.reset();
oTable.setGroupConditions({
groupLevels: [
{
"name": "Name"
}
]
});
oTable.rebind();
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country", "Value"],
groupLevels: ["Name"],
grandTotal: ["Country"],
subtotals: ["Country"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.reset();
oTable.insertColumn(new Column({
id: "cl"
}), 2);
oTable.rebind();
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country", "Value"],
groupLevels: ["Name"],
grandTotal: ["Country"],
subtotals: ["Country"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.restore();
});
});
QUnit.test("Grouping enabled on column press", function(assert) {
var oTable = this.oTable;
var done = assert.async();
var fColumnPressSpy = sinon.spy(oTable, "_onColumnPress");
oTable._fullyInitialized().then(function() {
var oInnerColumn = oTable._oTable.getColumns()[0];
oTable._oTable.fireEvent("columnSelect", {
column: oInnerColumn
});
assert.ok(fColumnPressSpy.calledOnce, "First column pressed");
fColumnPressSpy.restore();
oTable._fullyInitialized().then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
var oDelegate = oTable.getControlDelegate();
var fnRebind = oDelegate.rebind;
oDelegate.rebind = function () {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: ["Name"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.restore();
oDelegate.rebind = fnRebind;
done();
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[0].firePress();
});
});
});
QUnit.test("Aggregation enabled on column press", function(assert) {
var oTable = this.oTable;
var fColumnPressSpy = sinon.spy(oTable, "_onColumnPress");
var done = assert.async();
oTable._fullyInitialized().then(function() {
var oInnerSecondColumn = oTable._oTable.getColumns()[1];
oTable._oTable.fireEvent("columnSelect", {
column: oInnerSecondColumn
});
assert.ok(fColumnPressSpy.calledOnce, "First Column pressed");
fColumnPressSpy.restore();
oTable._fullyInitialized().then(function() {
var oDelegate = oTable.getControlDelegate();
var oPlugin = oTable._oTable.getDependents()[0];
var fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
var fnRebind = oDelegate.rebind;
oDelegate.rebind = function () {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: [],
grandTotal: ["Country"],
subtotals: ["Country"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.restore();
oDelegate.rebind = fnRebind;
done();
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[1].firePress();
});
});
});
QUnit.test("Grouping and Aggregation on two columns", function(assert) {
var oTable = this.oTable;
var fColumnPressSpy = sinon.spy(oTable, "_onColumnPress");
var done = assert.async();
oTable._fullyInitialized().then(function() {
var oInnerColumn = oTable._oTable.getColumns()[0];
oTable._oTable.fireEvent("columnSelect", {
column: oInnerColumn
});
assert.ok(fColumnPressSpy.calledOnce, "First Column pressed");
oTable._fullyInitialized().then(function() {
var oDelegate = oTable.getControlDelegate();
var oPlugin = oTable._oTable.getDependents()[0];
var fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
var fnRebind = oDelegate.rebind;
var oInnerSecondColumn = oTable._oTable.getColumns()[1];
oDelegate.rebind = function () {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: ["Name"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fColumnPressSpy.restore();
fSetAggregationSpy.restore();
oDelegate.rebind = fnRebind;
new Promise(function(resolve) {
oTable._oPopover.getAggregation("_popover").attachAfterClose(function() {
oTable._oTable.fireEvent("columnSelect", {
column: oInnerSecondColumn
});
resolve();
});
oTable._oPopover.getAggregation("_popover").close();
}).then(function() {
return oTable._fullyInitialized();
}).then(function() {
var oDelegate = oTable.getControlDelegate();
var oPlugin = oTable._oTable.getDependents()[0];
var fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
var fnRebind = oDelegate.rebind;
oDelegate.rebind = function () {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: ["Name"],
grandTotal: ["Country"],
subtotals: ["Country"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fColumnPressSpy.restore();
fSetAggregationSpy.restore();
oDelegate.rebind = fnRebind;
done();
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[1].firePress();
});
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[0].firePress();
});
});
});
QUnit.test("Grouping and forced aggregation", function(assert) {
var oTable = this.oTable;
var oDelegate;
var oPlugin;
var fSetAggregationSpy;
var fnRebind;
function openColumnMenu(oColumn) {
oTable._oTable.fireEvent("columnSelect", {
column: oColumn
});
// The popover is created async.
return oTable._fullyInitialized();
}
return oTable._fullyInitialized().then(function() {
oDelegate = oTable.getControlDelegate();
oPlugin = oTable._oTable.getDependents()[0];
fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
fnRebind = oDelegate.rebind;
return openColumnMenu(oTable._oTable.getColumns()[0]);
}).then(function() {
return new Promise(function(resolve) {
oDelegate.rebind = function() {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: ["Name"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.reset();
oDelegate.rebind = fnRebind;
resolve();
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[0].firePress();
});
}).then(function() {
return openColumnMenu(oTable._oTable.getColumns()[0]);
}).then(function() {
return new Promise(function(resolve) {
oDelegate.rebind = function() {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country"],
groupLevels: [],
grandTotal: ["Name"],
subtotals: ["Name"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.reset();
oDelegate.rebind = fnRebind;
resolve();
};
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[1].firePress();
Core.byId(oTable.getId() + "-messageBox").getButtons()[0].firePress();
});
});
});
QUnit.test("Sorting restriction", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var sMessage = Core.getLibraryResourceBundle("sap.ui.mdc").getText("table.PERSONALIZATION_DIALOG_SORT_RESTRICTION");
var oState;
var oValidationState;
oState = {items: [{name: "Name"}, {name: "name_country"}]};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.None, "No sorted properties: Validation result");
assert.strictEqual(oValidationState.message, undefined, "No sorted properties: Message text");
oState = {sorters: [{name: "Name"}, {name: "Country"}]};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.Information,
"Sorted properties and no visible columns: Validation result");
assert.strictEqual(oValidationState.message, sMessage,
"Sorted properties and no visible columns: Message text");
oState = {
items: [{name: "Name"}, {name: "Country"}, {name: "name_country"}],
sorters: [{name: "Name"}, {name: "Country"}]
};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.None, "All sorted properties visible: Validation result");
assert.strictEqual(oValidationState.message, undefined, "All sorted properties visible: Message text");
oState = {
items: [{name: "Name"}],
sorters: [{name: "Country"}]
};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.Information, "Sorted property invisible: Validation result");
assert.strictEqual(oValidationState.message, sMessage, "Sorted property invisible: Message text");
oState = {
items: [{name: "Name"}, {name: "name_country"}],
sorters: [{name: "Country"}]
};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.None,
"Sorted property is part of a visible complex property: Validation result");
assert.strictEqual(oValidationState.message, undefined,
"Sorted property is part of a visible complex property: Message text");
oTable.setP13nMode();
oState = {
items: [{name: "Name"}],
sorters: [{name: "Country"}]
};
oValidationState = oTable.validateState(oState, "Sort");
assert.strictEqual(oValidationState.validation, coreLibrary.MessageType.None,
"Sorted property invisible and analytical features not enabled: Validation result");
assert.strictEqual(oValidationState.message, undefined,
"Sorted property invisible and analytical features not enabled: Message text");
});
});
QUnit.test("Group restriction", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oDelegate = oTable.getControlDelegate();
var oResourceBundle = Core.getLibraryResourceBundle("sap.ui.mdc");
var oState, oValidationState;
oState = {
items: [{name: "Name"}, {name: "Country"}, {name: "name_country"}]
};
oValidationState = oTable.validateState(oState, "Group");
assert.equal(oValidationState.validation, coreLibrary.MessageType.None, "No message");
assert.equal(oValidationState.message, undefined, "Message text is not defined");
oState = {
items: [{name: "Name"}],
aggregations: { Name : {}}
};
oValidationState = oTable.validateState(oState, "Group");
assert.equal(oValidationState.validation, coreLibrary.MessageType.Information,
"Information message, Grouping and aggreagtion can't be used simulatneously");
assert.equal(oValidationState.message, oResourceBundle.getText("table.PERSONALIZATION_DIALOG_GROUP_RESTRICTION", "Name"),
"Message text is correct");
oState = {
items: [{name: "Name"}, {name: "name_country"}],
sorters: [{name: "Country"}]
};
oValidationState = oDelegate.validateState(oTable, oState);
assert.equal(oValidationState.validation, coreLibrary.MessageType.None,
"No message, the sorted property is not visible but part of a visible complex property");
assert.equal(oValidationState.message, undefined, "Message text is undefined");
oState = {};
oValidationState = oDelegate.validateState(oTable, oState);
assert.equal(oValidationState.validation, coreLibrary.MessageType.None,
"No message because oState.items is undefined");
assert.equal(oValidationState.message, undefined, "Message text is undefined");
});
});
QUnit.test("Column restriction", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oResourceBundle = Core.getLibraryResourceBundle("sap.ui.mdc");
var oState, oValidationState;
oState = {
items: [{name: "Name"}, {name: "Country"}, {name: "name_country"}]
};
oValidationState = oTable.validateState(oState, "Column");
assert.equal(oValidationState.validation, coreLibrary.MessageType.None, "No message");
assert.equal(oValidationState.message, undefined, "Message text is not defined");
oState = {
items: [{name: "Country"}],
aggregations: { Name : {}}
};
oValidationState = oTable.validateState(oState, "Column");
assert.equal(oValidationState.validation, coreLibrary.MessageType.Information,
"Information message, Cannot remove column when the total is showed for the column");
assert.equal(oValidationState.message, oResourceBundle.getText("table.PERSONALIZATION_DIALOG_TOTAL_RESTRICTION"),
"Message text is correct");
oState = {
items: [{name: "Name"}],
sorters: [{name: "Country"}]
};
oValidationState = oTable.validateState(oState, "Column");
assert.equal(oValidationState.validation, coreLibrary.MessageType.Information,
"Information message, Cannot remove column when the sorters is applied for the column");
assert.equal(oValidationState.message, oResourceBundle.getText("table.PERSONALIZATION_DIALOG_SORT_RESTRICTION", "Name"),
"Message text is correct");
oState = {
items: [{name: "Country"}],
sorters: [{name: "Name"}],
aggregations: { Name : {}}
};
oValidationState = oTable.validateState(oState, "Column");
assert.equal(oValidationState.validation, coreLibrary.MessageType.Information,
"Information message, Cannot remove column when the sorters and totals is shown for the column");
assert.equal(oValidationState.message, oResourceBundle.getText("table.PERSONALIZATION_DIALOG_TOTAL_RESTRICTION") + "\n" +
oResourceBundle.getText("table.PERSONALIZATION_DIALOG_SORT_RESTRICTION", "Name"),
"Message text is correct");
oState = {};
oValidationState = oTable.validateState(oState, "Column");
assert.equal(oValidationState.validation, coreLibrary.MessageType.None,
"No message because oState.items is undefined");
assert.equal(oValidationState.message, undefined, "Message text is undefined");
});
});
QUnit.module("Tests with specific propertyInfos and extensions for binding", {
before: function() {
MDCQUnitUtils.stubPropertyInfos(Table.prototype, [{
name: "Name",
label: "Name",
path: "Name",
groupable: true
}, {
name: "Country",
label: "Country",
path: "Country",
groupable: true
}, {
name: "Value",
label: "Value",
path: "Value"
}, {
name: "name_country",
label: "Complex Title & Description",
propertyInfos: ["Name"]
}]);
MDCQUnitUtils.stubPropertyInfosForBinding(Table.prototype, [{
name: "Name",
label: "Name",
path: "Name",
groupable: true
}, {
name: "Country",
label: "Country",
path: "Country",
groupable: true
}, {
name: "Value",
label: "Value",
path: "Value"
}, {
name: "name_country",
label: "Complex Title & Description",
propertyInfos: ["Name", "Country", "Value"]
}]);
MDCQUnitUtils.stubPropertyExtensionsForBinding(Table.prototype, {
Value: {
defaultAggregate: {}
}
});
},
beforeEach: function() {
return this.createTestObjects().then(function() {
return this.oTable.getEngine().getModificationHandler().waitForChanges({
element: this.oTable
});
}.bind(this));
},
afterEach: function() {
this.destroyTestObjects();
},
after: function() {
MDCQUnitUtils.restorePropertyInfos(Table.prototype);
MDCQUnitUtils.restorePropertyInfosForBinding(Table.prototype);
MDCQUnitUtils.restorePropertyExtensionsForBinding(Table.prototype);
},
createTestObjects: function() {
return createAppEnvironment(sTableView2, "Table").then(function(mCreatedApp){
this.oView = mCreatedApp.view;
this.oUiComponentContainer = mCreatedApp.container;
this.oUiComponentContainer.placeAt("qunit-fixture");
Core.applyChanges();
this.oTable = this.oView.byId('myTable');
ControlPersonalizationWriteAPI.restore({
selector: this.oTable
});
}.bind(this));
},
destroyTestObjects: function() {
this.oUiComponentContainer.destroy();
}
});
QUnit.test("Check column header for analytics buttons", function(assert) {
var fColumnPressSpy = sinon.spy(this.oTable, "_onColumnPress");
var oResourceBundle = Core.getLibraryResourceBundle("sap.ui.mdc");
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oFirstInnerColumn = oTable._oTable.getColumns()[0];
oTable._oTable.fireEvent("columnSelect", {
column: oFirstInnerColumn
});
assert.ok(fColumnPressSpy.calledOnce, "First Column pressed");
return oTable._fullyInitialized();
}).then(function() {
assert.strictEqual(oTable._oPopover.getItems()[0].getLabel(), oResourceBundle.getText("table.SETTINGS_GROUP"),
"The first column has group menu item");
assert.equal(oTable._oPopover.getItems().length, 1, "The first column doesn't have an aggregate menu item");
});
});
QUnit.test("Apply group on column header", function(assert) {
var oTable = this.oTable;
var done = assert.async();
var fColumnPressSpy = sinon.spy(oTable, "_onColumnPress");
oTable._fullyInitialized().then(function() {
var oInnerColumn = oTable._oTable.getColumns()[0];
oTable._oTable.fireEvent("columnSelect", {
column: oInnerColumn
});
assert.ok(fColumnPressSpy.calledOnce, "First column pressed");
fColumnPressSpy.restore();
oTable._fullyInitialized().then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var fSetAggregationSpy = sinon.spy(oPlugin, "setAggregationInfo");
var oDelegate = oTable.getControlDelegate();
var fnRebind = oDelegate.rebind;
oDelegate.rebind = function () {
fnRebind.apply(this, arguments);
assert.ok(fSetAggregationSpy.calledOnceWithExactly({
visible: ["Name", "Country","Value"],
groupLevels: ["Name"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
fSetAggregationSpy.restore();
oDelegate.rebind = fnRebind;
done();
};
var fTableGroupSpy = sinon.spy(oTable, "_onCustomGroup");
oTable._oPopover.getAggregation("_popover").getContent()[0].getContent()[0].firePress();
assert.ok(fTableGroupSpy.calledOnce, "Column group triggered");
if (!fTableGroupSpy.calledOnce) {
done(); // rebind won't be called in this case, so we need to end the test here
}
});
});
});
QUnit.module("Column state to plugin", {
before: function() {
MDCQUnitUtils.stubPropertyInfos(Table.prototype, [
{name: "CountryKey", path: "Country", label: "CountryKey", groupable: true, text: "CountryText"},
{name: "CountryText", path: "CountryText", label: "CountryText", groupable: true},
{name: "CountryKeyAndText", label: "CountryKey+CountryText", propertyInfos: ["CountryKey", "CountryText"]},
{name: "SalesAmount", path: "SalesAmount", label: "SalesAmount", unit: "Currency"},
{name: "Currency", path: "Currency", label: "Currency", groupable: true},
{name: "SalesAmountAndCurrency", label: "SalesAmount+Currency", propertyInfos: ["SalesAmount", "Currency"]},
{name: "SalesAmountAndRegion", label: "SalesAmount+Region", propertyInfos: ["SalesAmount", "Region"]},
{name: "CurrencyAndRegion", label: "Currency+Region", propertyInfos: ["Currency", "Region"]},
{name: "Region", path: "Region", label: "Region", groupable: true},
{name: "RegionText", path: "RegionText", label: "RegionText", groupable: true},
{name: "SalesAmountInLocalCurrency", path: "SalesAmountInLocalCurrency", label: "SalesAmountInLocalCurrency"},
{
name: "SalesAmountAndSalesAmountInLocalCurrency",
label: "SalesAmountAndSalesAmountInLocalCurrency",
propertyInfos: ["SalesAmount", "SalesAmountInLocalCurrency"]
},
{name: "RegionAndRegionText", label: "Region+RegionText", propertyInfos: ["Region", "RegionText"]}
]);
MDCQUnitUtils.stubPropertyExtension(Table.prototype, {
SalesAmount: {defaultAggregate: {}},
Currency: {defaultAggregate: {}}
});
},
beforeEach: function() {
return this.createTestObjects().then(function() {
this.oTable.destroyColumns();
this.oTable.addColumn(new Column({<|fim▁hole|> template: new Text({text: "CountryKey"})
}));
this.oTable.addColumn(new Column({
header: "CountryText",
dataProperty: "CountryText",
template: new Text({text: "CountryText"})
}));
this.oTable.addColumn(new Column({
header: "CountryKey+CountryText",
dataProperty: "CountryKeyAndText",
template: new Text({text: "CountryKey CountryText"})
}));
this.oTable.addColumn(new Column({
header: "SalesAmount",
dataProperty: "SalesAmount",
template: new Text({text: "SalesAmount"})
}));
this.oTable.addColumn(new Column({
header: "Currency",
dataProperty: "Currency",
template: new Text({text: "Currency"})
}));
this.oTable.addColumn(new Column({
header: "SalesAmount+Currency",
dataProperty: "SalesAmountAndCurrency",
template: new Text({text: "SalesAmount Currency"})
}));
this.oTable.addColumn(new Column({
header: "SalesAmount+Region",
dataProperty: "SalesAmountAndRegion",
template: new Text({text: "SalesAmount Region"})
}));
this.oTable.addColumn(new Column({
header: "Currency+Region",
dataProperty: "CurrencyAndRegion",
template: new Text({text: "Currency Region"})
}));
this.oTable.addColumn(new Column({
header: "SalesAmount+SalesAmountInLocalCurrency",
dataProperty: "SalesAmountAndSalesAmountInLocalCurrency",
template: new Text({text: "SalesAmount SalesAmountInLocalCurrency"})
}));
this.oTable.addColumn(new Column({
header: "Region+RegionText",
dataProperty: "RegionAndRegionText",
template: new Text({text: "Region RegionText"})
}));
return this.oTable.getEngine().getModificationHandler().waitForChanges({
element: this.oTable
});
}.bind(this));
},
afterEach: function() {
this.destroyTestObjects();
},
after: function() {
MDCQUnitUtils.restorePropertyInfos(Table.prototype);
MDCQUnitUtils.restorePropertyExtension(Table.prototype);
},
createTestObjects: function() {
return createAppEnvironment(sTableView2, "Table").then(function(mCreatedApp){
this.oView = mCreatedApp.view;
this.oUiComponentContainer = mCreatedApp.container;
this.oUiComponentContainer.placeAt("qunit-fixture");
Core.applyChanges();
this.oTable = this.oView.byId('myTable');
ControlPersonalizationWriteAPI.restore({
selector: this.oTable
});
}.bind(this));
},
destroyTestObjects: function() {
this.oUiComponentContainer.destroy();
}
});
QUnit.test("Aggregate", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var oSetAggregation = sinon.spy(oPlugin, "setAggregationInfo");
oTable.setAggregateConditions({
SalesAmount: {}
});
oTable.rebind();
assert.ok(oSetAggregation.calledOnceWithExactly({
visible: ["CountryKey", "CountryText", "SalesAmount", "Currency", "Region", "SalesAmountInLocalCurrency", "RegionText"],
groupLevels: [],
grandTotal: ["SalesAmount"],
subtotals: ["SalesAmount"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
});
});
QUnit.test("Group", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var oSetAggregation = sinon.spy(oPlugin, "setAggregationInfo");
oTable.setGroupConditions({
groupLevels: [{
name: "CountryKey"
}]
});
oTable.rebind();
assert.ok(oSetAggregation.calledOnceWithExactly({
visible: ["CountryKey", "CountryText", "SalesAmount", "Currency", "Region", "SalesAmountInLocalCurrency", "RegionText"],
groupLevels: ["CountryKey"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
});
});
QUnit.test("Group and aggregate", function(assert) {
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var oSetAggregation = sinon.spy(oPlugin, "setAggregationInfo");
oTable.setGroupConditions({
groupLevels: [{
name: "CountryKey"
}]
});
oTable.setAggregateConditions({
SalesAmount: {}
});
oTable.rebind();
assert.ok(oSetAggregation.calledOnceWithExactly({
visible: ["CountryKey", "CountryText", "SalesAmount", "Currency", "Region", "SalesAmountInLocalCurrency", "RegionText"],
groupLevels: ["CountryKey"],
grandTotal: ["SalesAmount"],
subtotals: ["SalesAmount"],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: true, grandTotal: true},
{subtotals: false, grandTotal: false}
]),
search: undefined
}), "Plugin#setAggregationInfo call");
});
});
QUnit.test("Transformation Search", function(assert) {
var done = assert.async();
var oTable = this.oTable;
return oTable._fullyInitialized().then(function() {
var fnOriginalUpdateBindingInfo = oTable.getControlDelegate().updateBindingInfo;
oTable.getControlDelegate().updateBindingInfo = function(oTable, oBindingInfo) {
fnOriginalUpdateBindingInfo(oTable, oBindingInfo);
oBindingInfo.parameters["$search"] = "Name";
};
return waitForBindingInfo(oTable);
}).then(function() {
var oPlugin = oTable._oTable.getDependents()[0];
var oBindRowsSpy = sinon.spy(oTable._oTable, "bindRows");
var oSetAggregation = sinon.spy(oPlugin, "setAggregationInfo");
oTable.setGroupConditions({ groupLevels: [{ name: "CountryKey" }] }).rebind();
var oBinding = oTable._oTable.getBindingInfo("rows");
assert.notOk(oBinding.parameters["$search"], "$search has been removed from oBinding");
assert.ok(oBindRowsSpy.calledWithExactly(oBinding), "BindRows of inner table called with oBindingInfo without $search parameter");
assert.ok(oSetAggregation.calledOnceWithExactly({
visible: ["CountryKey", "CountryText", "SalesAmount", "Currency", "Region", "SalesAmountInLocalCurrency", "RegionText"],
groupLevels: ["CountryKey"],
grandTotal: [],
subtotals: [],
columnState: createColumnStateIdMap(oTable, [
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false},
{subtotals: false, grandTotal: false}
]),
search: "Name"
}), "Plugin#setAggregationInfo call");
done();
});
});
QUnit.module("v4.TableDelegate#updateBinding", {
before: function() {
MDCQUnitUtils.stubPropertyInfos(Table.prototype, [{
name: "Name",
path: "Name",
label: "Name",
sortable: true,
groupable: true,
filterable: true
}]);
},
beforeEach: function() {
this.oTable = new Table({
autoBindOnInit: false,
p13nMode: ["Column", "Sort", "Filter", "Group", "Aggregate"],
delegate: {
name: "odata.v4.TestDelegate",
payload: {
collectionPath: "/ProductList"
}
}
}).setModel(new ODataModel({
synchronizationMode: "None",
serviceUrl: "serviceUrl/",
operationMode: "Server"
}));
return this.oTable._fullyInitialized().then(function() {
this.oTable.bindRows();
this.oInnerTable = this.oTable._oTable;
this.oRowBinding = this.oTable.getRowBinding();
this.oSetAggregationSpy = sinon.spy(this.oInnerTable.getDependents()[0], "setAggregationInfo");
this.oRebindSpy = sinon.spy(this.oTable.getControlDelegate(), "rebind");
this.oChangeParametersSpy = sinon.spy(this.oRowBinding, "changeParameters");
this.oFilterSpy = sinon.spy(this.oRowBinding, "filter");
this.oSortSpy = sinon.spy(this.oRowBinding, "sort");
}.bind(this));
},
afterEach: function() {
this.oTable.destroy();
this.oSetAggregationSpy.restore();
this.oRebindSpy.restore();
this.oChangeParametersSpy.restore();
this.oFilterSpy.restore();
this.oSortSpy.restore();
},
after: function() {
MDCQUnitUtils.restorePropertyInfos(Table.prototype);
}
});
QUnit.test("Sort", function(assert) {
this.oTable.setSortConditions({ sorters: [{ name: "Name", descending: false }] })._rebind();
assert.ok(this.oSortSpy.firstCall.calledWithExactly(this.oTable._getSorters()));
this.oTable.setSortConditions({ sorters: [{ name: "Name", descending: true }] })._rebind();
assert.ok(this.oSortSpy.secondCall.calledWithExactly(this.oTable._getSorters()));
this.oTable.setSortConditions()._rebind();
assert.equal(this.oSortSpy.callCount, 3);
assert.equal(this.oRebindSpy.callCount, 0);
});
QUnit.test("Filter", function(assert) {
var aFilters = [new Filter("Name", "EQ", "a")];
var oUpdateBindingInfoStub = sinon.stub(this.oTable.getControlDelegate(), "updateBindingInfo");
oUpdateBindingInfoStub.callsFake(function (oMDCTable, oBindingInfo) {
oUpdateBindingInfoStub.wrappedMethod.apply(this, arguments);
var oMetadataInfo = oMDCTable.getPayload();
oBindingInfo.path = oMetadataInfo.collectionPath;
oBindingInfo.filters = aFilters;
});
this.oTable._rebind();
assert.ok(this.oFilterSpy.firstCall.calledWithExactly(aFilters, "Application"));
oUpdateBindingInfoStub.restore();
this.oTable._rebind();
assert.ok(this.oFilterSpy.secondCall.calledWithExactly([], "Application"));
assert.equal(this.oRebindSpy.callCount, 0);
});
QUnit.test("Group", function(assert) {
this.oTable.setGroupConditions({ groupLevels: [{ name: "Name" }] })._rebind();
assert.ok(this.oSetAggregationSpy.firstCall.calledWithMatch({ groupLevels: [ "Name" ] }));
this.oTable.setGroupConditions()._rebind();
assert.ok(this.oSetAggregationSpy.secondCall.calledWithMatch( { groupLevels: [] }));
assert.equal(this.oRebindSpy.callCount, 0);
});
QUnit.test("Aggregates", function(assert) {
this.oTable.setAggregateConditions({ Name: {} })._rebind();
assert.ok(this.oSetAggregationSpy.firstCall.calledWithMatch({
grandTotal: [ "Name" ],
subtotals: [ "Name" ]
}));
this.oTable.setAggregateConditions()._rebind();
assert.ok(this.oSetAggregationSpy.secondCall.calledWithMatch( { grandTotal: [], subtotals: [] }));
assert.equal(this.oRebindSpy.callCount, 0);
});
QUnit.test("Parameters", function(assert) {
var oUpdateBindingInfoStub = sinon.stub(this.oTable.getControlDelegate(), "updateBindingInfo");
oUpdateBindingInfoStub.onCall(0).callsFake(function (oMDCTable, oBindingInfo) {
oUpdateBindingInfoStub.wrappedMethod.apply(this, arguments);
var oMetadataInfo = oMDCTable.getPayload();
oBindingInfo.path = oMetadataInfo.collectionPath;
oBindingInfo.parameters.$search = "x";
});
oUpdateBindingInfoStub.onCall(1).callsFake(function (oMDCTable, oBindingInfo) {
oUpdateBindingInfoStub.wrappedMethod.apply(this, arguments);
var oMetadataInfo = oMDCTable.getPayload();
oBindingInfo.path = oMetadataInfo.collectionPath;
oBindingInfo.parameters.$search = undefined;
});
oUpdateBindingInfoStub.onCall(2).callsFake(function (oMDCTable, oBindingInfo) {
oUpdateBindingInfoStub.wrappedMethod.apply(this, arguments);
var oMetadataInfo = oMDCTable.getPayload();
oBindingInfo.path = oMetadataInfo.collectionPath;
oBindingInfo.parameters.$$canonicalPath = true;
});
this.oTable._rebind();
assert.equal(this.oChangeParametersSpy.callCount, 1);
this.oTable._rebind();
assert.equal(this.oChangeParametersSpy.callCount, 2);
assert.equal(this.oRebindSpy.callCount, 0);
this.oTable._rebind();
assert.equal(this.oRebindSpy.callCount, 1);
oUpdateBindingInfoStub.restore();
});
QUnit.test("Add Column", function(assert) {
this.oTable.insertColumn(new Column());
this.oTable._rebind();
assert.equal(this.oChangeParametersSpy.callCount, 0);
assert.equal(this.oFilterSpy.callCount, 0);
assert.equal(this.oSortSpy.callCount, 0);
assert.equal(this.oSetAggregationSpy.callCount, 1);
assert.equal(this.oRebindSpy.callCount, 1);
});
QUnit.test("Change path", function(assert) {
var oUpdateBindingInfoStub = sinon.stub(this.oTable.getControlDelegate(), "updateBindingInfo");
oUpdateBindingInfoStub.onCall(1).callsFake(function (oMDCTable, oBindingInfo) {
oUpdateBindingInfoStub.wrappedMethod.apply(this, arguments);
oBindingInfo.path = oBindingInfo.path + "something_else";
});
this.oTable._rebind();
this.oRebindSpy.resetHistory();
this.oTable._rebind();
assert.equal(this.oRebindSpy.callCount, 1, "Changing the path forces a rebind");
oUpdateBindingInfoStub.restore();
});
});<|fim▁end|> | header: "CountryKey",
dataProperty: "CountryKey", |
<|file_name|>Action.java<|end_file_name|><|fim▁begin|>package no.dusken.momus.model.websocket;<|fim▁hole|> CREATE, UPDATE, DELETE
}<|fim▁end|> |
public enum Action { |
<|file_name|>scheme_Hirajoshi_D.js<|end_file_name|><|fim▁begin|>/* eslint quote-props: ["error", "consistent"] */
// Japanese Hirajoshi scale
// 1-4-2-1-4
// Gb G B Db D Gb
export default {
'a': { instrument: 'piano', note: 'a4' },
'b': { instrument: 'piano', note: 'eb3' },
'c': { instrument: 'piano', note: 'd6' },
'd': { instrument: 'piano', note: 'eb4' },
'e': { instrument: 'piano', note: 'd4' },
'f': { instrument: 'piano', note: 'bb5' },
'g': { instrument: 'piano', note: 'd7' },
'h': { instrument: 'piano', note: 'g3' },
'i': { instrument: 'piano', note: 'g5' },
'j': { instrument: 'piano', note: 'bb6' },
'k': { instrument: 'piano', note: 'eb6' },
'l': { instrument: 'piano', note: 'bb4' },
'm': { instrument: 'piano', note: 'a6' },
'n': { instrument: 'piano', note: 'a5' },
'o': { instrument: 'piano', note: 'd5' },
'p': { instrument: 'piano', note: 'a2' },
'q': { instrument: 'piano', note: 'bb2' },
'r': { instrument: 'piano', note: 'a3' },
's': { instrument: 'piano', note: 'd3' },
't': { instrument: 'piano', note: 'g4' },
'u': { instrument: 'piano', note: 'g6' },
'v': { instrument: 'piano', note: 'bb3' },
'w': { instrument: 'piano', note: 'eb5' },
'x': { instrument: 'piano', note: 'eb2' },
'y': { instrument: 'piano', note: 'g2' },
'z': { instrument: 'piano', note: 'eb7' },
'A': { instrument: 'celesta', note: 'a4' },
'B': { instrument: 'celesta', note: 'eb3' },
'C': { instrument: 'celesta', note: 'd6' },
'D': { instrument: 'celesta', note: 'eb4' },
'E': { instrument: 'celesta', note: 'd4' },
'F': { instrument: 'celesta', note: 'bb4' },
'G': { instrument: 'celesta', note: 'd2' },
'H': { instrument: 'celesta', note: 'g3' },
'I': { instrument: 'celesta', note: 'g5' },
'J': { instrument: 'celesta', note: 'bb6' },
'K': { instrument: 'celesta', note: 'eb6' },
'L': { instrument: 'celesta', note: 'bb5' },
'M': { instrument: 'celesta', note: 'a6' },
'N': { instrument: 'celesta', note: 'a5' },
'O': { instrument: 'celesta', note: 'd5' },
'P': { instrument: 'celesta', note: 'a7' },
'Q': { instrument: 'celesta', note: 'bb7' },
'R': { instrument: 'celesta', note: 'a3' },
'S': { instrument: 'celesta', note: 'd3' },
'T': { instrument: 'celesta', note: 'g4' },
'U': { instrument: 'celesta', note: 'g6' },
'V': { instrument: 'celesta', note: 'bb3' },
'W': { instrument: 'celesta', note: 'eb5' },
'X': { instrument: 'celesta', note: 'eb7' },
'Y': { instrument: 'celesta', note: 'g7' },
'Z': { instrument: 'celesta', note: 'eb2' },
'$': { instrument: 'swell', note: 'eb3' },
',': { instrument: 'swell', note: 'eb3' },
'/': { instrument: 'swell', note: 'bb3' },
'\\': { instrument: 'swell', note: 'eb3' },
':': { instrument: 'swell', note: 'g3' },
';': { instrument: 'swell', note: 'bb3' },
'-': { instrument: 'swell', note: 'bb3' },
'+': { instrument: 'swell', note: 'g3' },
'|': { instrument: 'swell', note: 'bb3' },
'{': { instrument: 'swell', note: 'bb3' },
'}': { instrument: 'swell', note: 'eb3' },
'[': { instrument: 'swell', note: 'g3' },
']': { instrument: 'swell', note: 'bb3' },
'%': { instrument: 'swell', note: 'bb3' },
'&': { instrument: 'swell', note: 'eb3' },
'*': { instrument: 'swell', note: 'eb3' },
'^': { instrument: 'swell', note: 'bb3' },
'#': { instrument: 'swell', note: 'g3' },
'!': { instrument: 'swell', note: 'g3' },
'@': { instrument: 'swell', note: 'eb3' },<|fim▁hole|> '`': { instrument: 'swell', note: 'eb3' },
'_': { instrument: 'swell', note: 'g3' },
'"': { instrument: 'swell', note: 'eb3' },
"'": { instrument: 'swell', note: 'bb3' },
'<': { instrument: 'swell', note: 'g3' },
'>': { instrument: 'swell', note: 'g3' },
'.': { instrument: 'swell', note: 'g3' },
'?': { instrument: 'swell', note: 'bb3' },
'0': { instrument: 'fluteorgan', note: 'd3' },
'1': { instrument: 'fluteorgan', note: 'eb3' },
'2': { instrument: 'fluteorgan', note: 'g3' },
'3': { instrument: 'fluteorgan', note: 'a3' },
'4': { instrument: 'fluteorgan', note: 'bb3' },
'5': { instrument: 'fluteorgan', note: 'd2' },
'6': { instrument: 'fluteorgan', note: 'eb2' },
'7': { instrument: 'fluteorgan', note: 'g2' },
'8': { instrument: 'fluteorgan', note: 'a2' },
'9': { instrument: 'fluteorgan', note: 'bb2' },
's1': { instrument: 'swell', note: 'eb3' },
's2': { instrument: 'swell', note: 'g3' },
's3': { instrument: 'swell', note: 'bb3' }
};<|fim▁end|> | '(': { instrument: 'swell', note: 'g3' },
')': { instrument: 'swell', note: 'eb3' },
'=': { instrument: 'swell', note: 'eb3' },
'~': { instrument: 'swell', note: 'g3' }, |
<|file_name|>StreamModule.java<|end_file_name|><|fim▁begin|>package models.message;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import models.SecureTable;
import org.codehaus.jackson.annotate.JsonProperty;
import com.alvazan.orm.api.z8spi.meta.DboTableMeta;
/**
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class StreamModule {
@JsonProperty("module")
@XmlElement(name="module")
public String module;
@JsonProperty("params")
@XmlElement(name="params")
public Map<String, String> params = new HashMap<String, String>();
//I want to do the composite pattern and push this field down into the "Container" while StreamModule is the "Component" of that pattern
//but that would not work as unmarshalling the json would break since parser does not know to parse to StreamModule or the Container type which
//I had previously called StreamAggregation so this field is only used for the Container type
@JsonProperty("childStreams")
@XmlElement(name="childStreams")
public List<StreamModule> streams = new ArrayList<StreamModule>();
public String getModule() {
return module;<|fim▁hole|> public void setModule(String module) {
this.module = module;
}
public Map<String, String> getParams() {
return params;
}
public void setParams(Map<String, String> params) {
this.params = params;
}
public List<StreamModule> getStreams() {
return streams;
}
public void setStreams(List<StreamModule> streams) {
this.streams = streams;
}
@Override
public String toString() {
return "module=" + module;
}
} // Register<|fim▁end|> | }
|
<|file_name|>DateUtil.java<|end_file_name|><|fim▁begin|>package com.eveningoutpost.dexdrip.Models;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
// from package info.nightscout.client.utils;
/**
* Created by mike on 30.12.2015.
*/
/**
* The Class DateUtil. A simple wrapper around SimpleDateFormat to ease the handling of iso date string <-> date obj
* with TZ
*/
public class DateUtil {
private static final String FORMAT_DATE_ISO = "yyyy-MM-dd'T'HH:mm:ss'Z'"; // eg 2017-03-24T22:03:27Z
private static final String FORMAT_DATE_ISO2 = "yyyy-MM-dd'T'HH:mm:ssZ"; // eg 2017-03-27T17:38:14+0300
private static final String FORMAT_DATE_ISO3 = "yyyy-MM-dd'T'HH:mmZ"; // eg 2017-05-12T08:16-0400
/**
* Takes in an ISO date string of the following format:
* yyyy-mm-ddThh:mm:ss.ms+HoMo
*
* @param isoDateString the iso date string
* @return the date
* @throws Exception the exception
*/
private static Date fromISODateString(String isoDateString)
throws Exception {
SimpleDateFormat f = new SimpleDateFormat(FORMAT_DATE_ISO);
f.setTimeZone(TimeZone.getTimeZone("UTC"));
return f.parse(isoDateString);
}
private static Date fromISODateString3(String isoDateString)
throws Exception {
SimpleDateFormat f = new SimpleDateFormat(FORMAT_DATE_ISO3);
f.setTimeZone(TimeZone.getTimeZone("UTC"));
return f.parse(isoDateString);
}
private static Date fromISODateString2(String isoDateString)
throws Exception {<|fim▁hole|> return f.parse(isoDateString);
} catch (java.text.ParseException e) {
return fromISODateString3(isoDateString);
}
}
public static Date tolerantFromISODateString(String isoDateString)
throws Exception {
try {
return fromISODateString(isoDateString.replaceFirst("\\.[0-9][0-9][0-9]Z$", "Z"));
} catch (java.text.ParseException e) {
return fromISODateString2(isoDateString);
}
}
/**
* Render date
*
* @param date the date obj
* @param format - if not specified, will use FORMAT_DATE_ISO
* @param tz - tz to set to, if not specified uses local timezone
* @return the iso-formatted date string
*/
public static String toISOString(Date date, String format, TimeZone tz) {
if (format == null) format = FORMAT_DATE_ISO;
if (tz == null) tz = TimeZone.getDefault();
DateFormat f = new SimpleDateFormat(format);
f.setTimeZone(tz);
return f.format(date);
}
public static String toISOString(Date date) {
return toISOString(date, FORMAT_DATE_ISO, TimeZone.getTimeZone("UTC"));
}
public static String toISOString(long date) {
return toISOString(new Date(date), FORMAT_DATE_ISO, TimeZone.getTimeZone("UTC"));
}
public static String toNightscoutFormat(long date) {
final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.US);
format.setTimeZone(TimeZone.getDefault());
return format.format(date);
}
}<|fim▁end|> | try {
SimpleDateFormat f = new SimpleDateFormat(FORMAT_DATE_ISO2);
f.setTimeZone(TimeZone.getTimeZone("UTC")); |
<|file_name|>sqlite_check.py<|end_file_name|><|fim▁begin|># Script Name : sqlite_check.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Runs checks to check my SQLITE database
import sqlite3 as lite
import sys
import os
dropbox = os.getenv("dropbox")
dbfile = ("Databases\jarvis.db")
master_db = os.path.join(dropbox, dbfile)
con = None
try:
con = lite.connect(master_db)
cur = con.cursor()
cur.execute('SELECT SQLITE_VERSION()')
data = cur.fetchone()
print<|fim▁hole|>except lite.Error, e:
print
"Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
con = lite.connect(master_db)
cur = con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
rows = cur.fetchall()
for row in rows:
print
row
con = lite.connect(master_db)
cur = con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
while True:
row = cur.fetchone()
if row == None:
break
print
row[0]<|fim▁end|> | "SQLite version: %s" % data
|
<|file_name|>export_node.cpp<|end_file_name|><|fim▁begin|>#include "export_node.hpp"
#include <sstream>
#include "constant_mappings.hpp"
namespace opossum {
ExportNode::ExportNode(const std::string& init_table_name, const std::string& init_file_name,
const FileType init_file_type)
: AbstractNonQueryNode(LQPNodeType::Export),
table_name(init_table_name),
file_name(init_file_name),
file_type(init_file_type) {}
std::string ExportNode::description(const DescriptionMode mode) const {
std::ostringstream stream;
stream << "[Export] Name: '" << table_name << "'";
return stream.str();
}
size_t ExportNode::_on_shallow_hash() const {
auto hash = boost::hash_value(table_name);
boost::hash_combine(hash, file_name);
boost::hash_combine(hash, file_type);
return hash;
}
std::shared_ptr<AbstractLQPNode> ExportNode::_on_shallow_copy(LQPNodeMapping& node_mapping) const {
return ExportNode::make(table_name, file_name, file_type);
}
bool ExportNode::_on_shallow_equals(const AbstractLQPNode& rhs, const LQPNodeMapping& node_mapping) const {
const auto& export_node = static_cast<const ExportNode&>(rhs);<|fim▁hole|>} // namespace opossum<|fim▁end|> | return table_name == export_node.table_name && file_name == export_node.file_name &&
file_type == export_node.file_type;
}
|
<|file_name|>testrunner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Module for running keyword-driven tests
"""
from __future__ import with_statement
import time
import datetime
import re
from adapterlib.ToolProtocol import *
from adapterlib.ToolProtocolHTTP import *
import adapterlib.keyword as keyword
import adapterlib.keywordproxy as keywordproxy
from adapterlib.logger import KeywordLogger
class AdapterCompleter(object):
""" Simple class for doing tab-completion in interactive mode"""
def __init__(self, keywords ):
self.keywords = sorted(keywords)
def complete(self, text, state ):
response = None
if state == 0:
if text:
self.matches = [s for s in self.keywords if s and s.startswith(text)]
else:
self.matches = self.keywords[:]
try:
response = self.matches[state]
except IndexError:
response = None
return response
class Target(object):
def __init__(self,name,):
self.__name = name
def setup(self):
raise NotImplementedError()
def cleanup(self):
raise NotImplementedError()
@property
def name(self):
return self.__name
def takeScreenShot(self, path):
return False
class TestRunner(object):
"""
TestRunner class is used to run Keyword-driven tests.
The class allows test to be run interactively (given through stdin), from
file or from server.
To run tests from a server, TestRunner uses classes ToolProtocol and
ToolProtocolHTTP.
"""
def __init__(self, targets, delay, record = False ):
"""
Initializer.
@type targets: list
@param targets: list of System under test (SUT) identifiers.
@type delay: float
@param delay: Wait-time between consequtive keywords (in seconds)
@type record: boolean
@param record: Is the test recorded to html-file
"""
self._targetNames = targets
self._targets = []
self.delay = delay
self._rec_process = None
self._kwCount = 1
self._logger = None
self._separator = " "
if record:
self._logger = KeywordLogger()
self._kw_cache = {}
# Special commands listed here for interactive mode completer
self._commands = {}
self._commands["exit"] = ["quit","q","exit"]
self._commands["kws"] = ["list","kws","list full","kws full"]
self._commands["info"] = ["info"]
self._commands["special"] = []
def _setupTestAutomation(self):
"""Sets up test automation environment
@rtype: boolean
@returns: True if success, False otherwise
"""
raise NotImplementedError()
def _cleanupTestAutomation(self):
"""Cleans up test automation environment"""
raise NotImplementedError()
def __setTarget(self,targetName):
if re.match("['\"].*['\"]",targetName):
targetName = targetName[1:-1]
if targetName == "test" or targetName == "testi":
print "Warning: 'test' and 'testi' considered dummy targets."
return True
for t in self._targets:
if t.name == targetName:
self._activeTarget = t
return True
return False
def initTest(self):
"""
Inits a test run.
Creates a log file and starts recording if defined.
"""
print "Setting up testing environment..."
if not self._setupTestAutomation():<|fim▁hole|>
if self._logger:
print "Recording test to a file"
self._logger.startLog()
return True
def _stopTest(self):
"""
Stops a test run.
Closes the log-file and stops recording process.
"""
print "Cleaning up testing environment..."
self._cleanupTestAutomation()
print "clean up complete"
if self._logger:
self._logger.endLog()
print "Test finished"
def endTest(self):
print "Shutting down"
self._stopTest()
def keywordInfo(self, kw ):
kws = self._getKeywords()
if kw in kws:
print kw
self.printKw(kw,"#",kws[kw][1])
def printKw(self,kw,header,text):
print header*len(kw)
print
docstring = text.splitlines()
strip_len = 0
if len(docstring[0]) == 0:
docstring = docstring[1:]
for line in docstring:
if len(line.strip()) > 0:
first_line = line.lstrip()
strip_len = len(line) - len(first_line)
break
for line in docstring:
print line[strip_len:].rstrip()
print
def listKeywords(self, basekw = keyword.Keyword,full=False,header="#"):
kws = self._getKeywords({},basekw)
kws_keys = sorted(kws.keys())
for kw in kws_keys:
print kw
if full:
self.printKw(kw,header,kws[kw][1])
def _getKeywords(self, kw_dictionary = {}, basekw = keyword.Keyword):
use_cache = len(kw_dictionary) == 0
if use_cache and basekw in self._kw_cache:
return self._kw_cache[basekw]
for kw in basekw.__subclasses__():
kw_name = str(kw)[str(kw).rfind('.')+1:str(kw).rfind("'")]
if not kw_name.endswith("Keyword"):
kw_dictionary[kw_name] = (str(kw.__module__),str(kw.__doc__))
self._getKeywords(kw_dictionary,kw)
if use_cache:
self._kw_cache[basekw] = kw_dictionary
return kw_dictionary
def __instantiateKeywordProxyObject(self,kwproxy, kwName,kwAttr,kwproxy_class):
kwobject = None
try:
kwmodule = __import__(kwproxy_class, globals(), locals(), [kwproxy], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(kwmodule,kwproxy)()
if not kwobject.initialize(kwName, kwAttr,self._activeTarget):
kwobject = None
if kwobject:
print 'Recognized keyword: %s' % kwName
print 'Attributes: %s' % kwAttr
except Exception, e:
print e
print "Error: KeywordProxy error"
kwobject = None
return kwobject
def __instantiateKeywordObject(self,kw_name,attributes,kw_class):
kwobject = None
try:
kwmodule = __import__(kw_class, globals(), locals(), [kw_name], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(kwmodule,kw_name)()
print 'Recognized keyword: %s' % kw_name
print 'Attributes: %s' % attributes
if not kwobject.initialize(attributes,self._activeTarget):
print "Invalid parameters"
kwobject = None
except Exception, e:
print e
print "Error: Keyword not recognized!"
kwobject = None
return kwobject
def _instantiateKeyword(self, kwName, kwAttr):
kw_dictionary = self._getKeywords()
kwproxy_dictionary = self._getKeywords({}, keywordproxy.KeywordProxy)
kwobject = None
for kw in kw_dictionary:
if kw.lower() == kwName.lower():
kwobject = self.__instantiateKeywordObject(kw,kwAttr,kw_dictionary[kw][0])
break
else:
for kwproxy in kwproxy_dictionary:
kwobject = self.__instantiateKeywordProxyObject(kwproxy, kwName,kwAttr,kwproxy_dictionary[kwproxy][0])
if kwobject:
break
if not kwobject:
print "Error: Keyword not recognized!"
return kwobject
def __executeKeyword(self, kw):
"""
Executes a single keyword.
Searches a corresponding keyword object from the list of keywords and executes the keyword with that object.
@type kw: string
@param kw: executed keyword
@rtype: boolean or string
@return: True if execution was succesfull; False if execution was succesdfull, but the keyword returned False;
Error if there was problems in the execution.
"""
print ""
print "Executing keyword: %s" % kw
#Which keyword
result = False
kw = kw.strip()
if kw.startswith("kw_"):
kw = kw[3:].strip()
# Testengine-note: generate-taskswitcher uses space as separator
if kw.startswith("LaunchApp") or kw.startswith("SetTarget"):
if not (kw.startswith("LaunchApp#") or kw.startswith("SetTarget#")):
kw = kw.replace(" ",self._separator,1)
kw_split = kw.split(self._separator,1)
kwName = kw_split[0].strip()
if len(kw_split) == 2:
kwAttr = kw_split[1].strip()
else:
kwAttr = ""
#Changing target
if kwName.lower() == "settarget":
result = self.__setTarget(kwAttr)
print 'result: %s' % str(result)
return result
kwobject = self._instantiateKeyword(kwName,kwAttr)
if not kwobject:
return "ERROR"
startTime = datetime.datetime.now()
result = kwobject.execute()
execTime = datetime.datetime.now() - startTime
print 'result: %s' % str(result)
kwDelay = kwobject.delay
if kwDelay != -1:
if self.delay > kwDelay:
kwDelay = self.delay
time.sleep(kwDelay)
if self._logger:
self._logger.logKeyword(self._activeTarget, kwobject, result, str(execTime))
self.kwCount = self._kwCount + 1
return result
def _handleSpecialCommands(self,command):
return False
def runInteractive(self):
"""
Runs an interactive test.
Keywords are read from stdin.
"""
# Only import here, so that we can use completion mechanism
# Readline only available in unix
try:
import readline
kws = self._getKeywords({}, keyword.Keyword).keys()
for command_list in self._commands.values():
kws.extend(command_list)
readline.set_completer(AdapterCompleter(kws).complete)
readline.parse_and_bind('tab: complete')
except:
pass
while True:
try:
kw = raw_input(">").strip()
if kw in self._commands["exit"]:
return
elif kw == "":
continue
kw_split = kw.split(" ")
if kw_split[0] in self._commands["kws"]:
if len(kw_split) > 1 and kw_split[1]=="full" and " ".join(kw_split[0:2] )in self._commands["kws"]:
if len(kw_split) == 3:
char = kw_split[2]
else:
char = "#"
self.listKeywords(full=True,header=char)
else:
self.listKeywords(full=False)
elif kw_split[0] in self._commands["info"] and len(kw_split) == 2:
self.keywordInfo(kw_split[1])
elif not self._handleSpecialCommands(kw):
self.__executeKeyword(kw)
except EOFError:
break
def runFromServer(self, address, port, username = None, protocol= None ):
"""
Runs a test from server.
@type address: string
@param address: Address of the server
@type port: integer
@param port: Port of the server
@type username: string
@param username: Username is required when using http or https protocol
@type protocol: string
@param protocol: Protocol that is used in the connection. Options are http and https.
Plain socketis used if parameter not given.
"""
toolProtocol = None
#while True:
if(address != None and port != None):
if(protocol):
base,path = address.split("/",1)
toolProtocol = ToolProtocolHTTP()
toolProtocol.init(base,path,port,username,protocol)
else:
toolProtocol = ToolProtocol()
toolProtocol.init(address,port)
if toolProtocol.hasConnection() == False:
#print "Connection to the MBT server failed, reconnecting..."
print "Connection to the MBT server failed."
# time.sleep(5)
return
#else:
# break
while True:
kw = ""
#if passive:
# kw = toolProtocol.receiveKeyword()
#else:
kw = toolProtocol.getKeyword()
if (kw == '' or kw =='\n' or kw == "ERROR"):
return
result = self.__executeKeyword(kw)
if(result == "ERROR"):
toolProtocol.putResult(False)
toolProtocol.bye()
return
toolProtocol.putResult(result)
def runFromFile(self, fileName ):
"""
Runs a test from file.
@type fileName: string
@param fileName: path to the file that contains the test
"""
try:
with open(fileName,'r') as inputFile:
for line in inputFile:
kw = line.strip()
if not kw:
break
result = self.__executeKeyword(kw)
if(result == "ERROR"):
break
except IOError:
print "Error when reading file: %s" % fileName<|fim▁end|> | return False
print "setup complete"
self._activeTarget = self._targets[0] |
<|file_name|>test_ext_bowtie.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Unit tests for functions calling bowtie externally."""
from pathlib2 import Path
import pytest
from imfusion.external import bowtie
class TestBowtieIndex(object):
"""Unit tests for the bowtie_index function."""
def test_call(self, mocker):
"""Tests example call with Path paths."""
mock_run = mocker.patch.object(bowtie, 'run_command')
bowtie.bowtie_index(Path('reference.fa'), Path('genome'))<|fim▁hole|><|fim▁end|> |
mock_run.assert_called_once_with(
args=['bowtie-build', 'reference.fa', 'genome'], log_path=None) |
<|file_name|>contest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'AminHP'
# python imports
import os
import shutil
import zipfile
import StringIO
import base64
# flask imports
from flask import jsonify, request, g, send_file, abort
# project imports
from project import app
from project.extensions import db, auth
from project.modules.datetime import utcnowts
from project.modules.paginator import paginate
from project.models.contest import Contest, Problem, ContestDateTimeError
from project.models.team import Team
from project.models.user import User
from project.forms.problem import UploadProblemBody, UploadTestCase
@app.api_route('', methods=['POST'])
@app.api_validate('contest.create_schema')
@auth.authenticate
def create():
"""
Create Contest
---
tags:
- contest
parameters:
- name: body
in: body
description: Contest information
required: true
schema:
id: ContestCreation
required:
- name
- starts_at
- ends_at
properties:
name:
type: string
example: babyknight
minLength: 1
maxLength: 32
starts_at:
type: integer
description: Contest starts_at (utc timestamp)
ends_at:
type: integer
description: Contest ends_at (utc timestamp)
recaptcha:
type: string
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
201:
description: Successfully created
schema:
$ref: "#/definitions/api_1_contest_list_owner_get_ContestInfo"
400:
description: Bad request
401:
description: Token is invalid or has expired
406:
description: EndTime must be greater than StartTime and StartTime must be greater than CreationTime
409:
description: Contest already exists
"""
json = request.json
try:
obj = Contest()
obj.owner = User.objects.get(pk=g.user_id)
obj.populate(json)
obj.save()
return jsonify(obj.to_json()), 201
except db.NotUniqueError:
return abort(409, "Contest already exists")
except ContestDateTimeError:
return abort(406, "EndTime must be greater than StartTime and StartTime must be greater than CreationTime")
@app.api_route('<string:cid>', methods=['GET'])
@auth.authenticate
def info(cid):
"""
Get Contest Info
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Contest information
schema:
id: ContestInfoUser
type: object
properties:
id:
type: string
description: Contest id
name:
type: string
description: Contest name
owner:
description: Owner info
schema:
id: ContestOwnerInfo
type: object
properties:
id:
type: string
description: Owner id
username:
type: string
description: Owner username
created_at:
type: integer
description: Contest created_at (utc timestamp)
starts_at:
type: integer
description: Contest starts_at (utc timestamp)
ends_at:
type: integer
description: Contest ends_at (utc timestamp)
is_active:
type: boolean
description: Contest is_active
is_ended:
type: boolean
description: Contest is_ended
is_owner:
type: boolean
description: Contest is_owner
is_admin:
type: boolean
description: Contest is_admin
pending_teams_num:
type: integer
description: Contest number of pending teams
accepted_teams_num:
type: integer
description: Contest number of accepted teams
joining_status:
type: object
description: Contest user joining status
schema:
properties:
status:
type: integer
description: joining status (0=not_joined, 1=waiting, 2=joined)
team:
schema:
id: TeamAbsInfo
properties:
id:
type: string
description: Team id
name:
type: string
description: Team name
owner:
description: Owner info
schema:
$ref: "#/definitions/api_1_team_info_get_UserAbsInfo"
401:
description: Token is invalid or has expired
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
return jsonify(obj.to_json_user(user_obj)), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>', methods=['PUT'])
@app.api_validate('contest.edit_schema')
@auth.authenticate
def edit(cid):
"""
Edit Contest
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: body
in: body
description: Contest information
required: true
schema:
id: ContestEdition
properties:
name:
type: string
example: babyknight
minLength: 1
maxLength: 32
starts_at:
type: integer
description: Contest starts_at (utc timestamp)
ends_at:
type: integer
description: Contest ends_at (utc timestamp)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully edited
schema:
$ref: "#/definitions/api_1_contest_list_owner_get_ContestInfo"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest does not exist
406:
description: EndTime must be greater than StartTime and StartTime must be greater than CreationTime
409:
description: Contest name already exists
"""
json = request.json
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
obj.populate(json)
obj.save()
return jsonify(obj.to_json()), 200
except db.NotUniqueError:
return abort(409, "Contest name already exists")
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
except ContestDateTimeError:
return abort(406, "EndTime must be greater than StartTime and StartTime must be greater than CreationTime")
@app.api_route('<string:cid>/', methods=['DELETE'])
@auth.authenticate
def delete(cid):
"""
Contest Delete
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully deleted
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the contest
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if user_obj != obj.owner:
return abort(403, "You aren't owner of the contest")
obj.delete()
return '', 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('', methods=['GET'])
@paginate('contests', 20)
@auth.authenticate
def list():
"""
Get All Contests List
---
tags:
- contest
parameters:
- name: page
in: query
type: integer
required: false
description: Page number
- name: per_page
in: query
type: integer
required: false
description: Contest amount per page (default is 10)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of contests
schema:
id: ContestsListUser
type: object
properties:
contests:
type: array
items:
$ref: "#/definitions/api_1_contest_info_get_ContestInfoUser"
meta:
type: object
description: Pagination meta data
properties:
first:
type: string
description: Url for first page of results
last:
type: string
description: Url for last page of results
next:
type: string
description: Url for next page of results
prev:
type: string
description: Url for previous page of results
page:
type: integer
description: Number of the current page
pages:
type: integer
description: All pages count
per_page:
type: integer
description: Item per each page
total:
type: integer
description: Total count of all items
401:
description: Token is invalid or has expired
"""
user_obj = User.objects.get(pk=g.user_id)
contests = Contest.objects.order_by('-starts_at')
result_func = lambda obj: Contest.to_json_user(obj, user_obj)
return contests, result_func
@app.api_route('owner', methods=['GET'])
@paginate('contests', 20)
@auth.authenticate
def list_owner():
"""
Get Owner Contests
---
tags:
- contest
parameters:
- name: page
in: query
type: integer
required: false
description: Page number
- name: per_page
in: query
type: integer
required: false
description: Contest amount per page (default is 10)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of contests
schema:
id: ContestsList
type: object
properties:
contests:
type: array
items:
schema:
id: ContestInfo
type: object
properties:
id:
type: string
description: Contest id
name:
type: string
description: Contest name
owner:
description: Owner info
schema:
id: ContestOwnerInfo
type: object
properties:
id:
type: string
description: Owner id
username:
type: string
description: Owner username
created_at:
type: integer
description: Contest created_at (utc timestamp)
starts_at:
type: integer
description: Contest starts_at (utc timestamp)
ends_at:
type: integer
description: Contest ends_at (utc timestamp)
is_active:
type: boolean
description: Contest is_active
is_ended:
type: boolean
description: Contest is_ended
pending_teams_num:
type: integer
description: Contest number of pending teams
accepted_teams_num:
type: integer
description: Contest number of accepted teams
401:
description: Token is invalid or has expired
"""
user_obj = User.objects.get(pk=g.user_id)
contests = Contest.objects.filter(owner=user_obj).order_by('-starts_at')
result_func = lambda obj: Contest.to_json(obj)
return contests, result_func
@app.api_route('<string:cid>/result', methods=['GET'])
@auth.authenticate
def result(cid):
"""
Get Result
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Result information
schema:
id: ContestResult
type: object
properties:
result:
type: object
properties:
team_id*:
type: object
description: Teams result dictionary (team_id => team_data).
Teams without submission aren't in this dictionary
properties:
penalty:
type: integer
solved_count:
type: integer
problems:
type: object
properties:
problem_id*:
type: object
description: Problems result dictionary (problem_id => problem_data).
Problems without submission aren't in this dictionary
properties:
submitted_at:
type: integer
description: Last submission time (utc timestmap) (default=null)
failed_tries:
type: integer
penalty:
type: integer
solved:
type: boolean
teams:
type: array
description: Teams list (sorted by rank in contest)
items:
schema:
properties:
id:
type: string
description: Team id
name:
type: string
description: Team name
problems:
type: array
description: Problems list
items:
schema:
properties:
id:
type: string
description: Problem id
title:
type: string
description: Problem title
401:
description: Token is invalid or has expired
403:
description: You aren't allowed to see result
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
now = utcnowts()
if not (user_obj == obj.owner or user_obj in obj.admins or \
(now >= obj.starts_at and obj.is_user_in_contest(user_obj)) or \
(now > obj.ends_at)):
return abort(403, "You aren't allowed to see result")
return jsonify(obj.to_json_result()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
################################# Team #################################
@app.api_route('team/<string:tid>', methods=['GET'])
@auth.authenticate
def list_team(tid):
"""
Get Contests List of a Team
---
tags:
- contest
parameters:
- name: tid
in: path
type: string
required: true
description: Id of team
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Team contests list
schema:
id: TeamContestsList
type: object
properties:
waiting_contests:
type: array
items:
schema:
$ref: "#/definitions/api_1_contest_list_owner_get_ContestInfo"
joined_contests:
type: array
items:
schema:
$ref: "#/definitions/api_1_contest_list_owner_get_ContestInfo"
401:
description: Token is invalid or has expired
404:
description: Team does not exist
"""
try:
obj = Team.objects.get(pk=tid)
wc = Contest.objects.filter(pending_teams=obj)
wc = [c.to_json() for c in wc]
jc = Contest.objects.filter(accepted_teams=obj)
jc = [c.to_json() for c in jc]
return jsonify(waiting_contests=wc, joined_contests=jc), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Team does not exist")
@app.api_route('<string:cid>/team', methods=['POST'])
@app.api_validate('contest.team_join_schema')
@auth.authenticate
def team_join(cid):
"""
Team Join
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: body
in: body
description: Team Identification
required: true
schema:
id: TeamIdentification
properties:
team_id:
type: string
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Join request sent
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the team
404:
description: Contest or Team does not exist
409:
description: You are already accepted
"""
json = request.json
try:
obj = Contest.objects.get(pk=cid)
team_obj = Team.objects.get(pk=json['team_id'])
if str(team_obj.owner.pk) != g.user_id:
return abort(403, "You aren't owner of the team")
if team_obj in obj.accepted_teams:
return abort(409, "You are already accepted")
obj.update(add_to_set__pending_teams=team_obj)
return '', 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or Team does not exist")
@app.api_route('<string:cid>/team/<string:tid>', methods=['DELETE'])
@auth.authenticate
def team_unjoin(cid, tid):
"""
Team Unjoin
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: tid
in: path
type: string
required: true
description: Id of team
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully unjoined
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the team
404:
description: Contest or Team does not exist
"""
try:
team_obj = Team.objects.get(pk=tid)
obj = Contest.objects.get(pk=cid, pending_teams=team_obj)
if str(team_obj.owner.pk) != g.user_id:
return abort(403, "You aren't owner of the team")
obj.update(pull__pending_teams=team_obj)
return '', 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or Team does not exist")
@app.api_route('<string:cid>/pending_teams', methods=['GET'])
@auth.authenticate
def team_list_pending(cid):
"""
Team Get Pending List
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of pending teams
schema:
id: ContestTeamsList
type: object
properties:
teams:
type: array
items:
schema:
$ref: "#/definitions/api_1_team_info_get_TeamInfo"
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
return jsonify(obj.to_json_teams('pending')), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>/accepted_teams', methods=['GET'])
@auth.authenticate
def team_list_accepted(cid):
"""
Team Get Accepted List
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of accepted teams
schema:
$ref: "#/definitions/api_1_contest_team_list_pending_get_ContestTeamsList"
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
return jsonify(obj.to_json_teams('accepted')), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>/team/<string:tid>/acceptation', methods=['PATCH'])
@auth.authenticate
def team_accept(cid, tid):
"""
Team Accept
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: tid
in: path
type: string
required: true
description: Id of team
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully accepted
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or Team does not exist
"""
try:
team_obj = Team.objects.get(pk=tid)
obj = Contest.objects.get(pk=cid, pending_teams=team_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
obj.update(pull__pending_teams=team_obj, add_to_set__accepted_teams=team_obj)
return '', 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or Team does not exist")
@app.api_route('<string:cid>/team/<string:tid>/acceptation', methods=['DELETE'])
@auth.authenticate
def team_reject(cid, tid):
"""
Team Reject
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: tid
in: path
type: string
required: true
description: Id of team
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully rejected
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or Team does not exist
"""
try:
team_obj = Team.objects.get(pk=tid)
obj = Contest.objects.get(pk=cid, pending_teams=team_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
obj.update(pull__pending_teams=team_obj)
return '', 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or Team does not exist")
@app.api_route('<string:cid>/team/<string:tid>/kick', methods=['DELETE'])
@auth.authenticate
def team_kick(cid, tid):
"""
Team Kick
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: tid
in: path
type: string
required: true
description: Id of team
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully kicked
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or Team does not exist
"""
try:
team_obj = Team.objects.get(pk=tid)
obj = Contest.objects.get(pk=cid, accepted_teams=team_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
obj.update(pull__accepted_teams=team_obj)
return '', 200
except (db.DoesNotExist, db.ValidationError):<|fim▁hole|>
################################# Problem #################################
@app.api_route('<string:cid>/problem', methods=['POST'])
@app.api_validate('contest.problem_create_schema')
@auth.authenticate
def problem_create(cid):
"""
Problem Create
Maximum number of problems can be created is 20
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: body
in: body
description: Problem information
required: true
schema:
id: ProblemCreation
required:
- title
- time_limit
- space_limit
properties:
title:
type: string
example: babyknight
minLength: 1
maxLength: 32
time_limit:
type: number
minimum: 0.1
maximum: 10
description: Problem time limit (seconds)
space_limit:
type: integer
minimum: 16
maximum: 256
description: Problem space limit (mega bytes)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
201:
description: Successfully created
schema:
$ref: "#/definitions/api_1_contest_problem_info_get_ProblemInfo"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest does not exist
406:
description: You can't create more problems
"""
json = request.json
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
if len(obj.problems) >= 20:
return abort(406, "You can't create more problems")
problem_obj = Problem()
problem_obj.populate(json)
problem_obj.save()
obj.update(push__problems=problem_obj)
return jsonify(problem_obj.to_json()), 201
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>/problem/<string:pid>', methods=['GET'])
@auth.authenticate
def problem_info(cid, pid):
"""
Problem Get Info
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Problem information
schema:
id: ProblemInfo
type: object
properties:
id:
type: string
description: Problem id
title:
type: string
description: Problem title
time_limit:
type: number
description: Problem time limit (seconds)
space_limit:
type: integer
description: Problem space limit (mega bytes)
401:
description: Token is invalid or has expired
403:
description: You aren't allowed to see problem
404:
description: Contest or problem does not exist
"""
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
now = utcnowts()
if not (user_obj == obj.owner or user_obj in obj.admins or \
(now >= obj.starts_at and obj.is_user_in_contest(user_obj)) or \
(now > obj.ends_at)):
return abort(403, "You aren't allowed to see problem")
return jsonify(problem_obj.to_json()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
@app.api_route('<string:cid>/problem', methods=['GET'])
@auth.authenticate
def problem_list(cid):
"""
Problem Get List
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of problems
schema:
id: ContestProblemsList
type: object
properties:
problems:
type: array
items:
schema:
id: ProblemAbsInfo
type: object
properties:
id:
type: string
description: Problem id
title:
type: string
description: Problem title
401:
description: Token is invalid or has expired
403:
description: You aren't allowed to see problems
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
now = utcnowts()
if not (user_obj == obj.owner or user_obj in obj.admins or \
(now >= obj.starts_at and obj.is_user_in_contest(user_obj)) or \
(now > obj.ends_at)):
return abort(403, "You aren't allowed to see problems")
return jsonify(obj.to_json_problems()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>/problem/<string:pid>', methods=['PUT'])
@app.api_validate('contest.problem_edit_schema')
@auth.authenticate
def problem_edit(cid, pid):
"""
Problem Edit
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: body
in: body
description: Problem information
required: true
schema:
id: ProblemEdition
properties:
title:
type: string
example: babyknight
minLength: 1
maxLength: 32
time_limit:
type: number
minimum: 0.1
maximum: 10
description: Problem time limit (seconds)
space_limit:
type: integer
minimum: 16
maximum: 256
description: Problem space limit (mega bytes)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully edited
schema:
$ref: "#/definitions/api_1_contest_problem_info_get_ProblemInfo"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or problem does not exist
"""
json = request.json
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
problem_obj.populate(json)
problem_obj.save()
return jsonify(problem_obj.to_json()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
@app.api_route('<string:cid>/problem', methods=['PATCH'])
@app.api_validate('contest.problem_change_order_schema')
@auth.authenticate
def problem_change_order(cid):
"""
Problem Change Order
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: body
in: body
description: Problems order
required: true
schema:
id: ProblemsOrder
required:
- order
properties:
order:
type: array
items:
type: integer
description: order number
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of problems
schema:
$ref: "#/definitions/api_1_contest_problem_list_get_ContestProblemsList"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest does not exist
406:
description: Bad order format
"""
json = request.json
try:
obj = Contest.objects.get(pk=cid)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
if len(list(set(json['order']))) != len(json['order']) or \
len(json['order']) != len(obj.problems):
return abort(406, "Bad order format")
new_problems = []
for i in json['order']:
new_problems.append(obj.problems[i])
obj.problems = new_problems
obj.save()
return jsonify(obj.to_json_problems()), 200
except IndexError:
return abort(406, "Bad order format")
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('<string:cid>/problem/<string:pid>', methods=['DELETE'])
@auth.authenticate
def problem_delete(cid, pid):
"""
Problem Delete
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully deleted
schema:
$ref: "#/definitions/api_1_contest_problem_list_get_ContestProblemsList"
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or problem does not exist
"""
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
problem_obj.delete()
obj.reload()
return jsonify(obj.to_json_problems()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
@app.api_route('<string:cid>/problem/<string:pid>/body', methods=['POST'])
@auth.authenticate
def problem_upload_body(cid, pid):
"""
Problem Upload Body File
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: body
in: formData
type: file
required: true
description: Problem body file (pdf) (max size is 16M)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully uploaded
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or problem does not exist
413:
description: Request entity too large. (max size is 16M)
415:
description: Supported file type is only application/pdf
"""
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
form = UploadProblemBody()
if not form.validate():
return abort(400, "Bad request")
if not form.validate_file():
return abort(415, "Supported file type is only application/pdf")
file_obj = form.body.data
file_obj.save(problem_obj.body_path)
return "", 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
@app.api_route('<string:cid>/problem/<string:pid>/testcase', methods=['POST'])
@auth.authenticate
def problem_upload_testcase(cid, pid):
"""
Problem Upload Testcase File
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: testcase
in: formData
type: file
required: true
description: Problem testcase file (zip) (max size is 16M)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successfully uploaded
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner or admin of the contest
404:
description: Contest or problem does not exist
413:
description: Request entity too large. (max size is 16M)
415:
description: Supported file type is only application/zip
"""
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
if (user_obj != obj.owner) and (not user_obj in obj.admins):
return abort(403, "You aren't owner or admin of the contest")
form = UploadTestCase()
if not form.validate():
return abort(400, "Bad request")
if not form.validate_file():
return abort(415, "Supported file type is only application/zip")
if os.path.exists(problem_obj.testcase_dir):
shutil.rmtree(problem_obj.testcase_dir)
file_obj = form.testcase.data
with zipfile.ZipFile(file_obj) as zf:
zf.extractall(problem_obj.testcase_dir)
return "", 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
@app.api_route('<string:cid>/problem/<string:pid>/body', methods=['GET'])
@auth.authenticate
def problem_download_body(cid, pid):
"""
Problem Download Body File
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: pid
in: path
type: string
required: true
description: Id of problem
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Problem body file
401:
description: Token is invalid or has expired
403:
description: You aren't allowed to see problem body
404:
description: (Contest or problem does not exist, File does not exist)
"""
try:
problem_obj = Problem.objects.get(pk=pid)
obj = Contest.objects.get(pk=cid, problems=problem_obj)
user_obj = User.objects.get(pk=g.user_id)
now = utcnowts()
if not (user_obj == obj.owner or user_obj in obj.admins or \
(now >= obj.starts_at and obj.is_user_in_contest(user_obj)) or \
(now > obj.ends_at)):
return abort(403, "You aren't allowed to see problem body")
data = open(problem_obj.body_path).read()
data = base64.b64encode(data)
data_io = StringIO.StringIO(data)
return send_file(data_io, mimetype='application/pdf')
except IOError:
return abort(404, "File does not exist")
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or problem does not exist")
################################# Admin #################################
@app.api_route('<string:cid>/admin', methods=['POST'])
@app.api_validate('contest.admin_add_schema')
@auth.authenticate
def admin_add(cid):
"""
Admin Add
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: body
in: body
description: Problem information
required: true
schema:
id: AdminIdentificationName
required:
- username
properties:
username:
type: string
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
201:
description: Admin added
schema:
$ref: "#/definitions/api_1_contest_admin_list_get_AdminsList"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the contest
404:
description: Contest or user does not exist
"""
json = request.json
try:
obj = Contest.objects.get(pk=cid)
if str(obj.owner.pk) != g.user_id:
return abort(403, "You aren't owner of the contest")
user_obj = User.objects.get(username=json['username'])
if user_obj != obj.owner:
obj.update(add_to_set__admins=user_obj)
obj.reload()
return jsonify(obj.to_json_admins()), 201
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or user does not exist")
@app.api_route('<string:cid>/admin/<string:uid>', methods=['DELETE'])
@auth.authenticate
def admin_remove(cid, uid):
"""
Admin Remove
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: uid
in: path
type: string
required: true
description: Id of user
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: Successully removed
schema:
$ref: "#/definitions/api_1_contest_admin_list_get_AdminsList"
400:
description: Bad request
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the contest
404:
description: Contest or user does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
if str(obj.owner.pk) != g.user_id:
return abort(403, "You aren't owner of the contest")
user_obj = User.objects.get(pk=uid)
obj.update(pull__admins=user_obj)
obj.reload()
return jsonify(obj.to_json_admins()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest or user does not exist")
@app.api_route('<string:cid>/admin', methods=['GET'])
@auth.authenticate
def admin_list(cid):
"""
Admin List
---
tags:
- contest
parameters:
- name: cid
in: path
type: string
required: true
description: Id of contest
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of admins
schema:
id: AdminsList
properties:
admins:
type: array
items:
schema:
$ref: "#/definitions/api_1_team_info_get_UserAbsInfo"
401:
description: Token is invalid or has expired
403:
description: You aren't owner of the contest
404:
description: Contest does not exist
"""
try:
obj = Contest.objects.get(pk=cid)
if str(obj.owner.pk) != g.user_id:
return abort(403, "You aren't owner of the contest")
return jsonify(obj.to_json_admins()), 200
except (db.DoesNotExist, db.ValidationError):
return abort(404, "Contest does not exist")
@app.api_route('admin', methods=['GET'])
@paginate('contests', 20)
@auth.authenticate
def admin_contests():
"""
Get Admin Contests
---
tags:
- contest
parameters:
- name: page
in: query
type: integer
required: false
description: Page number
- name: per_page
in: query
type: integer
required: false
description: Contest amount per page (default is 10)
- name: Access-Token
in: header
type: string
required: true
description: Token of current user
responses:
200:
description: List of contests
schema:
$ref: "#/definitions/api_1_contest_list_owner_get_ContestsList"
401:
description: Token is invalid or has expired
"""
user_obj = User.objects.get(pk=g.user_id)
contests = Contest.objects.filter(admins=user_obj).order_by('-starts_at')
result_func = lambda obj: Contest.to_json(obj)
return contests, result_func<|fim▁end|> | return abort(404, "Contest or Team does not exist") |
<|file_name|>qb_docker_image.scratch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: qb_docker_image
short_description: QB extension of Ansible's `docker_image` module.
description:
- Build, load or pull an image, making the image available for creating containers. Also supports tagging an
image into a repository and archiving an image to a .tar file.
options:
archive_path:
description:
- Use with state C(present) to archive an image to a .tar file.
required: false
version_added: "2.1"
load_path:
description:
- Use with state C(present) to load an image from a .tar file.
required: false
version_added: "2.2"
dockerfile:
description:
- Use with state C(present) to provide an alternate name for the Dockerfile to use when building an image.
default: Dockerfile
required: false
version_added: "2.0"
force:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name. Use with state
C(present) to build, load or pull an image when the image already exists.
default: false
required: false
version_added: "2.1"
type: bool
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
required: false
version_added: "2.1"
name:
description:
- "Image name. Name format will be one of: name, repository/name, registry_server:port/name.
When pushing or pulling an image the name can optionally include the tag by appending ':tag_name'."
required: true
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
aliases:
- build_path
required: false
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
default: true
required: false
version_added: "2.1"
type: bool
push:
description:
- Push the image to the registry. Specify the registry as part of the I(name) or I(repository) parameter.
default: false
required: false
version_added: "2.2"
type: bool
rm:
description:
- Remove intermediate containers after build.
default: true
required: false
version_added: "2.1"
type: bool
nocache:
description:
- Do not use cache when building an image.
default: false
required: false
type: bool
repository:
description:
- Full path to a repository. Use with state C(present) to tag the image into the repository. Expects
format I(repository:tag). If no tag is provided, will use the value of the C(tag) parameter or I(latest).
required: false
version_added: "2.1"
state:
description:
- Make assertions about the state of an image.
- When C(absent) an image will be removed. Use the force option to un-tag and remove all images
matching the provided name.
- When C(present) check if an image exists using the provided name and tag. If the image is not found or the
force option is used, the image will either be pulled, built or loaded. By default the image will be pulled
from Docker Hub. To build the image, provide a path value set to a directory containing a context and
Dockerfile. To load an image, specify load_path to provide a path to an archive file. To tag an image to a
repository, provide a repository path. If the name contains a repository path, it will be pushed.
- "NOTE: C(build) is DEPRECATED and will be removed in release 2.3. Specifying C(build) will behave the
same as C(present)."
required: false
default: present
choices:
- absent
- present
- build
tag:
description:
- Used to select an image when pulling. Will be added to the image when pushing, tagging or building. Defaults to
I(latest).
- If C(name) parameter format is I(name:tag), then tag value from C(name) will take precedence.
default: latest
required: false
buildargs:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21 and docker-py >= 1.7.0.
required: false
version_added: "2.2"
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
required: false
version_added: "2.1"
suboptions:
memory:
description:
- Set memory limit for build.
memswap:
description:
- Total memory (memory + swap), -1 to disable swap.
cpushares:
description:
- CPU shares (relative weight).
cpusetcpus:
description:
- CPUs in which to allow execution, e.g., "0-3", "0,1".
use_tls:
description:
- "DEPRECATED. Whether to use tls to connect to the docker server. Set to C(no) when TLS will not be used. Set to
C(encrypt) to use TLS. And set to C(verify) to use TLS and verify that the server's certificate is valid for the
server. NOTE: If you specify this option, it will set the value of the tls or tls_verify parameters."
choices:
- no
- encrypt
- verify
default: no
required: false
version_added: "2.0"
try_to_pull:
description:
- Try to pull the image before building. Added by QB.
choices:
- yes
- no
default: yes
required: false
extends_documentation_fragment:
- docker
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "Docker API >= 1.20"
author:
- Pavel Antonov (@softzilla)
- Chris Houseknecht (@chouseknecht)
- James Tanner (@jctanner)
'''
EXAMPLES = '''
- name: pull an image
docker_image:
name: pacur/centos-7
- name: Tag and push to docker hub
docker_image:
name: pacur/centos-7
repository: dcoppenhagan/myimage
tag: 7.0
push: yes
- name: Tag and push to local registry
docker_image:
name: centos
repository: localhost:5000/centos
tag: 7
push: yes
- name: Remove image
docker_image:
state: absent
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
- name: Build an image and push it to a private repo
docker_image:
path: ./sinatra
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
push: yes
- name: Archive image
docker_image:
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
archive_path: my_sinatra.tar
- name: Load image from archive and push to a private registry
docker_image:
name: localhost:5000/myimages/sinatra
tag: v1
push: yes
load_path: my_sinatra.tar
- name: Build image and with buildargs
docker_image:
path: /path/to/build/dir
name: myimage
buildargs:
log_volume: /var/log/myapp
listen_port: 8080
'''
RETURN = '''
image:
description: Image inspection results for the affected image.
returned: success
type: dict
sample: {}
'''
import os
import re
import json
import socket
import threading
import logging
from ansible.module_utils.docker_common import HAS_DOCKER_PY_2, AnsibleDockerClient, DockerBaseClass
from ansible.module_utils._text import to_native
try:
if HAS_DOCKER_PY_2:
from docker.auth import resolve_repository_name
else:
from docker.auth.auth import resolve_repository_name
from docker.utils.utils import parse_repository_tag
except ImportError:
# missing docker-py handled in docker_common
pass
import qb.ipc.stdio
import qb.ipc.stdio.logging_
# from qb.ipc.stdio import client as io_client, logging_ as stdio_logging
# import qb.ipc.stdio.logging_
# from qb.ipc.stdio import
logger = qb.ipc.stdio.logging_.getLogger('qb_docker_image')
class QBAnsibleDockerClient( AnsibleDockerClient ):
def try_pull_image(self, name, tag="latest"):
'''
Pull an image
'''
self.log("(Try) Pulling image %s:%s" % (name, tag))
try:
for line in self.pull(name, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('error'):
return None
except Exception as exc:
self.log("Error pulling image %s:%s - %s" % (name, tag, str(exc)))
return None
return self.find_image(name=name, tag=tag)
def log(self, msg, pretty_print=False):
qb_log(msg)
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.archive_path = parameters.get('archive_path')
self.container_limits = parameters.get('container_limits')
self.dockerfile = parameters.get('dockerfile')
self.force = parameters.get('force')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.nocache = parameters.get('nocache')
self.path = parameters.get('path')
self.pull = parameters.get('pull')
self.repository = parameters.get('repository')
self.rm = parameters.get('rm')
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = parameters.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = parameters.get('buildargs')
# QB additions
self.try_to_pull = parameters.get('try_to_pull')
self.logger = qb.ipc.stdio.logging_.getLogger(
'qb_docker_image:ImageManager',
level = logging.DEBUG
)
# If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name)
if repo_tag:
self.name = repo
self.tag = repo_tag
if self.state in ['present', 'build']:
self.present()
elif self.state == 'absent':
self.absent()
def fail(self, msg):
self.client.fail(msg)
def present(self):
'''
Handles state = 'present', which includes building, loading or pulling
an image, depending on user provided parameters.
:returns None
'''
self.logger.info("Starting state=present...")
image = self.client.find_image(name=self.name, tag=self.tag)
pulled_image = None
if not image or self.force:
if self.try_to_pull:
self.log("Try to pull the image")
self.results['actions'].append(
'Tried to pull image %s:%s' % (self.name, self.tag)
)
self.results['changed'] = True
if not self.check_mode:
pulled_image = self.client.try_pull_image(self.name, tag=self.tag)
if pulled_image:
self.results['actions'].append(
'Pulled image %s:%s' % (self.name, self.tag)
)
self.results['image'] = pulled_image
if pulled_image is None:
if self.path:
# Build the image
if not os.path.isdir(self.path):
self.fail("Requested build path %s could not be found or you do not have access." % self.path)
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.log("Building image %s" % image_name)
self.results['actions'].append("Built image %s from %s" % (image_name, self.path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.build_image()
elif self.load_path:
# Load the image from an archive
if not os.path.isfile(self.load_path):
self.fail("Error loading image %s. Specified path %s does not exist." % (self.name,
self.load_path))
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.results['actions'].append("Loaded image %s from %s" % (image_name, self.load_path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.load_image()
else:
# pull the image
self.results['actions'].append('Pulled image %s:%s' % (self.name, self.tag))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.client.pull_image(self.name, tag=self.tag)
if image and image == self.results['image']:
self.results['changed'] = False
if self.archive_path:
self.archive_image(self.name, self.tag)
# Only push if:
#
# 1. We didn't pull the image (if we did pull it we have no need to
# then push it).
# 2. We have a local image or image result (or what are we pushing)
# have_image = image or len(self.result['image']) > 0
# 3. Either:
# A. We didn't find any image before doing anything
# B. The resulting image is different
#
# image_is_different = (
# (not image) or (
# len(self.results image[u'Id'] != self.results['image'][u'Id']
self.logger.debug("Deciding to push...")
if (
pulled_image is None and (
image or self.result['image']
) and (
((not image) and self.results['image']) or
(image and self.results['image'] and image['Id'] != self.results['Id'])
)
):
self.logger.debug("Into push section!")
# self.log("have_image: {}".format(have_image))
# self.log("image_is_different: {}".format(image_is_different))
self.logger.debug("Image", image)
<|fim▁hole|> # elif self.repository:
# self.tag_image(
# self.name,
# self.tag,
# self.repository,
# force=self.force,
# push=self.push
# )
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
image = self.client.find_image(self.name, self.tag)
if image:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted'
def archive_image(self, name, tag):
'''
Archive an image to a .tar file. Called when archive_path is passed.
:param name - name of the image. Type: str
:return None
'''
if not tag:
tag = "latest"
image = self.client.find_image(name=name, tag=tag)
if not image:
self.log("archive image: image %s:%s not found" % (name, tag))
return
image_name = "%s:%s" % (name, tag)
self.results['actions'].append('Archived image %s to %s' % (image_name, self.archive_path))
self.results['changed'] = True
if not self.check_mode:
self.log("Getting archive of image %s" % image_name)
try:
image = self.client.get_image(image_name)
except Exception as exc:
self.fail("Error getting image %s - %s" % (image_name, str(exc)))
try:
with open(self.archive_path, 'w') as fd:
for chunk in image.stream(2048, decode_content=False):
fd.write(chunk)
except Exception as exc:
self.fail("Error writing image archive %s - %s" % (self.archive_path, str(exc)))
image = self.client.find_image(name=name, tag=tag)
if image:
self.results['image'] = image
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status
def tag_image(self, name, tag, repository, force=False, push=False):
'''
Tag an image into a repository.
:param name: name of the image. required.
:param tag: image tag.
:param repository: path to the repository. required.
:param force: bool. force tagging, even it image already exists with the repository path.
:param push: bool. push the image once it's tagged.
:return: None
'''
repo, repo_tag = parse_repository_tag(repository)
if not repo_tag:
repo_tag = "latest"
if tag:
repo_tag = tag
image = self.client.find_image(name=repo, tag=repo_tag)
found = 'found' if image else 'not found'
self.log("image %s was %s" % (repo, found))
if not image or force:
self.log("tagging %s:%s to %s:%s" % (name, tag, repo, repo_tag))
self.results['changed'] = True
self.results['actions'].append("Tagged image %s:%s to %s:%s" % (name, tag, repo, repo_tag))
if not self.check_mode:
try:
# Finding the image does not always work, especially running a localhost registry. In those
# cases, if we don't set force=True, it errors.
image_name = name
if tag and not re.search(tag, name):
image_name = "%s:%s" % (name, tag)
tag_status = self.client.tag(image_name, repo, tag=repo_tag, force=True)
if not tag_status:
raise Exception("Tag operation failed.")
except Exception as exc:
self.fail("Error: failed to tag image - %s" % str(exc))
self.results['image'] = self.client.find_image(name=repo, tag=repo_tag)
if push:
self.push_image(repo, repo_tag)
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
stream=True,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True
)
build_output = []
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
self.buildargs[key] = to_native(value)
params['buildargs'] = self.buildargs
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if "stream" in line:
build_output.append(line["stream"])
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail(
"Error building %s - code: %s, message: %s, logs: %s" % (
self.name,
errorDetail.get('code'),
errorDetail.get('message'),
build_output))
else:
self.fail("Error building %s - message: %s, logs: %s" % (
self.name, line.get('error'), build_output))
return self.client.find_image(name=self.name, tag=self.tag)
def load_image(self):
'''
Load an image from a .tar archive
:return: image dict
'''
try:
self.log("Opening image %s" % self.load_path)
image_tar = open(self.load_path, 'r')
except Exception as exc:
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
try:
self.log("Loading image from %s" % self.load_path)
self.client.load_image(image_tar)
except Exception as exc:
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
try:
image_tar.close()
except Exception as exc:
self.fail("Error closing image %s - %s" % (self.name, str(exc)))
return self.client.find_image(self.name, self.tag)
def log(self, msg, pretty_print=False):
return qb_log(msg)
def warn( self, warning ):
self.results['warnings'].append( str(warning) )
def qb_log( msg ):
if not qb.ipc.stdio.client.stdout.connected:
return False
string = None
if isinstance( msg, str ):
string = msg
elif isinstance( msg, dict ):
if 'stream' in msg:
string = msg['stream']
else:
string = json.dumps(
msg,
sort_keys=True,
indent=4,
separators=(',', ': ')
)
if string is not None:
if not string.endswith( u"\n" ):
string = string + u"\n"
qb.ipc.stdio.client.stdout.socket.sendall(string)
return True
def qb_debug(name, message, **payload):
if not qb.ipc.stdio.client.log.connected:
return False
struct = dict(
level='debug',
name=name,
pid=os.getpid(),
thread=threading.current_thread().name,
message=message,
payload=payload,
)
string = json.dumps(struct)
if not string.endswith( u"\n" ):
string = string + u"\n"
qb.ipc.stdio.client.log.socket.sendall(string)
return True
def main():
argument_spec = dict(
archive_path=dict(type='path'),
container_limits=dict(type='dict'),
dockerfile=dict(type='str'),
force=dict(type='bool', default=False),
http_timeout=dict(type='int'),
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='bool', default=False),
path=dict(type='path', aliases=['build_path']),
pull=dict(type='bool', default=True),
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True),
state=dict(type='str', choices=['absent', 'present', 'build'], default='present'),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', default='no', choices=['no', 'encrypt', 'verify']),
buildargs=dict(type='dict', default=None),
# QB additions
try_to_pull=dict( type='bool', default=True ),
)
client = QBAnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True,
)
results = dict(
changed=False,
actions=[],
image={},
warnings=[],
)
qb.ipc.stdio.client.connect(results['warnings'])
logger.info("HERERERERE", extra=dict(payload=dict(x='ex', y='why?')))
ImageManager(client, results)
client.module.exit_json(**results)
if __name__ == '__main__':
main()<|fim▁end|> | # if self.push and not self.repository:
# self.push_image(self.name, self.tag) |
<|file_name|>capabilities.js<|end_file_name|><|fim▁begin|>function safeMatchMedia(query) {
var m = window.matchMedia(query);
return !!m && m.matches;
}
define('capabilities', [], function() {
var capabilities = {
'JSON': window.JSON && typeof JSON.parse == 'function',
'debug': (('' + document.location).indexOf('dbg') >= 0),
'debug_in_page': (('' + document.location).indexOf('dbginpage') >= 0),
'console': window.console && (typeof window.console.log == 'function'),
'replaceState': typeof history.replaceState === 'function',
'chromeless': window.locationbar && !window.locationbar.visible,
'localStorage': false,
'sessionStorage': false,
'webApps': !!(navigator.mozApps && navigator.mozApps.install),
'app_runtime': !!(
navigator.mozApps &&
typeof navigator.mozApps.html5Implementation === 'undefined'
),
'fileAPI': !!window.FileReader,
'userAgent': navigator.userAgent,
'desktop': false,
'tablet': false,
'mobile': safeMatchMedia('(max-width: 600px)'),
'firefoxAndroid': (navigator.userAgent.indexOf('Firefox') != -1 && navigator.userAgent.indexOf('Android') != -1),
'touch': ('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch,
'nativeScroll': (function() {
return 'WebkitOverflowScrolling' in document.createElement('div').style;
})(),
'performance': !!(window.performance || window.msPerformance || window.webkitPerformance || window.mozPerformance),
'navPay': !!navigator.mozPay,
'webactivities': !!(window.setMessageHandler || window.mozSetMessageHandler),
'firefoxOS': null // This is set below.
};
// We're probably tablet if we have touch and we're larger than mobile.
capabilities.tablet = capabilities.touch && safeMatchMedia('(min-width: 601px)');
// We're probably desktop if we don't have touch and we're larger than some arbitrary dimension.
capabilities.desktop = !capabilities.touch && safeMatchMedia('(min-width: 673px)');
// Packaged-app installation are supported only on Firefox OS, so this is how we sniff.
capabilities.gaia = !!(capabilities.mobile && navigator.mozApps && navigator.mozApps.installPackage);
capabilities.getDeviceType = function() {
return this.desktop ? 'desktop' : (this.tablet ? 'tablet' : 'mobile');
};
if (capabilities.tablet) {
// If we're on tablet, then we're not on desktop.
capabilities.desktop = false;
}
if (capabilities.mobile) {
// If we're on mobile, then we're not on desktop nor tablet.
capabilities.desktop = capabilities.tablet = false;
}
// Detect Firefox OS.
// This will be true if the request is from a Firefox OS phone *or*
// a desktop B2G build with the correct UA pref, such as this:
// https://github.com/mozilla/r2d2b2g/blob/master/prosthesis/defaults/preferences/prefs.js
capabilities.firefoxOS = capabilities.gaia && !capabilities.firefoxAndroid;<|fim▁hole|> if ('localStorage' in window && window.localStorage !== null) {
capabilities.localStorage = true;
}
} catch (e) {
}
try {
if ('sessionStorage' in window && window.sessionStorage !== null) {
capabilities.sessionStorage = true;
}
} catch (e) {
}
return capabilities;
});
z.capabilities = require('capabilities');<|fim▁end|> |
try { |
<|file_name|>mutation-drift-selection.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=1>
# Wright-Fisher model of mutation, selection and random genetic drift
# <markdowncell>
# A Wright-Fisher model has a fixed population size *N* and discrete non-overlapping generations. Each generation, each individual has a random number of offspring whose mean is proportional to the individual's fitness. Each generation, mutation may occur. Mutations may increase or decrease individual's fitness, which affects the chances of that individual's offspring in subsequent generations.
# <markdowncell>
# Here, I'm using a fitness model where some proportion of the time a mutation will have a fixed fitness effect, increasing or decreasing fitness by a fixed amount.
# <headingcell level=2>
# Setup
# <codecell>
import numpy as np
import itertools
# <headingcell level=2>
# Make population dynamic model
# <headingcell level=3>
# Basic parameters
# <codecell>
pop_size = 100
# <codecell>
seq_length = 10
# <codecell>
alphabet = ['A', 'T']
# <codecell>
base_haplotype = "AAAAAAAAAA"
# <codecell>
fitness_effect = 1.1 # fitness effect if a functional mutation occurs
# <codecell>
fitness_chance = 0.1 # chance that a mutation has a fitness effect
# <headingcell level=3>
# Population of haplotypes maps to counts and fitnesses
# <markdowncell>
# Store this as a lightweight Dictionary that maps a string to a count. All the sequences together will have count *N*.
# <codecell>
pop = {}
# <codecell>
pop["AAAAAAAAAA"] = 40
# <codecell>
pop["AAATAAAAAA"] = 30
# <codecell>
pop["AATTTAAAAA"] = 30
# <markdowncell>
# *Map haplotype string to fitness float.*
# <codecell>
fitness = {}
# <codecell>
fitness["AAAAAAAAAA"] = 1.0
# <codecell>
fitness["AAATAAAAAA"] = 1.05
# <codecell>
fitness["AATTTAAAAA"] = 1.10
# <codecell>
pop["AAATAAAAAA"]
# <codecell>
fitness["AAATAAAAAA"]
# <headingcell level=3>
# Add mutation
# <codecell>
mutation_rate = 0.005 # per gen per individual per site
# <codecell>
def get_mutation_count():
mean = mutation_rate * pop_size * seq_length
return np.random.poisson(mean)
# <codecell>
def get_random_haplotype():
haplotypes = pop.keys()
frequencies = [x/float(pop_size) for x in pop.values()]
total = sum(frequencies)
frequencies = [x / total for x in frequencies]
return np.random.choice(haplotypes, p=frequencies)
# <codecell>
def get_mutant(haplotype):
site = np.random.randint(seq_length)
possible_mutations = list(alphabet)
possible_mutations.remove(haplotype[site])
mutation = np.random.choice(possible_mutations)
new_haplotype = haplotype[:site] + mutation + haplotype[site+1:]
return new_haplotype
# <markdowncell>
# *Mutations have fitness effects*
# <codecell>
def get_fitness(haplotype):
old_fitness = fitness[haplotype]
if (np.random.random() < fitness_chance):
return old_fitness * fitness_effect
else:
return old_fitness
# <codecell>
get_fitness("AAAAAAAAAA")
# <markdowncell>
# *If a mutation event creates a new haplotype, assign it a random fitness.*
# <codecell>
def mutation_event():
haplotype = get_random_haplotype()
if pop[haplotype] > 1:
pop[haplotype] -= 1
new_haplotype = get_mutant(haplotype)
if new_haplotype in pop:
pop[new_haplotype] += 1
else:
pop[new_haplotype] = 1
if new_haplotype not in fitness:
fitness[new_haplotype] = get_fitness(haplotype)
# <codecell>
mutation_event()
# <codecell>
pop
# <codecell>
fitness
# <codecell>
def mutation_step():
mutation_count = get_mutation_count()
for i in range(mutation_count):
mutation_event()
# <headingcell level=3>
# Genetic drift and fitness affect which haplotypes make it to the next generation
# <markdowncell>
# *Fitness weights the multinomial draw.*
# <codecell>
def get_offspring_counts():
haplotypes = pop.keys()
frequencies = [pop[haplotype]/float(pop_size) for haplotype in haplotypes]
fitnesses = [fitness[haplotype] for haplotype in haplotypes]
weights = [x * y for x,y in zip(frequencies, fitnesses)]
total = sum(weights)
weights = [x / total for x in weights]
return list(np.random.multinomial(pop_size, weights))
# <codecell>
get_offspring_counts()
# <codecell>
def offspring_step():
counts = get_offspring_counts()
for (haplotype, count) in zip(pop.keys(), counts):
if (count > 0):
pop[haplotype] = count
else:
del pop[haplotype]
# <headingcell level=3>
# Combine and iterate
# <codecell>
def time_step():
mutation_step()
offspring_step()
# <codecell>
generations = 5
# <codecell>
def simulate():
for i in range(generations):
time_step()
# <headingcell level=3>
# Record
# <markdowncell>
# We want to keep a record of past population frequencies to understand dynamics through time. At each step in the simulation, we append to a history object.
# <codecell>
history = []
# <codecell>
def simulate():
clone_pop = dict(pop)
history.append(clone_pop)
for i in range(generations):
time_step()
clone_pop = dict(pop)
history.append(clone_pop)
# <codecell>
simulate()
# <headingcell level=2>
# Analyze trajectories
# <headingcell level=3>
# Calculate diversity
# <codecell>
def get_distance(seq_a, seq_b):
diffs = 0
length = len(seq_a)
assert len(seq_a) == len(seq_b)
for chr_a, chr_b in zip(seq_a, seq_b):
if chr_a != chr_b:
diffs += 1
return diffs / float(length)
# <codecell>
def get_diversity(population):
haplotypes = population.keys()
haplotype_count = len(haplotypes)
diversity = 0
for i in range(haplotype_count):
for j in range(haplotype_count):
haplotype_a = haplotypes[i]
haplotype_b = haplotypes[j]
frequency_a = population[haplotype_a] / float(pop_size)
frequency_b = population[haplotype_b] / float(pop_size)
frequency_pair = frequency_a * frequency_b
diversity += frequency_pair * get_distance(haplotype_a, haplotype_b)
return diversity<|fim▁hole|>
def get_diversity_trajectory():
trajectory = [get_diversity(generation) for generation in history]
return trajectory
# <headingcell level=3>
# Plot diversity
# <codecell>
%matplotlib inline
import matplotlib.pyplot as plt
import matplotlib as mpl
# <codecell>
def diversity_plot():
mpl.rcParams['font.size']=14
trajectory = get_diversity_trajectory()
plt.plot(trajectory, "#447CCD")
plt.ylabel("diversity")
plt.xlabel("generation")
# <headingcell level=3>
# Analyze and plot divergence
# <codecell>
def get_divergence(population):
haplotypes = population.keys()
divergence = 0
for haplotype in haplotypes:
frequency = population[haplotype] / float(pop_size)
divergence += frequency * get_distance(base_haplotype, haplotype)
return divergence
# <codecell>
def get_divergence_trajectory():
trajectory = [get_divergence(generation) for generation in history]
return trajectory
# <codecell>
def divergence_plot():
mpl.rcParams['font.size']=14
trajectory = get_divergence_trajectory()
plt.plot(trajectory, "#447CCD")
plt.ylabel("divergence")
plt.xlabel("generation")
# <headingcell level=3>
# Plot haplotype trajectories
# <codecell>
def get_frequency(haplotype, generation):
pop_at_generation = history[generation]
if haplotype in pop_at_generation:
return pop_at_generation[haplotype]/float(pop_size)
else:
return 0
# <codecell>
def get_trajectory(haplotype):
trajectory = [get_frequency(haplotype, gen) for gen in range(generations)]
return trajectory
# <codecell>
def get_all_haplotypes():
haplotypes = set()
for generation in history:
for haplotype in generation:
haplotypes.add(haplotype)
return haplotypes
# <codecell>
colors = ["#781C86", "#571EA2", "#462EB9", "#3F47C9", "#3F63CF", "#447CCD", "#4C90C0", "#56A0AE", "#63AC9A", "#72B485", "#83BA70", "#96BD60", "#AABD52", "#BDBB48", "#CEB541", "#DCAB3C", "#E49938", "#E68133", "#E4632E", "#DF4327", "#DB2122"]
# <codecell>
colors_lighter = ["#A567AF", "#8F69C1", "#8474D1", "#7F85DB", "#7F97DF", "#82A8DD", "#88B5D5", "#8FC0C9", "#97C8BC", "#A1CDAD", "#ACD1A0", "#B9D395", "#C6D38C", "#D3D285", "#DECE81", "#E8C77D", "#EDBB7A", "#EEAB77", "#ED9773", "#EA816F", "#E76B6B"]
# <codecell>
def stacked_trajectory_plot(xlabel="generation"):
mpl.rcParams['font.size']=18
haplotypes = get_all_haplotypes()
trajectories = [get_trajectory(haplotype) for haplotype in haplotypes]
plt.stackplot(range(generations), trajectories, colors=colors_lighter)
plt.ylim(0, 1)
plt.ylabel("frequency")
plt.xlabel(xlabel)
# <headingcell level=3>
# Plot SNP trajectories
# <codecell>
def get_snp_frequency(site, generation):
minor_allele_frequency = 0.0
pop_at_generation = history[generation]
for haplotype in pop_at_generation.keys():
allele = haplotype[site]
frequency = pop_at_generation[haplotype] / float(pop_size)
if allele != "A":
minor_allele_frequency += frequency
return minor_allele_frequency
# <codecell>
def get_snp_trajectory(site):
trajectory = [get_snp_frequency(site, gen) for gen in range(generations)]
return trajectory
# <markdowncell>
# Find all variable sites.
# <codecell>
def get_all_snps():
snps = set()
for generation in history:
for haplotype in generation:
for site in range(seq_length):
if haplotype[site] != "A":
snps.add(site)
return snps
# <codecell>
def snp_trajectory_plot(xlabel="generation"):
mpl.rcParams['font.size']=18
snps = get_all_snps()
trajectories = [get_snp_trajectory(snp) for snp in snps]
data = []
for trajectory, color in itertools.izip(trajectories, itertools.cycle(colors)):
data.append(range(generations))
data.append(trajectory)
data.append(color)
fig = plt.plot(*data)
plt.ylim(0, 1)
plt.ylabel("frequency")
plt.xlabel(xlabel)
# <headingcell level=2>
# Scale up
# <markdowncell>
# Here, we scale up to more interesting parameter values.
# <codecell>
pop_size = 50
seq_length = 100
generations = 500
mutation_rate = 0.0001 # per gen per individual per site
fitness_effect = 1.1 # fitness effect if a functional mutation occurs
fitness_chance = 0.1 # chance that a mutation has a fitness effect
# <markdowncell>
# In this case there are $\mu$ = 0.01 mutations entering the population every generation.
# <codecell>
seq_length * mutation_rate
# <markdowncell>
# And the population genetic parameter $\theta$, which equals $2N\mu$, is 1.
# <codecell>
2 * pop_size * seq_length * mutation_rate
# <codecell>
base_haplotype = ''.join(["A" for i in range(seq_length)])
pop.clear()
fitness.clear()
del history[:]
pop[base_haplotype] = pop_size
fitness[base_haplotype] = 1.0
# <codecell>
simulate()
# <codecell>
plt.figure(num=None, figsize=(14, 14), dpi=80, facecolor='w', edgecolor='k')
plt.subplot2grid((3,2), (0,0), colspan=2)
stacked_trajectory_plot()
plt.subplot2grid((3,2), (1,0), colspan=2)
snp_trajectory_plot()
plt.subplot2grid((3,2), (2,0))
diversity_plot()
plt.subplot2grid((3,2), (2,1))
divergence_plot()<|fim▁end|> |
# <codecell> |
<|file_name|>WordAlignmentFeatureExtractorFactory.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
/**
* A factory for making word alignment feature extractors
*
* @author John McCrae <[email protected]>
*/
public interface WordAlignmentFeatureExtractorFactory {
/**
* An identifier for this WAFE
* @return The identifier
*/
String id();
/**
* Creata a new word alignment feature extractor
* @param params The parameters of the configuration file
* @return The WAFE
*/
WordAlignmentFeatureExtractor make(Map<String, Object> params);
}<|fim▁end|> | package org.insightcentre.unlp.naisc.wordalign;
import java.util.Map; |
<|file_name|>MEN.py<|end_file_name|><|fim▁begin|>#Evaluate semantic space against MEN dataset
import sys
import utils
from scipy import stats
import numpy as np
from math import sqrt
#Note: this is scipy's spearman, without tie adjustment
def spearman(x,y):
return stats.spearmanr(x, y)[0]
def readMEN(annotation_file):
pairs=[]
humans=[]
f=open(annotation_file,'r')
for l in f:
l=l.rstrip('\n')
items=l.split()<|fim▁hole|> pairs.append((items[0],items[1]))
humans.append(float(items[2]))
f.close()
return pairs, humans
def compute_men_spearman(dm_dict, annotation_file):
pairs, humans=readMEN(annotation_file)
system_actual=[]
human_actual=[]
count=0
for i in range(len(pairs)):
human=humans[i]
a,b=pairs[i]
if a in dm_dict and b in dm_dict:
cos=utils.cosine_similarity(dm_dict[a],dm_dict[b])
system_actual.append(cos)
human_actual.append(human)
count+=1
sp = spearman(human_actual,system_actual)
return sp,count<|fim▁end|> | |
<|file_name|>twister.rs<|end_file_name|><|fim▁begin|>use std::iter::Iterator;
pub struct Twister {
index: usize,
state: [u32; 624],
}
impl Twister {
pub fn new(seed: u32) -> Twister {
let mut state = [0u32; 624];
state[0] = seed;
for idx in 1..state.len() {
state[idx] = (0x6c078965*(state[idx-1] ^ (state[idx-1] >> 30)) as u64
+ idx as u64 & 0xffffffff) as u32;
}
Twister { index: 0, state: state }
}
pub fn raw(index: usize, state: &[u32; 624]) -> Twister {
Twister { index: index, state: *state.clone() }
}
fn generate_numbers(&mut self) {
for idx in 0..self.state.len() {
let y = (self.state[idx] & 0x80000000) +
(self.state[(idx + 1) % self.state.len()] & 0x7fffffff);
self.state[idx] = self.state[(idx + 397) % self.state.len()] ^ (y >> 1);
if y % 2 != 0 {
self.state[idx] ^= 0x9908b0df;
}
}
}
}
impl Iterator for Twister {
type Item = u32;
fn next(&mut self) -> Option<u32> {
if self.index == 0 {
self.generate_numbers();
}
let mut y = self.state[self.index];
y ^= y >> 11;
y ^= y << 7 & 0x9d2c5680;
y ^= y << 15 & 0xefc60000;
y ^= y >> 18;
self.index = (self.index + 1) % self.state.len();
Some(y)
}
}<|fim▁hole|>#[cfg(test)]
mod tests {
use super::Twister;
#[test]
fn new_0() {
let t = Twister::new(0);
assert_eq!(t.index, 0);
assert_eq!(t.state[0], 0);
}
#[test]
fn first_10_0() {
let t = Twister::new(0);
let first_10 = t.take(10).collect::<Vec<u32>>();
let expected = vec![0x8c7f0aac, 0x97c4aa2f, 0xb716a675, 0xd821ccc0,
0x9a4eb343, 0xdba252fb, 0x8b7d76c3, 0xd8e57d67,
0x6c74a409, 0x9fa1ded3];
assert_eq![first_10, expected];
}
#[test]
fn first_10_24601() {
let t = Twister::new(24601);
let first_10 = t.take(10).collect::<Vec<u32>>();
let expected = vec![0xcbb6cbb6, 0x8bdd34d2, 0x96dd3078, 0x6d9ed4bb,
0x40d704d0, 0x0bcd0625, 0xbf7739dc, 0x51019dcb,
0xdd41362f, 0x3a88e7e6];
assert_eq![first_10, expected];
}
#[test]
fn skip_1000_0() {
let t = Twister::new(0);
let skip_1000 = t.skip(1000).take(10).collect::<Vec<u32>>();
let expected = vec![0x4f751e27, 0x471c2cea, 0x5f7f367b, 0xe515c11c,
0x86647698, 0x06ca2e92, 0xc026fec3, 0xa029b8ac,
0x5560bed3, 0x545ce92d];
assert_eq![skip_1000, expected];
}
}<|fim▁end|> |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.